diff options
Diffstat (limited to 'ansible_collections/netapp')
236 files changed, 0 insertions, 30139 deletions
diff --git a/ansible_collections/netapp/aws/.github/ISSUE_TEMPLATE/bug_report.yml b/ansible_collections/netapp/aws/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index 2767dba53..000000000 --- a/ansible_collections/netapp/aws/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,210 +0,0 @@ ---- -name: 🐛 Bug report -description: Create a report to help us improve - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to report a bug in netapp.aws!** - - - ⚠ - Verify first that your issue is not [already reported on - GitHub][issue search] and keep in mind that we may have to keep - the current behavior because [every change breaks someone's - workflow][XKCD 1172]. - We try to be mindful about this. - - Also test if the latest release and devel branch are affected too. - - - **Tip:** If you are seeking community support, please consider - [Join our Slack community][ML||IRC]. - - - - [ML||IRC]: - https://join.slack.com/t/netapppub/shared_invite/zt-njcjx2sh-1VR2mEDvPcJAmPutOnP~mg - - [issue search]: ../search?q=is%3Aissue&type=issues - - [XKCD 1172]: https://xkcd.com/1172/ - - -- type: textarea - attributes: - label: Summary - description: Explain the problem briefly below. - placeholder: >- - When I try to do X with netapp.aws from the devel branch on GitHub, Y - breaks in a way Z under the env E. Here are all the details I know - about this problem... - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the rst file, module, plugin, task or - feature below, *use your best guess if unsure*. - - - **Tip:** Cannot find it in this repository? Please be advised that - the source for some parts of the documentation are hosted outside - of this repository. If the page you are reporting describes - modules/plugins/etc that are not officially supported by the - Ansible Core Engineering team, there is a good chance that it is - coming from one of the [Ansible Collections maintained by the - community][collections org]. If this is the case, please make sure - to file an issue under the appropriate project there instead. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Ansible Version - description: >- - Paste verbatim output from `ansible --version` below, under - the prompt line. Please don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - value: | - $ ansible --version - placeholder: | - $ ansible --version - ansible [core 2.11.0b4.post0] (detached HEAD ref: refs/) last updated 2021/04/02 00:33:35 (GMT +200) - config file = None - configured module search path = ['~/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] - ansible python module location = ~/src/github/ansible/ansible/lib/ansible - ansible collection location = ~/.ansible/collections:/usr/share/ansible/collections - executable location = bin/ansible - python version = 3.9.0 (default, Oct 26 2020, 13:08:59) [GCC 10.2.0] - jinja version = 2.11.3 - libyaml = True - validations: - required: true - -- type: textarea - attributes: - label: CVS for AWS Collection Version - description: >- - CVS for AWS Collection Version. Run `ansible-galaxy collection` and copy the entire output - render: console - value: | - $ ansible-galaxy collection list - validations: - required: true - -- type: textarea - attributes: - label: Playbook - description: >- - The task from the playbook that is give you the issue - render: console - validations: - required: true - -- type: textarea - attributes: - label: Steps to Reproduce - description: | - Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: | - 1. Implement the following playbook: - - ```yaml - --- - # ping.yml - - hosts: all - gather_facts: false - tasks: - - ping: - ... - ``` - 2. Then run `ANSIBLE_DEBUG=1 ansible-playbook ping.yml -vvvvv` - 3. An error occurs. - validations: - required: true - -- type: textarea - attributes: - label: Expected Results - description: >- - Describe what you expected to happen when running the steps above. - placeholder: >- - I expected X to happen because I assumed Y and was shocked - that it did not. - validations: - required: true - -- type: textarea - attributes: - label: Actual Results - description: | - Describe what actually happened. If possible run with extra verbosity (`-vvvv`). - - Paste verbatim command output and don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - placeholder: >- - Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))} - Exception: - Traceback (most recent call last): - File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main - status = self.run(options, args) - File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run - wb.build(autobuilding=True) - File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build - self.requirement_set.prepare_files(self.finder) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files - ignore_dependencies=self.ignore_dependencies)) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file - session=self.session, hashes=hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url - hashes=hashes - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url - hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url - stream=True, - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get - return self.request('GET', url, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request - return super(PipSession, self).request(method, url, *args, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request - resp = self.send(prep, **send_kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send - r = adapter.send(request, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send - resp = super(CacheControlAdapter, self).send(request, **kw) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send - raise SSLError(e, request=request) - SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),)) - ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2. - ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1. - validations: - required: true - - -- type: markdown - attributes: - value: > - *One last thing...* - - - Thank you for your collaboration! - - -... diff --git a/ansible_collections/netapp/aws/.github/ISSUE_TEMPLATE/feature_request.yml b/ansible_collections/netapp/aws/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index c0506a059..000000000 --- a/ansible_collections/netapp/aws/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,100 +0,0 @@ ---- -name: ✨ Feature request -description: Suggest an idea for this project - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to suggest a feature for netapp.aws!** - - 💡 - Before you go ahead with your request, please first consider if it - would be useful for majority of the netapp.aws users. As a - general rule of thumb, any feature that is only of interest to a - small sub group should be [implemented in a third-party Ansible - Collection][contribute to collections] or maybe even just your - project alone. Be mindful of the fact that the essential - netapp.aws features have a broad impact. - - - <details> - <summary> - ❗ Every change breaks someone's workflow. - </summary> - - - [![❗ Every change breaks someone's workflow. - ](https://imgs.xkcd.com/comics/workflow.png) - ](https://xkcd.com/1172/) - </details> - - - ⚠ - Verify first that your idea is not [already requested on - GitHub][issue search]. - - Also test if the main branch does not already implement this. - - -- type: textarea - attributes: - label: Summary - description: > - Describe the new feature/improvement you would like briefly below. - - - What's the problem this feature will solve? - - What are you trying to do, that you are unable to achieve - with netapp.aws as it currently stands? - - - * Provide examples of real-world use cases that this would enable - and how it solves the problem you described. - - * How do you solve this now? - - * Have you tried to work around the problem using other tools? - - * Could there be a different approach to solving this issue? - - placeholder: >- - I am trying to do X with netapp.aws from the devel branch on GitHub and - I think that implementing a feature Y would be very helpful for me and - every other user of netapp.aws because of Z. - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the module, plugin, task or feature below, - *use your best guess if unsure*. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Additional Information - description: | - Describe how the feature would be used, why it is needed and what it would solve. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: >- - I asked on https://stackoverflow.com/.... and the community - advised me to do X, Y and Z. - validations: - required: true - -... diff --git a/ansible_collections/netapp/aws/.github/workflows/coverage.yml b/ansible_collections/netapp/aws/.github/workflows/coverage.yml deleted file mode 100644 index d254e6081..000000000 --- a/ansible_collections/netapp/aws/.github/workflows/coverage.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: NetApp.aws Ansible Coverage - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity: - name: Coverage on AWS - runs-on: ubuntu-latest - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - - name: Install ansible stable-2.11 - run: pip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/aws/ - rsync -av . ansible_collections/netapp/aws/ --exclude ansible_collections/netapp/aws/ - - - name: Run Unit Tests - run: ansible-test units --coverage --color --docker --python 3.8 - working-directory: ansible_collections/netapp/aws/ - - # ansible-test support producing code coverage date - - name: Generate coverage report - run: ansible-test coverage xml -v --requirements --group-by command --group-by version - working-directory: ansible_collections/netapp/aws/ - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2 - with: - working-directory: ansible_collections/netapp/aws/ - verbose: true
\ No newline at end of file diff --git a/ansible_collections/netapp/aws/.github/workflows/main.yml b/ansible_collections/netapp/aws/.github/workflows/main.yml deleted file mode 100644 index e59624ece..000000000 --- a/ansible_collections/netapp/aws/.github/workflows/main.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: NetApp.aws Ansible CI - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity: - name: Sanity (${{ matrix.ansible }} on AWS - runs-on: ubuntu-latest - strategy: - matrix: - ansible: - - stable-2.9 - - stable-2.10 - - stable-2.11 - - stable-2.12 - - stable-2.13 - - devel - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - # Ansible 2.14 requires 3.9 as a minimum - python-version: 3.9 - - - name: Install ansible (${{ matrix.ansible }}) - run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/aws/ - rsync -av . ansible_collections/netapp/aws/ --exclude ansible_collections/netapp/aws/ - - name: Run sanity tests AWS - run: ansible-test sanity --docker -v --color - working-directory: ansible_collections/netapp/aws/ - - - name: Run Unit Tests - run: ansible-test units --docker -v --color - working-directory: ansible_collections/netapp/aws/ diff --git a/ansible_collections/netapp/aws/CHANGELOG.rst b/ansible_collections/netapp/aws/CHANGELOG.rst deleted file mode 100644 index 11be8dbab..000000000 --- a/ansible_collections/netapp/aws/CHANGELOG.rst +++ /dev/null @@ -1,90 +0,0 @@ -======================================= -NetApp AWS CVS Collection Release Notes -======================================= - -.. contents:: Topics - - -v21.7.0 -======= - -Minor Changes -------------- - -- PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - -v21.6.0 -======= - -Minor Changes -------------- - -- all modules - ability to trace API calls and responses. - -Bugfixes --------- - -- all modules - fix traceback TypeError 'NoneType' object is not subscriptable when URL points to a web server. - -v21.2.0 -======= - -Bugfixes --------- - -- all modules - disable logging for ``api_key`` and ``secret_key`` values. -- all modules - prevent infinite loop when asynchronous action fails. -- all modules - report error if response does not contain valid JSON. -- aws_netapp_cvs_filesystems - fix KeyError when exportPolicy is not present. - -v20.9.0 -======= - -Minor Changes -------------- - -- Fix pylint or flake8 warnings reported by galaxy importer. - -v20.8.0 -======= - -Minor Changes -------------- - -- add "elements:" and update "required:" to match module requirements. -- use a three group format for version_added. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. - -v20.6.0 -======= - -Bugfixes --------- - -- galaxy.yml - fix repository and homepage links. - -v20.2.0 -======= - -Bugfixes --------- - -- galaxy.yml - fix path to github repository. - -v19.10.0 -======== - -Minor Changes -------------- - -- refactor existing modules as a collection - -v2.9.0 -====== - -New Modules ------------ - -- netapp.aws.aws_netapp_cvs_active_directory - NetApp AWS CloudVolumes Service Manage Active Directory. -- netapp.aws.aws_netapp_cvs_filesystems - NetApp AWS Cloud Volumes Service Manage FileSystem. -- netapp.aws.aws_netapp_cvs_pool - NetApp AWS Cloud Volumes Service Manage Pools. -- netapp.aws.aws_netapp_cvs_snapshots - NetApp AWS Cloud Volumes Service Manage Snapshots. diff --git a/ansible_collections/netapp/aws/COPYING b/ansible_collections/netapp/aws/COPYING deleted file mode 100644 index 94a9ed024..000000000 --- a/ansible_collections/netapp/aws/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - <one line to give the program's name and a brief idea of what it does.> - Copyright (C) <year> <name of author> - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see <http://www.gnu.org/licenses/>. - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - <program> Copyright (C) <year> <name of author> - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -<http://www.gnu.org/licenses/>. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -<http://www.gnu.org/philosophy/why-not-lgpl.html>. diff --git a/ansible_collections/netapp/aws/FILES.json b/ansible_collections/netapp/aws/FILES.json deleted file mode 100644 index ba75d354a..000000000 --- a/ansible_collections/netapp/aws/FILES.json +++ /dev/null @@ -1,383 +0,0 @@ -{ - "files": [ - { - "name": ".", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ec72420df5dfbdce4111f715c96338df3b7cb75f58e478d2449c9720e560de8c", - "format": 1 - }, - { - "name": "plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments/netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "73a260df376d83b7076b7654a10e9f238de604470a6ba309e8c6019c0f710203", - "format": 1 - }, - { - "name": "plugins/module_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/module_utils/netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "f297bb0e9d58e427eda43771cf2f353ced6d4c3c71291832bae701bd94582643", - "format": 1 - }, - { - "name": "plugins/module_utils/netapp_module.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "2d69e77a6e5b76dc8909149c8c364454e80fb42631af7d889dfb6e2ff0438c3e", - "format": 1 - }, - { - "name": "plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/modules/aws_netapp_cvs_filesystems.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "600bccc2f8464217ff4672ba52c160fdcbdc27d40ae33b29f8944af10b14af18", - "format": 1 - }, - { - "name": "plugins/modules/aws_netapp_cvs_active_directory.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a040e165f265ad4cfc04672506be81a07a032a9e2769f5d84b2a77c3be81fce0", - "format": 1 - }, - { - "name": "plugins/modules/aws_netapp_cvs_snapshots.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8edfd787384f01ef37a0032e60898b0253472355a32e420b439e1dbb4d385e85", - "format": 1 - }, - { - "name": "plugins/modules/aws_netapp_cvs_pool.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7a4b6fc9d48d61cf80a052455334ffd671dd880e7ec476aff6ccae820b658610", - "format": 1 - }, - { - "name": "tests", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat/unittest.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cba95d18c5b39c6f49714eacf1ac77452c2e32fa087c03cf01aacd19ae597b0f", - "format": 1 - }, - { - "name": "tests/unit/compat/builtins.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0ca4cac919e166b25e601e11acb01f6957dddd574ff0a62569cb994a5ecb63e1", - "format": 1 - }, - { - "name": "tests/unit/compat/__init__.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "format": 1 - }, - { - "name": "tests/unit/compat/mock.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0af958450cf6de3fbafe94b1111eae8ba5a8dbe1d785ffbb9df81f26e4946d99", - "format": 1 - }, - { - "name": "tests/unit/requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "68a61b1d58a722f4ffabaa28da01c9837c93a582ea41c1bfb1c1fd54ea2d8fab", - "format": 1 - }, - { - "name": "tests/unit/plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/module_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/module_utils/test_netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "2d8932ad58a04840536e850abf3f131960e99ec55546081fb63713dbfc3bc05d", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_aws_netapp_cvs_snapshots.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "553b6f1afd566bebf6a90de18f71517efc3a41953bade06cd8972fcbff9ea1fb", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_aws_netapp_cvs_active_directory.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "3b42a059c1dfd757cd6294ca9ebce74f1e3ce6690bcddcdca2cdb4e6b8ac771b", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_aws_netapp_cvs_filesystems.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "f74d3b3ecbaf71433fcf569a9a09d134f241c8eb5a8e2ed373eeb5638fc79b2e", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_aws_netapp_cvs_pool.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "64f0c7995f02daaf91d3d6cc15f2347ecba5253a2dc13f7a7df6a880c0926fcf", - "format": 1 - }, - { - "name": "meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "meta/execution-environment.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "db75f5fcae43fd2db36d3c9a004748dd1ec4165a6e2ebb36ada6943a8b440f4a", - "format": 1 - }, - { - "name": "meta/runtime.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8c043a6db126f7b26a926814bf1485e24518dce3eb66607273cbd15835ffa29b", - "format": 1 - }, - { - "name": "changelogs", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments/20.9.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "adc1cab2c7625a1e299876d9d622eb1e7529af59042268e088673f408c1d1dce", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3569.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e3e0500c584d187e1b339a612f800540a39cddcebe5c3e1c8c2e134af0b2baf6", - "format": 1 - }, - { - "name": "changelogs/fragments/2019.10.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7b1a5ef7df5f1e6e66ddc013149aea0480eb79f911a0563e2e6d7d9af79d5572", - "format": 1 - }, - { - "name": "changelogs/fragments/20.2.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "91918f48a406834778ff7163c92c12dd1802c0620cb681ee66f8a4709444cf5e", - "format": 1 - }, - { - "name": "changelogs/fragments/20.8.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a91cbe5f46d2bae6af1eb3693470bdfaf04e5fbd6cdc76882e674389f4352f16", - "format": 1 - }, - { - "name": "changelogs/fragments/20.6.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6192b3cccdc7c1e1eb0d61a49dd20c6f234499b6dd9b52b2f974b673e99f7a47", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3644.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7eaf09f11cdf4fd2628e29124ce128dd984340ee65a233acdde77369ebf08ada", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4416.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4224db573f34caeeb956c8728eb343a47bc2729d898001a4c6a671b780dae1bf", - "format": 1 - }, - { - "name": "changelogs/config.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b0bb3c0938ee0599c727ceef11d224bd771e9db9dc7a0bca162d786c2933ea89", - "format": 1 - }, - { - "name": "changelogs/changelog.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8f1d175c82536c75b0c17c289a6aa7e9bd2faeea39485d571cea6cba5c86d9aa", - "format": 1 - }, - { - "name": "README.md", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "dedd9d245c2817b6873d338acf855078c7b36b9232b0300c23524a0315beb778", - "format": 1 - }, - { - "name": "COPYING", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8ceb4b9ee5adedde47b31e975c1d90c73ad27b6b165a1dcd80c7c545eb65b903", - "format": 1 - }, - { - "name": ".github", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows/coverage.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "876f2d16a1c4169c208ddec8702048f376eeacd4f697a10cfe4a948444ce3f4e", - "format": 1 - }, - { - "name": ".github/workflows/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "86f3c8498a18904b255b96834893257285f8538e413334fe72ccae163ac070a9", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/feature_request.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4204f720b07bf0636be6a9c39717e84e59dc1a0b36425bf0f10fc9817131d3e7", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/bug_report.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "1bcc8888ddd84ef0fc9efe03e784278a748a71ec3e4fffa62dc4a8ad02007760", - "format": 1 - }, - { - "name": "CHANGELOG.rst", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "c9a90849f79ddb81dc0f31e6cad9e6273e82d60b8651404fa31e40f0a6ee66b1", - "format": 1 - } - ], - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/aws/MANIFEST.json b/ansible_collections/netapp/aws/MANIFEST.json deleted file mode 100644 index 262e3a3df..000000000 --- a/ansible_collections/netapp/aws/MANIFEST.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "collection_info": { - "namespace": "netapp", - "name": "aws", - "version": "21.7.1", - "authors": [ - "NetApp Ansible Team <ng-ansibleteam@netapp.com>" - ], - "readme": "README.md", - "tags": [ - "storage", - "cloud", - "netapp", - "cvs", - "amazon", - "aws" - ], - "description": "Cloud Volumes Service (CVS) for AWS", - "license": [], - "license_file": "COPYING", - "dependencies": {}, - "repository": "https://github.com/ansible-collections/netapp.aws", - "documentation": null, - "homepage": "https://netapp.io/configuration-management-and-automation/", - "issues": "https://github.com/ansible-collections/netapp.aws/issues" - }, - "file_manifest_file": { - "name": "FILES.json", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "288000ca8e2cebddf0a0b8cd43548750171e8049bfa7805c756d432867da9dc9", - "format": 1 - }, - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/aws/README.md b/ansible_collections/netapp/aws/README.md deleted file mode 100644 index d6a9d0fa9..000000000 --- a/ansible_collections/netapp/aws/README.md +++ /dev/null @@ -1,87 +0,0 @@ -[![Documentation](https://img.shields.io/badge/docs-brightgreen.svg)](https://docs.ansible.com/ansible/devel/collections/netapp/aws/index.html) -![example workflow](https://github.com/ansible-collections/netapp.aws/actions/workflows/main.yml/badge.svg) -[![codecov](https://codecov.io/gh/ansible-collections/netapp.aws/branch/main/graph/badge.svg?token=weBYkksxSi)](https://codecov.io/gh/ansible-collections/netapp.aws) -[![Discord](https://img.shields.io/discord/855068651522490400)](https://discord.gg/NetApp) - - -============================================================= - -netapp.aws - -NetApp AWS CVS Collection - -Copyright (c) 2019 NetApp, Inc. All rights reserved. -Specifications subject to change without notice. - -============================================================= - -# Installation -```bash -ansible-galaxy collection install netapp.aws -``` -To use Collection add the following to the top of your playbook, with out this you will be using Ansible 2.9 version of the module -``` -collections: - - netapp.aws -``` - -# Module documentation -https://docs.ansible.com/ansible/devel/collections/netapp/aws/ - -# Need help -Join our [Discord](https://discord.gg/NetApp) and look for our #ansible channel. - -# Notes - -These Ansible modules are supporting NetApp Cloud Volumes Service for AWS. - -They require a subscription to the service and your API access keys. - -The modules currently support Active Directory, Pool, FileSystem (Volume), and Snapshot services. - -# Release Notes - - -## 21.7.0 - -### Minor Changes -- all modules - allow usage of Ansible module group defaults - for Ansible 2.12+. - -## 21.6.0 - -### Minor Changes -- all modules - ability to trace API calls and responses. - -### Bug Fixes -- all modules - fix traceback TypeError 'NoneType' object is not subscriptable when URL points to a web server. - -## 21.2.0 - -### Bug Fixes -- aws_netapp_cvs_filesystems - fix KeyError when exportPolicy is not present. -- all modules - disable logging for `api_key` and `secret_key` values. -- all modules - report error if response does not contain valid JSON. -- all modules - prevent infinite loop when asynchornous action fails. - -## 20.9.0 - -Fix pylint or flake8 warnings reported by galaxy importer. - -## 20.8.0 - -### Module documentation changes -- use a three group format for `version_added`. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. -- add `elements:` and update `required:` to match module requirements. - -## 20.6.0 - -### Bug Fixes -- galaxy.xml: fix repository and homepage links. - -## 20.2.0 - -### Bug Fixes -- galaxy.yml: fix path to github repository. - -## 19.11.0 -- Initial release as a collection. diff --git a/ansible_collections/netapp/aws/changelogs/changelog.yaml b/ansible_collections/netapp/aws/changelogs/changelog.yaml deleted file mode 100644 index ba907fcd5..000000000 --- a/ansible_collections/netapp/aws/changelogs/changelog.yaml +++ /dev/null @@ -1,81 +0,0 @@ -ancestor: null -releases: - 19.10.0: - changes: - minor_changes: - - refactor existing modules as a collection - fragments: - - 2019.10.0.yaml - release_date: '2019-11-14' - 2.9.0: - modules: - - description: NetApp AWS CloudVolumes Service Manage Active Directory. - name: aws_netapp_cvs_active_directory - namespace: '' - - description: NetApp AWS Cloud Volumes Service Manage FileSystem. - name: aws_netapp_cvs_filesystems - namespace: '' - - description: NetApp AWS Cloud Volumes Service Manage Pools. - name: aws_netapp_cvs_pool - namespace: '' - - description: NetApp AWS Cloud Volumes Service Manage Snapshots. - name: aws_netapp_cvs_snapshots - namespace: '' - release_date: '2019-11-13' - 20.2.0: - changes: - bugfixes: - - galaxy.yml - fix path to github repository. - fragments: - - 20.2.0.yaml - release_date: '2020-02-05' - 20.6.0: - changes: - bugfixes: - - galaxy.yml - fix repository and homepage links. - fragments: - - 20.6.0.yaml - release_date: '2020-06-03' - 20.8.0: - changes: - minor_changes: - - add "elements:" and update "required:" to match module requirements. - - use a three group format for version_added. So 2.7 becomes 2.7.0. Same thing - for 2.8 and 2.9. - fragments: - - 20.8.0.yaml - release_date: '2020-08-05' - 20.9.0: - changes: - minor_changes: - - Fix pylint or flake8 warnings reported by galaxy importer. - fragments: - - 20.9.0.yaml - release_date: '2020-09-02' - 21.2.0: - changes: - bugfixes: - - all modules - disable logging for ``api_key`` and ``secret_key`` values. - - all modules - prevent infinite loop when asynchronous action fails. - - all modules - report error if response does not contain valid JSON. - - aws_netapp_cvs_filesystems - fix KeyError when exportPolicy is not present. - fragments: - - DEVOPS-3644.yaml - release_date: '2021-02-04' - 21.6.0: - changes: - bugfixes: - - all modules - fix traceback TypeError 'NoneType' object is not subscriptable - when URL points to a web server. - minor_changes: - - all modules - ability to trace API calls and responses. - fragments: - - DEVOPS-3569.yaml - release_date: '2021-07-14' - 21.7.0: - changes: - minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - fragments: - - DEVOPS-4416.yaml - release_date: '2021-11-03' diff --git a/ansible_collections/netapp/aws/changelogs/config.yaml b/ansible_collections/netapp/aws/changelogs/config.yaml deleted file mode 100644 index 5f649c68c..000000000 --- a/ansible_collections/netapp/aws/changelogs/config.yaml +++ /dev/null @@ -1,32 +0,0 @@ -changelog_filename_template: ../CHANGELOG.rst -changelog_filename_version_depth: 0 -changes_file: changelog.yaml -changes_format: combined -ignore_other_fragment_extensions: true -keep_fragments: true -mention_ancestor: true -new_plugins_after_name: removed_features -notesdir: fragments -prelude_section_name: release_summary -prelude_section_title: Release Summary -sanitize_changelog: true -sections: -- - major_changes - - Major Changes -- - minor_changes - - Minor Changes -- - breaking_changes - - Breaking Changes / Porting Guide -- - deprecated_features - - Deprecated Features -- - removed_features - - Removed Features (previously deprecated) -- - security_fixes - - Security Fixes -- - bugfixes - - Bugfixes -- - known_issues - - Known Issues -title: NetApp AWS CVS Collection -trivial_section_name: trivial -use_fqcn: true diff --git a/ansible_collections/netapp/aws/changelogs/fragments/20.2.0.yaml b/ansible_collections/netapp/aws/changelogs/fragments/20.2.0.yaml deleted file mode 100644 index 3f764c1c9..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/20.2.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - galaxy.yml - fix path to github repository. diff --git a/ansible_collections/netapp/aws/changelogs/fragments/20.6.0.yaml b/ansible_collections/netapp/aws/changelogs/fragments/20.6.0.yaml deleted file mode 100644 index fcd0d11ee..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/20.6.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - galaxy.yml - fix repository and homepage links. diff --git a/ansible_collections/netapp/aws/changelogs/fragments/20.8.0.yaml b/ansible_collections/netapp/aws/changelogs/fragments/20.8.0.yaml deleted file mode 100644 index c92e9e41b..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/20.8.0.yaml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - use a three group format for version_added. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. - - add "elements:" and update "required:" to match module requirements. diff --git a/ansible_collections/netapp/aws/changelogs/fragments/20.9.0.yaml b/ansible_collections/netapp/aws/changelogs/fragments/20.9.0.yaml deleted file mode 100644 index c7328c5eb..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/20.9.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - Fix pylint or flake8 warnings reported by galaxy importer. diff --git a/ansible_collections/netapp/aws/changelogs/fragments/2019.10.0.yaml b/ansible_collections/netapp/aws/changelogs/fragments/2019.10.0.yaml deleted file mode 100644 index 5723daa11..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/2019.10.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - refactor existing modules as a collection diff --git a/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-3569.yaml b/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-3569.yaml deleted file mode 100644 index 19ba55d8d..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-3569.yaml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - all modules - ability to trace API calls and responses. -bugfixes: - - all modules - fix traceback TypeError 'NoneType' object is not subscriptable when URL points to a web server. diff --git a/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-3644.yaml b/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-3644.yaml deleted file mode 100644 index 2c7e83f31..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-3644.yaml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - aws_netapp_cvs_filesystems - fix KeyError when exportPolicy is not present. - - all modules - disable logging for ``api_key`` and ``secret_key`` values. - - all modules - report error if response does not contain valid JSON. - - all modules - prevent infinite loop when asynchronous action fails. diff --git a/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-4416.yaml b/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-4416.yaml deleted file mode 100644 index 6b4b660a0..000000000 --- a/ansible_collections/netapp/aws/changelogs/fragments/DEVOPS-4416.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. diff --git a/ansible_collections/netapp/aws/meta/execution-environment.yml b/ansible_collections/netapp/aws/meta/execution-environment.yml deleted file mode 100644 index 315d71a13..000000000 --- a/ansible_collections/netapp/aws/meta/execution-environment.yml +++ /dev/null @@ -1,3 +0,0 @@ -version: 1 -dependencies: - python: ../requirements.txt diff --git a/ansible_collections/netapp/aws/meta/runtime.yml b/ansible_collections/netapp/aws/meta/runtime.yml deleted file mode 100644 index b6b0adc8e..000000000 --- a/ansible_collections/netapp/aws/meta/runtime.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -requires_ansible: ">=2.13" -action_groups: - netapp_aws: - - aws_netapp_cvs_active_directory - - aws_netapp_cvs_filesystems - - aws_netapp_cvs_pool - - aws_netapp_cvs_snapshots diff --git a/ansible_collections/netapp/aws/plugins/doc_fragments/netapp.py b/ansible_collections/netapp/aws/plugins/doc_fragments/netapp.py deleted file mode 100644 index aff60719f..000000000 --- a/ansible_collections/netapp/aws/plugins/doc_fragments/netapp.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2019, NetApp Ansible Team <ng-ansibleteam@netapp.com> -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -class ModuleDocFragment(object): - - DOCUMENTATION = r''' -options: - - See respective platform section for more details -requirements: - - See respective platform section for more details -notes: - - This is documentation for NetApp's AWS CVS modules. -''' - - # Documentation fragment for AWSCVS - AWSCVS = """ -options: - api_key: - required: true - type: str - description: - - The access key to authenticate with the AWSCVS Web Services Proxy or Embedded Web Services API. - secret_key: - required: true - type: str - description: - - The secret_key to authenticate with the AWSCVS Web Services Proxy or Embedded Web Services API. - api_url: - required: true - type: str - description: - - The url to the AWSCVS Web Services Proxy or Embedded Web Services API. - validate_certs: - required: false - default: true - description: - - Should https certificates be validated? - type: bool - feature_flags: - description: - - Enable or disable a new feature. - - This can be used to enable an experimental feature or disable a new feature that breaks backward compatibility. - - Supported keys and values are subject to change without notice. Unknown keys are ignored. - - trace_apis can be set to true to enable tracing, data is written to /tmp/um_apis.log. - type: dict - version_added: 21.6.0 -notes: - - The modules prefixed with aws\\_cvs\\_netapp are built to Manage AWS Cloud Volumes Service . -""" diff --git a/ansible_collections/netapp/aws/plugins/module_utils/netapp.py b/ansible_collections/netapp/aws/plugins/module_utils/netapp.py deleted file mode 100644 index 271b66a5d..000000000 --- a/ansible_collections/netapp/aws/plugins/module_utils/netapp.py +++ /dev/null @@ -1,241 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2019, NetApp Ansible Team <ng-ansibleteam@netapp.com> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -''' -netapp.py -Support methods and class for AWS CVS modules -''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import logging -import time -from ansible.module_utils.basic import missing_required_lib - -try: - from ansible.module_utils.ansible_release import __version__ as ansible_version -except ImportError: - ansible_version = 'unknown' - -COLLECTION_VERSION = "21.7.1" - -try: - import requests - HAS_REQUESTS = True -except ImportError: - HAS_REQUESTS = False - - -POW2_BYTE_MAP = dict( - # Here, 1 kb = 1024 - bytes=1, - b=1, - kb=1024, - mb=1024 ** 2, - gb=1024 ** 3, - tb=1024 ** 4, - pb=1024 ** 5, - eb=1024 ** 6, - zb=1024 ** 7, - yb=1024 ** 8 -) - -LOG = logging.getLogger(__name__) -LOG_FILE = '/tmp/aws_cvs_apis.log' - - -def aws_cvs_host_argument_spec(): - - return dict( - api_url=dict(required=True, type='str'), - validate_certs=dict(required=False, type='bool', default=True), - api_key=dict(required=True, type='str', no_log=True), - secret_key=dict(required=True, type='str', no_log=True), - feature_flags=dict(required=False, type='dict', default=dict()), - ) - - -def has_feature(module, feature_name): - feature = get_feature(module, feature_name) - if isinstance(feature, bool): - return feature - module.fail_json(msg="Error: expected bool type for feature flag: %s" % feature_name) - - -def get_feature(module, feature_name): - ''' if the user has configured the feature, use it - otherwise, use our default - ''' - default_flags = dict( - strict_json_check=True, # if true, fail if response.content in not empty and is not valid json - trace_apis=False, # if true, append REST requests/responses to LOG_FILE - ) - - if module.params['feature_flags'] is not None and feature_name in module.params['feature_flags']: - return module.params['feature_flags'][feature_name] - if feature_name in default_flags: - return default_flags[feature_name] - module.fail_json(msg="Internal error: unexpected feature flag: %s" % feature_name) - - -class AwsCvsRestAPI(object): - ''' wraps requests methods to interface with AWS CVS REST APIs ''' - def __init__(self, module, timeout=60): - self.module = module - self.api_key = self.module.params['api_key'] - self.secret_key = self.module.params['secret_key'] - self.api_url = self.module.params['api_url'] - self.verify = self.module.params['validate_certs'] - self.timeout = timeout - self.url = 'https://' + self.api_url + '/v1/' - self.errors = list() - self.debug_logs = list() - self.check_required_library() - if has_feature(module, 'trace_apis'): - logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s') - - def check_required_library(self): - if not HAS_REQUESTS: - self.module.fail_json(msg=missing_required_lib('requests')) - - def send_request(self, method, api, params, json=None): - ''' send http request and process reponse, including error conditions ''' - if params is not None: - self.module.fail_json(msg='params is not implemented. api=%s, params=%s' % (api, repr(params))) - url = self.url + api - status_code = None - content = None - json_dict = None - json_error = None - error_details = None - headers = { - 'Content-type': "application/json", - 'api-key': self.api_key, - 'secret-key': self.secret_key, - 'Cache-Control': "no-cache", - } - - def check_contents(response): - '''json() may fail on an empty value, but it's OK if no response is expected. - To avoid false positives, only report an issue when we expect to read a value. - The first get will see it. - ''' - if method == 'GET' and has_feature(self.module, 'strict_json_check'): - contents = response.content - if len(contents) > 0: - raise ValueError("Expecting json, got: %s" % contents) - - def get_json(response): - ''' extract json, and error message if present ''' - error = None - try: - json = response.json() - except ValueError: - check_contents(response) - return None, None - success_code = [200, 201, 202] - if response.status_code not in success_code: - error = json.get('message') - return json, error - - def sanitize(value, key=None): - if isinstance(value, dict): - new_dict = dict() - for key, value in value.items(): - new_dict[key] = sanitize(value, key) - return new_dict - else: - if key in ['api-key', 'secret-key', 'password']: - return '*' * 12 - else: - return value - - self.log_debug('sending', repr(sanitize(dict(method=method, url=url, verify=self.verify, params=params, - timeout=self.timeout, json=json, headers=headers)))) - try: - response = requests.request(method, url, headers=headers, timeout=self.timeout, json=json) - content = response.content # for debug purposes - status_code = response.status_code - # If the response was successful, no Exception will be raised - response.raise_for_status() - json_dict, json_error = get_json(response) - except requests.exceptions.HTTPError as err: - __, json_error = get_json(response) - if json_error is None: - self.log_error(status_code, 'HTTP error: %s' % err) - error_details = str(err) - # If an error was reported in the json payload, it is handled below - except requests.exceptions.ConnectionError as err: - self.log_error(status_code, 'Connection error: %s' % err) - error_details = str(err) - except Exception as err: - self.log_error(status_code, 'Other error: %s' % err) - error_details = 'general exception: %s' % str(err) - if json_error is not None: - self.log_error(status_code, 'Endpoint error: %d: %s' % (status_code, json_error)) - error_details = json_error - self.log_debug(status_code, content) - return json_dict, error_details - - def get(self, api, params=None): - method = 'GET' - return self.send_request(method, api, params) - - def post(self, api, data, params=None): - method = 'POST' - return self.send_request(method, api, params, json=data) - - def patch(self, api, data, params=None): - method = 'PATCH' - return self.send_request(method, api, params, json=data) - - def put(self, api, data, params=None): - method = 'PUT' - return self.send_request(method, api, params, json=data) - - def delete(self, api, data, params=None): - method = 'DELETE' - return self.send_request(method, api, params, json=data) - - def get_state(self, job_id): - """ Method to get the state of the job """ - response, dummy = self.get('Jobs/%s' % job_id) - while str(response['state']) == 'ongoing': - time.sleep(15) - response, dummy = self.get('Jobs/%s' % job_id) - return str(response['state']) - - def log_error(self, status_code, message): - LOG.error("%s: %s", status_code, message) - self.errors.append(message) - self.debug_logs.append((status_code, message)) - - def log_debug(self, status_code, content): - LOG.debug("%s: %s", status_code, content) - self.debug_logs.append((status_code, content)) diff --git a/ansible_collections/netapp/aws/plugins/module_utils/netapp_module.py b/ansible_collections/netapp/aws/plugins/module_utils/netapp_module.py deleted file mode 100644 index 3e31ae989..000000000 --- a/ansible_collections/netapp/aws/plugins/module_utils/netapp_module.py +++ /dev/null @@ -1,142 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2018, Laurent Nicolas <laurentn@netapp.com> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -''' Support class for NetApp ansible modules ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -def cmp(a, b): - """ - Python 3 does not have a cmp function, this will do the cmp. - :param a: first object to check - :param b: second object to check - :return: - """ - # convert to lower case for string comparison. - if a is None: - return -1 - if isinstance(a, str) and isinstance(b, str): - a = a.lower() - b = b.lower() - # if list has string element, convert string to lower case. - if isinstance(a, list) and isinstance(b, list): - a = [x.lower() if isinstance(x, str) else x for x in a] - b = [x.lower() if isinstance(x, str) else x for x in b] - a.sort() - b.sort() - return (a > b) - (a < b) - - -class NetAppModule(object): - ''' - Common class for NetApp modules - set of support functions to derive actions based - on the current state of the system, and a desired state - ''' - - def __init__(self): - self.log = list() - self.changed = False - self.parameters = {'name': 'not intialized'} - - def set_parameters(self, ansible_params): - self.parameters = dict() - for param in ansible_params: - if ansible_params[param] is not None: - self.parameters[param] = ansible_params[param] - return self.parameters - - def get_cd_action(self, current, desired): - ''' takes a desired state and a current state, and return an action: - create, delete, None - eg: - is_present = 'absent' - some_object = self.get_object(source) - if some_object is not None: - is_present = 'present' - action = cd_action(current=is_present, desired = self.desired.state()) - ''' - if 'state' in desired: - desired_state = desired['state'] - else: - desired_state = 'present' - - if current is None and desired_state == 'absent': - return None - if current is not None and desired_state == 'present': - return None - # change in state - self.changed = True - if current is not None: - return 'delete' - return 'create' - - def compare_and_update_values(self, current, desired, keys_to_compare): - updated_values = dict() - is_changed = False - for key in keys_to_compare: - if key in current: - if key in desired and desired[key] is not None: - if current[key] != desired[key]: - updated_values[key] = desired[key] - is_changed = True - else: - updated_values[key] = current[key] - else: - updated_values[key] = current[key] - - return updated_values, is_changed - - def is_rename_action(self, source, target): - ''' takes a source and target object, and returns True - if a rename is required - eg: - source = self.get_object(source_name) - target = self.get_object(target_name) - action = is_rename_action(source, target) - :return: None for error, True for rename action, False otherwise - ''' - if source is None and target is None: - # error, do nothing - # cannot rename an non existent resource - # alternatively we could create B - return None - if source is not None and target is not None: - # error, do nothing - # idempotency (or) new_name_is_already_in_use - # alternatively we could delete B and rename A to B - return False - if source is None and target is not None: - # do nothing, maybe the rename was already done - return False - # source is not None and target is None: - # rename is in order - self.changed = True - return True diff --git a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_active_directory.py b/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_active_directory.py deleted file mode 100644 index b64c877b4..000000000 --- a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_active_directory.py +++ /dev/null @@ -1,276 +0,0 @@ -#!/usr/bin/python - -# (c) 2019, NetApp Inc. -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -"""AWS Cloud Volumes Services - Manage ActiveDirectory""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: aws_netapp_cvs_active_directory - -short_description: NetApp AWS CloudVolumes Service Manage Active Directory. -extends_documentation_fragment: - - netapp.aws.netapp.awscvs -version_added: 2.9.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Create, Update, Delete ActiveDirectory on AWS Cloud Volumes Service. - -options: - state: - description: - - Whether the specified ActiveDirectory should exist or not. - choices: ['present', 'absent'] - required: true - type: str - - region: - description: - - The region to which the Active Directory credentials are associated. - required: true - type: str - - domain: - description: - - Name of the Active Directory domain - type: str - - DNS: - description: - - DNS server address for the Active Directory domain - - Required when C(state=present) - - Required when C(state=present), to modify ActiveDirectory properties. - type: str - - netBIOS: - description: - - NetBIOS name of the server. - type: str - - username: - description: - - Username of the Active Directory domain administrator - type: str - - password: - description: - - Password of the Active Directory domain administrator - - Required when C(state=present), to modify ActiveDirectory properties - type: str -''' - -EXAMPLES = """ - - name: Create Active Directory - aws_netapp_cvs_active_directory.py: - state: present - region: us-east-1 - DNS: 101.102.103.123 - domain: mydomain.com - password: netapp1! - netBIOS: testing - username: user1 - api_url : My_CVS_Hostname - api_key: My_API_Key - secret_key : My_Secret_Key - - - name: Update Active Directory - aws_netapp_cvs_active_directory.py: - state: present - region: us-east-1 - DNS: 101.102.103.123 - domain: mydomain.com - password: netapp2! - netBIOS: testingBIOS - username: user2 - api_url : My_CVS_Hostname - api_key: My_API_Key - secret_key : My_Secret_Key - - - name: Delete Active Directory - aws_netapp_cvs_active_directory.py: - state: absent - region: us-east-1 - domain: mydomain.com - api_url : My_CVS_Hostname - api_key: My_API_Key - secret_key : My_Secret_Key -""" - -RETURN = ''' -''' - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.aws.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.aws.plugins.module_utils.netapp import AwsCvsRestAPI - - -class AwsCvsNetappActiveDir(object): - """ - Contains methods to parse arguments, - derive details of AWS_CVS objects - and send requests to AWS CVS via - the restApi - """ - - def __init__(self): - """ - Parse arguments, setup state variables, - check paramenters and ensure request module is installed - """ - self.argument_spec = netapp_utils.aws_cvs_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=True, choices=['present', 'absent'], type='str'), - region=dict(required=True, type='str'), - DNS=dict(required=False, type='str'), - domain=dict(required=False, type='str'), - password=dict(required=False, type='str', no_log=True), - netBIOS=dict(required=False, type='str'), - username=dict(required=False, type='str') - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('state', 'present', ['domain', 'password']), - ], - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - - # set up state variables - self.parameters = self.na_helper.set_parameters(self.module.params) - # Calling generic AWSCVS restApi class - self.rest_api = AwsCvsRestAPI(self.module) - - def get_activedirectory_id(self): - # Check if ActiveDirectory exists - # Return UUID for ActiveDirectory is found, None otherwise - try: - list_activedirectory, error = self.rest_api.get('Storage/ActiveDirectory') - except Exception: - return None - if error is not None: - self.module.fail_json(msg='Error calling list_activedirectory: %s' % error) - - for activedirectory in list_activedirectory: - if activedirectory['region'] == self.parameters['region']: - return activedirectory['UUID'] - return None - - def get_activedirectory(self, activedirectory_id=None): - if activedirectory_id is None: - return None - else: - activedirectory_info, error = self.rest_api.get('Storage/ActiveDirectory/%s' % activedirectory_id) - if not error: - return activedirectory_info - return None - - def create_activedirectory(self): - # Create ActiveDirectory - api = 'Storage/ActiveDirectory' - data = {"region": self.parameters['region'], "DNS": self.parameters['DNS'], "domain": self.parameters['domain'], - "username": self.parameters['username'], "password": self.parameters['password'], "netBIOS": self.parameters['netBIOS']} - - response, error = self.rest_api.post(api, data) - - if not error: - return response - else: - self.module.fail_json(msg=response['message']) - - def delete_activedirectory(self): - activedirectory_id = self.get_activedirectory_id() - # Delete ActiveDirectory - - if activedirectory_id: - api = 'Storage/ActiveDirectory/' + activedirectory_id - data = None - response, error = self.rest_api.delete(api, data) - if not error: - return response - else: - self.module.fail_json(msg=response['message']) - - else: - self.module.fail_json(msg="Active Directory does not exist") - - def update_activedirectory(self, activedirectory_id, updated_activedirectory): - # Update ActiveDirectory - api = 'Storage/ActiveDirectory/' + activedirectory_id - data = { - "region": self.parameters['region'], - "DNS": updated_activedirectory['DNS'], - "domain": updated_activedirectory['domain'], - "username": updated_activedirectory['username'], - "password": updated_activedirectory['password'], - "netBIOS": updated_activedirectory['netBIOS'] - } - - response, error = self.rest_api.put(api, data) - if not error: - return response - else: - self.module.fail_json(msg=response['message']) - - def apply(self): - """ - Perform pre-checks, call functions and exit - """ - modify = False - activedirectory_id = self.get_activedirectory_id() - current = self.get_activedirectory(activedirectory_id) - cd_action = self.na_helper.get_cd_action(current, self.parameters) - - if current and self.parameters['state'] != 'absent': - keys_to_check = ['DNS', 'domain', 'username', 'netBIOS'] - updated_active_directory, modify = self.na_helper.compare_and_update_values(current, self.parameters, keys_to_check) - - if self.parameters['password']: - modify = True - updated_active_directory['password'] = self.parameters['password'] - - if modify is True: - self.na_helper.changed = True - if 'domain' in self.parameters and self.parameters['domain'] is not None: - ad_exists = self.get_activedirectory(updated_active_directory['domain']) - if ad_exists: - modify = False - self.na_helper.changed = False - - if self.na_helper.changed: - if self.module.check_mode: - pass - else: - if modify is True: - self.update_activedirectory(activedirectory_id, updated_active_directory) - elif cd_action == 'create': - self.create_activedirectory() - elif cd_action == 'delete': - self.delete_activedirectory() - - self.module.exit_json(changed=self.na_helper.changed) - - -def main(): - """ - Main function - """ - aws_netapp_cvs_active_directory = AwsCvsNetappActiveDir() - aws_netapp_cvs_active_directory.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_filesystems.py b/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_filesystems.py deleted file mode 100644 index 09190b39e..000000000 --- a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_filesystems.py +++ /dev/null @@ -1,362 +0,0 @@ -#!/usr/bin/python - -# (c) 2019, NetApp Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -"""AWS Cloud Volumes Services - Manage fileSystem""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - - -DOCUMENTATION = ''' - -module: aws_netapp_cvs_filesystems - -short_description: NetApp AWS Cloud Volumes Service Manage FileSystem. -extends_documentation_fragment: - - netapp.aws.netapp.awscvs -version_added: 2.9.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, Update, Delete fileSystem on AWS Cloud Volumes Service. - -options: - state: - description: - - Whether the specified fileSystem should exist or not. - required: true - choices: ['present', 'absent'] - type: str - - region: - description: - - The region to which the filesystem belongs to. - required: true - type: str - - creationToken: - description: - - Name of the filesystem - required: true - type: str - - quotaInBytes: - description: - - Size of the filesystem - - Required for create - type: int - - serviceLevel: - description: - - Service Level of a filesystem. - choices: ['standard', 'premium', 'extreme'] - type: str - - exportPolicy: - description: - - The policy rules to export the filesystem - type: dict - suboptions: - rules: - description: - - Set of rules to export the filesystem - - Requires allowedClients, access and protocol - type: list - elements: dict - suboptions: - allowedClients: - description: - - Comma separated list of ip address blocks of the clients to access the fileSystem - - Each address block contains the starting IP address and size for the block - type: str - - cifs: - description: - - Enable or disable cifs filesystem - type: bool - - nfsv3: - description: - - Enable or disable nfsv3 fileSystem - type: bool - - nfsv4: - description: - - Enable or disable nfsv4 filesystem - type: bool - - ruleIndex: - description: - - Index number of the rule - type: int - - unixReadOnly: - description: - - Should fileSystem have read only permission or not - type: bool - - unixReadWrite: - description: - - Should fileSystem have read write permission or not - type: bool -''' - -EXAMPLES = """ -- name: Create FileSystem - aws_netapp_cvs_filesystems: - state: present - region: us-east-1 - creationToken: newVolume-1 - exportPolicy: - rules: - - allowedClients: 172.16.0.4 - cifs: False - nfsv3: True - nfsv4: True - ruleIndex: 1 - unixReadOnly: True - unixReadWrite: False - quotaInBytes: 100000000000 - api_url : cds-aws-bundles.netapp.com:8080 - api_key: My_API_Key - secret_key : My_Secret_Key - -- name: Update FileSystem - aws_netapp_cvs_filesystems: - state: present - region: us-east-1 - creationToken: newVolume-1 - exportPolicy: - rules: - - allowedClients: 172.16.0.4 - cifs: False - nfsv3: True - nfsv4: True - ruleIndex: 1 - unixReadOnly: True - unixReadWrite: False - quotaInBytes: 200000000000 - api_url : cds-aws-bundles.netapp.com:8080 - api_key: My_API_Key - secret_key : My_Secret_Key - -- name: Delete FileSystem - aws_netapp_cvs_filesystems: - state: present - region: us-east-1 - creationToken: newVolume-1 - quotaInBytes: 100000000000 - api_url : cds-aws-bundles.netapp.com:8080 - api_key: My_API_Key - secret_key : My_Secret_Key -""" - -RETURN = """ -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.aws.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.aws.plugins.module_utils.netapp import AwsCvsRestAPI - - -class AwsCvsNetappFileSystem(object): - """ - Contains methods to parse arguments, - derive details of AWS_CVS objects - and send requests to AWS CVS via - the restApi - """ - - def __init__(self): - """ - Parse arguments, setup state variables, - check paramenters and ensure request module is installed - """ - self.argument_spec = netapp_utils.aws_cvs_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=True, choices=['present', 'absent']), - region=dict(required=True, type='str'), - creationToken=dict(required=True, type='str', no_log=False), - quotaInBytes=dict(required=False, type='int'), - serviceLevel=dict(required=False, choices=['standard', 'premium', 'extreme']), - exportPolicy=dict( - type='dict', - options=dict( - rules=dict( - type='list', - elements='dict', - options=dict( - allowedClients=dict(required=False, type='str'), - cifs=dict(required=False, type='bool'), - nfsv3=dict(required=False, type='bool'), - nfsv4=dict(required=False, type='bool'), - ruleIndex=dict(required=False, type='int'), - unixReadOnly=dict(required=False, type='bool'), - unixReadWrite=dict(required=False, type='bool') - ) - ) - ) - ), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('state', 'present', ['region', 'creationToken', 'quotaInBytes']), - ], - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - - # set up state variables - self.parameters = self.na_helper.set_parameters(self.module.params) - - # Calling generic AWSCVS restApi class - self.rest_api = AwsCvsRestAPI(self.module) - - self.data = {} - for key in self.parameters.keys(): - self.data[key] = self.parameters[key] - - def get_filesystem_id(self): - # Check given FileSystem is exists - # Return fileSystemId is found, None otherwise - list_filesystem, error = self.rest_api.get('FileSystems') - if error: - self.module.fail_json(msg=error) - - for filesystem in list_filesystem: - if filesystem['creationToken'] == self.parameters['creationToken']: - return filesystem['fileSystemId'] - return None - - def get_filesystem(self, filesystem_id): - # Get FileSystem information by fileSystemId - # Return fileSystem Information - filesystem_info, error = self.rest_api.get('FileSystems/%s' % filesystem_id) - if error: - self.module.fail_json(msg=error) - else: - return filesystem_info - return None - - def is_job_done(self, response): - # check jobId is present and equal to 'done' - # return True on success, False otherwise - try: - job_id = response['jobs'][0]['jobId'] - except TypeError: - job_id = None - - if job_id is not None and self.rest_api.get_state(job_id) == 'done': - return True - return False - - def create_filesystem(self): - # Create fileSystem - api = 'FileSystems' - response, error = self.rest_api.post(api, self.data) - if not error: - if self.is_job_done(response): - return - error = "Error: unexpected response on FileSystems create: %s" % str(response) - self.module.fail_json(msg=error) - - def delete_filesystem(self, filesystem_id): - # Delete FileSystem - api = 'FileSystems/' + filesystem_id - self.data = None - response, error = self.rest_api.delete(api, self.data) - if not error: - if self.is_job_done(response): - return - error = "Error: unexpected response on FileSystems delete: %s" % str(response) - self.module.fail_json(msg=error) - - def update_filesystem(self, filesystem_id): - # Update FileSystem - api = 'FileSystems/' + filesystem_id - response, error = self.rest_api.put(api, self.data) - if not error: - if self.is_job_done(response): - return - error = "Error: unexpected response on FileSystems update: %s" % str(response) - self.module.fail_json(msg=error) - - def apply(self): - """ - Perform pre-checks, call functions and exit - """ - - filesystem = None - filesystem_id = self.get_filesystem_id() - - if filesystem_id: - # Getting the FileSystem details - filesystem = self.get_filesystem(filesystem_id) - - cd_action = self.na_helper.get_cd_action(filesystem, self.parameters) - - if cd_action is None and self.parameters['state'] == 'present': - # Check if we need to update the fileSystem - update_filesystem = False - if filesystem['quotaInBytes'] is not None and 'quotaInBytes' in self.parameters \ - and filesystem['quotaInBytes'] != self.parameters['quotaInBytes']: - update_filesystem = True - elif filesystem['creationToken'] is not None and 'creationToken' in self.parameters \ - and filesystem['creationToken'] != self.parameters['creationToken']: - update_filesystem = True - elif filesystem['serviceLevel'] is not None and 'serviceLevel' in self.parameters \ - and filesystem['serviceLevel'] != self.parameters['serviceLevel']: - update_filesystem = True - elif 'exportPolicy' in filesystem and filesystem['exportPolicy']['rules'] is not None and 'exportPolicy' in self.parameters: - for rule_org in filesystem['exportPolicy']['rules']: - for rule in self.parameters['exportPolicy']['rules']: - if rule_org['allowedClients'] != rule['allowedClients']: - update_filesystem = True - elif rule_org['unixReadOnly'] != rule['unixReadOnly']: - update_filesystem = True - elif rule_org['unixReadWrite'] != rule['unixReadWrite']: - update_filesystem = True - - if update_filesystem: - self.na_helper.changed = True - - result_message = "" - - if self.na_helper.changed: - if self.module.check_mode: - # Skip changes - result_message = "Check mode, skipping changes" - else: - if cd_action == "create": - self.create_filesystem() - result_message = "FileSystem Created" - elif cd_action == "delete": - self.delete_filesystem(filesystem_id) - result_message = "FileSystem Deleted" - else: # modify - self.update_filesystem(filesystem_id) - result_message = "FileSystem Updated" - self.module.exit_json(changed=self.na_helper.changed, msg=result_message) - - -def main(): - """ - Main function - """ - aws_cvs_netapp_filesystem = AwsCvsNetappFileSystem() - aws_cvs_netapp_filesystem.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_pool.py b/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_pool.py deleted file mode 100644 index fa4818a3b..000000000 --- a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_pool.py +++ /dev/null @@ -1,267 +0,0 @@ -#!/usr/bin/python - -# (c) 2019, NetApp Inc. -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -"""AWS Cloud Volumes Services - Manage Pools""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - - -DOCUMENTATION = ''' - -module: aws_netapp_cvs_pool - -short_description: NetApp AWS Cloud Volumes Service Manage Pools. -extends_documentation_fragment: - - netapp.aws.netapp.awscvs -version_added: 2.9.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Create, Update, Delete Pool on AWS Cloud Volumes Service. - -options: - state: - description: - - Whether the specified pool should exist or not. - choices: ['present', 'absent'] - required: true - type: str - region: - description: - - The region to which the Pool is associated. - required: true - type: str - name: - description: - - pool name ( The human readable name of the Pool ) - - name can be used for create, update and delete operations - required: true - type: str - serviceLevel: - description: - - The service level of the Pool - - can be used with pool create, update operations - choices: ['basic', 'standard', 'extreme'] - type: str - sizeInBytes: - description: - - Size of the Pool in bytes - - can be used with pool create, update operations - - minimum value is 4000000000000 bytes - type: int - vendorID: - description: - - A vendor ID for the Pool. E.g. an ID allocated by a vendor service for the Pool. - - can be used with pool create, update operations - - must be unique - type: str - from_name: - description: - - rename the existing pool name ( The human readable name of the Pool ) - - I(from_name) is the existing name, and I(name) the new name - - can be used with update operation - type: str -''' - -EXAMPLES = """ -- name: Create a new Pool - aws_netapp_cvs_pool: - state: present - name: TestPoolBB12 - serviceLevel: extreme - sizeInBytes: 4000000000000 - vendorID: ansiblePoolTestVendorBB12 - region: us-east-1 - api_url: cds-aws-bundles.netapp.com - api_key: MyAPiKey - secret_key: MySecretKey - -- name: Delete a Pool - aws_netapp_cvs_pool: - state: absent - name: TestPoolBB7 - region: us-east-1 - api_url: cds-aws-bundles.netapp.com - api_key: MyAPiKey - secret_key: MySecretKey - -- name: Update a Pool - aws_netapp_cvs_pool: - state: present - from_name: TestPoolBB12 - name: Mynewpool7 - vendorID: ansibleVendorMynewpool15 - serviceLevel: extreme - sizeInBytes: 4000000000000 - region: us-east-1 - api_url: cds-aws-bundles.netapp.com - api_key: MyAPiKey - secret_key: MySecretKey - -""" - -RETURN = ''' -''' - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.aws.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.aws.plugins.module_utils.netapp import AwsCvsRestAPI - - -class NetAppAWSCVS(object): - '''Class for Pool operations ''' - - def __init__(self): - """ - Parse arguments, setup state variables, - """ - self.argument_spec = netapp_utils.aws_cvs_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=True, choices=['present', 'absent']), - region=dict(required=True, type='str'), - name=dict(required=True, type='str'), - from_name=dict(required=False, type='str'), - serviceLevel=dict(required=False, choices=['basic', 'standard', 'extreme'], type='str'), - sizeInBytes=dict(required=False, type='int'), - vendorID=dict(required=False, type='str'), - )) - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - self.rest_api = AwsCvsRestAPI(self.module) - self.sizeinbytes_min_value = 4000000000000 - - def get_aws_netapp_cvs_pool(self, name=None): - """ - Returns Pool object if exists else Return None - """ - pool_info = None - - if name is None: - name = self.parameters['name'] - - pools, error = self.rest_api.get('Pools') - - if error is None and pools is not None: - for pool in pools: - if 'name' in pool and pool['region'] == self.parameters['region']: - if pool['name'] == name: - pool_info = pool - break - - return pool_info - - def create_aws_netapp_cvs_pool(self): - """ - Create a pool - """ - api = 'Pools' - - for key in ['serviceLevel', 'sizeInBytes', 'vendorID']: - if key not in self.parameters.keys() or self.parameters[key] is None: - self.module.fail_json(changed=False, msg="Mandatory key '%s' required" % (key)) - - pool = { - "name": self.parameters['name'], - "region": self.parameters['region'], - "serviceLevel": self.parameters['serviceLevel'], - "sizeInBytes": self.parameters['sizeInBytes'], - "vendorID": self.parameters['vendorID'] - } - - dummy, error = self.rest_api.post(api, pool) - if error is not None: - self.module.fail_json(changed=False, msg=error) - - def update_aws_netapp_cvs_pool(self, update_pool_info, pool_id): - """ - Update a pool - """ - api = 'Pools/' + pool_id - - pool = { - "name": update_pool_info['name'], - "region": self.parameters['region'], - "serviceLevel": update_pool_info['serviceLevel'], - "sizeInBytes": update_pool_info['sizeInBytes'], - "vendorID": update_pool_info['vendorID'] - } - - dummy, error = self.rest_api.put(api, pool) - if error is not None: - self.module.fail_json(changed=False, msg=error) - - def delete_aws_netapp_cvs_pool(self, pool_id): - """ - Delete a pool - """ - api = 'Pools/' + pool_id - data = None - dummy, error = self.rest_api.delete(api, data) - - if error is not None: - self.module.fail_json(changed=False, msg=error) - - def apply(self): - """ - Perform pre-checks, call functions and exit - """ - update_required = False - cd_action = None - - if 'sizeInBytes' in self.parameters.keys() and self.parameters['sizeInBytes'] < self.sizeinbytes_min_value: - self.module.fail_json(changed=False, msg="sizeInBytes should be greater than or equal to %d" % (self.sizeinbytes_min_value)) - - current = self.get_aws_netapp_cvs_pool() - if self.parameters.get('from_name'): - existing = self.get_aws_netapp_cvs_pool(self.parameters['from_name']) - rename = self.na_helper.is_rename_action(existing, current) - if rename is None: - self.module.fail_json(changed=False, msg="unable to rename pool: '%s' does not exist" % self.parameters['from_name']) - if rename: - current = existing - else: - cd_action = self.na_helper.get_cd_action(current, self.parameters) - - if cd_action is None and self.parameters['state'] == 'present': - keys_to_check = ['name', 'vendorID', 'sizeInBytes', 'serviceLevel'] - update_pool_info, update_required = self.na_helper.compare_and_update_values(current, self.parameters, keys_to_check) - - if update_required is True: - self.na_helper.changed = True - cd_action = 'update' - - if self.na_helper.changed: - if self.module.check_mode: - pass - else: - if cd_action == 'update': - self.update_aws_netapp_cvs_pool(update_pool_info, current['poolId']) - elif cd_action == 'create': - self.create_aws_netapp_cvs_pool() - elif cd_action == 'delete': - self.delete_aws_netapp_cvs_pool(current['poolId']) - - self.module.exit_json(changed=self.na_helper.changed) - - -def main(): - '''Main Function''' - aws_cvs_netapp_pool = NetAppAWSCVS() - aws_cvs_netapp_pool.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_snapshots.py b/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_snapshots.py deleted file mode 100644 index fa5c5f87c..000000000 --- a/ansible_collections/netapp/aws/plugins/modules/aws_netapp_cvs_snapshots.py +++ /dev/null @@ -1,245 +0,0 @@ -#!/usr/bin/python - -# (c) 2019, NetApp Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -"""AWS Cloud Volumes Services - Manage Snapshots""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - - -DOCUMENTATION = ''' - -module: aws_netapp_cvs_snapshots - -short_description: NetApp AWS Cloud Volumes Service Manage Snapshots. -extends_documentation_fragment: - - netapp.aws.netapp.awscvs -version_added: 2.9.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, Update, Delete Snapshot on AWS Cloud Volumes Service. - -options: - state: - description: - - Whether the specified snapshot should exist or not. - required: true - type: str - choices: ['present', 'absent'] - - region: - description: - - The region to which the snapshot belongs to. - required: true - type: str - - name: - description: - - Name of the snapshot - required: true - type: str - - fileSystemId: - description: - - Name or Id of the filesystem. - - Required for create operation - type: str - - from_name: - description: - - ID or Name of the snapshot to rename. - - Required to create an snapshot called 'name' by renaming 'from_name'. - type: str -''' - -EXAMPLES = """ -- name: Create Snapshot - aws_netapp_cvs_snapshots: - state: present - region: us-east-1 - name: testSnapshot - fileSystemId: testVolume - api_url : cds-aws-bundles.netapp.com - api_key: myApiKey - secret_key : mySecretKey - -- name: Update Snapshot - aws_netapp_cvs_snapshots: - state: present - region: us-east-1 - name: testSnapshot - renamed - from_name: testSnapshot - fileSystemId: testVolume - api_url : cds-aws-bundles.netapp.com - api_key: myApiKey - secret_key : mySecretKey - -- name: Delete Snapshot - aws_netapp_cvs_snapshots: - state: absent - region: us-east-1 - name: testSnapshot - api_url : cds-aws-bundles.netapp.com - api_key: myApiKey - secret_key : mySecretKey -""" - -RETURN = """ -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.aws.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.aws.plugins.module_utils.netapp import AwsCvsRestAPI - - -class AwsCvsNetappSnapshot(object): - """ - Contains methods to parse arguments, - derive details of AWS_CVS objects - and send requests to AWS CVS via - the restApi - """ - - def __init__(self): - """ - Parse arguments, setup state variables, - check paramenters and ensure request module is installed - """ - self.argument_spec = netapp_utils.aws_cvs_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=True, choices=['present', 'absent']), - region=dict(required=True, type='str'), - name=dict(required=True, type='str'), - from_name=dict(required=False, type='str'), - fileSystemId=dict(required=False, type='str') - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('state', 'present', ['fileSystemId']), - ], - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - - # set up state variables - self.parameters = self.na_helper.set_parameters(self.module.params) - # Calling generic AWSCVS restApi class - self.rest_api = AwsCvsRestAPI(self.module) - - # Checking for the parameters passed and create new parameters list - self.data = {} - for key in self.parameters.keys(): - self.data[key] = self.parameters[key] - - def get_snapshot_id(self, name): - # Check if snapshot exists - # Return snpashot Id If Snapshot is found, None otherwise - list_snapshots, error = self.rest_api.get('Snapshots') - - if error: - self.module.fail_json(msg=error) - - for snapshot in list_snapshots: - if snapshot['name'] == name: - return snapshot['snapshotId'] - return None - - def get_filesystem_id(self): - # Check given FileSystem is exists - # Return fileSystemId is found, None otherwise - list_filesystem, error = self.rest_api.get('FileSystems') - - if error: - self.module.fail_json(msg=error) - for filesystem in list_filesystem: - if filesystem['fileSystemId'] == self.parameters['fileSystemId']: - return filesystem['fileSystemId'] - elif filesystem['creationToken'] == self.parameters['fileSystemId']: - return filesystem['fileSystemId'] - return None - - def create_snapshot(self): - # Create Snapshot - api = 'Snapshots' - dummy, error = self.rest_api.post(api, self.data) - if error: - self.module.fail_json(msg=error) - - def rename_snapshot(self, snapshot_id): - # Rename Snapshot - api = 'Snapshots/' + snapshot_id - dummy, error = self.rest_api.put(api, self.data) - if error: - self.module.fail_json(msg=error) - - def delete_snapshot(self, snapshot_id): - # Delete Snapshot - api = 'Snapshots/' + snapshot_id - dummy, error = self.rest_api.delete(api, self.data) - if error: - self.module.fail_json(msg=error) - - def apply(self): - """ - Perform pre-checks, call functions and exit - """ - self.snapshot_id = self.get_snapshot_id(self.data['name']) - - if self.snapshot_id is None and 'fileSystemId' in self.data: - self.filesystem_id = self.get_filesystem_id() - self.data['fileSystemId'] = self.filesystem_id - if self.filesystem_id is None: - self.module.fail_json(msg='Error: Specified filesystem id %s does not exist ' % self.data['fileSystemId']) - - cd_action = self.na_helper.get_cd_action(self.snapshot_id, self.data) - result_message = "" - if self.na_helper.changed: - if self.module.check_mode: - # Skip changes - result_message = "Check mode, skipping changes" - else: - if cd_action == "delete": - self.delete_snapshot(self.snapshot_id) - result_message = "Snapshot Deleted" - - elif cd_action == "create": - if 'from_name' in self.data: - # If cd_action is craete and from_name is given - snapshot_id = self.get_snapshot_id(self.data['from_name']) - if snapshot_id is not None: - # If resource pointed by from_name exists, rename the snapshot to name - self.rename_snapshot(snapshot_id) - result_message = "Snapshot Updated" - else: - # If resource pointed by from_name does not exists, error out - self.module.fail_json(msg="Resource does not exist : %s" % self.data['from_name']) - else: - self.create_snapshot() - # If from_name is not defined, Create from scratch. - result_message = "Snapshot Created" - - self.module.exit_json(changed=self.na_helper.changed, msg=result_message) - - -def main(): - """ - Main function - """ - aws_netapp_cvs_snapshots = AwsCvsNetappSnapshot() - aws_netapp_cvs_snapshots.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/aws/requirements.txt b/ansible_collections/netapp/aws/requirements.txt deleted file mode 100644 index 663bd1f6a..000000000 --- a/ansible_collections/netapp/aws/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests
\ No newline at end of file diff --git a/ansible_collections/netapp/aws/tests/unit/compat/__init__.py b/ansible_collections/netapp/aws/tests/unit/compat/__init__.py deleted file mode 100644 index e69de29bb..000000000 --- a/ansible_collections/netapp/aws/tests/unit/compat/__init__.py +++ /dev/null diff --git a/ansible_collections/netapp/aws/tests/unit/compat/builtins.py b/ansible_collections/netapp/aws/tests/unit/compat/builtins.py deleted file mode 100644 index f60ee6782..000000000 --- a/ansible_collections/netapp/aws/tests/unit/compat/builtins.py +++ /dev/null @@ -1,33 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -# -# Compat for python2.7 -# - -# One unittest needs to import builtins via __import__() so we need to have -# the string that represents it -try: - import __builtin__ -except ImportError: - BUILTINS = 'builtins' -else: - BUILTINS = '__builtin__' diff --git a/ansible_collections/netapp/aws/tests/unit/compat/mock.py b/ansible_collections/netapp/aws/tests/unit/compat/mock.py deleted file mode 100644 index 0972cd2e8..000000000 --- a/ansible_collections/netapp/aws/tests/unit/compat/mock.py +++ /dev/null @@ -1,122 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python3.x's unittest.mock module -''' -import sys - -# Python 2.7 - -# Note: Could use the pypi mock library on python3.x as well as python2.x. It -# is the same as the python3 stdlib mock library - -try: - # Allow wildcard import because we really do want to import all of mock's - # symbols into this compat shim - # pylint: disable=wildcard-import,unused-wildcard-import - from unittest.mock import * -except ImportError: - # Python 2 - # pylint: disable=wildcard-import,unused-wildcard-import - try: - from mock import * - except ImportError: - print('You need the mock library installed on python2.x to run tests') - - -# Prior to 3.4.4, mock_open cannot handle binary read_data -if sys.version_info >= (3,) and sys.version_info < (3, 4, 4): - file_spec = None - - def _iterate_read_data(read_data): - # Helper for mock_open: - # Retrieve lines from read_data via a generator so that separate calls to - # readline, read, and readlines are properly interleaved - sep = b'\n' if isinstance(read_data, bytes) else '\n' - data_as_list = [l + sep for l in read_data.split(sep)] - - if data_as_list[-1] == sep: - # If the last line ended in a newline, the list comprehension will have an - # extra entry that's just a newline. Remove this. - data_as_list = data_as_list[:-1] - else: - # If there wasn't an extra newline by itself, then the file being - # emulated doesn't have a newline to end the last line remove the - # newline that our naive format() added - data_as_list[-1] = data_as_list[-1][:-1] - - for line in data_as_list: - yield line - - def mock_open(mock=None, read_data=''): - """ - A helper function to create a mock to replace the use of `open`. It works - for `open` called directly or used as a context manager. - - The `mock` argument is the mock object to configure. If `None` (the - default) then a `MagicMock` will be created for you, with the API limited - to methods or attributes available on standard file handles. - - `read_data` is a string for the `read` methoddline`, and `readlines` of the - file handle to return. This is an empty string by default. - """ - def _readlines_side_effect(*args, **kwargs): - if handle.readlines.return_value is not None: - return handle.readlines.return_value - return list(_data) - - def _read_side_effect(*args, **kwargs): - if handle.read.return_value is not None: - return handle.read.return_value - return type(read_data)().join(_data) - - def _readline_side_effect(): - if handle.readline.return_value is not None: - while True: - yield handle.readline.return_value - for line in _data: - yield line - - global file_spec - if file_spec is None: - import _io - file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))) - - if mock is None: - mock = MagicMock(name='open', spec=open) - - handle = MagicMock(spec=file_spec) - handle.__enter__.return_value = handle - - _data = _iterate_read_data(read_data) - - handle.write.return_value = None - handle.read.return_value = None - handle.readline.return_value = None - handle.readlines.return_value = None - - handle.read.side_effect = _read_side_effect - handle.readline.side_effect = _readline_side_effect() - handle.readlines.side_effect = _readlines_side_effect - - mock.return_value = handle - return mock diff --git a/ansible_collections/netapp/aws/tests/unit/compat/unittest.py b/ansible_collections/netapp/aws/tests/unit/compat/unittest.py deleted file mode 100644 index 73a20cf8c..000000000 --- a/ansible_collections/netapp/aws/tests/unit/compat/unittest.py +++ /dev/null @@ -1,44 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python2.7's unittest module -''' - -import sys - -import pytest - -# Allow wildcard import because we really do want to import all of -# unittests's symbols into this compat shim -# pylint: disable=wildcard-import,unused-wildcard-import -if sys.version_info < (2, 7): - try: - # Need unittest2 on python2.6 - from unittest2 import * - except ImportError: - print('You need unittest2 installed on python2.6.x to run tests') - - class TestCase: - """ skip everything """ - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as unittest2 may not be available') -else: - from unittest import * diff --git a/ansible_collections/netapp/aws/tests/unit/plugins/module_utils/test_netapp.py b/ansible_collections/netapp/aws/tests/unit/plugins/module_utils/test_netapp.py deleted file mode 100644 index 2fb3b7ba0..000000000 --- a/ansible_collections/netapp/aws/tests/unit/plugins/module_utils/test_netapp.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright (c) 2018 NetApp -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests for module_utils netapp.py ''' -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json -import os.path -import sys -import tempfile - -import pytest - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.aws.tests.unit.compat.mock import patch - -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -SRR = { - 'empty_good': (dict(), None), - 'get_data': (dict(records=['data1', 'data2']), None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), -} - - -def mock_args(feature_flags=None): - args = { - 'api_key': 'api_key', - 'api_url': 'api_url', - 'secret_key': 'secret_key!', - } - if feature_flags is not None: - args.update({'feature_flags': feature_flags}) - return args - - -def create_module(args): - argument_spec = netapp_utils.aws_cvs_host_argument_spec() - set_module_args(args) - module = basic.AnsibleModule(argument_spec) - return module - - -def create_restapi_object(args): - module = create_module(args) - module.fail_json = fail_json - rest_api = netapp_utils.AwsCvsRestAPI(module) - return rest_api - - -class mockResponse: - def __init__(self, json_data, status_code, raise_action=None): - self.json_data = json_data - self.status_code = status_code - self.content = json_data - self.raise_action = raise_action - - def raise_for_status(self): - pass - - def json(self): - if self.raise_action == 'bad_json': - raise ValueError(self.raise_action) - return self.json_data - - -@patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.send_request') -def test_empty_get(mock_request): - ''' get with no data ''' - mock_request.side_effect = [ - SRR['empty_good'], - SRR['end_of_sequence'], - ] - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert not error - assert len(message) == 0 - - -@patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.send_request') -def test_get_data(mock_request): - ''' get with data ''' - mock_request.side_effect = [ - SRR['get_data'], - SRR['end_of_sequence'], - ] - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert not error - print('get:', message) - assert message['records'] == SRR['get_data'][0]['records'] - - -def test_has_feature_success_default_0(): - ''' existing feature_flag with default of False''' - flag = 'trace_apis' - module = create_module(mock_args()) - value = netapp_utils.has_feature(module, flag) - assert not value - - -def test_has_feature_success_default_1(): - ''' existing feature_flag with default of True''' - flag = 'strict_json_check' - module = create_module(mock_args()) - value = netapp_utils.has_feature(module, flag) - assert value - - -def test_has_feature_success_user_true(): - ''' existing feature_flag with value set to True ''' - flag = 'user_deprecation_warning' - args = dict(mock_args({flag: True})) - module = create_module(args) - value = netapp_utils.has_feature(module, flag) - assert value - - -def test_has_feature_success_user_false(): - ''' existing feature_flag with value set to False ''' - flag = 'user_deprecation_warning' - args = dict(mock_args({flag: False})) - print(args) - module = create_module(args) - value = netapp_utils.has_feature(module, flag) - assert not value - - -def test_has_feature_invalid_key(): - ''' existing feature_flag with unknown key ''' - flag = 'deprecation_warning_bad_key' - module = create_module(mock_args()) - # replace ANsible fail method with ours - module.fail_json = fail_json - with pytest.raises(AnsibleFailJson) as exc: - netapp_utils.has_feature(module, flag) - msg = 'Internal error: unexpected feature flag: %s' % flag - assert exc.value.args[0]['msg'] == msg - - -@patch('requests.request') -def test_empty_get_sent(mock_request): - ''' get with no data ''' - mock_request.return_value = mockResponse(json_data=dict(_links='me'), status_code=200) - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert not error - # only one key (_links) - assert len(message) == 1 - - -@patch('requests.request') -def test_empty_get_sent_bad_json(mock_request): - ''' get with no data ''' - mock_request.return_value = mockResponse(json_data='anything', status_code=200, raise_action='bad_json') - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert error - assert 'Expecting json, got: anything' in error - print('errors:', rest_api.errors) - print('debug:', rest_api.debug_logs) - - -@patch('requests.request') -def test_empty_get_sent_bad_but_empty_json(mock_request): - ''' get with no data ''' - mock_request.return_value = mockResponse(json_data='', status_code=200, raise_action='bad_json') - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert error is None - assert message is None diff --git a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_active_directory.py b/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_active_directory.py deleted file mode 100644 index 98755b939..000000000 --- a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_active_directory.py +++ /dev/null @@ -1,117 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests ONTAP Ansible module: ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.aws.tests.unit.compat import unittest -from ansible_collections.netapp.aws.tests.unit.compat.mock import patch -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_active_directory \ - import AwsCvsNetappActiveDir as ad_module - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def set_default_args_fail_check(self): - return dict({ - 'state': 'present', - 'DNS': '101.102.103.123', - 'domain': 'mydomain.com', - 'password': 'netapp1!', - 'username': 'myuser', - 'api_url': 'myapiurl.com', - 'secret_key': 'mysecretkey', - 'api_key': 'myapikey' - }) - - def set_default_args_pass_check(self): - return dict({ - 'state': 'present', - 'DNS': '101.102.103.123', - 'domain': 'mydomain.com', - 'password': 'netapp1!', - 'region': 'us-east-1', - 'netBIOS': 'testing', - 'username': 'myuser', - 'api_url': 'myapiurl.com', - 'secret_key': 'mysecretkey', - 'api_key': 'myapikey' - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args(self.set_default_args_fail_check()) - ad_module() - print('Info: %s' % exc.value.args[0]['msg']) - - def test_module_fail_when_required_args_present(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleExitJson) as exc: - set_module_args(self.set_default_args_pass_check()) - ad_module() - exit_json(changed=True, msg="TestCase Fail when required ars are present") - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_active_directory.AwsCvsNetappActiveDir.get_activedirectory_id') - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_active_directory.AwsCvsNetappActiveDir.get_activedirectory') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.post') - def test_create_aws_netapp_cvs_activedir(self, get_post_api, get_aws_api, get_ad_id): - set_module_args(self.set_default_args_pass_check()) - my_obj = ad_module() - - get_aws_api.return_value = None - get_post_api.return_value = None, None - get_ad_id.return_value = "123" - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_create_aws_netapp_cvs_active_directory: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] diff --git a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_filesystems.py b/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_filesystems.py deleted file mode 100644 index b5a4bad84..000000000 --- a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_filesystems.py +++ /dev/null @@ -1,155 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests AWS CVS FileSystems Ansible module: aws_netapp_cvs_filesystems''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.aws.tests.unit.compat import unittest -from ansible_collections.netapp.aws.tests.unit.compat.mock import patch -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_filesystems \ - import AwsCvsNetappFileSystem as fileSystem_module - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def set_default_args_fail_check(self): - return dict({ - 'creationToken': 'TestFilesystem', - 'region': 'us-east-1', - 'quotaInBytes': 3424, - 'serviceLevel': 'standard', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_default_args_pass_check(self): - return dict({ - 'state': 'present', - 'creationToken': 'TestFilesystem', - 'region': 'us-east-1', - 'quotaInBytes': 3424, - 'serviceLevel': 'standard', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_args_create_aws_netapp_cvs_filesystems(self): - return dict({ - 'state': 'present', - 'creationToken': 'TestFilesystem', - 'region': 'us-east-1', - 'quotaInBytes': 3424, - 'serviceLevel': 'standard', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_args_delete_aws_netapp_cvs_filesystems(self): - return dict({ - 'state': 'absent', - 'creationToken': 'TestFilesystem', - 'region': 'us-east-1', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args(self.set_default_args_fail_check()) - fileSystem_module() - print('Info: test_module_fail_when_required_args_missing: %s' % exc.value.args[0]['msg']) - - def test_module_fail_when_required_args_present(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleExitJson) as exc: - set_module_args(self.set_default_args_pass_check()) - fileSystem_module() - exit_json(changed=True, msg="Induced arguments check") - print('Info: test_module_fail_when_required_args_present: %s' % exc.value.args[0]['msg']) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_filesystems.AwsCvsNetappFileSystem.get_filesystem_id') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.get_state') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.post') - def test_create_aws_netapp_cvs_snapshots_pass(self, get_post_api, get_state_api, get_filesystem_id): - set_module_args(self.set_args_create_aws_netapp_cvs_filesystems()) - my_obj = fileSystem_module() - get_filesystem_id.return_value = None - get_state_api.return_value = 'done' - response = {'jobs': [{'jobId': 'dummy'}]} - get_post_api.return_value = response, None - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_create_aws_netapp_cvs_filesystem_pass: %s' % repr(exc.value.args[0])) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_filesystems.AwsCvsNetappFileSystem.get_filesystem_id') - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_filesystems.AwsCvsNetappFileSystem.get_filesystem') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.get_state') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.delete') - def test_delete_aws_netapp_cvs_snapshots_pass(self, get_post_api, get_state_api, get_filesystem, get_filesystem_id): - set_module_args(self.set_args_delete_aws_netapp_cvs_filesystems()) - my_obj = fileSystem_module() - get_filesystem_id.return_value = '432-432-532423-4232' - get_filesystem.return_value = 'dummy' - get_state_api.return_value = 'done' - response = {'jobs': [{'jobId': 'dummy'}]} - get_post_api.return_value = response, None - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_create_aws_netapp_cvs_filesyste_pass: %s' % repr(exc.value.args[0])) - assert exc.value.args[0]['changed'] diff --git a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_pool.py b/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_pool.py deleted file mode 100644 index 26e822de7..000000000 --- a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_pool.py +++ /dev/null @@ -1,258 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' Unit tests for AWS Cloud Volumes Services - Manage Pools ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.aws.tests.unit.compat import unittest -from ansible_collections.netapp.aws.tests.unit.compat.mock import patch -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_pool \ - import NetAppAWSCVS as pool_module - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def set_default_args_fail_check(self): - return dict({ - 'from_name': 'TestPoolAA', - 'name': 'TestPoolAA_new', - 'serviceLevel': 'standard', - 'sizeInBytes': 4000000000000, - 'vendorID': 'ansiblePoolTestVendorA', - 'region': 'us-east-1', - 'api_url': 'hostname.invalid', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_default_args_pass_check(self): - return dict({ - 'state': 'present', - 'from_name': 'TestPoolAA', - 'name': 'TestPoolAA_new', - 'serviceLevel': 'standard', - 'sizeInBytes': 4000000000000, - 'vendorID': 'ansiblePoolTestVendorA', - 'region': 'us-east-1', - 'api_url': 'hostname.invalid', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_args_create_aws_netapp_cvs_pool(self): - return dict({ - 'state': 'present', - 'name': 'TestPoolAA', - 'serviceLevel': 'standard', - 'sizeInBytes': 4000000000000, - 'vendorID': 'ansiblePoolTestVendorA', - 'region': 'us-east-1', - 'api_url': 'hostname.invalid', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_args_update_aws_netapp_cvs_pool(self): - return dict({ - 'state': 'present', - 'from_name': 'TestPoolAA', - 'name': 'TestPoolAA_new', - 'serviceLevel': 'standard', - 'sizeInBytes': 4000000000000, - 'vendorID': 'ansiblePoolTestVendorA', - 'region': 'us-east-1', - 'api_url': 'hostname.invalid', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_args_delete_aws_netapp_cvs_pool(self): - return dict({ - 'state': 'absent', - 'name': 'TestPoolAA', - 'region': 'us-east-1', - 'api_url': 'hostname.invalid', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args(self.set_default_args_fail_check()) - pool_module() - print('Info: test_module_fail_when_required_args_missing: %s' % exc.value.args[0]['msg']) - - def test_module_pass_when_required_args_present(self): - ''' required arguments are present ''' - with pytest.raises(AnsibleExitJson) as exc: - set_module_args(self.set_default_args_pass_check()) - pool_module() - exit_json(changed=True, msg="Induced arguments check") - print('Info: test_module_pass_when_required_args_present: %s' % exc.value.args[0]['msg']) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_pool.NetAppAWSCVS.get_aws_netapp_cvs_pool') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.put') - def test_update_aws_netapp_cvs_pool_pass(self, get_put_api, get_aws_api): - set_module_args(self.set_args_update_aws_netapp_cvs_pool()) - my_obj = pool_module() - my_pool = { - "name": "Dummyname", - "poolId": "1f63b3d0-4fd4-b4fe-1ed6-c62f5f20d975", - "region": "us-east-1", - "serviceLevel": "extreme", - "sizeInBytes": 40000000000000000, - "state": "available", - "vendorID": "Dummy" - } - get_aws_api.return_value = my_pool - get_put_api.return_value = my_pool, None - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_update_aws_netapp_cvs_pool_pass: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_pool.NetAppAWSCVS.get_aws_netapp_cvs_pool') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.put') - def test_update_aws_netapp_cvs_pool_fail(self, get_put_api, get_aws_api): - set_module_args(self.set_args_update_aws_netapp_cvs_pool()) - my_obj = pool_module() - my_pool = { - "name": "Dummyname", - "poolId": "1f63b3d0-4fd4-b4fe-1ed6-c62f5f20d975", - "region": "us-east-1", - "serviceLevel": "extreme", - "sizeInBytes": 40000000000000000, - "state": "available", - "vendorID": "Dummy" - } - get_put_api.return_value = my_pool, "Error" - get_aws_api.return_value = my_pool - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print('Info: test_update_aws_netapp_cvs_pool_fail: %s' % repr(exc.value)) - assert exc.value.args[0]['msg'] is not None - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_pool.NetAppAWSCVS.get_aws_netapp_cvs_pool') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.post') - def test_create_aws_netapp_cvs_pool_pass(self, get_post_api, get_aws_api): - set_module_args(self.set_args_create_aws_netapp_cvs_pool()) - my_obj = pool_module() - get_aws_api.return_value = None - get_post_api.return_value = None, None - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_create_aws_netapp_cvs_pool_pass: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_pool.NetAppAWSCVS.get_aws_netapp_cvs_pool') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.post') - def test_create_aws_netapp_cvs_pool_fail(self, get_post_api, get_aws_api): - set_module_args(self.set_args_create_aws_netapp_cvs_pool()) - my_obj = pool_module() - my_pool = { - "name": "Dummyname", - "poolId": "1f63b3d0-4fd4-b4fe-1ed6-c62f5f20d975", - "region": "us-east-1", - "serviceLevel": "extreme", - "sizeInBytes": 40000000000000000, - "state": "available", - "vendorID": "Dummy" - } - get_post_api.return_value = my_pool, "Error" - get_aws_api.return_value = None - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print('Info: test_create_aws_netapp_cvs_pool_fail: %s' % repr(exc.value)) - assert exc.value.args[0]['msg'] is not None - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_pool.NetAppAWSCVS.get_aws_netapp_cvs_pool') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.delete') - def test_delete_aws_netapp_cvs_pool_pass(self, get_delete_api, get_aws_api): - set_module_args(self.set_args_delete_aws_netapp_cvs_pool()) - my_obj = pool_module() - my_pool = { - "name": "Dummyname", - "poolId": "1f63b3d0-4fd4-b4fe-1ed6-c62f5f20d975", - "region": "us-east-1", - "serviceLevel": "extreme", - "sizeInBytes": 40000000000000000, - "state": "available", - "vendorID": "Dummy" - } - get_aws_api.return_value = my_pool - get_delete_api.return_value = None, None - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_delete_aws_netapp_cvs_pool_pass: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_pool.NetAppAWSCVS.get_aws_netapp_cvs_pool') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.delete') - def test_delete_aws_netapp_cvs_pool_fail(self, get_delete_api, get_aws_api): - set_module_args(self.set_args_delete_aws_netapp_cvs_pool()) - my_obj = pool_module() - my_pool = { - "name": "Dummyname", - "poolId": "1f63b3d0-4fd4-b4fe-1ed6-c62f5f20d975", - "region": "us-east-1", - "serviceLevel": "extreme", - "sizeInBytes": 40000000000000000, - "state": "available", - "vendorID": "Dummy" - } - get_delete_api.return_value = my_pool, "Error" - get_aws_api.return_value = my_pool - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print('Info: test_delete_aws_netapp_cvs_pool_fail: %s' % repr(exc.value)) - assert exc.value.args[0]['msg'] is not None diff --git a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_snapshots.py b/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_snapshots.py deleted file mode 100644 index bb825a2ee..000000000 --- a/ansible_collections/netapp/aws/tests/unit/plugins/modules/test_aws_netapp_cvs_snapshots.py +++ /dev/null @@ -1,147 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests NetApp AWS CVS Snapshots Ansible module: aws_netapp_cvs_snapshots''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.aws.tests.unit.compat import unittest -from ansible_collections.netapp.aws.tests.unit.compat.mock import patch -import ansible_collections.netapp.aws.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_snapshots \ - import AwsCvsNetappSnapshot as snapshot_module - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def set_default_args_fail_check(self): - return dict({ - 'name': 'TestFilesystem', - 'fileSystemId': 'standard', - 'from_name': 'from_TestFilesystem', - 'region': 'us-east-1', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_default_args_pass_check(self): - return dict({ - 'state': 'present', - 'name': 'testSnapshot', - 'fileSystemId': 'standard', - 'from_name': 'from_TestFilesystem', - 'region': 'us-east-1', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_args_create_aws_netapp_cvs_snapshots(self): - return dict({ - 'state': 'present', - 'name': 'testSnapshot', - 'fileSystemId': '123-4213-432-432', - 'region': 'us-east-1', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def set_args_delete_aws_netapp_cvs_snapshots(self): - return dict({ - 'state': 'absent', - 'name': 'testSnapshot', - 'region': 'us-east-1', - 'api_url': 'hostname.com', - 'api_key': 'myapikey', - 'secret_key': 'mysecretkey' - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args(self.set_default_args_fail_check()) - snapshot_module() - print('Info: test_module_fail_when_required_args_missing: %s' % exc.value.args[0]['msg']) - - def test_module_fail_when_required_args_present(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleExitJson) as exc: - set_module_args(self.set_default_args_pass_check()) - snapshot_module() - exit_json(changed=True, msg="Induced arguments check") - print('Info: test_module_fail_when_required_args_present: %s' % exc.value.args[0]['msg']) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_snapshots.AwsCvsNetappSnapshot.get_snapshot_id') - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_snapshots.AwsCvsNetappSnapshot.get_filesystem_id') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.post') - def test_create_aws_netapp_cvs_snapshots_pass(self, get_post_api, get_filesystem_id, get_snapshot_id): - set_module_args(self.set_args_create_aws_netapp_cvs_snapshots()) - my_obj = snapshot_module() - get_filesystem_id.return_value = 'fiesystemName' - get_snapshot_id.return_value = None - get_post_api.return_value = None, None - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_create_aws_netapp_cvs_snapshots_pass: %s' % repr(exc.value.args[0])) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.aws.plugins.modules.aws_netapp_cvs_snapshots.AwsCvsNetappSnapshot.get_snapshot_id') - @patch('ansible_collections.netapp.aws.plugins.module_utils.netapp.AwsCvsRestAPI.delete') - def test_delete_aws_netapp_cvs_snapshots_pass(self, get_post_api, get_snapshot_id): - set_module_args(self.set_args_delete_aws_netapp_cvs_snapshots()) - my_obj = snapshot_module() - get_snapshot_id.return_value = "1f63b3d0-4fd4-b4fe-1ed6-c62f5f20d975" - get_post_api.return_value = None, None - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_create_aws_netapp_cvs_snapshots_pass: %s' % repr(exc.value.args[0])) - assert exc.value.args[0]['changed'] diff --git a/ansible_collections/netapp/aws/tests/unit/requirements.txt b/ansible_collections/netapp/aws/tests/unit/requirements.txt deleted file mode 100644 index b754473a9..000000000 --- a/ansible_collections/netapp/aws/tests/unit/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests ; python_version >= '2.7' diff --git a/ansible_collections/netapp/azure/.github/ISSUE_TEMPLATE/bug_report.yml b/ansible_collections/netapp/azure/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index ed35f3ddd..000000000 --- a/ansible_collections/netapp/azure/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,210 +0,0 @@ ---- -name: 🐛 Bug report -description: Create a report to help us improve - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to report a bug in netapp.azure!** - - - ⚠ - Verify first that your issue is not [already reported on - GitHub][issue search] and keep in mind that we may have to keep - the current behavior because [every change breaks someone's - workflow][XKCD 1172]. - We try to be mindful about this. - - Also test if the latest release and devel branch are affected too. - - - **Tip:** If you are seeking community support, please consider - [Join our Slack community][ML||IRC]. - - - - [ML||IRC]: - https://join.slack.com/t/netapppub/shared_invite/zt-njcjx2sh-1VR2mEDvPcJAmPutOnP~mg - - [issue search]: ../search?q=is%3Aissue&type=issues - - [XKCD 1172]: https://xkcd.com/1172/ - - -- type: textarea - attributes: - label: Summary - description: Explain the problem briefly below. - placeholder: >- - When I try to do X with netapp.azure from the devel branch on GitHub, Y - breaks in a way Z under the env E. Here are all the details I know - about this problem... - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the rst file, module, plugin, task or - feature below, *use your best guess if unsure*. - - - **Tip:** Cannot find it in this repository? Please be advised that - the source for some parts of the documentation are hosted outside - of this repository. If the page you are reporting describes - modules/plugins/etc that are not officially supported by the - Ansible Core Engineering team, there is a good chance that it is - coming from one of the [Ansible Collections maintained by the - community][collections org]. If this is the case, please make sure - to file an issue under the appropriate project there instead. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Ansible Version - description: >- - Paste verbatim output from `ansible --version` below, under - the prompt line. Please don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - value: | - $ ansible --version - placeholder: | - $ ansible --version - ansible [core 2.11.0b4.post0] (detached HEAD ref: refs/) last updated 2021/04/02 00:33:35 (GMT +200) - config file = None - configured module search path = ['~/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] - ansible python module location = ~/src/github/ansible/ansible/lib/ansible - ansible collection location = ~/.ansible/collections:/usr/share/ansible/collections - executable location = bin/ansible - python version = 3.9.0 (default, Oct 26 2020, 13:08:59) [GCC 10.2.0] - jinja version = 2.11.3 - libyaml = True - validations: - required: true - -- type: textarea - attributes: - label: Azure NetApp Files Collection Version - description: >- - Azure NetApp Files Collection Version. Run `ansible-galaxy collection` and copy the entire output - render: console - value: | - $ ansible-galaxy collection list - validations: - required: true - -- type: textarea - attributes: - label: Playbook - description: >- - The task from the playbook that is give you the issue - render: console - validations: - required: true - -- type: textarea - attributes: - label: Steps to Reproduce - description: | - Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: | - 1. Implement the following playbook: - - ```yaml - --- - # ping.yml - - hosts: all - gather_facts: false - tasks: - - ping: - ... - ``` - 2. Then run `ANSIBLE_DEBUG=1 ansible-playbook ping.yml -vvvvv` - 3. An error occurs. - validations: - required: true - -- type: textarea - attributes: - label: Expected Results - description: >- - Describe what you expected to happen when running the steps above. - placeholder: >- - I expected X to happen because I assumed Y and was shocked - that it did not. - validations: - required: true - -- type: textarea - attributes: - label: Actual Results - description: | - Describe what actually happened. If possible run with extra verbosity (`-vvvv`). - - Paste verbatim command output and don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - placeholder: >- - Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))} - Exception: - Traceback (most recent call last): - File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main - status = self.run(options, args) - File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run - wb.build(autobuilding=True) - File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build - self.requirement_set.prepare_files(self.finder) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files - ignore_dependencies=self.ignore_dependencies)) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file - session=self.session, hashes=hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url - hashes=hashes - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url - hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url - stream=True, - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get - return self.request('GET', url, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request - return super(PipSession, self).request(method, url, *args, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request - resp = self.send(prep, **send_kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send - r = adapter.send(request, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send - resp = super(CacheControlAdapter, self).send(request, **kw) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send - raise SSLError(e, request=request) - SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),)) - ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2. - ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1. - validations: - required: true - - -- type: markdown - attributes: - value: > - *One last thing...* - - - Thank you for your collaboration! - - -... diff --git a/ansible_collections/netapp/azure/.github/ISSUE_TEMPLATE/feature_request.yml b/ansible_collections/netapp/azure/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index 621d52529..000000000 --- a/ansible_collections/netapp/azure/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,100 +0,0 @@ ---- -name: ✨ Feature request -description: Suggest an idea for this project - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to suggest a feature for netapp.azure!** - - 💡 - Before you go ahead with your request, please first consider if it - would be useful for majority of the netapp.azure users. As a - general rule of thumb, any feature that is only of interest to a - small sub group should be [implemented in a third-party Ansible - Collection][contribute to collections] or maybe even just your - project alone. Be mindful of the fact that the essential - netapp.azure features have a broad impact. - - - <details> - <summary> - ❗ Every change breaks someone's workflow. - </summary> - - - [![❗ Every change breaks someone's workflow. - ](https://imgs.xkcd.com/comics/workflow.png) - ](https://xkcd.com/1172/) - </details> - - - ⚠ - Verify first that your idea is not [already requested on - GitHub][issue search]. - - Also test if the main branch does not already implement this. - - -- type: textarea - attributes: - label: Summary - description: > - Describe the new feature/improvement you would like briefly below. - - - What's the problem this feature will solve? - - What are you trying to do, that you are unable to achieve - with netapp.azure as it currently stands? - - - * Provide examples of real-world use cases that this would enable - and how it solves the problem you described. - - * How do you solve this now? - - * Have you tried to work around the problem using other tools? - - * Could there be a different approach to solving this issue? - - placeholder: >- - I am trying to do X with netapp.azure from the devel branch on GitHub and - I think that implementing a feature Y would be very helpful for me and - every other user of netapp.azure because of Z. - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the module, plugin, task or feature below, - *use your best guess if unsure*. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Additional Information - description: | - Describe how the feature would be used, why it is needed and what it would solve. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: >- - I asked on https://stackoverflow.com/.... and the community - advised me to do X, Y and Z. - validations: - required: true - -... diff --git a/ansible_collections/netapp/azure/.github/workflows/codeql-analysis.yml b/ansible_collections/netapp/azure/.github/workflows/codeql-analysis.yml deleted file mode 100644 index eea54a872..000000000 --- a/ansible_collections/netapp/azure/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,71 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ main ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ main ] - schedule: - - cron: '34 20 * * 6' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] - # Learn more: - # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 diff --git a/ansible_collections/netapp/azure/.github/workflows/coverage.yml b/ansible_collections/netapp/azure/.github/workflows/coverage.yml deleted file mode 100644 index 924232e4c..000000000 --- a/ansible_collections/netapp/azure/.github/workflows/coverage.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: NetApp.azure Ansible Coverage - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity: - name: Coverage on Azure - runs-on: ubuntu-latest - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - - name: Install ansible stable-2.11 - run: pip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/azure/ - rsync -av . ansible_collections/netapp/azure/ --exclude ansible_collections/netapp/azure/ - - - name: Run Unit Tests - run: ansible-test units --coverage --color --docker --python 3.8 - working-directory: ansible_collections/netapp/azure/ - - # ansible-test support producing code coverage date - - name: Generate coverage report - run: ansible-test coverage xml -v --requirements --group-by command --group-by version - working-directory: ansible_collections/netapp/azure/ - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2 - with: - working-directory: ansible_collections/netapp/azure/ - verbose: true
\ No newline at end of file diff --git a/ansible_collections/netapp/azure/.github/workflows/main.yml b/ansible_collections/netapp/azure/.github/workflows/main.yml deleted file mode 100644 index 37408fb67..000000000 --- a/ansible_collections/netapp/azure/.github/workflows/main.yml +++ /dev/null @@ -1,67 +0,0 @@ -name: NetApp Ansible CI - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity_azure: - name: Sanity (${{ matrix.ansible }} on Azure ANF) - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - ansible: - - stable-2.9 - - stable-2.10 - - stable-2.11 - - stable-2.12 - - stable-2.13 - - devel - collection: [azure] - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - # Ansible 2.14 requires 3.9 as a minimum - python-version: 3.9 - - - name: Install ansible (${{ matrix.ansible }}) - run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check - - - name: Delete collection tar file (azure) - run: rm -f /tmp/netapp-azure* - - - name: Check collection path (azure) - run: | - pwd - ls - - - name: Build collection (azure) - run: ansible-galaxy collection build --output-path /tmp . - - - name: Install collection (azure) - run: ansible-galaxy collection install /tmp/netapp-azure* - - - name: Delete collection tar file (azure) - run: rm -f /tmp/netapp-azure* - - - name: Run sanity tests (azure) - run: ansible-test sanity --docker -v --color - working-directory: /home/runner/.ansible/collections/ansible_collections/netapp/azure - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/azure/ - rsync -av . ansible_collections/netapp/azure/ --exclude ansible_collections/netapp/azure/ - - - name: Run Unit Tests - run: ansible-test units --docker -v --color - working-directory: ansible_collections/netapp/azure/ diff --git a/ansible_collections/netapp/azure/CHANGELOG.rst b/ansible_collections/netapp/azure/CHANGELOG.rst deleted file mode 100644 index fc18835a2..000000000 --- a/ansible_collections/netapp/azure/CHANGELOG.rst +++ /dev/null @@ -1,171 +0,0 @@ -================================================= -Azure NetApp Files (ANF) Collection Release Notes -================================================= - -.. contents:: Topics - - -v21.10.0 -======== - -Minor Changes -------------- - -- PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - -v21.9.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_volume - new option ``feature_flags`` to selectively enable/disable a feature. - -Bugfixes --------- - -- azure_rm_netapp_volume - 'Change Ownership' is not permitted when creating NFSv4.1 volume with latest azure-mgmt-netapp package (4.0.0). - -v21.8.1 -======= - -Bugfixes --------- - -- Hub Automation cannot generate documentation (cannot use doc fragments from another collection). - -v21.8.0 -======= - -Bugfixes --------- - -- fix CI pipeline as azcollection does not support python 2.6. -- fix CI pipeline as ignores are not required with latest azcollection. - -v21.7.0 -======= - -Bugfixes --------- - -- fix CI pipeline to work with azcollection, and isolate UTs from azcollection. - -v21.6.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_account - support additional authentication schemes provided by AzureRMModuleBase. -- azure_rm_netapp_capacity_pool - support additional authentication schemes provided by AzureRMModuleBase, and tags. -- azure_rm_netapp_capacity_pool - wait for completion when creating, modifying, or deleting a pool. -- azure_rm_netapp_snapshot - support additional authentication schemes provided by AzureRMModuleBase. -- azure_rm_netapp_snapshot - wait for completion when creating, modifying, or deleting a pool. -- azure_rm_netapp_volume - support additional authentication schemes provided by AzureRMModuleBase, and tags. - -v21.5.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_volume - enable changes in volume size. -- azure_rm_netapp_volume - rename msg to mount_path, as documented in RETURN. - -v21.3.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_account - new option ``active_directories`` to support SMB volumes. -- azure_rm_netapp_account - new suboptions ``ad_name``, ``kdc_ip``, ``service_root_ca_certificate``` for Active Directory. -- azure_rm_netapp_volume - new option ``protocol_types`` to support SMB volumes. - -Bugfixes --------- - -- azure_rm_netapp_account - wait for job completion for asynchroneous requests, and report belated errors. -- support for azure-mgmt-netapp 1.0.0, while maintaining compatibility with 0.10.0. - -v21.2.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_account - new option ``active_directories`` to support SMB volumes. -- azure_rm_netapp_volume - new option ``protocol_types`` to support SMB volumes. -- azure_rm_netapp_volume - new option ``subnet_name`` as subnet_id is ambiguous. subnet_id is now aliased to subnet_name. - -Bugfixes --------- - -- azure_rm_netapp_volume - fix 'Nonetype' object is not subscriptable exception when mount target is not created. - -v20.8.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_capacity_pool - Updated ANF capacity pool modify function for size parameter mandatory issue. -- use a three group format for version_added. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. - -v20.7.0 -======= - -Bugfixes --------- - -- azure_rm_netapp_capacity_pool - fixed idempotency for delete operation. - -v20.6.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_capacity_pool - now allows modify for size. -- azure_rm_netapp_volume - now returns complete mount_path of the volume specified. - -v20.5.0 -======= - -Minor Changes -------------- - -- azure_rm_netapp_account - new option ``tags``. -- azure_rm_netapp_capacity_pool - new option ``service_level``. -- azure_rm_netapp_volume - new option ``size``. -- azure_rm_netapp_volume - new option ``vnet_resource_group_for_subnet``, resource group for virtual_network and subnet_id to be used. -- azure_rm_netapp_volume - now returns mount_path of the volume specified. - -v20.4.0 -======= - -Bugfixes --------- - -- fix changes to azure-mgmt-netapp as per new release. -- removed ONTAP dependency import. - -v20.2.0 -======= - -Bugfixes --------- - -- galaxy.yml - fix path to github repository. - -v19.10.0 -======== - -New Modules ------------ - -- netapp.azure.azure_rm_netapp_account - Manage NetApp Azure Files Account -- netapp.azure.azure_rm_netapp_capacity_pool - Manage NetApp Azure Files capacity pool -- netapp.azure.azure_rm_netapp_snapshot - Manage NetApp Azure Files Snapshot -- netapp.azure.azure_rm_netapp_volume - Manage NetApp Azure Files Volume diff --git a/ansible_collections/netapp/azure/COPYING b/ansible_collections/netapp/azure/COPYING deleted file mode 100644 index 94a9ed024..000000000 --- a/ansible_collections/netapp/azure/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - <one line to give the program's name and a brief idea of what it does.> - Copyright (C) <year> <name of author> - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see <http://www.gnu.org/licenses/>. - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - <program> Copyright (C) <year> <name of author> - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -<http://www.gnu.org/licenses/>. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -<http://www.gnu.org/philosophy/why-not-lgpl.html>. diff --git a/ansible_collections/netapp/azure/FILES.json b/ansible_collections/netapp/azure/FILES.json deleted file mode 100644 index b91cf6173..000000000 --- a/ansible_collections/netapp/azure/FILES.json +++ /dev/null @@ -1,705 +0,0 @@ -{ - "files": [ - { - "name": ".", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "3fc6781dfd99664ea3df19e10ff3fd4cb5fcb9a1ffab3974bf29a072688fac2b", - "format": 1 - }, - { - "name": "plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments/netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "05eeac3f09944a70935c66674fecd48fc0992c2706bc4168b10f54dff6517b6e", - "format": 1 - }, - { - "name": "plugins/doc_fragments/azure.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e2fddafaafe92473737d8a9b23ad23ce82703b0144a9930f8ace78f7138144f9", - "format": 1 - }, - { - "name": "plugins/doc_fragments/azure_tags.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e8c9d720dbc7605d3ee66799e8e81d0886e404c9a07b6b9b8edc844e0646de64", - "format": 1 - }, - { - "name": "plugins/module_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/module_utils/azure_rm_netapp_common.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7dffba3ae74d685e191305887df483f7bcded81cf80618a920c3609de0828777", - "format": 1 - }, - { - "name": "plugins/module_utils/netapp_module.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "44ebf2058eef2cc291d84bda55a9d22745a54ea08244d2c3fa498c835a60412f", - "format": 1 - }, - { - "name": "plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/modules/azure_rm_netapp_volume.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "57f7917d51e630e28625b9aed1e055fc993912079ec84066b9b4dc00a79fc076", - "format": 1 - }, - { - "name": "plugins/modules/azure_rm_netapp_snapshot.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cb97e5fe758ded5c061c587caa4b5ece7e5093aa8735d28b5915b8ffae10493d", - "format": 1 - }, - { - "name": "plugins/modules/azure_rm_netapp_account.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "371e98c5eb914d5b5b29d5a38a0128a473a56503b24a0117cad094cd35fa4f68", - "format": 1 - }, - { - "name": "plugins/modules/azure_rm_netapp_capacity_pool.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "45ae9bf9be901a5744da83e0476439cc4afc583f996b5116e63a3ff1388789aa", - "format": 1 - }, - { - "name": "tests", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat/unittest.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cba95d18c5b39c6f49714eacf1ac77452c2e32fa087c03cf01aacd19ae597b0f", - "format": 1 - }, - { - "name": "tests/unit/compat/builtins.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0ca4cac919e166b25e601e11acb01f6957dddd574ff0a62569cb994a5ecb63e1", - "format": 1 - }, - { - "name": "tests/unit/compat/__init__.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "format": 1 - }, - { - "name": "tests/unit/compat/mock.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0af958450cf6de3fbafe94b1111eae8ba5a8dbe1d785ffbb9df81f26e4946d99", - "format": 1 - }, - { - "name": "tests/unit/requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "1776552fe8fe8ec7bc9bb0d8323b4892f297ac3a000d438ffce73c82487eb688", - "format": 1 - }, - { - "name": "tests/unit/plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/module_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/module_utils/test_netapp_module.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "26d17aa4e1be7ddd99dd8150f3f72726693835df4480f09f63d665ba4568054d", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_azure_rm_netapp_volume_import.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6fb1715ddd3de877a50b0f8dd97d5b7e4518ca309f81f188eeedf0da7bd8ed6d", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_azure_rm_netapp_capacity_pool.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "35b9437b1e70cb214f9fb2599f48a2d51fb7f025be21487608a527000cdca225", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_azure_rm_netapp_snapshot.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7fb834aa39535b9eccaab1c67293e4fd44d96c2b8aec2bf963d3dee0e6e939be", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_azure_rm_netapp_volume.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "42e2a55717c6640dc3343055a758b4f930b9010c9d49351dddcc86e828557d5a", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_azure_rm_netapp_account.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "d138b9ecf3ad3b50bc4757b2dce610f770523903da67ace63243b66fe1959857", - "format": 1 - }, - { - "name": "tests/integration", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_volume", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_volume/tasks", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_volume/tasks/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "37ecd68e07721ba9b164a9cf650354e65d9b522a8fa80ee5dd0a0f85ecbbb82c", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_volume/meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_volume/meta/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_volume/aliases", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "262ad6ab8a8087f2f21d4f26a2c2d02c347a7edf0fb1af8fdb931ab1c1087bbb", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_capacity_pool", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_capacity_pool/tasks", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_capacity_pool/tasks/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "725cf3d8200cc95ad524962e33008627f642ac6cb17daee6d833d7ad4990c904", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_capacity_pool/meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_capacity_pool/meta/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "86ade1ce7ec530fef84440672efaf3b60c253946bb82f5f0a134a9691bc6ffad", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_capacity_pool/aliases", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6d10b8feb73c18dc63753fe195137e2505f925ba1cbcdf5df43f67ad8c93f7a3", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_snapshot", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_snapshot/tasks", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_snapshot/tasks/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "2c1ad3044c2063456e66ffca7f717b0d5b01561685df50819b3ddab8f256f1e3", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_snapshot/meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_snapshot/meta/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_snapshot/aliases", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "262ad6ab8a8087f2f21d4f26a2c2d02c347a7edf0fb1af8fdb931ab1c1087bbb", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_account", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_account/tasks", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_account/tasks/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "af6c1c6108bde6652e3e6c3fa84bbf33b9e8e8d214fd5e6e2096d8a900993adb", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_account/meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_account/meta/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f", - "format": 1 - }, - { - "name": "tests/integration/targets/azure_rm_netapp_account/aliases", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "262ad6ab8a8087f2f21d4f26a2c2d02c347a7edf0fb1af8fdb931ab1c1087bbb", - "format": 1 - }, - { - "name": "tests/runner", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/runner/requirements", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/runner/requirements/integration.cloud.azure.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "48edd11776e45cc283be0d76efed83271645ff082d0d22c23d8a16ede3f13104", - "format": 1 - }, - { - "name": "tests/runner/requirements/unit.cloud.azure.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "48edd11776e45cc283be0d76efed83271645ff082d0d22c23d8a16ede3f13104", - "format": 1 - }, - { - "name": "tests/runner/requirements/requirements-azure.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "be5c6dc9b1fe6ffbec88625d6c225df060724e678bf4b179ee4acb6ba18a9fe2", - "format": 1 - }, - { - "name": "tests/runner/requirements/units.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "48edd11776e45cc283be0d76efed83271645ff082d0d22c23d8a16ede3f13104", - "format": 1 - }, - { - "name": "meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "meta/runtime.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "afa4e7501eb80db02661630c468c0dd0a410da1ddb0297b1cbb74fe23d1b6951", - "format": 1 - }, - { - "name": "changelogs", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3935.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "275e9147e14cace400fea4ad8d2d88e2a3c41daa65545cf5ade52ffac5bea3be", - "format": 1 - }, - { - "name": "changelogs/fragments/20.5.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e801f1b985b89427664b5bbc3e86c7f94e0515fe71584171438bdd2e6423f802", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3704.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7f7bf4dd5809a694c63075c88409b71d97b3c636fb4c60883e23e1d9c6acf256", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3505.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "2117a356f790e63f26cbe90c1b8cbc8e66bc4ba44a3cec0d0f219bb3dee6a663", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3949.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "46256df687411ac1d89895e32713a4830ccffcf1bb823fbf88597481881aa67f", - "format": 1 - }, - { - "name": "changelogs/fragments/20.7.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "95ad8a5b1d71afce933fdd69b3116309a6d9a559416d8ab3641470eb5286ee9f", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3663.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ede05991f45025041ce7305e06f27f7c7747b0a5e3c61d11e5775796315ad801", - "format": 1 - }, - { - "name": "changelogs/fragments/20.2.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "91918f48a406834778ff7163c92c12dd1802c0620cb681ee66f8a4709444cf5e", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4001.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "c9b63b1230f04b472b8d55d5bd616c1687972ec14126e643de62788c9788708a", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3526.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "61cb464ce2c8e6aaf9a74beca5e246e4010ee68f2632eba0347e5cf3267e9510", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4246.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "87733f8557c5f0c13302e47e18712cc094f8e1cf63665519a340c74baee95e1b", - "format": 1 - }, - { - "name": "changelogs/fragments/20.4.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "c4fd861b273aceb9ec0a456ddbae2a6f82bdd6a2244b81e9b1e903a59eaa83bf", - "format": 1 - }, - { - "name": "changelogs/fragments/20.8.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "266926d348a95b55d65ff99683f9bdfe7ad1302731c08964bb86ce714272d86c", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4135.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "3b73cac3f25ff245e6156120be0b828339193515410dfe1746e9fe58b2fc5d1a", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4070.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ef62606cf209269c701e931090525e063781b9990853a20f718f250bbccd182d", - "format": 1 - }, - { - "name": "changelogs/fragments/20.6.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b5988b539e04f6be548b4f07142e208adc5974ed6d67511f2efcd5f721598124", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3849.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cfaa25e04a5dcb6f13b27a52d79dd1ee8a06002d028a86a09184b58c431fc5fd", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4416.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4224db573f34caeeb956c8728eb343a47bc2729d898001a4c6a671b780dae1bf", - "format": 1 - }, - { - "name": "changelogs/config.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "fdb1023b66dee056bc12c35cf7326a01c0ce7a8c1eceea032270fd24db9e1467", - "format": 1 - }, - { - "name": "changelogs/changelog.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "edbc85fcd35436d9f71c7b5f3247522276ebc4fb00567a74873b163adcadf020", - "format": 1 - }, - { - "name": "README.md", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "c4071e1bc75bf65da6951045ea60d4bde7036937ac56ff73a32a95764fa9a4a3", - "format": 1 - }, - { - "name": "COPYING", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8ceb4b9ee5adedde47b31e975c1d90c73ad27b6b165a1dcd80c7c545eb65b903", - "format": 1 - }, - { - "name": ".github", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows/coverage.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "081b4ec5fb77d7676357b8600b547e7b2cbad8eb340333305ef5d448d832510b", - "format": 1 - }, - { - "name": ".github/workflows/codeql-analysis.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b67ebd724c02ab0bc49f55fe9928691ce9000511f58d1b182245c8ea9fa4bc89", - "format": 1 - }, - { - "name": ".github/workflows/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "217f80de01b8091549fa87f7a89e7936f1d91abc6e0398aed2c85cac6deb0448", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/feature_request.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4ad81e92ae7b1fbef37afde2fc57466d3d7dd20d5ab9bba1e01e0faac83228cf", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/bug_report.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ef5f731a1c7790ce52a685a93370a5d0a5523bf75b9b0a0f0d9cc50171c60ac0", - "format": 1 - }, - { - "name": "CHANGELOG.rst", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7ee760ffaaed6d3d476d1eb9fda880bbaf3a2247a5014428c058282b597131a7", - "format": 1 - }, - { - "name": "HACK.md", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0757d4b54e08f27761851d37143cbe15d58c324db2968fe157546992592bb382", - "format": 1 - } - ], - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/azure/HACK.md b/ansible_collections/netapp/azure/HACK.md deleted file mode 100644 index cbd239e0e..000000000 --- a/ansible_collections/netapp/azure/HACK.md +++ /dev/null @@ -1,13 +0,0 @@ -Because of an issue in Ansible, Hub Automation cannot use doc fragments from an external collection as: -``` - - azure.azcollection.azure - - azure.azcollection.azure_tags -``` - -Red Hat asked us to make local copies of the azcollection doc fragments. They are in -``` -ansible_collections/netapp/azure/plugins/doc_fragments/azure.py -ansible_collections/netapp/azure/plugins/doc_fragments/azure_tags.py -``` - -Once the Ansible issue is fixed, we should remove these copies, as they may be out of sync with the azcollection. diff --git a/ansible_collections/netapp/azure/MANIFEST.json b/ansible_collections/netapp/azure/MANIFEST.json deleted file mode 100644 index ecfd4c592..000000000 --- a/ansible_collections/netapp/azure/MANIFEST.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "collection_info": { - "namespace": "netapp", - "name": "azure", - "version": "21.10.1", - "authors": [ - "NetApp Ansible Team <ng-ansibleteam@netapp.com>" - ], - "readme": "README.md", - "tags": [ - "storage", - "cloud", - "netapp", - "cvs", - "anf", - "azure" - ], - "description": "Azure NetApp Files (ANF)", - "license": [], - "license_file": "COPYING", - "dependencies": { - "azure.azcollection": ">=1.0.0" - }, - "repository": "https://github.com/ansible-collections/netapp.azure", - "documentation": null, - "homepage": "https://netapp.io/configuration-management-and-automation/", - "issues": "https://github.com/ansible-collections/netapp.azure/issues" - }, - "file_manifest_file": { - "name": "FILES.json", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "dac3f17953d3c381c15de48087a8d6d80a112c8f172006ed178a081fef9023c6", - "format": 1 - }, - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/azure/README.md b/ansible_collections/netapp/azure/README.md deleted file mode 100644 index 05dcdd6dd..000000000 --- a/ansible_collections/netapp/azure/README.md +++ /dev/null @@ -1,158 +0,0 @@ -[![Documentation](https://img.shields.io/badge/docs-brightgreen.svg)](https://docs.ansible.com/ansible/devel/collections/netapp/azure/index.html) -![example workflow](https://github.com/ansible-collections/netapp.azure/actions/workflows/main.yml/badge.svg) -[![codecov](https://codecov.io/gh/ansible-collections/netapp.azure/branch/main/graph/badge.svg?token=weBYkksxSi)](https://codecov.io/gh/ansible-collections/netapp.azure) -[![Discord](https://img.shields.io/discord/855068651522490400)](https://discord.gg/NetApp) -============================================================= - -netapp.azure - -Azure NetApp Files (ANF) Collection - -Copyright (c) 2019 NetApp, Inc. All rights reserved. -Specifications subject to change without notice. - -============================================================= - -# Installation -```bash -ansible-galaxy collection install netapp.azure -``` -To use Collection add the following to the top of your playbook, with out this you will be using Ansible 2.9 version of the module -``` -collections: - - netapp.azure -``` - -# Module documentation -https://docs.ansible.com/ansible/devel/collections/netapp/azure/ - -# Need help -Join our [Discord](https://discord.gg/NetApp) and look for our #ansible channel. - -# Requirements -- python >= 2.7 -- azure >= 2.0.0 -- Python azure-mgmt. Install using ```pip install azure-mgmt``` -- Python azure-mgmt-netapp. Install using ```pip install azure-mgmt-netapp``` -- For authentication with Azure NetApp log in before you run your tasks or playbook with 'az login'. - -# Code of Conduct -This collection follows the [Ansible project's Code of Conduct](https://docs.ansible.com/ansible/devel/community/code_of_conduct.html). - -# Release Notes - -## 21.10.0 - -### Minor changes - - all modules - allow usage of Ansible module group defaults - for Ansible 2.12+. - -## 21.9.0 - -### New Options - - azure_rm_netapp_volume - `feature_flags` to selectively enable/disable a feature. - -### Bug Fixes - - azure_rm_netapp_volume - 'Change Ownership' is not permitted when creating NFSv4.1 volume with latest azure-mgmt-netapp package (4.0.0). - -## 21.8.1 - -### Bug Fixes - - Hub Automation cannot generate documentation (cannot use doc fragments from another collection). - -## 21.8.0 - -### Bug Fixes - -- fix CI pipeline as azcollection does not support python 2.6. -- fix CI pipeline as ignores are not required with latest azcollection. - -## 21.7.0 - -### Bug Fixes - -- fix CI pipeline to work with azcollection, and isolate UTs from azcollection. - -## 21.6.0 - -### Minor changes - - - azure_rm_netapp_account - support additional authentication schemes provided by AzureRMModuleBase. - - azure_rm_netapp_capacity_pool - support additional authentication schemes provided by AzureRMModuleBase, and tags. - - azure_rm_netapp_capacity_pool - wait for completion when creating, modifying, or deleting a pool. - - azure_rm_netapp_snapshot - support additional authentication schemes provided by AzureRMModuleBase. - - azure_rm_netapp_snapshot - wait for completion when creating or deleting a snapshot. - - azure_rm_netapp_volume - support additional authentication schemes provided by AzureRMModuleBase, and tags. - -## 21.5.0 - -### Minor changes - - azure_rm_netapp_volume - enable changes in volume size. - - azure_rm_netapp_volume - rename msg to mount_path, as documented in RETURN. - -## 21.3.0 - -### New Options - - azure_rm_netapp_account - new suboptions `ad_name`, `kdc_ip`, `service_root_ca_certificate` for Active Directory. - -### Bug Fixes - - support for azure-mgmt-netapp 1.0.0, while maintaining compatibility with 0.10.0. - - azure_rm_netapp_account - wait for job completion for asynchroneous requests, and report belated errors. - -## 21.2.0 - -### New Options - - azure_rm_netapp_account: new option `active_directories` to support SMB volumes. - - azure_rm_netapp_volume: new option `protocol_types` to support SMB volumes. - -## 21.1.0 - -### New Options - - azure_rm_netapp_volume - new option `subnet_name` as subnet_id is ambiguous. subnet_id is now aliased to subnet_name. - -### Bug Fixes - - azure_rm_netapp_volume - fix 'Nonetype' object is not subscriptable exception when mount target is not created. - -## 20.8.0 - -### Module documentation changes -- azure_rm_netapp_capacity_pool: Updated ANF capacity pool modify function for `size` parameter mandatory issue. -- use a three group format for `version_added`. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. - -## 20.7.0 - -### Bug Fixes -- azure_rm_netapp_capacity_pool: fixed idempotency for delete operation. - -## 20.6.0 - -### New Options -- azure_rm_netapp_capacity_pool: now allows modify for size. -- azure_rm_netapp_volume: now returns complete mount_path of the volume specified. - -## 20.5.0 - -### New Options -- azure_rm_netapp_account: new option `tags`. -- azure_rm_netapp_capacity_pool: new option `service_level`. -- azure_rm_netapp_volume: new option `size`. -- azure_rm_netapp_volume: now returns mount_path of the volume specified. -- azure_rm_netapp_volume: new option `vnet_resource_group_for_subnet`, resource group for virtual_network and subnet_id to be used. - -## 20.4.0 - -### Bug Fixes -- fix changes to azure-mgmt-netapp as per new release. -- removed ONTAP dependency import. - -## 20.2.0 - -### Bug Fixes -- galaxy.yml: fix path to github repository. - -## 19.11.0 -- Initial release. -### New Modules -- azure_rm_netapp_account: create/delete NetApp Azure Files Account. -- azure_rm_netapp_capacity_pool: create/delete NetApp Azure Files capacity pool. -- azure_rm_netapp_snapshot: create/delete NetApp Azure Files Snapshot. -- azure_rm_netapp_volume: create/delete NetApp Azure Files volume. diff --git a/ansible_collections/netapp/azure/changelogs/changelog.yaml b/ansible_collections/netapp/azure/changelogs/changelog.yaml deleted file mode 100644 index 71085dedf..000000000 --- a/ansible_collections/netapp/azure/changelogs/changelog.yaml +++ /dev/null @@ -1,169 +0,0 @@ -ancestor: null -releases: - 19.10.0: - modules: - - description: Manage NetApp Azure Files Account - name: azure_rm_netapp_account - namespace: '' - - description: Manage NetApp Azure Files capacity pool - name: azure_rm_netapp_capacity_pool - namespace: '' - - description: Manage NetApp Azure Files Snapshot - name: azure_rm_netapp_snapshot - namespace: '' - - description: Manage NetApp Azure Files Volume - name: azure_rm_netapp_volume - namespace: '' - release_date: '2019-11-14' - 20.2.0: - changes: - bugfixes: - - galaxy.yml - fix path to github repository. - fragments: - - 20.2.0.yaml - release_date: '2020-02-05' - 20.4.0: - changes: - bugfixes: - - fix changes to azure-mgmt-netapp as per new release. - - removed ONTAP dependency import. - fragments: - - 20.4.0.yaml - release_date: '2020-04-21' - 20.5.0: - changes: - minor_changes: - - azure_rm_netapp_account - new option ``tags``. - - azure_rm_netapp_capacity_pool - new option ``service_level``. - - azure_rm_netapp_volume - new option ``size``. - - azure_rm_netapp_volume - new option ``vnet_resource_group_for_subnet``, resource - group for virtual_network and subnet_id to be used. - - azure_rm_netapp_volume - now returns mount_path of the volume specified. - fragments: - - 20.5.0.yaml - release_date: '2020-05-06' - 20.6.0: - changes: - minor_changes: - - azure_rm_netapp_capacity_pool - now allows modify for size. - - azure_rm_netapp_volume - now returns complete mount_path of the volume specified. - fragments: - - 20.6.0.yaml - release_date: '2020-06-03' - 20.7.0: - changes: - bugfixes: - - azure_rm_netapp_capacity_pool - fixed idempotency for delete operation. - fragments: - - 20.7.0.yaml - release_date: '2020-06-24' - 20.8.0: - changes: - minor_changes: - - azure_rm_netapp_capacity_pool - Updated ANF capacity pool modify function - for size parameter mandatory issue. - - use a three group format for version_added. So 2.7 becomes 2.7.0. Same thing - for 2.8 and 2.9. - fragments: - - 20.8.0.yaml - release_date: '2020-08-05' - 21.10.0: - changes: - minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - fragments: - - DEVOPS-4416.yaml - release_date: '2021-11-03' - 21.2.0: - changes: - bugfixes: - - azure_rm_netapp_volume - fix 'Nonetype' object is not subscriptable exception - when mount target is not created. - minor_changes: - - azure_rm_netapp_account - new option ``active_directories`` to support SMB - volumes. - - azure_rm_netapp_volume - new option ``protocol_types`` to support SMB volumes. - - azure_rm_netapp_volume - new option ``subnet_name`` as subnet_id is ambiguous. subnet_id - is now aliased to subnet_name. - fragments: - - DEVOPS-3505.yaml - release_date: '2021-02-04' - 21.3.0: - changes: - bugfixes: - - azure_rm_netapp_account - wait for job completion for asynchroneous requests, - and report belated errors. - - support for azure-mgmt-netapp 1.0.0, while maintaining compatibility with - 0.10.0. - minor_changes: - - azure_rm_netapp_account - new option ``active_directories`` to support SMB - volumes. - - azure_rm_netapp_account - new suboptions ``ad_name``, ``kdc_ip``, ``service_root_ca_certificate``` - for Active Directory. - - azure_rm_netapp_volume - new option ``protocol_types`` to support SMB volumes. - fragments: - - DEVOPS-3526.yaml - - DEVOPS-3663.yaml - - DEVOPS-3704.yaml - release_date: '2021-03-03' - 21.5.0: - changes: - minor_changes: - - azure_rm_netapp_volume - enable changes in volume size. - - azure_rm_netapp_volume - rename msg to mount_path, as documented in RETURN. - fragments: - - DEVOPS-3849.yaml - release_date: '2021-04-21' - 21.6.0: - changes: - minor_changes: - - azure_rm_netapp_account - support additional authentication schemes provided - by AzureRMModuleBase. - - azure_rm_netapp_capacity_pool - support additional authentication schemes - provided by AzureRMModuleBase, and tags. - - azure_rm_netapp_capacity_pool - wait for completion when creating, modifying, - or deleting a pool. - - azure_rm_netapp_snapshot - support additional authentication schemes provided - by AzureRMModuleBase. - - azure_rm_netapp_snapshot - wait for completion when creating, modifying, or - deleting a pool. - - azure_rm_netapp_volume - support additional authentication schemes provided - by AzureRMModuleBase, and tags. - fragments: - - DEVOPS-3935.yaml - - DEVOPS-3949.yaml - release_date: '2021-05-06' - 21.7.0: - changes: - bugfixes: - - fix CI pipeline to work with azcollection, and isolate UTs from azcollection. - fragments: - - DEVOPS-4001.yaml - release_date: '2021-06-03' - 21.8.0: - changes: - bugfixes: - - fix CI pipeline as azcollection does not support python 2.6. - - fix CI pipeline as ignores are not required with latest azcollection. - fragments: - - DEVOPS-4070.yaml - release_date: '2021-07-14' - 21.8.1: - changes: - bugfixes: - - Hub Automation cannot generate documentation (cannot use doc fragments from - another collection). - fragments: - - DEVOPS-4135.yaml - release_date: '2021-07-16' - 21.9.0: - changes: - bugfixes: - - azure_rm_netapp_volume - 'Change Ownership' is not permitted when creating - NFSv4.1 volume with latest azure-mgmt-netapp package (4.0.0). - minor_changes: - - azure_rm_netapp_volume - new option ``feature_flags`` to selectively enable/disable - a feature. - fragments: - - DEVOPS-4246.yaml - release_date: '2021-09-01' diff --git a/ansible_collections/netapp/azure/changelogs/config.yaml b/ansible_collections/netapp/azure/changelogs/config.yaml deleted file mode 100644 index fcd0312f6..000000000 --- a/ansible_collections/netapp/azure/changelogs/config.yaml +++ /dev/null @@ -1,32 +0,0 @@ -changelog_filename_template: ../CHANGELOG.rst -changelog_filename_version_depth: 0 -changes_file: changelog.yaml -changes_format: combined -ignore_other_fragment_extensions: true -keep_fragments: true -mention_ancestor: false -new_plugins_after_name: removed_features -notesdir: fragments -prelude_section_name: release_summary -prelude_section_title: Release Summary -sanitize_changelog: true -sections: -- - major_changes - - Major Changes -- - minor_changes - - Minor Changes -- - breaking_changes - - Breaking Changes / Porting Guide -- - deprecated_features - - Deprecated Features -- - removed_features - - Removed Features (previously deprecated) -- - security_fixes - - Security Fixes -- - bugfixes - - Bugfixes -- - known_issues - - Known Issues -title: Azure NetApp Files (ANF) Collection -trivial_section_name: trivial -use_fqcn: true diff --git a/ansible_collections/netapp/azure/changelogs/fragments/20.2.0.yaml b/ansible_collections/netapp/azure/changelogs/fragments/20.2.0.yaml deleted file mode 100644 index 3f764c1c9..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/20.2.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - galaxy.yml - fix path to github repository. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/20.4.0.yaml b/ansible_collections/netapp/azure/changelogs/fragments/20.4.0.yaml deleted file mode 100644 index 044018d2c..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/20.4.0.yaml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: - - fix changes to azure-mgmt-netapp as per new release. - - removed ONTAP dependency import. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/20.5.0.yaml b/ansible_collections/netapp/azure/changelogs/fragments/20.5.0.yaml deleted file mode 100644 index a2f16d6b0..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/20.5.0.yaml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: - - azure_rm_netapp_account - new option ``tags``. - - azure_rm_netapp_capacity_pool - new option ``service_level``. - - azure_rm_netapp_volume - new option ``size``. - - azure_rm_netapp_volume - now returns mount_path of the volume specified. - - azure_rm_netapp_volume - new option ``vnet_resource_group_for_subnet``, resource group for virtual_network and subnet_id to be used. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/20.6.0.yaml b/ansible_collections/netapp/azure/changelogs/fragments/20.6.0.yaml deleted file mode 100644 index 67b15df45..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/20.6.0.yaml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - azure_rm_netapp_capacity_pool - now allows modify for size. - - azure_rm_netapp_volume - now returns complete mount_path of the volume specified. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/20.7.0.yaml b/ansible_collections/netapp/azure/changelogs/fragments/20.7.0.yaml deleted file mode 100644 index e150ea10d..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/20.7.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - azure_rm_netapp_capacity_pool - fixed idempotency for delete operation. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/20.8.0.yaml b/ansible_collections/netapp/azure/changelogs/fragments/20.8.0.yaml deleted file mode 100644 index 68e9d285d..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/20.8.0.yaml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - azure_rm_netapp_capacity_pool - Updated ANF capacity pool modify function for size parameter mandatory issue. - - use a three group format for version_added. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3505.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3505.yaml deleted file mode 100644 index 87c49d052..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3505.yaml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - azure_rm_netapp_volume - new option ``subnet_name`` as subnet_id is ambiguous. subnet_id is now aliased to subnet_name. -bugfixes: - - azure_rm_netapp_volume - fix 'Nonetype' object is not subscriptable exception when mount target is not created. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3526.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3526.yaml deleted file mode 100644 index 720ce523d..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3526.yaml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - azure_rm_netapp_account - new option ``active_directories`` to support SMB volumes. - - azure_rm_netapp_volume - new option ``protocol_types`` to support SMB volumes. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3663.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3663.yaml deleted file mode 100644 index e9adbdb9b..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3663.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - support for azure-mgmt-netapp 1.0.0, while maintaining compatibility with 0.10.0. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3704.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3704.yaml deleted file mode 100644 index f0c1f6a38..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3704.yaml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - azure_rm_netapp_account - new suboptions ``ad_name``, ``kdc_ip``, ``service_root_ca_certificate``` for Active Directory. - -bugfixes: - - azure_rm_netapp_account - wait for job completion for asynchroneous requests, and report belated errors. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3849.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3849.yaml deleted file mode 100644 index 7fc5d9fae..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3849.yaml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - azure_rm_netapp_volume - enable changes in volume size. - - azure_rm_netapp_volume - rename msg to mount_path, as documented in RETURN. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3935.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3935.yaml deleted file mode 100644 index c619dbcd9..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3935.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - azure_rm_netapp_account - support additional authentication schemes provided by AzureRMModuleBase. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3949.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3949.yaml deleted file mode 100644 index 2889546f8..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-3949.yaml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: - - azure_rm_netapp_capacity_pool - support additional authentication schemes provided by AzureRMModuleBase, and tags. - - azure_rm_netapp_capacity_pool - wait for completion when creating, modifying, or deleting a pool. - - azure_rm_netapp_snapshot - support additional authentication schemes provided by AzureRMModuleBase. - - azure_rm_netapp_snapshot - wait for completion when creating, modifying, or deleting a pool. - - azure_rm_netapp_volume - support additional authentication schemes provided by AzureRMModuleBase, and tags. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4001.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4001.yaml deleted file mode 100644 index 2b09e21ce..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4001.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - fix CI pipeline to work with azcollection, and isolate UTs from azcollection. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4070.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4070.yaml deleted file mode 100644 index 0342115bb..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4070.yaml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: - - fix CI pipeline as azcollection does not support python 2.6. - - fix CI pipeline as ignores are not required with latest azcollection. diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4135.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4135.yaml deleted file mode 100644 index 50e2e7d36..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4135.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - Hub Automation cannot generate documentation (cannot use doc fragments from another collection). diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4246.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4246.yaml deleted file mode 100644 index 781042d6f..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4246.yaml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - azure_rm_netapp_volume - new option ``feature_flags`` to selectively enable/disable a feature. -bugfixes: - - azure_rm_netapp_volume - 'Change Ownership' is not permitted when creating NFSv4.1 volume with latest azure-mgmt-netapp package (4.0.0). diff --git a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4416.yaml b/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4416.yaml deleted file mode 100644 index 6b4b660a0..000000000 --- a/ansible_collections/netapp/azure/changelogs/fragments/DEVOPS-4416.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. diff --git a/ansible_collections/netapp/azure/meta/runtime.yml b/ansible_collections/netapp/azure/meta/runtime.yml deleted file mode 100644 index fc582847b..000000000 --- a/ansible_collections/netapp/azure/meta/runtime.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -requires_ansible: ">=2.13" -action_groups: - netapp_azure: - - azure_rm_netapp_account - - azure_rm_netapp_capacity_pool - - azure_rm_netapp_snapshot - - azure_rm_netapp_volume diff --git a/ansible_collections/netapp/azure/plugins/doc_fragments/azure.py b/ansible_collections/netapp/azure/plugins/doc_fragments/azure.py deleted file mode 100644 index 49467db70..000000000 --- a/ansible_collections/netapp/azure/plugins/doc_fragments/azure.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2016 Matt Davis, <mdavis@ansible.com> -# Copyright: (c) 2016 Chris Houseknecht, <house@redhat.com> -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -class ModuleDocFragment(object): - - # Azure doc fragment - DOCUMENTATION = r''' - -options: - ad_user: - description: - - Active Directory username. Use when authenticating with an Active Directory user rather than service - principal. - type: str - password: - description: - - Active Directory user password. Use when authenticating with an Active Directory user rather than service - principal. - type: str - profile: - description: - - Security profile found in ~/.azure/credentials file. - type: str - subscription_id: - description: - - Your Azure subscription Id. - type: str - client_id: - description: - - Azure client ID. Use when authenticating with a Service Principal. - type: str - secret: - description: - - Azure client secret. Use when authenticating with a Service Principal. - type: str - tenant: - description: - - Azure tenant ID. Use when authenticating with a Service Principal. - type: str - cloud_environment: - description: - - For cloud environments other than the US public cloud, the environment name (as defined by Azure Python SDK, eg, C(AzureChinaCloud), - C(AzureUSGovernment)), or a metadata discovery endpoint URL (required for Azure Stack). Can also be set via credential file profile or - the C(AZURE_CLOUD_ENVIRONMENT) environment variable. - type: str - default: AzureCloud - version_added: '0.0.1' - adfs_authority_url: - description: - - Azure AD authority url. Use when authenticating with Username/password, and has your own ADFS authority. - type: str - version_added: '0.0.1' - cert_validation_mode: - description: - - Controls the certificate validation behavior for Azure endpoints. By default, all modules will validate the server certificate, but - when an HTTPS proxy is in use, or against Azure Stack, it may be necessary to disable this behavior by passing C(ignore). Can also be - set via credential file profile or the C(AZURE_CERT_VALIDATION) environment variable. - type: str - choices: [ ignore, validate ] - version_added: '0.0.1' - auth_source: - description: - - Controls the source of the credentials to use for authentication. - - Can also be set via the C(ANSIBLE_AZURE_AUTH_SOURCE) environment variable. - - When set to C(auto) (the default) the precedence is module parameters -> C(env) -> C(credential_file) -> C(cli). - - When set to C(env), the credentials will be read from the environment variables - - When set to C(credential_file), it will read the profile from C(~/.azure/credentials). - - When set to C(cli), the credentials will be sources from the Azure CLI profile. C(subscription_id) or the environment variable - C(AZURE_SUBSCRIPTION_ID) can be used to identify the subscription ID if more than one is present otherwise the default - az cli subscription is used. - - When set to C(msi), the host machine must be an azure resource with an enabled MSI extension. C(subscription_id) or the - environment variable C(AZURE_SUBSCRIPTION_ID) can be used to identify the subscription ID if the resource is granted - access to more than one subscription, otherwise the first subscription is chosen. - - The C(msi) was added in Ansible 2.6. - type: str - default: auto - choices: - - auto - - cli - - credential_file - - env - - msi - version_added: '0.0.1' - api_profile: - description: - - Selects an API profile to use when communicating with Azure services. Default value of C(latest) is appropriate for public clouds; - future values will allow use with Azure Stack. - type: str - default: latest - version_added: '0.0.1' - log_path: - description: - - Parent argument. - type: str - log_mode: - description: - - Parent argument. - type: str -requirements: - - python >= 2.7 - - The host that executes this module must have the azure.azcollection collection installed via galaxy - - All python packages listed in collection's requirements-azure.txt must be installed via pip on the host that executes modules from azure.azcollection - - Full installation instructions may be found https://galaxy.ansible.com/azure/azcollection - -notes: - - For authentication with Azure you can pass parameters, set environment variables, use a profile stored - in ~/.azure/credentials, or log in before you run your tasks or playbook with C(az login). - - Authentication is also possible using a service principal or Active Directory user. - - To authenticate via service principal, pass subscription_id, client_id, secret and tenant or set environment - variables AZURE_SUBSCRIPTION_ID, AZURE_CLIENT_ID, AZURE_SECRET and AZURE_TENANT. - - To authenticate via Active Directory user, pass ad_user and password, or set AZURE_AD_USER and - AZURE_PASSWORD in the environment. - - "Alternatively, credentials can be stored in ~/.azure/credentials. This is an ini file containing - a [default] section and the following keys: subscription_id, client_id, secret and tenant or - subscription_id, ad_user and password. It is also possible to add additional profiles. Specify the profile - by passing profile or setting AZURE_PROFILE in the environment." - -seealso: - - name: Sign in with Azure CLI - link: https://docs.microsoft.com/en-us/cli/azure/authenticate-azure-cli?view=azure-cli-latest - description: How to authenticate using the C(az login) command. - ''' diff --git a/ansible_collections/netapp/azure/plugins/doc_fragments/azure_tags.py b/ansible_collections/netapp/azure/plugins/doc_fragments/azure_tags.py deleted file mode 100644 index 8edb80eed..000000000 --- a/ansible_collections/netapp/azure/plugins/doc_fragments/azure_tags.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2016, Matt Davis, <mdavis@ansible.com> -# Copyright: (c) 2016, Chris Houseknecht, <house@redhat.com> -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -class ModuleDocFragment(object): - - # Azure doc fragment - DOCUMENTATION = r''' -options: - tags: - description: - - Dictionary of string:string pairs to assign as metadata to the object. - - Metadata tags on the object will be updated with any provided values. - - To remove tags set append_tags option to false. - - Currently, Azure DNS zones and Traffic Manager services also don't allow the use of spaces in the tag. - - Azure Front Door doesn't support the use of # in the tag name. - - Azure Automation and Azure CDN only support 15 tags on resources. - type: dict - append_tags: - description: - - Use to control if tags field is canonical or just appends to existing tags. - - When canonical, any tags not found in the tags parameter will be removed from the object's metadata. - type: bool - default: yes - ''' diff --git a/ansible_collections/netapp/azure/plugins/doc_fragments/netapp.py b/ansible_collections/netapp/azure/plugins/doc_fragments/netapp.py deleted file mode 100644 index 18e9cc2a2..000000000 --- a/ansible_collections/netapp/azure/plugins/doc_fragments/netapp.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2019, NetApp Ansible Team ng-ansibleteam@netapp.com -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -class ModuleDocFragment(object): - - DOCUMENTATION = r''' -options: - - See respective platform section for more details -requirements: - - See respective platform section for more details -notes: - - Ansible modules are available for the following NetApp Storage Platforms: E-Series, ONTAP, SolidFire -''' - - # Documentation fragment for Cloud Volume Services on Azure NetApp (azure_rm_netapp) - AZURE_RM_NETAPP = r''' -options: - resource_group: - description: - - Name of the resource group. - required: true - type: str -requirements: - - python >= 2.7 - - azure >= 2.0.0 - - Python azure-mgmt. Install using 'pip install azure-mgmt' - - Python azure-mgmt-netapp. Install using 'pip install azure-mgmt-netapp' - - For authentication with Azure NetApp log in before you run your tasks or playbook with C(az login). - -notes: - - The modules prefixed with azure_rm_netapp are built to support the Cloud Volume Services for Azure NetApp Files. - -seealso: - - name: Sign in with Azure CLI - link: https://docs.microsoft.com/en-us/cli/azure/authenticate-azure-cli?view=azure-cli-latest - description: How to authenticate using the C(az login) command. - ''' diff --git a/ansible_collections/netapp/azure/plugins/module_utils/azure_rm_netapp_common.py b/ansible_collections/netapp/azure/plugins/module_utils/azure_rm_netapp_common.py deleted file mode 100644 index 166327088..000000000 --- a/ansible_collections/netapp/azure/plugins/module_utils/azure_rm_netapp_common.py +++ /dev/null @@ -1,163 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ -# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -azure_rm_netapp_common -Wrapper around AzureRMModuleBase base class -''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import sys - -HAS_AZURE_COLLECTION = True -NEW_STYLE = None -COLLECTION_VERSION = "21.10.1" -IMPORT_ERRORS = [] -SDK_VERSION = "0.0.0" - -if 'pytest' in sys.modules: - from ansible_collections.netapp.azure.plugins.module_utils.netapp_module import AzureRMModuleBaseMock as AzureRMModuleBase -else: - try: - from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase - except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - HAS_AZURE_COLLECTION = False - - class AzureRMModuleBase: - def __init__(self, derived_arg_spec, required_if=None, supports_check_mode=False, supports_tags=True, **kwargs): - raise ImportError(IMPORT_ERRORS) - - except SyntaxError as exc: - # importing Azure collection fails with python 2.6 - if sys.version_info < (2, 8): - IMPORT_ERRORS.append(str(exc)) - from ansible_collections.netapp.azure.plugins.module_utils.netapp_module import AzureRMModuleBaseMock as AzureRMModuleBase - HAS_AZURE_COLLECTION = False - else: - raise - -try: - from azure.mgmt.netapp import NetAppManagementClient # 1.0.0 or newer - NEW_STYLE = True -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -if NEW_STYLE is None: - try: - from azure.mgmt.netapp import AzureNetAppFilesManagementClient # 0.10.0 or older - NEW_STYLE = False - except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -try: - from azure.mgmt.netapp import VERSION as SDK_VERSION -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - - -class AzureRMNetAppModuleBase(AzureRMModuleBase): - ''' Wrapper around AzureRMModuleBase base class ''' - def __init__(self, derived_arg_spec, required_if=None, supports_check_mode=False, supports_tags=True): - self._netapp_client = None - self._new_style = NEW_STYLE - self._sdk_version = SDK_VERSION - super(AzureRMNetAppModuleBase, self).__init__(derived_arg_spec=derived_arg_spec, - required_if=required_if, - supports_check_mode=supports_check_mode, - supports_tags=supports_tags) - if not HAS_AZURE_COLLECTION: - self.fail_when_import_errors(IMPORT_ERRORS) - - def get_mgmt_svc_client(self, client_type, base_url=None, api_version=None): - if not self._new_style: - return super(AzureRMNetAppModuleBase, self).get_mgmt_svc_client(client_type, base_url, api_version) - self.log('Getting management service client NetApp {0}'.format(client_type.__name__)) - self.check_client_version(client_type) - - if not base_url: - # most things are resource_manager, don't make everyone specify - base_url = self.azure_auth._cloud_environment.endpoints.resource_manager - - client_kwargs = dict(credential=self.azure_auth.azure_credentials, subscription_id=self.azure_auth.subscription_id, base_url=base_url) - - return client_type(**client_kwargs) - - @property - def netapp_client(self): - self.log('Getting netapp client') - if self._new_style is None: - # note that we always have at least one import error - self.fail_when_import_errors(IMPORT_ERRORS) - if self._netapp_client is None: - if self._new_style: - self._netapp_client = self.get_mgmt_svc_client(NetAppManagementClient) - else: - self._netapp_client = self.get_mgmt_svc_client(AzureNetAppFilesManagementClient, - base_url=self._cloud_environment.endpoints.resource_manager, - api_version='2018-05-01') - return self._netapp_client - - @property - def new_style(self): - return self._new_style - - @property - def sdk_version(self): - return self._sdk_version - - def get_method(self, category, name): - try: - methods = getattr(self.netapp_client, category) - except AttributeError as exc: - self.module.fail_json('Error: category %s not found for netapp_client: %s' % (category, str(exc))) - - if self._new_style: - name = 'begin_' + name - try: - method = getattr(methods, name) - except AttributeError as exc: - self.module.fail_json('Error: method %s not found for netapp_client category: %s - %s' % (name, category, str(exc))) - return method - - def fail_when_import_errors(self, import_errors, has_azure_mgmt_netapp=True): - if has_azure_mgmt_netapp and not import_errors: - return - msg = '' - if not has_azure_mgmt_netapp: - msg = "The python azure-mgmt-netapp package is required. " - if hasattr(self, 'module'): - msg += 'Import errors: %s' % str(import_errors) - self.module.fail_json(msg=msg) - msg += str(import_errors) - raise ImportError(msg) - - def has_feature(self, feature_name): - feature = self.get_feature(feature_name) - if isinstance(feature, bool): - return feature - self.module.fail_json(msg="Error: expected bool type for feature flag: %s" % feature_name) - - def get_feature(self, feature_name): - ''' if the user has configured the feature, use it - otherwise, use our default - ''' - default_flags = dict( - # TODO: review need for these - # trace_apis=False, # if true, append REST requests/responses to /tmp/azure_apis.log - # check_required_params_for_none=True, - # deprecation_warning=True, - # show_modified=True, - # - # preview features in ANF - ignore_change_ownership_mode=True - ) - - if self.parameters.get('feature_flags') is not None and feature_name in self.parameters['feature_flags']: - return self.parameters['feature_flags'][feature_name] - if feature_name in default_flags: - return default_flags[feature_name] - self.module.fail_json(msg="Internal error: unexpected feature flag: %s" % feature_name) diff --git a/ansible_collections/netapp/azure/plugins/module_utils/netapp_module.py b/ansible_collections/netapp/azure/plugins/module_utils/netapp_module.py deleted file mode 100644 index 9ee758c01..000000000 --- a/ansible_collections/netapp/azure/plugins/module_utils/netapp_module.py +++ /dev/null @@ -1,271 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2018, Laurent Nicolas <laurentn@netapp.com> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -''' Support class for NetApp ansible modules ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -from ansible.module_utils import basic - - -class AzureRMModuleBaseMock(): - ''' Mock for sanity tests when azcollection is not installed ''' - def __init__(self, derived_arg_spec, required_if=None, supports_check_mode=False, supports_tags=True, **kwargs): - if supports_tags: - derived_arg_spec.update(dict(tags=dict())) - self.module = basic.AnsibleModule( - argument_spec=derived_arg_spec, - required_if=required_if, - supports_check_mode=supports_check_mode - ) - self.module.warn('Running in Unit Test context!') - # the following is done in exec_module() - self.parameters = dict([item for item in self.module.params.items() if item[1] is not None]) - # remove values with a default of None (not required) - self.module_arg_spec = dict([item for item in self.module_arg_spec.items() if item[0] in self.parameters]) - - def update_tags(self, tags): - self.module.log('update_tags called with:', tags) - return None, None - - -def cmp(obj1, obj2): - """ - Python 3 does not have a cmp function, this will do the cmp. - :param a: first object to check - :param b: second object to check - :return: - """ - # convert to lower case for string comparison. - if obj1 is None: - return -1 - if isinstance(obj1, str) and isinstance(obj2, str): - obj1 = obj1.lower() - obj2 = obj2.lower() - # if list has string element, convert string to lower case. - if isinstance(obj1, list) and isinstance(obj2, list): - obj1 = [x.lower() if isinstance(x, str) else x for x in obj1] - obj2 = [x.lower() if isinstance(x, str) else x for x in obj2] - obj1.sort() - obj2.sort() - if isinstance(obj1, dict) and isinstance(obj2, dict): - return 0 if obj1 == obj2 else 1 - return (obj1 > obj2) - (obj1 < obj2) - - -class NetAppModule(): - ''' - Common class for NetApp modules - set of support functions to derive actions based - on the current state of the system, and a desired state - ''' - - def __init__(self): - self.log = [] - self.changed = False - self.parameters = {'name': 'not intialized'} - self.zapi_string_keys = dict() - self.zapi_bool_keys = dict() - self.zapi_list_keys = {} - self.zapi_int_keys = {} - self.zapi_required = {} - - def set_parameters(self, ansible_params): - self.parameters = {} - for param in ansible_params: - if ansible_params[param] is not None: - self.parameters[param] = ansible_params[param] - return self.parameters - - def get_cd_action(self, current, desired): - ''' takes a desired state and a current state, and return an action: - create, delete, None - eg: - is_present = 'absent' - some_object = self.get_object(source) - if some_object is not None: - is_present = 'present' - action = cd_action(current=is_present, desired = self.desired.state()) - ''' - desired_state = desired['state'] if 'state' in desired else 'present' - if current is None and desired_state == 'absent': - return None - if current is not None and desired_state == 'present': - return None - # change in state - self.changed = True - if current is not None: - return 'delete' - return 'create' - - def compare_and_update_values(self, current, desired, keys_to_compare): - updated_values = {} - is_changed = False - for key in keys_to_compare: - if key in current: - if key in desired and desired[key] is not None: - if current[key] != desired[key]: - updated_values[key] = desired[key] - is_changed = True - else: - updated_values[key] = current[key] - else: - updated_values[key] = current[key] - - return updated_values, is_changed - - @staticmethod - def check_keys(current, desired): - ''' TODO: raise an error if keys do not match - with the exception of: - new_name, state in desired - ''' - - @staticmethod - def compare_lists(current, desired, get_list_diff): - ''' compares two lists and return a list of elements that are either the desired elements or elements that are - modified from the current state depending on the get_list_diff flag - :param: current: current item attribute in ONTAP - :param: desired: attributes from playbook - :param: get_list_diff: specifies whether to have a diff of desired list w.r.t current list for an attribute - :return: list of attributes to be modified - :rtype: list - ''' - desired_diff_list = [item for item in desired if item not in current] # get what in desired and not in current - current_diff_list = [item for item in current if item not in desired] # get what in current but not in desired - - if desired_diff_list or current_diff_list: - # there are changes - if get_list_diff: - return desired_diff_list - else: - return desired - else: - return [] - - def get_modified_attributes(self, current, desired, get_list_diff=False): - ''' takes two dicts of attributes and return a dict of attributes that are - not in the current state - It is expected that all attributes of interest are listed in current and - desired. - :param: current: current attributes in ONTAP - :param: desired: attributes from playbook - :param: get_list_diff: specifies whether to have a diff of desired list w.r.t current list for an attribute - :return: dict of attributes to be modified - :rtype: dict - - NOTE: depending on the attribute, the caller may need to do a modify or a - different operation (eg move volume if the modified attribute is an - aggregate name) - ''' - # if the object does not exist, we can't modify it - modified = {} - if current is None: - return modified - - # error out if keys do not match - self.check_keys(current, desired) - - # collect changed attributes - for key, value in current.items(): - if key in desired and desired[key] is not None: - if isinstance(value, list): - modified_list = self.compare_lists(value, desired[key], get_list_diff) # get modified list from current and desired - if modified_list: - modified[key] = modified_list - elif cmp(value, desired[key]) != 0: - modified[key] = desired[key] - if modified: - self.changed = True - return modified - - def is_rename_action(self, source, target): - ''' takes a source and target object, and returns True - if a rename is required - eg: - source = self.get_object(source_name) - target = self.get_object(target_name) - action = is_rename_action(source, target) - :return: None for error, True for rename action, False otherwise - ''' - if source is None and target is None: - # error, do nothing - # cannot rename an non existent resource - # alternatively we could create B - return None - if source is not None and target is not None: - # error, do nothing - # idempotency (or) new_name_is_already_in_use - # alternatively we could delete B and rename A to B - return False - if source is None: - # do nothing, maybe the rename was already done - return False - # source is not None and target is None: - # rename is in order - self.changed = True - return True - - def filter_out_none_entries(self, list_or_dict): - """take a dict or list as input and return a dict/list without keys/elements whose values are None - skip empty dicts or lists. - """ - - if isinstance(list_or_dict, dict): - result = {} - for key, value in list_or_dict.items(): - if isinstance(value, (list, dict)): - sub = self.filter_out_none_entries(value) - if sub: - # skip empty dict or list - result[key] = sub - elif value is not None: - # skip None value - result[key] = value - return result - - if isinstance(list_or_dict, list): - alist = [] - for item in list_or_dict: - if isinstance(item, (list, dict)): - sub = self.filter_out_none_entries(item) - if sub: - # skip empty dict or list - alist.append(sub) - elif item is not None: - # skip None value - alist.append(item) - return alist - - raise TypeError('unexpected type %s' % type(list_or_dict)) - - @staticmethod - def get_not_none_values_from_dict(parameters, keys): - # python 2.6 does not support dict comprehension using k: v - return dict((key, value) for key, value in parameters.items() if key in keys and value is not None) diff --git a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_account.py b/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_account.py deleted file mode 100644 index c09ade0df..000000000 --- a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_account.py +++ /dev/null @@ -1,404 +0,0 @@ -#!/usr/bin/python -# -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ -# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -azure_rm_netapp_account -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -DOCUMENTATION = ''' ---- -module: azure_rm_netapp_account - -short_description: Manage NetApp Azure Files Account -version_added: 19.10.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: - - Create and delete NetApp Azure account. - Provide the Resource group name for the NetApp account to be created. -extends_documentation_fragment: - - netapp.azure.azure - - netapp.azure.azure_tags - - netapp.azure.netapp.azure_rm_netapp - -options: - name: - description: - - The name of the NetApp account. - required: true - type: str - location: - description: - - Resource location. - - Required for create. - type: str - - active_directories: - description: - - list of active directory dictionaries. - - The list is currently limited to a single active directory (ANF or Azure limit of one AD per subscription). - type: list - elements: dict - version_added: 21.2.0 - suboptions: - active_directory_id: - description: not used for create. Not needed for join. - type: str - dns: - description: list of DNS addresses. Required for create or join. - type: list - elements: str - domain: - description: Fully Qualified Active Directory DNS Domain Name. Required for create or join. - type: str - site: - description: The Active Directory site the service will limit Domain Controller discovery to. - type: str - smb_server_name: - description: Prefix for creating the SMB server's computer account name in the Active Directory domain. Required for create or join. - type: str - organizational_unit: - description: LDAP Path for the Organization Unit where SMB Server machine accounts will be created (i.e. OU=SecondLevel,OU=FirstLevel). - type: str - username: - description: Credentials that have permissions to create SMB server machine account in the AD domain. Required for create or join. - type: str - password: - description: see username. If password is present, the module is not idempotent, as we cannot check the current value. Required for create or join. - type: str - aes_encryption: - description: If enabled, AES encryption will be enabled for SMB communication. - type: bool - ldap_signing: - description: Specifies whether or not the LDAP traffic needs to be signed. - type: bool - ad_name: - description: Name of the active directory machine. Used only while creating kerberos volume. - type: str - version_added: 21.3.0 - kdc_ip: - description: kdc server IP addresses for the active directory machine. Used only while creating kerberos volume. - type: str - version_added: 21.3.0 - server_root_ca_certificate: - description: - - When LDAP over SSL/TLS is enabled, the LDAP client is required to have base64 encoded Active Directory Certificate Service's - self-signed root CA certificate, this optional parameter is used only for dual protocol with LDAP user-mapping volumes. - type: str - version_added: 21.3.0 - state: - description: - - State C(present) will check that the NetApp account exists with the requested configuration. - - State C(absent) will delete the NetApp account. - default: present - choices: - - absent - - present - type: str - debug: - description: output details about current account if it exists. - type: bool - default: false - -''' -EXAMPLES = ''' - -- name: Create NetApp Azure Account - netapp.azure.azure_rm_netapp_account: - resource_group: myResourceGroup - name: testaccount - location: eastus - tags: {'abc': 'xyz', 'cba': 'zyx'} - -- name: Modify Azure NetApp account (Join AD) - netapp.azure.azure_rm_netapp_account: - resource_group: myResourceGroup - name: testaccount - location: eastus - active_directories: - - site: ln - dns: 10.10.10.10 - domain: domain.com - smb_server_name: dummy - password: xxxxxx - username: laurentn - -- name: Delete NetApp Azure Account - netapp.azure.azure_rm_netapp_account: - state: absent - resource_group: myResourceGroup - name: testaccount - location: eastus - -- name: Create Azure NetApp account (with AD) - netapp.azure.azure_rm_netapp_account: - resource_group: laurentngroupnodash - name: tests-netapp11 - location: eastus - tags: - creator: laurentn - use: Ansible - active_directories: - - site: ln - dns: 10.10.10.10 - domain: domain.com - smb_server_name: dummy - password: xxxxxx - username: laurentn -''' - -RETURN = ''' -''' - -import traceback - -HAS_AZURE_MGMT_NETAPP = False -IMPORT_ERRORS = list() - -try: - from msrestazure.azure_exceptions import CloudError - from azure.core.exceptions import AzureError, ResourceNotFoundError -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -try: - from azure.mgmt.netapp.models import NetAppAccount, NetAppAccountPatch, ActiveDirectory - HAS_AZURE_MGMT_NETAPP = True -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -from ansible.module_utils.basic import to_native -from ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common import AzureRMNetAppModuleBase -from ansible_collections.netapp.azure.plugins.module_utils.netapp_module import NetAppModule - - -class AzureRMNetAppAccount(AzureRMNetAppModuleBase): - ''' create, modify, delete account, including joining AD domain - ''' - def __init__(self): - - self.module_arg_spec = dict( - resource_group=dict(type='str', required=True), - name=dict(type='str', required=True), - location=dict(type='str', required=False), - state=dict(choices=['present', 'absent'], default='present', type='str'), - active_directories=dict(type='list', elements='dict', options=dict( - active_directory_id=dict(type='str'), - dns=dict(type='list', elements='str'), - domain=dict(type='str'), - site=dict(type='str'), - smb_server_name=dict(type='str'), - organizational_unit=dict(type='str'), - username=dict(type='str'), - password=dict(type='str', no_log=True), - aes_encryption=dict(type='bool'), - ldap_signing=dict(type='bool'), - ad_name=dict(type='str'), - kdc_ip=dict(type='str'), - server_root_ca_certificate=dict(type='str', no_log=True), - )), - debug=dict(type='bool', default=False) - ) - - self.na_helper = NetAppModule() - self.parameters = dict() - self.debug = list() - self.warnings = list() - - # import errors are handled in AzureRMModuleBase - super(AzureRMNetAppAccount, self).__init__(derived_arg_spec=self.module_arg_spec, - required_if=[('state', 'present', ['location'])], - supports_check_mode=True) - - def get_azure_netapp_account(self): - """ - Returns NetApp Account object for an existing account - Return None if account does not exist - """ - try: - account_get = self.netapp_client.accounts.get(self.parameters['resource_group'], self.parameters['name']) - except (CloudError, ResourceNotFoundError): # account does not exist - return None - account = vars(account_get) - ads = None - if account.get('active_directories') is not None: - ads = list() - for each_ad in account.get('active_directories'): - ad_dict = vars(each_ad) - dns = ad_dict.get('dns') - if dns is not None: - ad_dict['dns'] = sorted(dns.split(',')) - ads.append(ad_dict) - account['active_directories'] = ads - return account - - def create_account_request_body(self, modify=None): - """ - Create an Azure NetApp Account Request Body - :return: None - """ - options = dict() - location = None - for attr in ('location', 'tags', 'active_directories'): - value = self.parameters.get(attr) - if attr == 'location' and modify is None: - location = value - continue - if value is not None: - if modify is None or attr in modify: - if attr == 'active_directories': - ads = list() - for ad_dict in value: - if ad_dict.get('dns') is not None: - # API expects a string of comma separated elements - ad_dict['dns'] = ','.join(ad_dict['dns']) - ads.append(ActiveDirectory(**self.na_helper.filter_out_none_entries(ad_dict))) - value = ads - options[attr] = value - if modify is None: - if location is None: - self.module.fail_json(msg="Error: 'location' is a required parameter") - return NetAppAccount(location=location, **options) - return NetAppAccountPatch(**options) - - def create_azure_netapp_account(self): - """ - Create an Azure NetApp Account - :return: None - """ - account_body = self.create_account_request_body() - try: - response = self.get_method('accounts', 'create_or_update')(body=account_body, - resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['name']) - while response.done() is not True: - response.result(10) - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error creating Azure NetApp account %s: %s' - % (self.parameters['name'], to_native(error)), - exception=traceback.format_exc()) - - def update_azure_netapp_account(self, modify): - """ - Create an Azure NetApp Account - :return: None - """ - account_body = self.create_account_request_body(modify) - try: - response = self.get_method('accounts', 'update')(body=account_body, - resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['name']) - while response.done() is not True: - response.result(10) - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error creating Azure NetApp account %s: %s' - % (self.parameters['name'], to_native(error)), - exception=traceback.format_exc()) - - def delete_azure_netapp_account(self): - """ - Delete an Azure NetApp Account - :return: None - """ - try: - response = self.get_method('accounts', 'delete')(resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['name']) - while response.done() is not True: - response.result(10) - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error deleting Azure NetApp account %s: %s' - % (self.parameters['name'], to_native(error)), - exception=traceback.format_exc()) - - def get_changes_in_ads(self, current, desired): - c_ads = current.get('active_directories') - d_ads = desired.get('active_directories') - if not c_ads: - return desired.get('active_directories'), None - if not d_ads: - return None, current.get('active_directories') - if len(c_ads) > 1 or len(d_ads) > 1: - msg = 'Error checking for AD, currently only one AD is supported.' - if len(c_ads) > 1: - msg += ' Current: %s.' % str(c_ads) - if len(d_ads) > 1: - msg += ' Desired: %s.' % str(d_ads) - self.module.fail_json(msg='Error checking for AD, currently only one AD is supported') - changed = False - d_ad = d_ads[0] - c_ad = c_ads[0] - for key, value in c_ad.items(): - if key == 'password': - if d_ad.get(key) is None: - continue - self.warnings.append("module is not idempotent if 'password:' is present") - if d_ad.get(key) is None: - d_ad[key] = value - elif d_ad.get(key) != value: - changed = True - self.debug.append("key: %s, value %s" % (key, value)) - if changed: - return [d_ad], None - return None, None - - def exec_module(self, **kwargs): - - # unlikely - self.fail_when_import_errors(IMPORT_ERRORS, HAS_AZURE_MGMT_NETAPP) - - # set up parameters according to our initial list - for key in list(self.module_arg_spec): - self.parameters[key] = kwargs[key] - # and common parameter - for key in ['tags']: - if key in kwargs: - self.parameters[key] = kwargs[key] - - current = self.get_azure_netapp_account() - modify = None - cd_action = self.na_helper.get_cd_action(current, self.parameters) - self.debug.append('current: %s' % str(current)) - if current is not None and cd_action is None: - ads_to_add, ads_to_delete = self.get_changes_in_ads(current, self.parameters) - self.parameters.pop('active_directories', None) - if ads_to_add: - self.parameters['active_directories'] = ads_to_add - if ads_to_delete: - self.module.fail_json(msg="Error: API does not support unjoining an AD", debug=self.debug) - modify = self.na_helper.get_modified_attributes(current, self.parameters) - if 'tags' in modify: - dummy, modify['tags'] = self.update_tags(current.get('tags')) - - if self.na_helper.changed: - if self.module.check_mode: - pass - else: - if cd_action == 'create': - self.create_azure_netapp_account() - elif cd_action == 'delete': - self.delete_azure_netapp_account() - elif modify: - self.update_azure_netapp_account(modify) - results = dict( - changed=self.na_helper.changed, - modify=modify - ) - if self.warnings: - results['warnings'] = self.warnings - if self.parameters['debug']: - results['debug'] = self.debug - self.module.exit_json(**results) - - -def main(): - AzureRMNetAppAccount() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_capacity_pool.py b/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_capacity_pool.py deleted file mode 100644 index 9d099a03f..000000000 --- a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_capacity_pool.py +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/python -# -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ -# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -azure_rm_netapp_capacity_pool -""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -DOCUMENTATION = ''' ---- -module: azure_rm_netapp_capacity_pool - -short_description: Manage NetApp Azure Files capacity pool -version_added: 19.10.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: - - Create and delete NetApp Azure capacity pool. - Provide the Resource group name for the capacity pool to be created. - - Resize NetApp Azure capacity pool -extends_documentation_fragment: - - netapp.azure.azure - - netapp.azure.azure_tags - - netapp.azure.netapp.azure_rm_netapp - -options: - name: - description: - - The name of the capacity pool. - required: true - type: str - account_name: - description: - - The name of the NetApp account. - required: true - type: str - location: - description: - - Resource location. - - Required for create. - type: str - size: - description: - - Provisioned size of the pool (in chunks). Allowed values are in 4TiB chunks. - - Provide number to be multiplied to 4TiB. - - Required for create. - default: 1 - type: int - service_level: - description: - - The service level of the file system. - - Required for create. - choices: ['Standard', 'Premium', 'Ultra'] - type: str - version_added: "20.5.0" - state: - description: - - State C(present) will check that the capacity pool exists with the requested configuration. - - State C(absent) will delete the capacity pool. - default: present - choices: ['present', 'absent'] - type: str - -''' -EXAMPLES = ''' - -- name: Create Azure NetApp capacity pool - netapp.azure.azure_rm_netapp_capacity_pool: - resource_group: myResourceGroup - account_name: tests-netapp - name: tests-pool - location: eastus - size: 2 - service_level: Standard - -- name: Resize Azure NetApp capacity pool - netapp.azure.azure_rm_netapp_capacity_pool: - resource_group: myResourceGroup - account_name: tests-netapp - name: tests-pool - location: eastus - size: 3 - service_level: Standard - -- name: Delete Azure NetApp capacity pool - netapp.azure.azure_rm_netapp_capacity_pool: - state: absent - resource_group: myResourceGroup - account_name: tests-netapp - name: tests-pool - -''' - -RETURN = ''' -''' - -import traceback - -AZURE_OBJECT_CLASS = 'NetAppAccount' -HAS_AZURE_MGMT_NETAPP = False -IMPORT_ERRORS = list() -SIZE_POOL = 4398046511104 - -try: - from msrestazure.azure_exceptions import CloudError - from azure.core.exceptions import AzureError, ResourceNotFoundError -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -try: - from azure.mgmt.netapp.models import CapacityPool - HAS_AZURE_MGMT_NETAPP = True -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -from ansible.module_utils.basic import to_native -from ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common import AzureRMNetAppModuleBase -from ansible_collections.netapp.azure.plugins.module_utils.netapp_module import NetAppModule - - -class AzureRMNetAppCapacityPool(AzureRMNetAppModuleBase): - """ create, modify, delete a capacity pool """ - def __init__(self): - - self.module_arg_spec = dict( - resource_group=dict(type='str', required=True), - name=dict(type='str', required=True), - account_name=dict(type='str', required=True), - location=dict(type='str', required=False), - state=dict(choices=['present', 'absent'], default='present', type='str'), - size=dict(type='int', required=False, default=1), - service_level=dict(type='str', required=False, choices=['Standard', 'Premium', 'Ultra']), - ) - - self.na_helper = NetAppModule() - self.parameters = dict() - - # import errors are handled in AzureRMModuleBase - super(AzureRMNetAppCapacityPool, self).__init__(derived_arg_spec=self.module_arg_spec, - required_if=[('state', 'present', ['location', 'service_level'])], - supports_check_mode=True) - - def get_azure_netapp_capacity_pool(self): - """ - Returns capacity pool object for an existing pool - Return None if capacity pool does not exist - """ - try: - capacity_pool_get = self.netapp_client.pools.get(self.parameters['resource_group'], - self.parameters['account_name'], self.parameters['name']) - except (CloudError, ResourceNotFoundError): # capacity pool does not exist - return None - return capacity_pool_get - - def create_azure_netapp_capacity_pool(self): - """ - Create a capacity pool for the given Azure NetApp Account - :return: None - """ - options = self.na_helper.get_not_none_values_from_dict(self.parameters, ['location', 'service_level', 'size', 'tags']) - capacity_pool_body = CapacityPool(**options) - try: - response = self.get_method('pools', 'create_or_update')(body=capacity_pool_body, resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], - pool_name=self.parameters['name']) - while response.done() is not True: - response.result(10) - - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error creating capacity pool %s for Azure NetApp account %s: %s' - % (self.parameters['name'], self.parameters['account_name'], to_native(error)), - exception=traceback.format_exc()) - - def modify_azure_netapp_capacity_pool(self, modify): - """ - Modify a capacity pool for the given Azure NetApp Account - :return: None - """ - options = self.na_helper.get_not_none_values_from_dict(self.parameters, ['location', 'service_level', 'size', 'tags']) - capacity_pool_body = CapacityPool(**options) - try: - response = self.get_method('pools', 'update')(body=capacity_pool_body, resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], - pool_name=self.parameters['name']) - while response.done() is not True: - response.result(10) - - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error modifying capacity pool %s for Azure NetApp account %s: %s' - % (self.parameters['name'], self.parameters['account_name'], to_native(error)), - exception=traceback.format_exc()) - - def delete_azure_netapp_capacity_pool(self): - """ - Delete a capacity pool for the given Azure NetApp Account - :return: None - """ - try: - response = self.get_method('pools', 'delete')(resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], pool_name=self.parameters['name']) - while response.done() is not True: - response.result(10) - - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error deleting capacity pool %s for Azure NetApp account %s: %s' - % (self.parameters['name'], self.parameters['name'], to_native(error)), - exception=traceback.format_exc()) - - def exec_module(self, **kwargs): - - # unlikely - self.fail_when_import_errors(IMPORT_ERRORS, HAS_AZURE_MGMT_NETAPP) - - # set up parameters according to our initial list - for key in list(self.module_arg_spec): - self.parameters[key] = kwargs[key] - # and common parameter - for key in ['tags']: - if key in kwargs: - self.parameters[key] = kwargs[key] - if 'size' in self.parameters: - self.parameters['size'] *= SIZE_POOL - - modify = {} - current = self.get_azure_netapp_capacity_pool() - cd_action = self.na_helper.get_cd_action(current, self.parameters) - if cd_action is None and self.parameters['state'] == 'present': - current = vars(current) - # get_azure_netapp_capacity_pool() returns pool name with account name appended in front of it like 'account/pool' - current['name'] = self.parameters['name'] - modify = self.na_helper.get_modified_attributes(current, self.parameters) - if 'tags' in modify: - dummy, modify['tags'] = self.update_tags(current.get('tags')) - - if self.na_helper.changed: - if self.module.check_mode: - pass - else: - if cd_action == 'create': - self.create_azure_netapp_capacity_pool() - elif cd_action == 'delete': - self.delete_azure_netapp_capacity_pool() - elif modify: - self.modify_azure_netapp_capacity_pool(modify) - - self.module.exit_json(changed=self.na_helper.changed, modify=modify) - - -def main(): - AzureRMNetAppCapacityPool() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_snapshot.py b/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_snapshot.py deleted file mode 100644 index 212f10861..000000000 --- a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_snapshot.py +++ /dev/null @@ -1,226 +0,0 @@ -#!/usr/bin/python -# -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ -# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -azure_rm_netapp_snapshot -""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -DOCUMENTATION = ''' ---- -module: azure_rm_netapp_snapshot - -short_description: Manage NetApp Azure Files Snapshot -version_added: 19.10.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: - - Create and delete NetApp Azure Snapshot. -extends_documentation_fragment: - - netapp.azure.azure - - netapp.azure.netapp.azure_rm_netapp - -options: - name: - description: - - The name of the snapshot. - required: true - type: str - volume_name: - description: - - The name of the volume. - required: true - type: str - pool_name: - description: - - The name of the capacity pool. - required: true - type: str - account_name: - description: - - The name of the NetApp account. - required: true - type: str - location: - description: - - Resource location. - - Required for create. - type: str - state: - description: - - State C(present) will check that the snapshot exists with the requested configuration. - - State C(absent) will delete the snapshot. - default: present - choices: - - absent - - present - type: str - -''' -EXAMPLES = ''' - -- name: Create Azure NetApp Snapshot - netapp.azure.azure_rm_netapp_snapshot: - resource_group: myResourceGroup - account_name: tests-netapp - pool_name: tests-pool - volume_name: tests-volume2 - name: tests-snapshot - location: eastus - -- name: Delete Azure NetApp Snapshot - netapp.azure.azure_rm_netapp_snapshot: - state: absent - resource_group: myResourceGroup - account_name: tests-netapp - pool_name: tests-pool - volume_name: tests-volume2 - name: tests-snapshot - -''' - -RETURN = ''' -''' - -import traceback - -AZURE_OBJECT_CLASS = 'NetAppAccount' -HAS_AZURE_MGMT_NETAPP = False -IMPORT_ERRORS = list() - -try: - from msrestazure.azure_exceptions import CloudError - from azure.core.exceptions import AzureError, ResourceNotFoundError -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -try: - from azure.mgmt.netapp.models import Snapshot - HAS_AZURE_MGMT_NETAPP = True -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -from ansible.module_utils.basic import to_native -from ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common import AzureRMNetAppModuleBase -from ansible_collections.netapp.azure.plugins.module_utils.netapp_module import NetAppModule - - -class AzureRMNetAppSnapshot(AzureRMNetAppModuleBase): - """ crate or delete snapshots """ - def __init__(self): - - self.module_arg_spec = dict( - resource_group=dict(type='str', required=True), - name=dict(type='str', required=True), - volume_name=dict(type='str', required=True), - pool_name=dict(type='str', required=True), - account_name=dict(type='str', required=True), - location=dict(type='str', required=False), - state=dict(choices=['present', 'absent'], default='present', type='str') - ) - self.na_helper = NetAppModule() - self.parameters = dict() - - # import errors are handled in AzureRMModuleBase - super(AzureRMNetAppSnapshot, self).__init__(derived_arg_spec=self.module_arg_spec, - required_if=[('state', 'present', ['location'])], - supports_check_mode=True, - supports_tags=False) - - def get_azure_netapp_snapshot(self): - """ - Returns snapshot object for an existing snapshot - Return None if snapshot does not exist - """ - try: - snapshot_get = self.netapp_client.snapshots.get(self.parameters['resource_group'], self.parameters['account_name'], - self.parameters['pool_name'], self.parameters['volume_name'], - self.parameters['name']) - except (CloudError, ResourceNotFoundError): # snapshot does not exist - return None - return snapshot_get - - def create_azure_netapp_snapshot(self): - """ - Create a snapshot for the given Azure NetApp Account - :return: None - """ - kw_args = dict( - resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], - pool_name=self.parameters['pool_name'], - volume_name=self.parameters['volume_name'], - snapshot_name=self.parameters['name'] - ) - if self.new_style: - kw_args['body'] = Snapshot( - location=self.parameters['location'] - ) - else: - kw_args['location'] = self.parameters['location'] - try: - result = self.get_method('snapshots', 'create')(**kw_args) - # waiting till the status turns Succeeded - while result.done() is not True: - result.result(10) - - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error creating snapshot %s for Azure NetApp account %s: %s' - % (self.parameters['name'], self.parameters['account_name'], to_native(error)), - exception=traceback.format_exc()) - - def delete_azure_netapp_snapshot(self): - """ - Delete a snapshot for the given Azure NetApp Account - :return: None - """ - try: - result = self.get_method('snapshots', 'delete')(resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], - pool_name=self.parameters['pool_name'], - volume_name=self.parameters['volume_name'], - snapshot_name=self.parameters['name']) - # waiting till the status turns Succeeded - while result.done() is not True: - result.result(10) - - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error deleting snapshot %s for Azure NetApp account %s: %s' - % (self.parameters['name'], self.parameters['account_name'], to_native(error)), - exception=traceback.format_exc()) - - def exec_module(self, **kwargs): - - # unlikely - self.fail_when_import_errors(IMPORT_ERRORS, HAS_AZURE_MGMT_NETAPP) - - # set up parameters according to our initial list - for key in list(self.module_arg_spec): - self.parameters[key] = kwargs[key] - - current = self.get_azure_netapp_snapshot() - cd_action = self.na_helper.get_cd_action(current, self.parameters) - - if self.na_helper.changed: - if self.module.check_mode: - pass - else: - if cd_action == 'create': - self.create_azure_netapp_snapshot() - elif cd_action == 'delete': - self.delete_azure_netapp_snapshot() - - self.module.exit_json(changed=self.na_helper.changed) - - -def main(): - AzureRMNetAppSnapshot() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_volume.py b/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_volume.py deleted file mode 100644 index 487787ee7..000000000 --- a/ansible_collections/netapp/azure/plugins/modules/azure_rm_netapp_volume.py +++ /dev/null @@ -1,399 +0,0 @@ -#!/usr/bin/python -# -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ -# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -azure_rm_netapp_volume -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -DOCUMENTATION = ''' ---- -module: azure_rm_netapp_volume - -short_description: Manage NetApp Azure Files Volume -version_added: 19.10.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: - - Create and delete NetApp Azure volume. -extends_documentation_fragment: - - netapp.azure.azure - - netapp.azure.azure_tags - - netapp.azure.netapp.azure_rm_netapp - -options: - name: - description: - - The name of the volume. - required: true - type: str - file_path: - description: - - A unique file path for the volume. Used when creating mount targets. - type: str - pool_name: - description: - - The name of the capacity pool. - required: true - type: str - account_name: - description: - - The name of the NetApp account. - required: true - type: str - location: - description: - - Resource location. - - Required for create. - type: str - subnet_name: - description: - - Azure resource name for a delegated subnet. Must have the delegation Microsoft.NetApp/volumes. - - Provide name of the subnet ID. - - Required for create. - type: str - aliases: ['subnet_id'] - version_added: 21.1.0 - virtual_network: - description: - - The name of the virtual network required for the subnet to create a volume. - - Required for create. - type: str - service_level: - description: - - The service level of the file system. - - default is Premium. - type: str - choices: ['Premium', 'Standard', 'Ultra'] - vnet_resource_group_for_subnet: - description: - - Only required if virtual_network to be used is of different resource_group. - - Name of the resource group for virtual_network and subnet_name to be used. - type: str - version_added: "20.5.0" - size: - description: - - Provisioned size of the volume (in GiB). - - Minimum size is 100 GiB. Upper limit is 100TiB - - default is 100GiB. - version_added: "20.5.0" - type: int - protocol_types: - description: - - Protocol types - NFSv3, NFSv4.1, CIFS (for SMB). - type: list - elements: str - version_added: 21.2.0 - state: - description: - - State C(present) will check that the volume exists with the requested configuration. - - State C(absent) will delete the volume. - default: present - choices: ['present', 'absent'] - type: str - feature_flags: - description: - - Enable or disable a new feature. - - This can be used to enable an experimental feature or disable a new feature that breaks backward compatibility. - - Supported keys and values are subject to change without notice. Unknown keys are ignored. - type: dict - version_added: 21.9.0 -notes: - - feature_flags is setting ignore_change_ownership_mode to true by default to bypass a 'change ownership mode' issue with azure-mgmt-netapp 4.0.0. -''' -EXAMPLES = ''' - -- name: Create Azure NetApp volume - netapp.azure.azure_rm_netapp_volume: - resource_group: myResourceGroup - account_name: tests-netapp - pool_name: tests-pool - name: tests-volume2 - location: eastus - file_path: tests-volume2 - virtual_network: myVirtualNetwork - vnet_resource_group_for_subnet: myVirtualNetworkResourceGroup - subnet_name: test - service_level: Ultra - size: 100 - -- name: Delete Azure NetApp volume - netapp.azure.azure_rm_netapp_volume: - state: absent - resource_group: myResourceGroup - account_name: tests-netapp - pool_name: tests-pool - name: tests-volume2 - -''' - -RETURN = ''' -mount_path: - description: Returns mount_path of the Volume - returned: always - type: str - -''' - -import traceback - -AZURE_OBJECT_CLASS = 'NetAppAccount' -HAS_AZURE_MGMT_NETAPP = False -IMPORT_ERRORS = [] -ONE_GIB = 1073741824 - -try: - from msrestazure.azure_exceptions import CloudError - from msrest.exceptions import ValidationError - from azure.core.exceptions import AzureError, ResourceNotFoundError -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -try: - from azure.mgmt.netapp.models import Volume, ExportPolicyRule, VolumePropertiesExportPolicy, VolumePatch - HAS_AZURE_MGMT_NETAPP = True -except ImportError as exc: - IMPORT_ERRORS.append(str(exc)) - -from ansible.module_utils.basic import to_native -from ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common import AzureRMNetAppModuleBase -from ansible_collections.netapp.azure.plugins.module_utils.netapp_module import NetAppModule - - -class AzureRMNetAppVolume(AzureRMNetAppModuleBase): - ''' create or delete a volume ''' - - def __init__(self): - - self.module_arg_spec = dict( - resource_group=dict(type='str', required=True), - name=dict(type='str', required=True), - file_path=dict(type='str', required=False), - pool_name=dict(type='str', required=True), - account_name=dict(type='str', required=True), - location=dict(type='str', required=False), - state=dict(choices=['present', 'absent'], default='present', type='str'), - subnet_name=dict(type='str', required=False, aliases=['subnet_id']), - virtual_network=dict(type='str', required=False), - size=dict(type='int', required=False), - vnet_resource_group_for_subnet=dict(type='str', required=False), - service_level=dict(type='str', required=False, choices=['Premium', 'Standard', 'Ultra']), - protocol_types=dict(type='list', elements='str'), - feature_flags=dict(type='dict') - ) - self.na_helper = NetAppModule() - self.parameters = {} - - # import errors are handled in AzureRMModuleBase - super(AzureRMNetAppVolume, self).__init__(derived_arg_spec=self.module_arg_spec, - required_if=[('state', 'present', ['location', 'file_path', 'subnet_name', 'virtual_network']), - ], - supports_check_mode=True) - - @staticmethod - def dict_from_volume_object(volume_object): - - def replace_list_of_objects_with_list_of_dicts(adict, key): - if adict.get(key): - adict[key] = [vars(x) for x in adict[key]] - - current_dict = vars(volume_object) - attr = 'subnet_id' - if attr in current_dict: - current_dict['subnet_name'] = current_dict.pop(attr).split('/')[-1] - attr = 'mount_targets' - replace_list_of_objects_with_list_of_dicts(current_dict, attr) - attr = 'export_policy' - if current_dict.get(attr): - attr_dict = vars(current_dict[attr]) - replace_list_of_objects_with_list_of_dicts(attr_dict, 'rules') - current_dict[attr] = attr_dict - return current_dict - - def get_azure_netapp_volume(self): - """ - Returns volume object for an existing volume - Return None if volume does not exist - """ - try: - volume_get = self.netapp_client.volumes.get(self.parameters['resource_group'], self.parameters['account_name'], - self.parameters['pool_name'], self.parameters['name']) - except (CloudError, ResourceNotFoundError): # volume does not exist - return None - return self.dict_from_volume_object(volume_get) - - def get_export_policy_rules(self): - # ExportPolicyRule(rule_index: int=None, unix_read_only: bool=None, unix_read_write: bool=None, - # kerberos5_read_only: bool=False, kerberos5_read_write: bool=False, kerberos5i_read_only: bool=False, - # kerberos5i_read_write: bool=False, kerberos5p_read_only: bool=False, kerberos5p_read_write: bool=False, - # cifs: bool=None, nfsv3: bool=None, nfsv41: bool=None, allowed_clients: str=None, has_root_access: bool=True - ptypes = self.parameters.get('protocol_types') - if ptypes is None: - return None - ptypes = [x.lower() for x in ptypes] - if 'nfsv4.1' in ptypes: - ptypes.append('nfsv41') - # only create a policy when NFSv4 is used (for now) - if 'nfsv41' not in ptypes: - return None - options = dict( - rule_index=1, - allowed_clients='0.0.0.0/0', - unix_read_write=True) - if self.has_feature('ignore_change_ownership_mode') and self.sdk_version >= '4.0.0': - # https://github.com/Azure/azure-sdk-for-python/issues/20356 - options['chown_mode'] = None - for protocol in ('cifs', 'nfsv3', 'nfsv41'): - options[protocol] = protocol in ptypes - return VolumePropertiesExportPolicy(rules=[ExportPolicyRule(**options)]) - - def create_azure_netapp_volume(self): - """ - Create a volume for the given Azure NetApp Account - :return: None - """ - options = self.na_helper.get_not_none_values_from_dict(self.parameters, ['protocol_types', 'service_level', 'tags', 'usage_threshold']) - rules = self.get_export_policy_rules() - if rules is not None: - # TODO: other options to expose ? - # options['throughput_mibps'] = 1.6 - # options['encryption_key_source'] = 'Microsoft.NetApp' - # options['security_style'] = 'Unix' - # options['unix_permissions'] = '0770' - # required for NFSv4 - options['export_policy'] = rules - subnet_id = '/subscriptions/%s/resourceGroups/%s/providers/Microsoft.Network/virtualNetworks/%s/subnets/%s'\ - % (self.azure_auth.subscription_id, - self.parameters['resource_group'] if self.parameters.get('vnet_resource_group_for_subnet') is None - else self.parameters['vnet_resource_group_for_subnet'], - self.parameters['virtual_network'], - self.parameters['subnet_name']) - volume_body = Volume( - location=self.parameters['location'], - creation_token=self.parameters['file_path'], - subnet_id=subnet_id, - **options - ) - try: - result = self.get_method('volumes', 'create_or_update')(body=volume_body, resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], - pool_name=self.parameters['pool_name'], volume_name=self.parameters['name']) - # waiting till the status turns Succeeded - while result.done() is not True: - result.result(10) - except (CloudError, ValidationError, AzureError) as error: - self.module.fail_json(msg='Error creating volume %s for Azure NetApp account %s and subnet ID %s: %s' - % (self.parameters['name'], self.parameters['account_name'], subnet_id, to_native(error)), - exception=traceback.format_exc()) - - def modify_azure_netapp_volume(self): - """ - Modify a volume for the given Azure NetApp Account - :return: None - """ - options = self.na_helper.get_not_none_values_from_dict(self.parameters, ['tags', 'usage_threshold']) - volume_body = VolumePatch( - **options - ) - try: - result = self.get_method('volumes', 'update')(body=volume_body, resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], - pool_name=self.parameters['pool_name'], volume_name=self.parameters['name']) - # waiting till the status turns Succeeded - while result.done() is not True: - result.result(10) - except (CloudError, ValidationError, AzureError) as error: - self.module.fail_json(msg='Error modifying volume %s for Azure NetApp account %s: %s' - % (self.parameters['name'], self.parameters['account_name'], to_native(error)), - exception=traceback.format_exc()) - - def delete_azure_netapp_volume(self): - """ - Delete a volume for the given Azure NetApp Account - :return: None - """ - try: - result = self.get_method('volumes', 'delete')(resource_group_name=self.parameters['resource_group'], - account_name=self.parameters['account_name'], - pool_name=self.parameters['pool_name'], volume_name=self.parameters['name']) - # waiting till the status turns Succeeded - while result.done() is not True: - result.result(10) - except (CloudError, AzureError) as error: - self.module.fail_json(msg='Error deleting volume %s for Azure NetApp account %s: %s' - % (self.parameters['name'], self.parameters['account_name'], to_native(error)), - exception=traceback.format_exc()) - - def validate_modify(self, modify, current): - disallowed = dict(modify) - disallowed.pop('tags', None) - disallowed.pop('usage_threshold', None) - if disallowed: - self.module.fail_json(msg="Error: the following properties cannot be modified: %s. Current: %s" % (repr(disallowed), repr(current))) - - def exec_module(self, **kwargs): - - # unlikely - self.fail_when_import_errors(IMPORT_ERRORS, HAS_AZURE_MGMT_NETAPP) - - # set up parameters according to our initial list - for key in list(self.module_arg_spec): - self.parameters[key] = kwargs[key] - # and common parameter - for key in ['tags']: - if key in kwargs: - self.parameters[key] = kwargs[key] - - # API is using 'usage_threshold' for 'size', and the unit is bytes - if self.parameters.get('size') is not None: - self.parameters['usage_threshold'] = ONE_GIB * self.parameters.pop('size') - - modify = None - current = self.get_azure_netapp_volume() - cd_action = self.na_helper.get_cd_action(current, self.parameters) - if cd_action is None and current: - # ignore change in name - name = current.pop('name', None) - modify = self.na_helper.get_modified_attributes(current, self.parameters) - if name is not None: - current['name'] = name - if 'tags' in modify: - dummy, modify['tags'] = self.update_tags(current.get('tags')) - self.validate_modify(modify, current) - - if self.na_helper.changed and not self.module.check_mode: - if cd_action == 'create': - self.create_azure_netapp_volume() - elif cd_action == 'delete': - self.delete_azure_netapp_volume() - elif modify: - self.modify_azure_netapp_volume() - - def get_mount_info(return_info): - if return_info is not None and return_info.get('mount_targets'): - return '%s:/%s' % (return_info['mount_targets'][0]['ip_address'], return_info['creation_token']) - return None - - mount_info = '' - if self.parameters['state'] == 'present': - return_info = self.get_azure_netapp_volume() - if return_info is None and not self.module.check_mode: - self.module.fail_json(msg='Error: volume %s was created successfully, but cannot be found.' % self.parameters['name']) - mount_info = get_mount_info(return_info) - if mount_info is None and not self.module.check_mode: - self.module.fail_json(msg='Error: volume %s was created successfully, but mount target(s) cannot be found - volume details: %s.' - % (self.parameters['name'], str(return_info))) - self.module.exit_json(changed=self.na_helper.changed, mount_path=mount_info, modify=modify) - - -def main(): - AzureRMNetAppVolume() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/azure/requirements.txt b/ansible_collections/netapp/azure/requirements.txt deleted file mode 100644 index 4badbc6ae..000000000 --- a/ansible_collections/netapp/azure/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -azure-mgmt-netapp -requests -xmltodict
\ No newline at end of file diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/aliases b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/aliases deleted file mode 100644 index 759eafa2d..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/aliases +++ /dev/null @@ -1,3 +0,0 @@ -cloud/azure -shippable/azure/group3 -destructive diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/meta/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/meta/main.yml deleted file mode 100644 index 95e1952f9..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/meta/main.yml +++ /dev/null @@ -1,2 +0,0 @@ -dependencies: - - setup_azure diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/tasks/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/tasks/main.yml deleted file mode 100644 index 9fa627e48..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_account/tasks/main.yml +++ /dev/null @@ -1,41 +0,0 @@ -- name: Create Azure NetApp account - azure_rm_netapp_account: - resource_group: laurentngroupnodash - name: tests-netapp1 - location: eastus - tags: {'test1': 'tesssttt', 'abc': 'xyz'} - register: output - -- assert: - that: output.changed - -- name: Create Azure NetApp account (Idempotency) - azure_rm_netapp_account: - resource_group: laurentngroupnodash - name: tests-netapp1 - location: eastus - tags: {'test1': 'tesssttt', 'abc': 'xyz'} - register: output - -- assert: - that: not output.changed - -- name: Delete Azure NetApp account - azure_rm_netapp_account: - state: absent - resource_group: laurentngroupnodash - name: tests-netapp1 - register: output - -- assert: - that: output.changed - -- name: Delete Azure NetApp account (Idempotency) - azure_rm_netapp_account: - state: absent - resource_group: laurentngroupnodash - name: tests-netapp1 - register: output - -- assert: - that: not output.changed diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/aliases b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/aliases deleted file mode 100644 index 3a0c0dc68..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/aliases +++ /dev/null @@ -1,3 +0,0 @@ -cloud/azure -shippable/azure/group3 -destructive
\ No newline at end of file diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/meta/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/meta/main.yml deleted file mode 100644 index 48f5726d8..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/meta/main.yml +++ /dev/null @@ -1,2 +0,0 @@ -dependencies: - - setup_azure
\ No newline at end of file diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/tasks/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/tasks/main.yml deleted file mode 100644 index 9bbae4ae7..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_capacity_pool/tasks/main.yml +++ /dev/null @@ -1,47 +0,0 @@ -- name: Create Azure NetApp capacity pool - azure_rm_netapp_capacity_pool: - resource_group: NetworkWatcherRG - account_name: ansibleazure - name: tests-poolsss - location: eastus - size: 2 - service_level: Standard - register: output - -- assert: - that: output.changed - -- name: Create Azure NetApp capacity pool (Idempotency) - azure_rm_netapp_capacity_pool: - resource_group: NetworkWatcherRG - account_name: ansibleazure - name: tests-poolsss - location: eastus - size: 2 - service_level: Standard - register: output - -- assert: - that: not output.changed - -- name: Delete Azure NetApp capacity pool - azure_rm_netapp_capacity_pool: - state: absent - resource_group: NetworkWatcherRG - account_name: ansibleazure - name: tests-poolsss - register: output - -- assert: - that: output.changed - -- name: Delete Azure NetApp capacity pool (Idempotency) - azure_rm_netapp_capacity_pool: - state: absent - resource_group: NetworkWatcherRG - account_name: ansibleazure - name: tests-poolsss - register: output - -- assert: - that: not output.changed
\ No newline at end of file diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/aliases b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/aliases deleted file mode 100644 index 759eafa2d..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/aliases +++ /dev/null @@ -1,3 +0,0 @@ -cloud/azure -shippable/azure/group3 -destructive diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/meta/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/meta/main.yml deleted file mode 100644 index 95e1952f9..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/meta/main.yml +++ /dev/null @@ -1,2 +0,0 @@ -dependencies: - - setup_azure diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/tasks/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/tasks/main.yml deleted file mode 100644 index c1c1cf5c2..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_snapshot/tasks/main.yml +++ /dev/null @@ -1,51 +0,0 @@ -- name: Create Azure NetApp snapshot - azure_rm_netapp_snapshot: - resource_group: laurentngroupnodash - account_name: tests-netapp - pool_name: test-pool - volume_name: tes - name: tests-snapshot - location: eastus - register: output - -- assert: - that: output.changed - -- name: Create Azure NetApp snapshot (Idempotency) - azure_rm_netapp_snapshot: - resource_group: laurentngroupnodash - account_name: tests-netapp - pool_name: test-pool - volume_name: tes - name: tests-snapshot - location: eastus - register: output - -- assert: - that: not output.changed - -- name: Delete Azure NetApp snapshot - azure_rm_netapp_snapshot: - state: absent - resource_group: laurentngroupnodash - account_name: tests-netapp - pool_name: test-pool - volume_name: tes - name: tests-snapshot - register: output - -- assert: - that: output.changed - -- name: Delete Azure NetApp snapshot (Idempotency) - azure_rm_netapp_snapshot: - state: absent - resource_group: laurentngroupnodash - account_name: tests-netapp - pool_name: test-pool - volume_name: tes - name: tests-snapshot - register: output - -- assert: - that: not output.changed diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/aliases b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/aliases deleted file mode 100644 index 759eafa2d..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/aliases +++ /dev/null @@ -1,3 +0,0 @@ -cloud/azure -shippable/azure/group3 -destructive diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/meta/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/meta/main.yml deleted file mode 100644 index 95e1952f9..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/meta/main.yml +++ /dev/null @@ -1,2 +0,0 @@ -dependencies: - - setup_azure diff --git a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/tasks/main.yml b/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/tasks/main.yml deleted file mode 100644 index c5b2a4262..000000000 --- a/ansible_collections/netapp/azure/tests/integration/targets/azure_rm_netapp_volume/tasks/main.yml +++ /dev/null @@ -1,57 +0,0 @@ -- name: Create Azure NetApp volume - azure_rm_netapp_volume: - resource_group: NetworkWatcherRG - account_name: ansibleazure - pool_name: tests-pool - name: tests-volume2 - location: eastus - size: 150 - file_path: tests-volume2 - virtual_network: azure_ansible - subnet_id: test - service_level: Standard - register: output - -- assert: - that: output.changed - -- name: Create Azure NetApp volume (Idempotency) - azure_rm_netapp_volume: - resource_group: NetworkWatcherRG - account_name: ansibleazure - pool_name: tests-pool - name: tests-volume2 - location: eastus - size: 150 - file_path: tests-volume2 - virtual_network: azure_ansible - subnet_id: test - service_level: Standard - register: output - -- assert: - that: not output.changed - -- name: Delete Azure NetApp volume - azure_rm_netapp_volume: - state: absent - resource_group: NetworkWatcherRG - account_name: ansibleazure - pool_name: tests-pool - name: tests-volume2 - register: output - -- assert: - that: output.changed - -- name: Delete Azure NetApp volume (Idempotency) - azure_rm_netapp_volume: - state: absent - resource_group: NetworkWatcherRG - account_name: ansibleazure - pool_name: tests-pool - name: tests-volume2 - register: output - -- assert: - that: not output.changed diff --git a/ansible_collections/netapp/azure/tests/runner/requirements/integration.cloud.azure.txt b/ansible_collections/netapp/azure/tests/runner/requirements/integration.cloud.azure.txt deleted file mode 100644 index e75cf1e2b..000000000 --- a/ansible_collections/netapp/azure/tests/runner/requirements/integration.cloud.azure.txt +++ /dev/null @@ -1 +0,0 @@ -azure-mgmt-netapp ; python_version >= '2.7' diff --git a/ansible_collections/netapp/azure/tests/runner/requirements/requirements-azure.txt b/ansible_collections/netapp/azure/tests/runner/requirements/requirements-azure.txt deleted file mode 100644 index 11852e2a5..000000000 --- a/ansible_collections/netapp/azure/tests/runner/requirements/requirements-azure.txt +++ /dev/null @@ -1,9 +0,0 @@ -azure-common==1.1.11 -azure-mgmt-compute==4.4.0 -azure-storage==0.35.1 -azure-mgmt-monitor==0.5.2 -azure-mgmt-network==2.3.0 -azure-mgmt-resource==2.1.0 -azure-mgmt-storage==3.1.0 -azure-mgmt-netapp ; python_version >= '2.7' -azure-cli diff --git a/ansible_collections/netapp/azure/tests/runner/requirements/unit.cloud.azure.txt b/ansible_collections/netapp/azure/tests/runner/requirements/unit.cloud.azure.txt deleted file mode 100644 index e75cf1e2b..000000000 --- a/ansible_collections/netapp/azure/tests/runner/requirements/unit.cloud.azure.txt +++ /dev/null @@ -1 +0,0 @@ -azure-mgmt-netapp ; python_version >= '2.7' diff --git a/ansible_collections/netapp/azure/tests/runner/requirements/units.txt b/ansible_collections/netapp/azure/tests/runner/requirements/units.txt deleted file mode 100644 index e75cf1e2b..000000000 --- a/ansible_collections/netapp/azure/tests/runner/requirements/units.txt +++ /dev/null @@ -1 +0,0 @@ -azure-mgmt-netapp ; python_version >= '2.7' diff --git a/ansible_collections/netapp/azure/tests/unit/compat/__init__.py b/ansible_collections/netapp/azure/tests/unit/compat/__init__.py deleted file mode 100644 index e69de29bb..000000000 --- a/ansible_collections/netapp/azure/tests/unit/compat/__init__.py +++ /dev/null diff --git a/ansible_collections/netapp/azure/tests/unit/compat/builtins.py b/ansible_collections/netapp/azure/tests/unit/compat/builtins.py deleted file mode 100644 index f60ee6782..000000000 --- a/ansible_collections/netapp/azure/tests/unit/compat/builtins.py +++ /dev/null @@ -1,33 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -# -# Compat for python2.7 -# - -# One unittest needs to import builtins via __import__() so we need to have -# the string that represents it -try: - import __builtin__ -except ImportError: - BUILTINS = 'builtins' -else: - BUILTINS = '__builtin__' diff --git a/ansible_collections/netapp/azure/tests/unit/compat/mock.py b/ansible_collections/netapp/azure/tests/unit/compat/mock.py deleted file mode 100644 index 0972cd2e8..000000000 --- a/ansible_collections/netapp/azure/tests/unit/compat/mock.py +++ /dev/null @@ -1,122 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python3.x's unittest.mock module -''' -import sys - -# Python 2.7 - -# Note: Could use the pypi mock library on python3.x as well as python2.x. It -# is the same as the python3 stdlib mock library - -try: - # Allow wildcard import because we really do want to import all of mock's - # symbols into this compat shim - # pylint: disable=wildcard-import,unused-wildcard-import - from unittest.mock import * -except ImportError: - # Python 2 - # pylint: disable=wildcard-import,unused-wildcard-import - try: - from mock import * - except ImportError: - print('You need the mock library installed on python2.x to run tests') - - -# Prior to 3.4.4, mock_open cannot handle binary read_data -if sys.version_info >= (3,) and sys.version_info < (3, 4, 4): - file_spec = None - - def _iterate_read_data(read_data): - # Helper for mock_open: - # Retrieve lines from read_data via a generator so that separate calls to - # readline, read, and readlines are properly interleaved - sep = b'\n' if isinstance(read_data, bytes) else '\n' - data_as_list = [l + sep for l in read_data.split(sep)] - - if data_as_list[-1] == sep: - # If the last line ended in a newline, the list comprehension will have an - # extra entry that's just a newline. Remove this. - data_as_list = data_as_list[:-1] - else: - # If there wasn't an extra newline by itself, then the file being - # emulated doesn't have a newline to end the last line remove the - # newline that our naive format() added - data_as_list[-1] = data_as_list[-1][:-1] - - for line in data_as_list: - yield line - - def mock_open(mock=None, read_data=''): - """ - A helper function to create a mock to replace the use of `open`. It works - for `open` called directly or used as a context manager. - - The `mock` argument is the mock object to configure. If `None` (the - default) then a `MagicMock` will be created for you, with the API limited - to methods or attributes available on standard file handles. - - `read_data` is a string for the `read` methoddline`, and `readlines` of the - file handle to return. This is an empty string by default. - """ - def _readlines_side_effect(*args, **kwargs): - if handle.readlines.return_value is not None: - return handle.readlines.return_value - return list(_data) - - def _read_side_effect(*args, **kwargs): - if handle.read.return_value is not None: - return handle.read.return_value - return type(read_data)().join(_data) - - def _readline_side_effect(): - if handle.readline.return_value is not None: - while True: - yield handle.readline.return_value - for line in _data: - yield line - - global file_spec - if file_spec is None: - import _io - file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))) - - if mock is None: - mock = MagicMock(name='open', spec=open) - - handle = MagicMock(spec=file_spec) - handle.__enter__.return_value = handle - - _data = _iterate_read_data(read_data) - - handle.write.return_value = None - handle.read.return_value = None - handle.readline.return_value = None - handle.readlines.return_value = None - - handle.read.side_effect = _read_side_effect - handle.readline.side_effect = _readline_side_effect() - handle.readlines.side_effect = _readlines_side_effect - - mock.return_value = handle - return mock diff --git a/ansible_collections/netapp/azure/tests/unit/compat/unittest.py b/ansible_collections/netapp/azure/tests/unit/compat/unittest.py deleted file mode 100644 index 73a20cf8c..000000000 --- a/ansible_collections/netapp/azure/tests/unit/compat/unittest.py +++ /dev/null @@ -1,44 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python2.7's unittest module -''' - -import sys - -import pytest - -# Allow wildcard import because we really do want to import all of -# unittests's symbols into this compat shim -# pylint: disable=wildcard-import,unused-wildcard-import -if sys.version_info < (2, 7): - try: - # Need unittest2 on python2.6 - from unittest2 import * - except ImportError: - print('You need unittest2 installed on python2.6.x to run tests') - - class TestCase: - """ skip everything """ - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as unittest2 may not be available') -else: - from unittest import * diff --git a/ansible_collections/netapp/azure/tests/unit/plugins/module_utils/test_netapp_module.py b/ansible_collections/netapp/azure/tests/unit/plugins/module_utils/test_netapp_module.py deleted file mode 100644 index fb83c464e..000000000 --- a/ansible_collections/netapp/azure/tests/unit/plugins/module_utils/test_netapp_module.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright (c) 2018 NetApp -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests for module_utils netapp_module.py ''' -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -from ansible_collections.netapp.azure.tests.unit.compat import unittest -from ansible_collections.netapp.azure.plugins.module_utils.netapp_module import NetAppModule as na_helper - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def test_get_cd_action_create(self): - ''' validate cd_action for create ''' - current = None - desired = {'state': 'present'} - my_obj = na_helper() - result = my_obj.get_cd_action(current, desired) - assert result == 'create' - - def test_get_cd_action_delete(self): - ''' validate cd_action for delete ''' - current = {'state': 'absent'} - desired = {'state': 'absent'} - my_obj = na_helper() - result = my_obj.get_cd_action(current, desired) - assert result == 'delete' - - def test_get_cd_action(self): - ''' validate cd_action for returning None ''' - current = None - desired = {'state': 'absent'} - my_obj = na_helper() - result = my_obj.get_cd_action(current, desired) - assert result is None - - def test_get_modified_attributes_for_no_data(self): - ''' validate modified attributes when current is None ''' - current = None - desired = {'name': 'test'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired) - assert result == {} - - def test_get_modified_attributes(self): - ''' validate modified attributes ''' - current = {'name': ['test', 'abcd', 'xyz', 'pqr'], 'state': 'present'} - desired = {'name': ['abcd', 'abc', 'xyz', 'pqr'], 'state': 'absent'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired) - assert result == desired - - def test_get_modified_attributes_for_intersecting_mixed_list(self): - ''' validate modified attributes for list diff ''' - current = {'name': [2, 'four', 'six', 8]} - desired = {'name': ['a', 8, 'ab', 'four', 'abcd']} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['a', 'ab', 'abcd']} - - def test_get_modified_attributes_for_intersecting_list(self): - ''' validate modified attributes for list diff ''' - current = {'name': ['two', 'four', 'six', 'eight']} - desired = {'name': ['a', 'six', 'ab', 'four', 'abc']} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['a', 'ab', 'abc']} - - def test_get_modified_attributes_for_nonintersecting_list(self): - ''' validate modified attributes for list diff ''' - current = {'name': ['two', 'four', 'six', 'eight']} - desired = {'name': ['a', 'ab', 'abd']} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['a', 'ab', 'abd']} - - def test_get_modified_attributes_for_list_of_dicts_no_data(self): - ''' validate modified attributes for list diff ''' - current = None - desired = {'address_blocks': [{'start': '10.20.10.40', 'size': 5}]} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {} - - def test_get_modified_attributes_for_intersecting_list_of_dicts(self): - ''' validate modified attributes for list diff ''' - current = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}]} - desired = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'address_blocks': [{'start': '10.20.10.40', 'size': 5}]} - - def test_get_modified_attributes_for_nonintersecting_list_of_dicts(self): - ''' validate modified attributes for list diff ''' - current = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}]} - desired = {'address_blocks': [{'start': '10.20.10.23', 'size': 5}, {'start': '10.20.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'address_blocks': [{'start': '10.20.10.23', 'size': 5}, {'start': '10.20.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]} - - def test_get_modified_attributes_for_list_diff(self): - ''' validate modified attributes for list diff ''' - current = {'name': ['test', 'abcd'], 'state': 'present'} - desired = {'name': ['abcd', 'abc'], 'state': 'present'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['abc']} - - def test_get_modified_attributes_for_no_change(self): - ''' validate modified attributes for same data in current and desired ''' - current = {'name': 'test'} - desired = {'name': 'test'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired) - assert result == {} - - def test_is_rename_action_for_empty_input(self): - ''' validate rename action for input None ''' - source = None - target = None - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result == source - - def test_is_rename_action_for_no_source(self): - ''' validate rename action when source is None ''' - source = None - target = 'test2' - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result is False - - def test_is_rename_action_for_no_target(self): - ''' validate rename action when target is None ''' - source = 'test2' - target = None - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result is True - - def test_is_rename_action(self): - ''' validate rename action ''' - source = 'test' - target = 'test2' - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result is False diff --git a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_account.py b/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_account.py deleted file mode 100644 index 0d140b4a0..000000000 --- a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_account.py +++ /dev/null @@ -1,173 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests ONTAP Ansible module: azure_rm_netapp_account''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import sys - -import pytest -try: - from requests import Response -except ImportError: - if sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.azure.tests.unit.compat import unittest -from ansible_collections.netapp.azure.tests.unit.compat.mock import patch, Mock - -HAS_AZURE_RMNETAPP_IMPORT = True -try: - # At this point, python believes the module is already loaded, so the import inside azure_rm_netapp_volume will be skipped. - from ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_account \ - import AzureRMNetAppAccount as account_module -except ImportError: - HAS_AZURE_RMNETAPP_IMPORT = False - -HAS_AZURE_CLOUD_ERROR_IMPORT = True -try: - from msrestazure.azure_exceptions import CloudError -except ImportError: - HAS_AZURE_CLOUD_ERROR_IMPORT = False - -if not HAS_AZURE_CLOUD_ERROR_IMPORT and sys.version_info < (3, 5): - pytestmark = pytest.mark.skip('skipping as missing required azure_exceptions on 2.6 and 2.7') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockAzureClient(object): - ''' mock server connection to ONTAP host ''' - def __init__(self): - ''' save arguments ''' - self.valid_accounts = ['test1', 'test2'] - - def get(self, resource_group, account_name): # pylint: disable=unused-argument - if account_name not in self.valid_accounts: - invalid = Response() - invalid.status_code = 404 - raise CloudError(response=invalid) - return Mock(name=account_name) - - def create_or_update(self, body, resource_group, account_name): # pylint: disable=unused-argument,no-self-use - return None - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.netapp_client = Mock() - self.netapp_client.accounts = MockAzureClient() - self._netapp_client = None - - def set_default_args(self): - resource_group = 'azure' - name = 'test1' - location = 'abc' - return dict({ - 'resource_group': resource_group, - 'name': name, - 'location': location - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - account_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_get_called_valid_account(self, client_f): - set_module_args(self.set_default_args()) - client_f.return_value = Mock() - client_f.side_effect = Mock() - my_obj = account_module() - my_obj.netapp_client.accounts = self.netapp_client.accounts - assert my_obj.get_azure_netapp_account() is not None - - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_get_called_non_existing_account(self, client_f): - data = self.set_default_args() - data['name'] = 'invalid' - set_module_args(data) - client_f.return_value = Mock() - client_f.side_effect = Mock() - my_obj = account_module() - my_obj.netapp_client.accounts = self.netapp_client.accounts - assert my_obj.get_azure_netapp_account() is None - - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_account.AzureRMNetAppAccount.get_azure_netapp_account') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_account.AzureRMNetAppAccount.create_azure_netapp_account') - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_create_called(self, client_f, mock_create, mock_get): - data = dict(self.set_default_args()) - data['name'] = 'create' - data['tags'] = {'ttt': 'tesssttt', 'abc': 'xyz'} - set_module_args(data) - mock_get.return_value = None - client_f.return_value = Mock() - client_f.side_effect = Mock() - my_obj = account_module() - my_obj.netapp_client.accounts = self.netapp_client.accounts - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_create.assert_called_with() - - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_account.AzureRMNetAppAccount.get_azure_netapp_account') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_account.AzureRMNetAppAccount.delete_azure_netapp_account') - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_delete_called(self, client_f, mock_delete, mock_get): - data = dict(self.set_default_args()) - data['state'] = 'absent' - set_module_args(data) - mock_get.return_value = Mock() - client_f.return_value = Mock() - client_f.side_effect = Mock() - my_obj = account_module() - my_obj.netapp_client.accounts = self.netapp_client.accounts - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_delete.assert_called_with() diff --git a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_capacity_pool.py b/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_capacity_pool.py deleted file mode 100644 index 91c8eefd6..000000000 --- a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_capacity_pool.py +++ /dev/null @@ -1,197 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests ONTAP Ansible module: azure_rm_netapp_capacity_pool''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import sys - -import pytest -try: - from requests import Response -except ImportError: - if sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.azure.tests.unit.compat import unittest -from ansible_collections.netapp.azure.tests.unit.compat.mock import patch, Mock - -HAS_AZURE_RMNETAPP_IMPORT = True -try: - # At this point, python believes the module is already loaded, so the import inside azure_rm_netapp_volume will be skipped. - from ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_capacity_pool \ - import AzureRMNetAppCapacityPool as capacity_pool_module -except ImportError: - HAS_AZURE_RMNETAPP_IMPORT = False - -HAS_AZURE_CLOUD_ERROR_IMPORT = True -try: - from msrestazure.azure_exceptions import CloudError -except ImportError: - HAS_AZURE_CLOUD_ERROR_IMPORT = False - -if not HAS_AZURE_CLOUD_ERROR_IMPORT and sys.version_info < (3, 5): - pytestmark = pytest.mark.skip('skipping as missing required azure_exceptions on 2.6 and 2.7') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockAzureClient(object): - ''' mock server connection to ONTAP host ''' - def __init__(self): - ''' save arguments ''' - self.valid_pools = ['test1', 'test2'] - - def get(self, resource_group, account_name, pool_name): # pylint: disable=unused-argument - if pool_name not in self.valid_pools: - invalid = Response() - invalid.status_code = 404 - raise CloudError(response=invalid) - else: - return Mock(name=pool_name) - - def create_or_update(self, body, resource_group, account_name, pool_name): # pylint: disable=unused-argument - return None - - def update(self, body, resource_group, account_name, pool_name): # pylint: disable=unused-argument - return None - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.netapp_client = Mock() - self.netapp_client.pools = MockAzureClient() - self._netapp_client = None - - def set_default_args(self): - resource_group = 'azure' - account_name = 'azure' - name = 'test1' - location = 'abc' - size = 1 - service_level = 'Standard' - return dict({ - 'resource_group': resource_group, - 'account_name': account_name, - 'name': name, - 'location': location, - 'size': size, - 'service_level': service_level - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - capacity_pool_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_get_called_valid_capacity_pool(self, client_f): - set_module_args(self.set_default_args()) - client_f.return_value = Mock() - my_obj = capacity_pool_module() - my_obj.netapp_client.pools = self.netapp_client.pools - assert my_obj.get_azure_netapp_capacity_pool() is not None - - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_get_called_non_existing_capacity_pool(self, client_f): - data = self.set_default_args() - data['name'] = 'invalid' - set_module_args(data) - client_f.return_value = Mock() - my_obj = capacity_pool_module() - my_obj.netapp_client.pools = self.netapp_client.pools - assert my_obj.get_azure_netapp_capacity_pool() is None - - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_capacity_pool.AzureRMNetAppCapacityPool.get_azure_netapp_capacity_pool') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_capacity_pool.AzureRMNetAppCapacityPool.create_azure_netapp_capacity_pool') - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_create_called(self, client_f, mock_create, mock_get): - data = dict(self.set_default_args()) - data['name'] = 'create' - set_module_args(data) - mock_get.return_value = None - client_f.return_value = Mock() - my_obj = capacity_pool_module() - my_obj.netapp_client.pools = self.netapp_client.pools - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_create.assert_called_with() - - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_capacity_pool.AzureRMNetAppCapacityPool.get_azure_netapp_capacity_pool') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_capacity_pool.AzureRMNetAppCapacityPool.create_azure_netapp_capacity_pool') - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_modify_called(self, client_f, mock_modify, mock_get): - data = dict(self.set_default_args()) - data['name'] = 'create' - data['size'] = 3 - set_module_args(data) - mock_get.return_value = None - client_f.return_value = Mock() - my_obj = capacity_pool_module() - my_obj.netapp_client.pools = self.netapp_client.pools - with pytest.raises(AnsibleExitJson) as exc: - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_modify.assert_called_with() - - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_capacity_pool.AzureRMNetAppCapacityPool.get_azure_netapp_capacity_pool') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_capacity_pool.AzureRMNetAppCapacityPool.delete_azure_netapp_capacity_pool') - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_delete_called(self, client_f, mock_delete, mock_get): - data = self.set_default_args() - data['state'] = 'absent' - set_module_args(data) - mock_get.return_value = Mock() - client_f.return_value = Mock() - my_obj = capacity_pool_module() - my_obj.netapp_client.pools = self.netapp_client.pools - with pytest.raises(AnsibleExitJson) as exc: - data['state'] = 'absent' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_delete.assert_called_with() diff --git a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_snapshot.py b/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_snapshot.py deleted file mode 100644 index 0415a4039..000000000 --- a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_snapshot.py +++ /dev/null @@ -1,165 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests ONTAP Ansible module: azure_rm_netapp_snapshot''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import sys - -import pytest -try: - from requests import Response -except ImportError: - if sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.azure.tests.unit.compat import unittest -from ansible_collections.netapp.azure.tests.unit.compat.mock import patch, Mock - -HAS_AZURE_RMNETAPP_IMPORT = True -try: - # At this point, python believes the module is already loaded, so the import inside azure_rm_netapp_volume will be skipped. - from ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_snapshot \ - import AzureRMNetAppSnapshot as snapshot_module -except ImportError: - HAS_AZURE_RMNETAPP_IMPORT = False - -HAS_AZURE_CLOUD_ERROR_IMPORT = True -try: - from msrestazure.azure_exceptions import CloudError -except ImportError: - HAS_AZURE_CLOUD_ERROR_IMPORT = False - -if not HAS_AZURE_CLOUD_ERROR_IMPORT and sys.version_info < (3, 5): - pytestmark = pytest.mark.skip('skipping as missing required azure_exceptions on 2.6 and 2.7') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockAzureClient(object): - ''' mock server connection to ONTAP host ''' - def __init__(self): - ''' save arguments ''' - self.valid_snapshots = ['test1', 'test2'] - - def get(self, resource_group, account_name, pool_name, volume_name, snapshot_name): # pylint: disable=unused-argument - if snapshot_name not in self.valid_snapshots: - invalid = Response() - invalid.status_code = 404 - raise CloudError(response=invalid) - else: - return Mock(name=snapshot_name) - - def create(self, body, resource_group, account_name, pool_name, volume_name, snapshot_name): # pylint: disable=unused-argument - return None - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.netapp_client = Mock() - self.netapp_client.pools = MockAzureClient() - self._netapp_client = None - - def set_default_args(self): - resource_group = 'azure' - account_name = 'azure' - pool_name = 'azure' - volume_name = 'azure' - name = 'test1' - location = 'abc' - return dict({ - 'resource_group': resource_group, - 'account_name': account_name, - 'pool_name': pool_name, - 'volume_name': volume_name, - 'name': name, - 'location': location - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - snapshot_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - def test_ensure_get_called_valid_snapshot(self, client_f): - set_module_args(self.set_default_args()) - client_f.return_value = Mock() - my_obj = snapshot_module() - my_obj.netapp_client.snapshots = self.netapp_client.snapshots - assert my_obj.get_azure_netapp_snapshot() is not None - - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_snapshot.AzureRMNetAppSnapshot.get_azure_netapp_snapshot') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_snapshot.AzureRMNetAppSnapshot.create_azure_netapp_snapshot') - def test_ensure_create_called(self, mock_create, mock_get, client_f): - data = dict(self.set_default_args()) - data['name'] = 'create' - set_module_args(data) - mock_get.return_value = None - client_f.return_value = Mock() - my_obj = snapshot_module() - my_obj.netapp_client.snapshots = self.netapp_client.snapshots - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_create.assert_called_with() - - @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_snapshot.AzureRMNetAppSnapshot.get_azure_netapp_snapshot') - @patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_snapshot.AzureRMNetAppSnapshot.delete_azure_netapp_snapshot') - def test_ensure_delete_called(self, mock_delete, mock_get, client_f): - data = dict(self.set_default_args()) - data['state'] = 'absent' - set_module_args(data) - client_f.return_value = Mock() - mock_get.return_value = Mock() - my_obj = snapshot_module() - my_obj.netapp_client.snapshots = self.netapp_client.snapshots - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_delete.assert_called_with() diff --git a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_volume.py b/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_volume.py deleted file mode 100644 index 83c7f812e..000000000 --- a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_volume.py +++ /dev/null @@ -1,501 +0,0 @@ -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests ONTAP Ansible module: azure_rm_netapp_volume''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import sys - -import pytest -try: - from requests import Response -except ImportError: - if sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.azure.tests.unit.compat.mock import patch, Mock - -HAS_AZURE_RMNETAPP_IMPORT = True -try: - # At this point, python believes the module is already loaded, so the import inside azure_rm_netapp_volume will be skipped. - from ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume \ - import AzureRMNetAppVolume as volume_module -except ImportError: - HAS_AZURE_RMNETAPP_IMPORT = False - -HAS_AZURE_CLOUD_ERROR_IMPORT = True -try: - from msrestazure.azure_exceptions import CloudError -except ImportError: - HAS_AZURE_CLOUD_ERROR_IMPORT = False - -if not HAS_AZURE_CLOUD_ERROR_IMPORT and sys.version_info < (3, 5): - pytestmark = pytest.mark.skip('skipping as missing required azure_exceptions on 2.6 and 2.7') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockAzureClient(object): - ''' mock server connection to ONTAP host ''' - def __init__(self): - ''' save arguments ''' - self.valid_volumes = ['test1', 'test2'] - - def get(self, resource_group, account_name, pool_name, volume_name): # pylint: disable=unused-argument - if volume_name in self.valid_volumes: - return Mock(name=volume_name, - subnet_id='/resid/whatever/subnet_name', - mount_targets=[Mock(ip_address='1.2.3.4')] - ) - - invalid = Response() - invalid.status_code = 404 - raise CloudError(response=invalid) - - def create_or_update(self, body, resource_group, account_name, pool_name, volume_name): # pylint: disable=unused-argument - return None - - def begin_create_or_update(self, body, resource_group_name, account_name, pool_name, volume_name): # pylint: disable=unused-argument - return Mock(done=Mock(side_effect=[False, True])) - - def begin_update(self, body, resource_group_name, account_name, pool_name, volume_name): # pylint: disable=unused-argument - return Mock(done=Mock(side_effect=[False, True])) - - def begin_delete(self, resource_group_name, account_name, pool_name, volume_name): # pylint: disable=unused-argument - return Mock(done=Mock(side_effect=[False, True])) - - -class MockAzureClientRaise(MockAzureClient): - ''' mock server connection to ONTAP host ''' - response = Mock(status_code=400, context=None, headers=[], text=lambda: 'Forced exception') - - def begin_create_or_update(self, body, resource_group_name, account_name, pool_name, volume_name): # pylint: disable=unused-argument - raise CloudError(MockAzureClientRaise.response) - - def begin_update(self, body, resource_group_name, account_name, pool_name, volume_name): # pylint: disable=unused-argument - raise CloudError(MockAzureClientRaise.response) - - def begin_delete(self, resource_group_name, account_name, pool_name, volume_name): # pylint: disable=unused-argument - raise CloudError(MockAzureClientRaise.response) - - -# using pytest natively, without unittest.TestCase -@pytest.fixture(name="patch_ansible") -def fixture_patch_ansible(): - with patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) as mocks: - yield mocks - - -def set_default_args(): - resource_group = 'azure' - account_name = 'azure' - pool_name = 'azure' - name = 'test1' - location = 'abc' - file_path = 'azure' - subnet_id = 'azure' - virtual_network = 'azure' - size = 100 - return dict({ - 'resource_group': resource_group, - 'account_name': account_name, - 'pool_name': pool_name, - 'name': name, - 'location': location, - 'file_path': file_path, - 'subnet_name': subnet_id, - 'virtual_network': virtual_network, - 'size': size, - 'protocol_types': 'nfs', - 'tags': {'owner': 'laurentn'} - }) - - -def test_module_fail_when_required_args_missing(patch_ansible): # pylint: disable=unused-argument - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - volume_module() - print('Info: %s' % exc.value.args[0]['msg']) - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -def test_ensure_get_called_valid_volume(client_f): - set_module_args(set_default_args()) - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.netapp_client.volumes = MockAzureClient() - assert my_obj.get_azure_netapp_volume() is not None - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -def test_ensure_get_called_non_existing_volume(client_f): - data = dict(set_default_args()) - data['name'] = 'invalid' - set_module_args(data) - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.netapp_client.volumes = MockAzureClient() - assert my_obj.get_azure_netapp_volume() is None - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.create_azure_netapp_volume') -def test_ensure_create_called(mock_create, mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'create' - set_module_args(data) - mock_get.side_effect = [ - None, # first get - dict(mount_targets=[dict(ip_address='11.22.33.44')], # get after create - creation_token='abcd') - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - expected_mount_path = '11.22.33.44:/abcd' - assert exc.value.args[0]['mount_path'] == expected_mount_path - mock_create.assert_called_with() - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -def test_create(mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'create' - data['protocol_types'] = ['nfsv4.1'] - set_module_args(data) - mock_get.side_effect = [ - None, # first get - dict(mount_targets=[dict(ip_address='11.22.33.44')], # get after create - creation_token='abcd') - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.azure_auth = Mock(subscription_id='1234') - my_obj._new_style = True - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - expected_mount_path = '11.22.33.44:/abcd' - assert exc.value.args[0]['mount_path'] == expected_mount_path - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -def test_create_exception(mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'create' - data['protocol_types'] = 'nfsv4.1' - set_module_args(data) - mock_get.side_effect = [ - None, # first get - dict(mount_targets=[dict(ip_address='11.22.33.44')], # get after create - creation_token='abcd') - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.azure_auth = Mock(subscription_id='1234') - my_obj._new_style = True - my_obj.netapp_client.volumes = MockAzureClientRaise() - with pytest.raises(AnsibleFailJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - expected_msg = 'Error creating volume' - assert expected_msg in exc.value.args[0]['msg'] - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.create_azure_netapp_volume') -def test_ensure_create_called_but_fail_on_get(mock_create, mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'create' - set_module_args(data) - mock_get.side_effect = [ - None, # first get - dict(mount_targets=None, # get after create - creation_token='abcd') - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleFailJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - error = 'Error: volume create was created successfully, but mount target(s) cannot be found - volume details:' - assert exc.value.args[0]['msg'].startswith(error) - mock_create.assert_called_with() - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.create_azure_netapp_volume') -def test_ensure_create_called_but_fail_on_mount_target(mock_create, mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'create' - set_module_args(data) - mock_get.return_value = None - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleFailJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - error = 'Error: volume create was created successfully, but cannot be found.' - assert exc.value.args[0]['msg'] == error - mock_create.assert_called_with() - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.delete_azure_netapp_volume') -def test_ensure_delete_called(mock_delete, mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['state'] = 'absent' - set_module_args(data) - client_f.return_value = Mock() - mock_get.return_value = Mock() - my_obj = volume_module() - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - mock_delete.assert_called_with() - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -def test_delete(mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'delete' - data['state'] = 'absent' - set_module_args(data) - mock_get.side_effect = [ - dict(mount_targets=[dict(ip_address='11.22.33.44')], # first get - creation_token='abcd') - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.azure_auth = Mock(subscription_id='1234') - my_obj._new_style = True - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - expected_mount_path = '' - assert exc.value.args[0]['mount_path'] == expected_mount_path - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -def test_delete_exception(mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'delete' - data['state'] = 'absent' - set_module_args(data) - mock_get.side_effect = [ - dict(mount_targets=[dict(ip_address='11.22.33.44')], # first get - creation_token='abcd') - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.azure_auth = Mock(subscription_id='1234') - my_obj._new_style = True - my_obj.netapp_client.volumes = MockAzureClientRaise() - with pytest.raises(AnsibleFailJson) as exc: - # add default args for exec_module - data['debug'] = False - my_obj.exec_module(**data) - expected_msg = 'Error deleting volume' - assert expected_msg in exc.value.args[0]['msg'] - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -def test_modify(mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'modify' - data['size'] = 200 - data['tags'] = {'added_tag': 'new_tag'} - set_module_args(data) - mock_get.side_effect = [ - dict(mount_targets=[dict(ip_address='11.22.33.44')], # first get - creation_token='abcd', - tags={}, - usage_threshold=0), - dict(mount_targets=[dict(ip_address='11.22.33.44')], # get after modify - creation_token='abcd', - usage_threshold=10000000) - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.azure_auth = Mock(subscription_id='1234') - my_obj._new_style = True - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleExitJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - assert exc.value.args[0]['changed'] - print('modify', exc.value.args[0]) - expected_mount_path = '11.22.33.44:/abcd' - assert exc.value.args[0]['mount_path'] == expected_mount_path - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -def test_modify_exception(mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'modify' - data['size'] = 200 - set_module_args(data) - mock_get.side_effect = [ - dict(mount_targets=[dict(ip_address='11.22.33.44')], # first get - creation_token='abcd', - usage_threshold=0), - dict(mount_targets=[dict(ip_address='11.22.33.44')], # get after modify - creation_token='abcd', - usage_threshold=10000000) - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.azure_auth = Mock(subscription_id='1234') - my_obj._new_style = True - my_obj.netapp_client.volumes = MockAzureClientRaise() - with pytest.raises(AnsibleFailJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - expected_msg = 'Error modifying volume' - assert expected_msg in exc.value.args[0]['msg'] - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -@patch('ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume.AzureRMNetAppVolume.get_azure_netapp_volume') -def test_modify_not_supported(mock_get, client_f, patch_ansible): # pylint: disable=unused-argument - data = dict(set_default_args()) - data['name'] = 'modify' - data['location'] = 'east' - set_module_args(data) - mock_get.side_effect = [ - dict(mount_targets=[dict(ip_address='11.22.33.44')], # first get - creation_token='abcd', - usage_threshold=0, - location='west', - name='old_name'), - dict(mount_targets=[dict(ip_address='11.22.33.44')], # get after modify - creation_token='abcd', - usage_threshold=10000000) - ] - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.azure_auth = Mock(subscription_id='1234') - my_obj._new_style = True - my_obj.netapp_client.volumes = MockAzureClient() - with pytest.raises(AnsibleFailJson) as exc: - # add default args for exec_module - data['state'] = 'present' - data['debug'] = False - my_obj.exec_module(**data) - expected_msg = "Error: the following properties cannot be modified: {'location': 'east'}" - assert expected_msg in exc.value.args[0]['msg'] - - -@patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.netapp_client') -def test_get_export_policy_rules(client_f, patch_ansible): - set_module_args(set_default_args()) - client_f.return_value = Mock() - my_obj = volume_module() - my_obj.netapp_client.volumes = MockAzureClient() - rules = my_obj.get_export_policy_rules() - assert rules is None - del my_obj.parameters['protocol_types'] - rules = my_obj.get_export_policy_rules() - assert rules is None - my_obj.parameters['protocol_types'] = ['nFsv4.1'] - rules = my_obj.get_export_policy_rules() - assert rules is not None - rules = vars(rules) - assert 'rules' in rules - rules = rules['rules'] - assert rules - rule = vars(rules[0]) - assert rule['nfsv41'] - assert not rule['cifs'] - - -def test_dict_from_object(): - set_module_args(set_default_args()) - my_obj = volume_module() - # just for fun - module_dict = my_obj.dict_from_volume_object(my_obj) - print('Module dict', module_dict) - - rule_object = Mock() - rule_object.ip_address = '10.10.10.10' - export_policy_object = Mock() - export_policy_object.rules = [rule_object] - volume_object = Mock() - volume_object.export_policy = export_policy_object - volume_dict = my_obj.dict_from_volume_object(volume_object) - print('Volume dict', volume_dict) - assert 'export_policy' in volume_dict - assert 'rules' in volume_dict['export_policy'] - assert isinstance(volume_dict['export_policy']['rules'], list) - assert len(volume_dict['export_policy']['rules']) == 1 - assert 'ip_address' in volume_dict['export_policy']['rules'][0] diff --git a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_volume_import.py b/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_volume_import.py deleted file mode 100644 index 13d3bba29..000000000 --- a/ansible_collections/netapp/azure/tests/unit/plugins/modules/test_azure_rm_netapp_volume_import.py +++ /dev/null @@ -1,74 +0,0 @@ -# (c) 2021, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests ONTAP Ansible module: azure_rm_netapp_volume''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import sys - -import pytest -# from typing import Collection -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.azure.tests.unit.compat.mock import patch - - -if sys.version_info < (3, 5): - pytestmark = pytest.mark.skip('skipping as missing imports on 2.6 and 2.7') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -@pytest.fixture(name="patch_ansible") -def fixture_patch_ansible(): - with patch.multiple(basic.AnsibleModule, - fail_json=fail_json) as mocks: - yield mocks - - -# @patch('ansible_collections.netapp.azure.plugins.module_utils.azure_rm_netapp_common.AzureRMNetAppModuleBase.__init__') -def test_import_error(): - orig_import = __import__ - - def import_mock(name, *args): - print('importing: %s' % name) - if name.startswith('ansible_collections.netapp.azure.plugins.modules'): - # force a relead to go through secondary imports - sys.modules.pop(name, None) - if name in ('azure.core.exceptions', 'azure.mgmt.netapp.models'): - raise ImportError('forced error on %s' % name) - return orig_import(name, *args) - - # mock_base.return_value = Mock() - data = dict() - set_module_args(data) - with patch('builtins.__import__', side_effect=import_mock): - from ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume import IMPORT_ERRORS - assert any('azure.core.exceptions' in error for error in IMPORT_ERRORS) - assert any('azure.mgmt.netapp.models' in error for error in IMPORT_ERRORS) - - -def test_main(patch_ansible): # pylint: disable=unused-argument - data = dict() - set_module_args(data) - from ansible_collections.netapp.azure.plugins.modules.azure_rm_netapp_volume import main - with pytest.raises(AnsibleFailJson) as exc: - main() - expected_msg = "missing required arguments:" - assert expected_msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/azure/tests/unit/requirements.txt b/ansible_collections/netapp/azure/tests/unit/requirements.txt deleted file mode 100644 index 0b89f6365..000000000 --- a/ansible_collections/netapp/azure/tests/unit/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -azure-mgmt-netapp ; python_version >= '2.7' -msrestazure ; python_version >= '3.5' -requests ; python_version >= '2.7' diff --git a/ansible_collections/netapp/elementsw/.github/ISSUE_TEMPLATE/bug_report.yml b/ansible_collections/netapp/elementsw/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index 93fbe057a..000000000 --- a/ansible_collections/netapp/elementsw/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,210 +0,0 @@ ---- -name: 🐛 Bug report -description: Create a report to help us improve - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to report a bug in netapp.elementsw!** - - - ⚠ - Verify first that your issue is not [already reported on - GitHub][issue search] and keep in mind that we may have to keep - the current behavior because [every change breaks someone's - workflow][XKCD 1172]. - We try to be mindful about this. - - Also test if the latest release and devel branch are affected too. - - - **Tip:** If you are seeking community support, please consider - [Join our Slack community][ML||IRC]. - - - - [ML||IRC]: - https://join.slack.com/t/netapppub/shared_invite/zt-njcjx2sh-1VR2mEDvPcJAmPutOnP~mg - - [issue search]: ../search?q=is%3Aissue&type=issues - - [XKCD 1172]: https://xkcd.com/1172/ - - -- type: textarea - attributes: - label: Summary - description: Explain the problem briefly below. - placeholder: >- - When I try to do X with netapp.elementsw from the devel branch on GitHub, Y - breaks in a way Z under the env E. Here are all the details I know - about this problem... - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the rst file, module, plugin, task or - feature below, *use your best guess if unsure*. - - - **Tip:** Cannot find it in this repository? Please be advised that - the source for some parts of the documentation are hosted outside - of this repository. If the page you are reporting describes - modules/plugins/etc that are not officially supported by the - Ansible Core Engineering team, there is a good chance that it is - coming from one of the [Ansible Collections maintained by the - community][collections org]. If this is the case, please make sure - to file an issue under the appropriate project there instead. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Ansible Version - description: >- - Paste verbatim output from `ansible --version` below, under - the prompt line. Please don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - value: | - $ ansible --version - placeholder: | - $ ansible --version - ansible [core 2.11.0b4.post0] (detached HEAD ref: refs/) last updated 2021/04/02 00:33:35 (GMT +200) - config file = None - configured module search path = ['~/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] - ansible python module location = ~/src/github/ansible/ansible/lib/ansible - ansible collection location = ~/.ansible/collections:/usr/share/ansible/collections - executable location = bin/ansible - python version = 3.9.0 (default, Oct 26 2020, 13:08:59) [GCC 10.2.0] - jinja version = 2.11.3 - libyaml = True - validations: - required: true - -- type: textarea - attributes: - label: ElementSW Collection Version - description: >- - ElementSW Collection Version. Run `ansible-galaxy collection` and copy the entire output - render: console - value: | - $ ansible-galaxy collection list - validations: - required: true - -- type: textarea - attributes: - label: Playbook - description: >- - The task from the playbook that is give you the issue - render: console - validations: - required: true - -- type: textarea - attributes: - label: Steps to Reproduce - description: | - Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: | - 1. Implement the following playbook: - - ```yaml - --- - # ping.yml - - hosts: all - gather_facts: false - tasks: - - ping: - ... - ``` - 2. Then run `ANSIBLE_DEBUG=1 ansible-playbook ping.yml -vvvvv` - 3. An error occurs. - validations: - required: true - -- type: textarea - attributes: - label: Expected Results - description: >- - Describe what you expected to happen when running the steps above. - placeholder: >- - I expected X to happen because I assumed Y and was shocked - that it did not. - validations: - required: true - -- type: textarea - attributes: - label: Actual Results - description: | - Describe what actually happened. If possible run with extra verbosity (`-vvvv`). - - Paste verbatim command output and don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - placeholder: >- - Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))} - Exception: - Traceback (most recent call last): - File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main - status = self.run(options, args) - File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run - wb.build(autobuilding=True) - File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build - self.requirement_set.prepare_files(self.finder) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files - ignore_dependencies=self.ignore_dependencies)) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file - session=self.session, hashes=hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url - hashes=hashes - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url - hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url - stream=True, - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get - return self.request('GET', url, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request - return super(PipSession, self).request(method, url, *args, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request - resp = self.send(prep, **send_kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send - r = adapter.send(request, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send - resp = super(CacheControlAdapter, self).send(request, **kw) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send - raise SSLError(e, request=request) - SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),)) - ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2. - ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1. - validations: - required: true - - -- type: markdown - attributes: - value: > - *One last thing...* - - - Thank you for your collaboration! - - -... diff --git a/ansible_collections/netapp/elementsw/.github/ISSUE_TEMPLATE/feature_request.yml b/ansible_collections/netapp/elementsw/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index 8bb6094c7..000000000 --- a/ansible_collections/netapp/elementsw/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,100 +0,0 @@ ---- -name: ✨ Feature request -description: Suggest an idea for this project - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to suggest a feature for netapp.elementsw!** - - 💡 - Before you go ahead with your request, please first consider if it - would be useful for majority of the netapp.elementsw users. As a - general rule of thumb, any feature that is only of interest to a - small sub group should be [implemented in a third-party Ansible - Collection][contribute to collections] or maybe even just your - project alone. Be mindful of the fact that the essential - netapp.elementsw features have a broad impact. - - - <details> - <summary> - ❗ Every change breaks someone's workflow. - </summary> - - - [![❗ Every change breaks someone's workflow. - ](https://imgs.xkcd.com/comics/workflow.png) - ](https://xkcd.com/1172/) - </details> - - - ⚠ - Verify first that your idea is not [already requested on - GitHub][issue search]. - - Also test if the main branch does not already implement this. - - -- type: textarea - attributes: - label: Summary - description: > - Describe the new feature/improvement you would like briefly below. - - - What's the problem this feature will solve? - - What are you trying to do, that you are unable to achieve - with netapp.elementsw as it currently stands? - - - * Provide examples of real-world use cases that this would enable - and how it solves the problem you described. - - * How do you solve this now? - - * Have you tried to work around the problem using other tools? - - * Could there be a different approach to solving this issue? - - placeholder: >- - I am trying to do X with netapp.elementsw from the devel branch on GitHub and - I think that implementing a feature Y would be very helpful for me and - every other user of netapp.elementsw because of Z. - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the module, plugin, task or feature below, - *use your best guess if unsure*. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Additional Information - description: | - Describe how the feature would be used, why it is needed and what it would solve. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: >- - I asked on https://stackoverflow.com/.... and the community - advised me to do X, Y and Z. - validations: - required: true - -... diff --git a/ansible_collections/netapp/elementsw/.github/workflows/coverage.yml b/ansible_collections/netapp/elementsw/.github/workflows/coverage.yml deleted file mode 100644 index 9e2692651..000000000 --- a/ansible_collections/netapp/elementsw/.github/workflows/coverage.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: NetApp.elementsw Ansible Coverage - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity: - name: Coverage on elementsw - runs-on: ubuntu-latest - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - - name: Install ansible stable-2.11 - run: pip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/elementsw/ - rsync -av . ansible_collections/netapp/elementsw/ --exclude ansible_collections/netapp/elementsw/ - - - name: Run Unit Tests - run: ansible-test units --coverage --color --docker --python 3.8 - working-directory: ansible_collections/netapp/elementsw/ - - # ansible-test support producing code coverage date - - name: Generate coverage report - run: ansible-test coverage xml -v --requirements --group-by command --group-by version - working-directory: ansible_collections/netapp/elementsw/ - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2 - with: - working-directory: ansible_collections/netapp/elementsw/ - verbose: true
\ No newline at end of file diff --git a/ansible_collections/netapp/elementsw/.github/workflows/main.yml b/ansible_collections/netapp/elementsw/.github/workflows/main.yml deleted file mode 100644 index 2b9ec2379..000000000 --- a/ansible_collections/netapp/elementsw/.github/workflows/main.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: NetApp.elementsw Ansible CI - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity: - name: Sanity (${{ matrix.ansible }} on Elementsw - runs-on: ubuntu-latest - strategy: - matrix: - ansible: - - stable-2.9 - - stable-2.10 - - stable-2.11 - - stable-2.12 - - devel - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - - name: Install ansible (${{ matrix.ansible }}) - run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/elementsw/ - rsync -av . ansible_collections/netapp/elementsw/ --exclude ansible_collections/netapp/elementsw/ - - - - name: Run sanity tests Elementsw - run: ansible-test sanity --docker -v --color - working-directory: ansible_collections/netapp/elementsw/ - - - name: Run Unit Tests - run: ansible-test units --docker -v --color - working-directory: ansible_collections/netapp/elementsw/ diff --git a/ansible_collections/netapp/elementsw/CHANGELOG.rst b/ansible_collections/netapp/elementsw/CHANGELOG.rst deleted file mode 100644 index a611ba793..000000000 --- a/ansible_collections/netapp/elementsw/CHANGELOG.rst +++ /dev/null @@ -1,192 +0,0 @@ -========================================= -NetApp ElementSW Collection Release Notes -========================================= - -.. contents:: Topics - - -v21.7.0 -======= - -Minor Changes -------------- - -- PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - -v21.6.1 -======= - -Bugfixes --------- - -- requirements.txt - point to the correct python dependency - -v21.3.0 -======= - -Minor Changes -------------- - -- na_elementsw_info - add ``cluster_nodes`` and ``cluster_drives``. -- na_elementsw_qos_policy - explicitly define ``minIOPS``, ``maxIOPS``, ``burstIOPS`` as int. - -Bugfixes --------- - -- na_elementsw_drive - lastest SDK does not accept ``force_during_bin_sync`` and ``force_during_upgrade``. -- na_elementsw_qos_policy - loop would convert `minIOPS`, `maxIOPS`, `burstIOPS` to str, causing type mismatch issues in comparisons. -- na_elementsw_snapshot_schedule - change of interface in SDK ('ScheduleInfo' object has no attribute 'minutes') - -v20.11.0 -======== - -Minor Changes -------------- - -- na_elementsw_snapshot_schedule - Add ``retention`` in examples. - -Bugfixes --------- - -- na_elementsw_drive - Object of type 'dict_values' is not JSON serializable. - -v20.10.0 -======== - -Minor Changes -------------- - -- na_elementsw_cluster - add new options ``encryption``, ``order_number``, and ``serial_number``. -- na_elementsw_network_interfaces - make all options not required, so that only bond_1g can be set for example. -- na_elementsw_network_interfaces - restructure options into 2 dictionaries ``bond_1g`` and ``bond_10g``, so that there is no shared option. Disallow all older options. - -New Modules ------------ - -- netapp.elementsw.na_elementsw_info - NetApp Element Software Info - -v20.9.1 -======= - -Bugfixes --------- - -- na_elementsw_node - improve error reporting when cluster name cannot be set because node is already active. -- na_elementsw_schedule - missing imports TimeIntervalFrequency, Schedule, ScheduleInfo have been added back - -v20.9.0 -======= - -Minor Changes -------------- - -- na_elementsw_node - ``cluster_name`` to set the cluster name on new nodes. -- na_elementsw_node - ``preset_only`` to only set the cluster name before creating a cluster with na_elementsw_cluster. -- na_elementsw_volume - ``qos_policy_name`` to provide a QOS policy name or ID. - -Bugfixes --------- - -- na_elementsw_node - fix check_mode so that no action is taken. - -New Modules ------------ - -- netapp.elementsw.na_elementsw_qos_policy - NetApp Element Software create/modify/rename/delete QOS Policy - -v20.8.0 -======= - -Minor Changes -------------- - -- add "required:true" where missing. -- add "type:str" (or int, dict) where missing in documentation section. -- na_elementsw_drive - add all drives in a cluster, allow for a list of nodes or a list of drives. -- remove "required:true" for state and use present as default. -- use a three group format for ``version_added``. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. - -Bugfixes --------- - -- na_elementsw_access_group - fix check_mode so that no action is taken. -- na_elementsw_admin_users - fix check_mode so that no action is taken. -- na_elementsw_cluster - create cluster if it does not exist. Do not expect MVIP or SVIP to exist before create. -- na_elementsw_cluster_snmp - double exception because of AttributeError. -- na_elementsw_drive - node_id or drive_id were not handled properly when using numeric ids. -- na_elementsw_initiators - volume_access_group_id was ignored. volume_access_groups was ignored and redundant. -- na_elementsw_ldap - double exception because of AttributeError. -- na_elementsw_snapshot_schedule - ignore schedules being deleted (idempotency), remove default values and fix documentation. -- na_elementsw_vlan - AttributeError if VLAN already exists. -- na_elementsw_vlan - change in attributes was ignored. -- na_elementsw_vlan - fix check_mode so that no action is taken. -- na_elementsw_volume - Argument '512emulation' in argument_spec is not a valid python identifier - renamed to enable512emulation. -- na_elementsw_volume - double exception because of AttributeError. - -v20.6.0 -======= - -Bugfixes --------- - -- galaxy.yml - fix repository and homepage links. - -v20.2.0 -======= - -Bugfixes --------- - -- galaxy.yml - fix path to github repository. -- netapp.py - report error in case of connection error rather than raising a generic exception by default. - -v20.1.0 -======= - -New Modules ------------ - -- netapp.elementsw.na_elementsw_access_group_volumes - NetApp Element Software Add/Remove Volumes to/from Access Group - -v19.10.0 -======== - -Minor Changes -------------- - -- refactor existing modules as a collection - -v2.8.0 -====== - -New Modules ------------ - -- netapp.elementsw.na_elementsw_cluster_config - Configure Element SW Cluster -- netapp.elementsw.na_elementsw_cluster_snmp - Configure Element SW Cluster SNMP -- netapp.elementsw.na_elementsw_initiators - Manage Element SW initiators - -v2.7.0 -====== - -New Modules ------------ - -- netapp.elementsw.na_elementsw_access_group - NetApp Element Software Manage Access Groups -- netapp.elementsw.na_elementsw_account - NetApp Element Software Manage Accounts -- netapp.elementsw.na_elementsw_admin_users - NetApp Element Software Manage Admin Users -- netapp.elementsw.na_elementsw_backup - NetApp Element Software Create Backups -- netapp.elementsw.na_elementsw_check_connections - NetApp Element Software Check connectivity to MVIP and SVIP. -- netapp.elementsw.na_elementsw_cluster - NetApp Element Software Create Cluster -- netapp.elementsw.na_elementsw_cluster_pair - NetApp Element Software Manage Cluster Pair -- netapp.elementsw.na_elementsw_drive - NetApp Element Software Manage Node Drives -- netapp.elementsw.na_elementsw_ldap - NetApp Element Software Manage ldap admin users -- netapp.elementsw.na_elementsw_network_interfaces - NetApp Element Software Configure Node Network Interfaces -- netapp.elementsw.na_elementsw_node - NetApp Element Software Node Operation -- netapp.elementsw.na_elementsw_snapshot - NetApp Element Software Manage Snapshots -- netapp.elementsw.na_elementsw_snapshot_restore - NetApp Element Software Restore Snapshot -- netapp.elementsw.na_elementsw_snapshot_schedule - NetApp Element Software Snapshot Schedules -- netapp.elementsw.na_elementsw_vlan - NetApp Element Software Manage VLAN -- netapp.elementsw.na_elementsw_volume - NetApp Element Software Manage Volumes -- netapp.elementsw.na_elementsw_volume_clone - NetApp Element Software Create Volume Clone -- netapp.elementsw.na_elementsw_volume_pair - NetApp Element Software Volume Pair diff --git a/ansible_collections/netapp/elementsw/FILES.json b/ansible_collections/netapp/elementsw/FILES.json deleted file mode 100644 index 7113c56bd..000000000 --- a/ansible_collections/netapp/elementsw/FILES.json +++ /dev/null @@ -1,649 +0,0 @@ -{ - "files": [ - { - "name": ".", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0bd0735ea0d7847ed0f372da0cf7d7f8a0a2471aec49b5c16901d1c32793e43e", - "format": 1 - }, - { - "name": "plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments/netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "fd42778f85cd3b989604d0227af4cc90350d94f5864938eb0bd29cf7a66401c3", - "format": 1 - }, - { - "name": "plugins/module_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/module_utils/netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cc9a4b7d4d77cf221f256e5972707d08f424f319b856ef4a8fdd0dbe9a3dc322", - "format": 1 - }, - { - "name": "plugins/module_utils/netapp_module.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a98ea2d0aec17e10c6b5a956cfaa1dcddbd336b674079a1f86e85429381a49e7", - "format": 1 - }, - { - "name": "plugins/module_utils/netapp_elementsw_module.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "33132c95ba546d56bf953e1613dd39ad8a258379b3a32120f7be8b19e2c0d8a2", - "format": 1 - }, - { - "name": "plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_initiators.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4a0e280ee9ef13b994f98c848524dc53b3a3a16559e3d1e22be6573272327c8c", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_qos_policy.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4934c116271845de9f5da2f9747042601e961bc929f3a22397961313b3888e06", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_cluster_snmp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "1ee85a0b9e6ac2b0151a52b7722a43ea3e358d48f48816f5fac597151fd58d93", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_snapshot.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "25b0f4b869b1b814160da50df5b7b06d0e5d3eb83ca8887a0fead337699d6c62", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_volume.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a4b329b6f3c13f500a95ad0fb40eba4db5873b78b0c137997c858229336011af", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_access_group_volumes.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "532fbf39ed0ee98af0e9323f037ab0e0f52d5eac9179a82eeb169a5a48cdfd3e", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_snapshot_schedule.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6a07aa78ae73ec965592b77bad72bbedd724b519e82f51805d5fd414d3f9c414", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_node.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6882747383c770c6ec43585e3a4db0081c8de165415d40941532324208e3aa4e", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_access_group.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7099bfffb1ec35ed7c0a40c0708cb4d1d79f6267b16fcc71f759796add15edaf", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_cluster_pair.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ddd54266eb0a3ebf891d8c1310059b40cfbad7679db3d7f2b9c600baf31e42ca", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_volume_pair.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ead937f30287dfd02521b4fdda1e0a128cd1d3ba8db4a721330ff4bbfb76e284", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_cluster_config.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6dc94b752a4931e30ea169f61aec3919a7cd7636ce3aeff4764094d2adc355f7", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "983415a406d31e2edd3e06b64745363e0d1c5ee7575058298bfdce6919522e31", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_ldap.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8b8a59c8c45c1aa147c2d90b01654135f31ac4a1e31c643ce3b07007d6f28ea9", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_vlan.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "39414c4cb613271d96220d275f027404e41e4b5dd61db5c7ad6eb3f70bc3243b", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_cluster.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "d42be06f947c782d42fdd9141daeb87374855fc996ecfc53a450e20216cc6e05", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_volume_clone.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "05f518bb36b88476c0a6dc329587400937c88c64bb335bd0f3ad279c79cf845e", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_check_connections.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "54458477eb0807256e663f64924d88cf5a5cb8058c0e7212a155a4aff9f87997", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_drive.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "5d7a53bf79e58150eff5f6979890afb54a6859597121a4cee0e7b4e6020f0eb0", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_account.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7dbfc7b05e3c69ebbb1723314094d62e07a4b328cba09db899808fd50d38bc15", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_snapshot_restore.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0d70395bc1a83498c08081aaa31fa4e5bb8ebfccbc03b7c9f1cb0aa6a4d132c9", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_backup.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b545334782c314c7c2c8e857f85838859b461176369ed002f3fba7414062b809", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_network_interfaces.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "d045d9768f1b469c3aeda533dbfdcbdb5a2f51a2d9949c59a3f73b56959ca082", - "format": 1 - }, - { - "name": "plugins/modules/na_elementsw_admin_users.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b822e729b9e40361b148fd9739fddf1c26705597a092b5d967e29676eed9fb66", - "format": 1 - }, - { - "name": "tests", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat/unittest.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cba95d18c5b39c6f49714eacf1ac77452c2e32fa087c03cf01aacd19ae597b0f", - "format": 1 - }, - { - "name": "tests/unit/compat/builtins.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0ca4cac919e166b25e601e11acb01f6957dddd574ff0a62569cb994a5ecb63e1", - "format": 1 - }, - { - "name": "tests/unit/compat/__init__.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "format": 1 - }, - { - "name": "tests/unit/compat/mock.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0af958450cf6de3fbafe94b1111eae8ba5a8dbe1d785ffbb9df81f26e4946d99", - "format": 1 - }, - { - "name": "tests/unit/requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "da9e4399d51f4aa7e39d11a4c8adb3ea291252334eeebc6e5569777c717739da", - "format": 1 - }, - { - "name": "tests/unit/plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_cluster.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "66d9f46f9b572b24f6465f43d2aebfb43f3fe2858ad528472559ba089dc2fb3c", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_access_group_volumes.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "f6aa0100e51bbe54b6e9edeb072b7de526542e55da1cede0d1ae5f4367ec89eb", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_volume.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "d910be3c377edddb04f6f74c3e4908a9d6d32c71ec251cf74e9eaa6711b1bffe", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_vlan.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "9390907ec097add3aa2d936dd95f63d05bfac2b5b730ae12df50d14c5a18e0c1", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_nodes.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b563b9adab2f4c7a67354fa2b7a2e3468cf68b041ba51c788e0e082e4b50b7ba", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_cluster_config.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ae4c8e648a16dfa704964ef0f3782ea27adec2f1c0ceb5fca84ab86e888caffa", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_qos_policy.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "189242c5691fba4c436403cbfeb512fdab01c8bd35b028d7262b4cdeca9c7376", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_account.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "5002081bc3177a94e5b2911259138ba80b2cf03006c6333c78cc50731f89fbbe", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_initiators.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "f5cc8b59e5120ff8f6b51a9b2085d336f63c5b91d7d3f21db629176c92c2f011", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_cluster_snmp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "655c454425b97c72bb924b5def11e8dc65dd9dc4cd40cf00df66ae85120ba40f", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_template.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "fb1802b2cd87193966ccc7d8b0c6c94522d7954bfada73febb8aeae77367322c", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_network_interfaces.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "489f21207a0de4f7ab263096c0f2d2c674cb9a334b45edb76165f7a933b13c5e", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_access_group.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4682bf1c6d258032a9a9b001254246a2993e006ab2aa32463e42bed5e192e09f", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_elementsw_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "9a138bc7c455af917d85a69c4e010ae92cda34cff767fe7d0514806ab82d22b0", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/modules_utils/test_netapp_module.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a40d8651793b9771d6f56d5e8b52772597a77e317002a9f9bf3400cffd014d60", - "format": 1 - }, - { - "name": "meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "meta/runtime.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "2a2a08d11b2cf3859e796da8a7928461df41efdd14abbc7e4234a37da5ca19c4", - "format": 1 - }, - { - "name": "changelogs", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3734.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "419f9e02843f2fc7b584c8d3a4160769b1939784dbc0f726c55daeca0bc6bef9", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3324.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "784b39c5d9440affb1dbab3ba8769ec1e88e7570798448c238a77d32dbf6e505", - "format": 1 - }, - { - "name": "changelogs/fragments/20.9.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "56bed0aab9696af7068eb1bb743eb316ab23c3200ac6faa715a303e5f33f0973", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3196.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "94573d6e6ddde5f8a053d72a7e49d87d13c4274f5ea5c24c6c0a95947215977b", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3800.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6fc0ea3ba25f76222015eba223c4a88c7d36b52cb5d767a5c3a9374746532a5e", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3733.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "9a1ce243b30c79588a96fac9c050487d9b9ea63208e9c30934b7af77cc24dfe4", - "format": 1 - }, - { - "name": "changelogs/fragments/2019.10.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7b1a5ef7df5f1e6e66ddc013149aea0480eb79f911a0563e2e6d7d9af79d5572", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3174.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7cfc4addbf3343a3ce121f5de6cc2cc8244ad7b62a7429c2694543dabc2a8ccf", - "format": 1 - }, - { - "name": "changelogs/fragments/20.2.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "2c98764e792ed6c6d9cee6df80b9fff8f4fcadaf765c0aa0f0ed3dd5e3080fec", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3117.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "242f770eafb49994810a3263e23e1d342aeb36396819045c48f491810aab6908", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3731.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "5f92782e45a47a3439f8a858c3f283879fdc070422109d5a9ab2fdaa7ca56293", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3310.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8132aa931d13a49ba1a3c0fee131c048c6767ce17b3d9cabafa7e34f3c7c239a", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3235.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cddb1135b1c15ca3c8f130bcc439d73ac819c7a3e0472c9ff358c75405bd8cb3", - "format": 1 - }, - { - "name": "changelogs/fragments/20.8.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b13007f7b14dd35357ec0fb06b0e89cf5fee56036b0a6004dfb21c46010cb7c1", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3188.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0efa05e4cca58b1bfe30a60673adc266e7598d841065486b5b29c7e7a8b29bf4", - "format": 1 - }, - { - "name": "changelogs/fragments/20.6.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "6192b3cccdc7c1e1eb0d61a49dd20c6f234499b6dd9b52b2f974b673e99f7a47", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4416.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4224db573f34caeeb956c8728eb343a47bc2729d898001a4c6a671b780dae1bf", - "format": 1 - }, - { - "name": "changelogs/config.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "70f470630a3fb893540ad9060634bfd0955e4a3371ab1a921e44bdc6b5ea1ba5", - "format": 1 - }, - { - "name": "changelogs/changelog.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ad8dbe639e83e6feef631362bf2d78cde3c51c093203c0de8113b0d1cbc7756d", - "format": 1 - }, - { - "name": "README.md", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ada0df4adf6ff17cdb5493e6050ec750fa13347ea71a6122a7e139f65f842b50", - "format": 1 - }, - { - "name": ".github", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows/coverage.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "5fef29bf470c1567ed5ba3e3d5f227d21db4d23455c4fd12628e3e3ad80ddd76", - "format": 1 - }, - { - "name": ".github/workflows/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "140dc9b99f730080720586330df5ee7ef8f5e74b5898738d2b269ac52bbe4666", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/feature_request.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "1c8be00f495b1a0e20d3e4c2bca809b9eda7d2ab92e838bfad951dfa37e7b3d2", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/bug_report.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b59987ccd30474cf321e36496cc8b30464bdd816c5b3860d659356bc3e2a2a7f", - "format": 1 - }, - { - "name": "CHANGELOG.rst", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "1818f97ced0b9d61cd4d65742e14cb618a333be7f734c1fee8bb420323e5373d", - "format": 1 - } - ], - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/elementsw/MANIFEST.json b/ansible_collections/netapp/elementsw/MANIFEST.json deleted file mode 100644 index fda95d344..000000000 --- a/ansible_collections/netapp/elementsw/MANIFEST.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "collection_info": { - "namespace": "netapp", - "name": "elementsw", - "version": "21.7.0", - "authors": [ - "NetApp Ansible Team <ng-ansibleteam@netapp.com>" - ], - "readme": "README.md", - "tags": [ - "storage", - "netapp", - "solidfire" - ], - "description": "Netapp ElementSW (Solidfire) Collection", - "license": [ - "GPL-2.0-or-later" - ], - "license_file": null, - "dependencies": {}, - "repository": "https://github.com/ansible-collections/netapp.elementsw", - "documentation": null, - "homepage": "https://netapp.io/configuration-management-and-automation/", - "issues": "https://github.com/ansible-collections/netapp.elementsw/issues" - }, - "file_manifest_file": { - "name": "FILES.json", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "472a7d73c3fe2719a7c500eadc92b8f89ca852d2c5aee2b71d7afb688c97dc8c", - "format": 1 - }, - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/elementsw/README.md b/ansible_collections/netapp/elementsw/README.md deleted file mode 100644 index 96b62e64d..000000000 --- a/ansible_collections/netapp/elementsw/README.md +++ /dev/null @@ -1,133 +0,0 @@ -[![Documentation](https://img.shields.io/badge/docs-brightgreen.svg)](https://docs.ansible.com/ansible/devel/collections/netapp/elementsw/index.html) -![example workflow](https://github.com/ansible-collections/netapp.elementsw/actions/workflows/main.yml/badge.svg) -[![codecov](https://codecov.io/gh/ansible-collections/netapp.elementsw/branch/main/graph/badge.svg?token=weBYkksxSi)](https://codecov.io/gh/ansible-collections/netapp.elementsw) - - -netapp.elementSW - -NetApp ElementSW Collection - -Copyright (c) 2019 NetApp, Inc. All rights reserved. -Specifications subject to change without notice. - -# Installation -```bash -ansible-galaxy collection install netapp.elementsw -``` -To use Collection add the following to the top of your playbook, with out this you will be using Ansible 2.9 version of the module -``` -collections: - - netapp.elementsw -``` - -# Module documentation -https://docs.ansible.com/ansible/devel/collections/netapp/elementsw/ - -# Need help -Join our Slack Channel at [Netapp.io](http://netapp.io/slack) - -# Release Notes - -## 21.7.0 - -### Minor changes - - all modules - enable usage of Ansible module group defaults - for Ansible 2.12+. - -## 21.6.1 -### Bug Fixes - - requirements.txt: point to the correct python dependency - -## 21.3.0 - -### New Options - - na_elementsw_qos_policy: explicitly define `minIOPS`, `maxIOPS`, `burstIOPS` as int. - -### Minor changes - - na_elementsw_info - add `cluster_nodes` and `cluster_drives`. - -### Bug Fixes - - na_elementsw_drive - latest SDK does not accept ``force_during_bin_sync`` and ``force_during_upgrade``. - - na_elementsw_qos_policy - loop would convert `minIOPS`, `maxIOPS`, `burstIOPS` to str, causing type mismatch issues in comparisons. - - na_elementsw_snapshot_schedule - change of interface in SDK ('ScheduleInfo' object has no attribute 'minutes') - -## 20.11.0 - -### Minor changes -- na_elementsw_snapshot_schedule - Add `retention` in examples. - -### Bug Fixes -- na_elementsw_drive - Object of type 'dict_values' is not JSON serializable. - -## 20.10.0 - -### New Modules -- na_elementsw_info: support for two subsets `cluster_accounts`, `node_config`. - -### New Options -- na_elementsw_cluster: `encryption` to enable encryption at rest. `order_number` and `serial_number` for demo purposes. -- na_elementsw_network_interfaces: restructure options, into 2 dictionaries `bond_1g` and `bond_10g`, so that there is no shared option. Disallow all older options. -- na_elementsw_network_interfaces: make all options not required, so that only bond_1g can be set for example. - -## 20.9.1 - -### Bug Fixes -- na_elementsw_node: improve error reporting when cluster name cannot be set because node is already active. -- na_elementsw_schedule - missing imports TimeIntervalFrequency, Schedule, ScheduleInfo have been added back - -## 20.9.0 - -### New Modules -- na_elementsw_qos_policy: create, modify, rename, or delete QOS policy. - -### New Options -- na_elementsw_node: `cluster_name` to set the cluster name on new nodes. -- na_elementsw_node: `preset_only` to only set the cluster name before creating a cluster with na_elementsw_cluster. -- na_elementsw_volume: `qos_policy_name` to provide a QOS policy name or ID. - -### Bug Fixes -- na_elementsw_node: fix check_mode so that no action is taken. - -## 20.8.0 - -### New Options -- na_elementsw_drive: add all drives in a cluster, allow for a list of nodes or a list of drives. - -### Bug Fixes -- na_elementsw_access_group: fix check_mode so that no action is taken. -- na_elementsw_admin_users: fix check_mode so that no action is taken. -- na_elementsw_cluster: create cluster if it does not exist. Do not expect MVIP or SVIP to exist before create. -- na_elementsw_cluster_snmp: double exception because of AttributeError. -- na_elementsw_drive: node_id or drive_id were not handled properly when using numeric ids. -- na_elementsw_initiators: volume_access_group_id was ignored. volume_access_groups was ignored and redundant. -- na_elementsw_ldap: double exception because of AttributeError. -- na_elementsw_snapshot_schedule: ignore schedules being deleted (idempotency), remove default values and fix documentation. -- na_elementsw_vlan: AttributeError if VLAN already exists. -- na_elementsw_vlan: fix check_mode so that no action is taken. -- na_elementsw_vlan: change in attributes was ignored. -- na_elementsw_volume: double exception because of AttributeError. -- na_elementsw_volume: Argument '512emulation' in argument_spec is not a valid python identifier - renamed to enable512emulation. - -### Module documentation changes -- use a three group format for `version_added`. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. -- add type: str (or int, dict) where missing in documentation section. -- add required: true where missing. -- remove required: true for state and use present as default. - -## 20.6.0 -### Bug Fixes -- galaxy.xml: fix repository and homepage links - -## 20.2.0 -### Bug Fixes -- galaxy.yml: fix path to github repository. -- netapp.py: report error in case of connection error rather than raising a generic exception by default. - -## 20.1.0 -### New Module -- na_elementsw_access_group_volumes: add/remove volumes to/from existing access group - -## 19.11.0 -## 19.10.0 -Changes in 19.10.0 and September collection releases compared to Ansible 2.9 -### Documentation Fixes: -- na_elementsw_drive: na_elementsw_drive was documented as na_element_drive diff --git a/ansible_collections/netapp/elementsw/changelogs/changelog.yaml b/ansible_collections/netapp/elementsw/changelogs/changelog.yaml deleted file mode 100644 index 97d921301..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/changelog.yaml +++ /dev/null @@ -1,221 +0,0 @@ -ancestor: null -releases: - 19.10.0: - changes: - minor_changes: - - refactor existing modules as a collection - fragments: - - 2019.10.0.yaml - release_date: '2019-11-14' - 2.7.0: - modules: - - description: NetApp Element Software Manage Access Groups - name: na_elementsw_access_group - namespace: '' - - description: NetApp Element Software Manage Accounts - name: na_elementsw_account - namespace: '' - - description: NetApp Element Software Manage Admin Users - name: na_elementsw_admin_users - namespace: '' - - description: NetApp Element Software Create Backups - name: na_elementsw_backup - namespace: '' - - description: NetApp Element Software Check connectivity to MVIP and SVIP. - name: na_elementsw_check_connections - namespace: '' - - description: NetApp Element Software Create Cluster - name: na_elementsw_cluster - namespace: '' - - description: NetApp Element Software Manage Cluster Pair - name: na_elementsw_cluster_pair - namespace: '' - - description: NetApp Element Software Manage Node Drives - name: na_elementsw_drive - namespace: '' - - description: NetApp Element Software Manage ldap admin users - name: na_elementsw_ldap - namespace: '' - - description: NetApp Element Software Configure Node Network Interfaces - name: na_elementsw_network_interfaces - namespace: '' - - description: NetApp Element Software Node Operation - name: na_elementsw_node - namespace: '' - - description: NetApp Element Software Manage Snapshots - name: na_elementsw_snapshot - namespace: '' - - description: NetApp Element Software Restore Snapshot - name: na_elementsw_snapshot_restore - namespace: '' - - description: NetApp Element Software Snapshot Schedules - name: na_elementsw_snapshot_schedule - namespace: '' - - description: NetApp Element Software Manage VLAN - name: na_elementsw_vlan - namespace: '' - - description: NetApp Element Software Manage Volumes - name: na_elementsw_volume - namespace: '' - - description: NetApp Element Software Create Volume Clone - name: na_elementsw_volume_clone - namespace: '' - - description: NetApp Element Software Volume Pair - name: na_elementsw_volume_pair - namespace: '' - release_date: '2018-09-21' - 2.8.0: - modules: - - description: Configure Element SW Cluster - name: na_elementsw_cluster_config - namespace: '' - - description: Configure Element SW Cluster SNMP - name: na_elementsw_cluster_snmp - namespace: '' - - description: Manage Element SW initiators - name: na_elementsw_initiators - namespace: '' - release_date: '2019-04-11' - 20.1.0: - modules: - - description: NetApp Element Software Add/Remove Volumes to/from Access Group - name: na_elementsw_access_group_volumes - namespace: '' - release_date: '2020-01-08' - 20.10.0: - changes: - minor_changes: - - na_elementsw_cluster - add new options ``encryption``, ``order_number``, and - ``serial_number``. - - na_elementsw_network_interfaces - make all options not required, so that only - bond_1g can be set for example. - - na_elementsw_network_interfaces - restructure options into 2 dictionaries - ``bond_1g`` and ``bond_10g``, so that there is no shared option. Disallow - all older options. - fragments: - - DEVOPS-3117.yaml - - DEVOPS-3196.yaml - - DEVOPS-3235.yaml - modules: - - description: NetApp Element Software Info - name: na_elementsw_info - namespace: '' - release_date: '2020-10-08' - 20.11.0: - changes: - bugfixes: - - na_elementsw_drive - Object of type 'dict_values' is not JSON serializable. - minor_changes: - - na_elementsw_snapshot_schedule - Add ``retention`` in examples. - fragments: - - DEVOPS-3310.yml - - DEVOPS-3324.yaml - release_date: '2020-11-05' - 20.2.0: - changes: - bugfixes: - - galaxy.yml - fix path to github repository. - - netapp.py - report error in case of connection error rather than raising a - generic exception by default. - fragments: - - 20.2.0.yaml - release_date: '2020-02-05' - 20.6.0: - changes: - bugfixes: - - galaxy.yml - fix repository and homepage links. - fragments: - - 20.6.0.yaml - release_date: '2020-06-03' - 20.8.0: - changes: - bugfixes: - - na_elementsw_access_group - fix check_mode so that no action is taken. - - na_elementsw_admin_users - fix check_mode so that no action is taken. - - na_elementsw_cluster - create cluster if it does not exist. Do not expect - MVIP or SVIP to exist before create. - - na_elementsw_cluster_snmp - double exception because of AttributeError. - - na_elementsw_drive - node_id or drive_id were not handled properly when using - numeric ids. - - na_elementsw_initiators - volume_access_group_id was ignored. volume_access_groups - was ignored and redundant. - - na_elementsw_ldap - double exception because of AttributeError. - - na_elementsw_snapshot_schedule - ignore schedules being deleted (idempotency), - remove default values and fix documentation. - - na_elementsw_vlan - AttributeError if VLAN already exists. - - na_elementsw_vlan - change in attributes was ignored. - - na_elementsw_vlan - fix check_mode so that no action is taken. - - na_elementsw_volume - Argument '512emulation' in argument_spec is not a valid - python identifier - renamed to enable512emulation. - - na_elementsw_volume - double exception because of AttributeError. - minor_changes: - - add "required:true" where missing. - - add "type:str" (or int, dict) where missing in documentation section. - - na_elementsw_drive - add all drives in a cluster, allow for a list of nodes - or a list of drives. - - remove "required:true" for state and use present as default. - - use a three group format for ``version_added``. So 2.7 becomes 2.7.0. Same - thing for 2.8 and 2.9. - fragments: - - 20.8.0.yaml - release_date: '2020-08-05' - 20.9.0: - changes: - bugfixes: - - na_elementsw_node - fix check_mode so that no action is taken. - minor_changes: - - na_elementsw_node - ``cluster_name`` to set the cluster name on new nodes. - - na_elementsw_node - ``preset_only`` to only set the cluster name before creating - a cluster with na_elementsw_cluster. - - na_elementsw_volume - ``qos_policy_name`` to provide a QOS policy name or - ID. - fragments: - - 20.9.0.yaml - modules: - - description: NetApp Element Software create/modify/rename/delete QOS Policy - name: na_elementsw_qos_policy - namespace: '' - release_date: '2020-09-02' - 20.9.1: - changes: - bugfixes: - - na_elementsw_node - improve error reporting when cluster name cannot be set - because node is already active. - - na_elementsw_schedule - missing imports TimeIntervalFrequency, Schedule, ScheduleInfo - have been added back - fragments: - - DEVOPS-3174.yaml - - DEVOPS-3188.yaml - release_date: '2020-09-08' - 21.3.0: - changes: - bugfixes: - - na_elementsw_drive - lastest SDK does not accept ``force_during_bin_sync`` - and ``force_during_upgrade``. - - na_elementsw_qos_policy - loop would convert `minIOPS`, `maxIOPS`, `burstIOPS` - to str, causing type mismatch issues in comparisons. - - na_elementsw_snapshot_schedule - change of interface in SDK ('ScheduleInfo' - object has no attribute 'minutes') - minor_changes: - - na_elementsw_info - add ``cluster_nodes`` and ``cluster_drives``. - - na_elementsw_qos_policy - explicitly define ``minIOPS``, ``maxIOPS``, ``burstIOPS`` - as int. - fragments: - - DEVOPS-3731.yaml - - DEVOPS-3733.yaml - - DEVOPS-3734.yaml - release_date: '2021-03-03' - 21.6.1: - changes: - bugfixes: - - requirements.txt - point to the correct python dependency - fragments: - - DEVOPS-3800.yaml - release_date: '2021-05-18' - 21.7.0: - changes: - minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - fragments: - - DEVOPS-4416.yaml - release_date: '2021-11-03' diff --git a/ansible_collections/netapp/elementsw/changelogs/config.yaml b/ansible_collections/netapp/elementsw/changelogs/config.yaml deleted file mode 100644 index 2d637df5c..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/config.yaml +++ /dev/null @@ -1,32 +0,0 @@ -changelog_filename_template: ../CHANGELOG.rst -changelog_filename_version_depth: 0 -changes_file: changelog.yaml -changes_format: combined -ignore_other_fragment_extensions: true -keep_fragments: true -mention_ancestor: true -new_plugins_after_name: removed_features -notesdir: fragments -prelude_section_name: release_summary -prelude_section_title: Release Summary -sanitize_changelog: true -sections: -- - major_changes - - Major Changes -- - minor_changes - - Minor Changes -- - breaking_changes - - Breaking Changes / Porting Guide -- - deprecated_features - - Deprecated Features -- - removed_features - - Removed Features (previously deprecated) -- - security_fixes - - Security Fixes -- - bugfixes - - Bugfixes -- - known_issues - - Known Issues -title: NetApp ElementSW Collection -trivial_section_name: trivial -use_fqcn: true diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/20.2.0.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/20.2.0.yaml deleted file mode 100644 index 832b5f56f..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/20.2.0.yaml +++ /dev/null @@ -1,3 +0,0 @@ -bugfixes: - - galaxy.yml - fix path to github repository. - - netapp.py - report error in case of connection error rather than raising a generic exception by default. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/20.6.0.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/20.6.0.yaml deleted file mode 100644 index fcd0d11ee..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/20.6.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - galaxy.yml - fix repository and homepage links. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/20.8.0.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/20.8.0.yaml deleted file mode 100644 index 5c959531a..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/20.8.0.yaml +++ /dev/null @@ -1,21 +0,0 @@ -minor_changes: - - na_elementsw_drive - add all drives in a cluster, allow for a list of nodes or a list of drives. - - use a three group format for ``version_added``. So 2.7 becomes 2.7.0. Same thing for 2.8 and 2.9. - - add "type:str" (or int, dict) where missing in documentation section. - - add "required:true" where missing. - - remove "required:true" for state and use present as default. - -bugfixes: - - na_elementsw_access_group - fix check_mode so that no action is taken. - - na_elementsw_admin_users - fix check_mode so that no action is taken. - - na_elementsw_cluster - create cluster if it does not exist. Do not expect MVIP or SVIP to exist before create. - - na_elementsw_cluster_snmp - double exception because of AttributeError. - - na_elementsw_drive - node_id or drive_id were not handled properly when using numeric ids. - - na_elementsw_initiators - volume_access_group_id was ignored. volume_access_groups was ignored and redundant. - - na_elementsw_ldap - double exception because of AttributeError. - - na_elementsw_snapshot_schedule - ignore schedules being deleted (idempotency), remove default values and fix documentation. - - na_elementsw_vlan - AttributeError if VLAN already exists. - - na_elementsw_vlan - fix check_mode so that no action is taken. - - na_elementsw_vlan - change in attributes was ignored. - - na_elementsw_volume - double exception because of AttributeError. - - na_elementsw_volume - Argument '512emulation' in argument_spec is not a valid python identifier - renamed to enable512emulation. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/20.9.0.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/20.9.0.yaml deleted file mode 100644 index a406c9c2d..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/20.9.0.yaml +++ /dev/null @@ -1,7 +0,0 @@ -minor_changes: - - na_elementsw_node - ``cluster_name`` to set the cluster name on new nodes. - - na_elementsw_node - ``preset_only`` to only set the cluster name before creating a cluster with na_elementsw_cluster. - - na_elementsw_volume - ``qos_policy_name`` to provide a QOS policy name or ID. - -bugfixes: - - na_elementsw_node - fix check_mode so that no action is taken. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/2019.10.0.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/2019.10.0.yaml deleted file mode 100644 index 5723daa11..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/2019.10.0.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - refactor existing modules as a collection diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3117.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3117.yaml deleted file mode 100644 index 23a6cafa4..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3117.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - na_elementsw_cluster - add new options ``encryption``, ``order_number``, and ``serial_number``. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3174.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3174.yaml deleted file mode 100644 index 01e754719..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3174.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - na_elementsw_node - improve error reporting when cluster name cannot be set because node is already active. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3188.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3188.yaml deleted file mode 100644 index ad5d8bee7..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3188.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - na_elementsw_schedule - missing imports TimeIntervalFrequency, Schedule, ScheduleInfo have been added back
\ No newline at end of file diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3196.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3196.yaml deleted file mode 100644 index 21a70b02c..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3196.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - na_elementsw_network_interfaces - make all options not required, so that only bond_1g can be set for example. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3235.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3235.yaml deleted file mode 100644 index 8a2f82f34..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3235.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - na_elementsw_network_interfaces - restructure options into 2 dictionaries ``bond_1g`` and ``bond_10g``, so that there is no shared option. Disallow all older options. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3310.yml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3310.yml deleted file mode 100644 index 729e6d062..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3310.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - na_elementsw_snapshot_schedule - Add ``retention`` in examples. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3324.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3324.yaml deleted file mode 100644 index b87e308d8..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3324.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - na_elementsw_drive - Object of type 'dict_values' is not JSON serializable. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3731.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3731.yaml deleted file mode 100644 index a4e43ed45..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3731.yaml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - na_elementsw_qos_policy - explicitly define ``minIOPS``, ``maxIOPS``, ``burstIOPS`` as int. -bugfixes: - - na_elementsw_qos_policy - loop would convert `minIOPS`, `maxIOPS`, `burstIOPS` to str, causing type mismatch issues in comparisons. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3733.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3733.yaml deleted file mode 100644 index 7310f3b75..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3733.yaml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - na_elementsw_info - add ``cluster_nodes`` and ``cluster_drives``. -bugfixes: - - na_elementsw_drive - lastest SDK does not accept ``force_during_bin_sync`` and ``force_during_upgrade``. diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3734.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3734.yaml deleted file mode 100644 index 08c5bf552..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3734.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - na_elementsw_snapshot_schedule - change of interface in SDK ('ScheduleInfo' object has no attribute 'minutes') diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3800.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3800.yaml deleted file mode 100644 index b6e57d046..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-3800.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - requirements.txt - point to the correct python dependency diff --git a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-4416.yaml b/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-4416.yaml deleted file mode 100644 index 6b4b660a0..000000000 --- a/ansible_collections/netapp/elementsw/changelogs/fragments/DEVOPS-4416.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. diff --git a/ansible_collections/netapp/elementsw/meta/runtime.yml b/ansible_collections/netapp/elementsw/meta/runtime.yml deleted file mode 100644 index 05a30f02f..000000000 --- a/ansible_collections/netapp/elementsw/meta/runtime.yml +++ /dev/null @@ -1,28 +0,0 @@ ---- -requires_ansible: ">=2.9.10" -action_groups: - netapp_elementsw: - - na_elementsw_access_group - - na_elementsw_access_group_volumes - - na_elementsw_account - - na_elementsw_admin_users - - na_elementsw_backup - - na_elementsw_check_connections - - na_elementsw_cluster_config - - na_elementsw_cluster_pair - - na_elementsw_cluster - - na_elementsw_cluster_snmp - - na_elementsw_drive - - na_elementsw_info - - na_elementsw_initiators - - na_elementsw_ldap - - na_elementsw_network_interfaces - - na_elementsw_node - - na_elementsw_qos_policy - - na_elementsw_snapshot - - na_elementsw_snapshot_restore - - na_elementsw_snapshot_schedule - - na_elementsw_vlan - - na_elementsw_volume_clone - - na_elementsw_volume_pair - - na_elementsw_volume diff --git a/ansible_collections/netapp/elementsw/plugins/doc_fragments/netapp.py b/ansible_collections/netapp/elementsw/plugins/doc_fragments/netapp.py deleted file mode 100644 index 229d03f7d..000000000 --- a/ansible_collections/netapp/elementsw/plugins/doc_fragments/netapp.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2018, NetApp Ansible Team <ng-ansibleteam@netapp.com> -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -class ModuleDocFragment(object): - - DOCUMENTATION = r''' -options: - - See respective platform section for more details -requirements: - - See respective platform section for more details -notes: - - Ansible modules are available for the following NetApp Storage Platforms: E-Series, ONTAP, SolidFire -''' - - # Documentation fragment for SolidFire - SOLIDFIRE = r''' -options: - hostname: - required: true - description: - - The hostname or IP address of the SolidFire cluster. - - For na_elementsw_cluster, the Management IP (MIP) or hostname of the node to initiate the cluster creation from. - type: str - username: - required: true - description: - - Please ensure that the user has the adequate permissions. For more information, please read the official documentation - U(https://mysupport.netapp.com/documentation/docweb/index.html?productID=62636&language=en-US). - aliases: ['user'] - type: str - password: - required: true - description: - - Password for the specified user. - aliases: ['pass'] - type: str - -requirements: - - The modules were developed with SolidFire 10.1 - - solidfire-sdk-python (1.1.0.92) or greater. Install using 'pip install solidfire-sdk-python' - -notes: - - The modules prefixed with na\\_elementsw are built to support the SolidFire storage platform. - -''' diff --git a/ansible_collections/netapp/elementsw/plugins/module_utils/netapp.py b/ansible_collections/netapp/elementsw/plugins/module_utils/netapp.py deleted file mode 100644 index 4121bf8e7..000000000 --- a/ansible_collections/netapp/elementsw/plugins/module_utils/netapp.py +++ /dev/null @@ -1,107 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2017, Sumit Kumar <sumit4@netapp.com> -# Copyright (c) 2017, Michael Price <michael.price@netapp.com> -# Copyright: (c) 2018, NetApp Ansible Team <ng-ansibleteam@netapp.com> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -''' -Common methods and constants -''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -HAS_SF_SDK = False -SF_BYTE_MAP = dict( - # Management GUI displays 1024 ** 3 as 1.1 GB, thus use 1000. - bytes=1, - b=1, - kb=1000, - mb=1000 ** 2, - gb=1000 ** 3, - tb=1000 ** 4, - pb=1000 ** 5, - eb=1000 ** 6, - zb=1000 ** 7, - yb=1000 ** 8 -) - -# uncomment this to log API calls -# import logging - -try: - from solidfire.factory import ElementFactory - import solidfire.common - HAS_SF_SDK = True -except ImportError: - HAS_SF_SDK = False - -COLLECTION_VERSION = "21.7.0" - - -def has_sf_sdk(): - return HAS_SF_SDK - - -def ontap_sf_host_argument_spec(): - - return dict( - hostname=dict(required=True, type='str'), - username=dict(required=True, type='str', aliases=['user']), - password=dict(required=True, type='str', aliases=['pass'], no_log=True) - ) - - -def create_sf_connection(module, hostname=None, port=None, raise_on_connection_error=False, timeout=None): - if hostname is None: - hostname = module.params['hostname'] - username = module.params['username'] - password = module.params['password'] - options = dict() - if port is not None: - options['port'] = port - if timeout is not None: - options['timeout'] = timeout - - if not HAS_SF_SDK: - module.fail_json(msg="the python SolidFire SDK module is required") - - try: - logging.basicConfig(filename='/tmp/elementsw_apis.log', level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s') - except NameError: - # logging was not imported - pass - - try: - return_val = ElementFactory.create(hostname, username, password, **options) - except (solidfire.common.ApiConnectionError, solidfire.common.ApiServerError) as exc: - if raise_on_connection_error: - raise exc - module.fail_json(msg=repr(exc)) - except Exception as exc: - raise Exception("Unable to create SF connection: %s" % repr(exc)) - return return_val diff --git a/ansible_collections/netapp/elementsw/plugins/module_utils/netapp_elementsw_module.py b/ansible_collections/netapp/elementsw/plugins/module_utils/netapp_elementsw_module.py deleted file mode 100644 index 2d8b92cfa..000000000 --- a/ansible_collections/netapp/elementsw/plugins/module_utils/netapp_elementsw_module.py +++ /dev/null @@ -1,206 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Copyright: (c) 2018, NetApp Ansible Team <ng-ansibleteam@netapp.com> - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -from ansible.module_utils._text import to_native - -HAS_SF_SDK = False -try: - import solidfire.common - HAS_SF_SDK = True -except ImportError: - HAS_SF_SDK = False - - -def has_sf_sdk(): - return HAS_SF_SDK - - -class NaElementSWModule(object): - ''' Support class for common or shared functions ''' - def __init__(self, elem): - self.elem_connect = elem - self.parameters = dict() - - def get_volume(self, volume_id): - """ - Return volume details if volume exists for given volume_id - - :param volume_id: volume ID - :type volume_id: int - :return: Volume dict if found, None if not found - :rtype: dict - """ - volume_list = self.elem_connect.list_volumes(volume_ids=[volume_id]) - for volume in volume_list.volumes: - if volume.volume_id == volume_id: - if str(volume.delete_time) == "": - return volume - return None - - def get_volume_id(self, vol_name, account_id): - """ - Return volume id from the given (valid) account_id if found - Return None if not found - - :param vol_name: Name of the volume - :type vol_name: str - :param account_id: Account ID - :type account_id: int - - :return: Volume ID of the first matching volume if found. None if not found. - :rtype: int - """ - volume_list = self.elem_connect.list_volumes_for_account(account_id=account_id) - for volume in volume_list.volumes: - if volume.name == vol_name: - # return volume_id - if str(volume.delete_time) == "": - return volume.volume_id - return None - - def volume_id_exists(self, volume_id): - """ - Return volume_id if volume exists for given volume_id - - :param volume_id: volume ID - :type volume_id: int - :return: Volume ID if found, None if not found - :rtype: int - """ - volume_list = self.elem_connect.list_volumes(volume_ids=[volume_id]) - for volume in volume_list.volumes: - if volume.volume_id == volume_id: - if str(volume.delete_time) == "": - return volume.volume_id - return None - - def volume_exists(self, volume, account_id): - """ - Return volume_id if exists, None if not found - - :param volume: Volume ID or Name - :type volume: str - :param account_id: Account ID (valid) - :type account_id: int - :return: Volume ID if found, None if not found - """ - # If volume is an integer, get_by_id - if str(volume).isdigit(): - volume_id = int(volume) - try: - if self.volume_id_exists(volume_id): - return volume_id - except solidfire.common.ApiServerError: - # don't fail, continue and try get_by_name - pass - # get volume by name - volume_id = self.get_volume_id(volume, account_id) - return volume_id - - def get_snapshot(self, snapshot_id, volume_id): - """ - Return snapshot details if found - - :param snapshot_id: Snapshot ID or Name - :type snapshot_id: str - :param volume_id: Account ID (valid) - :type volume_id: int - :return: Snapshot dict if found, None if not found - :rtype: dict - """ - # mandate src_volume_id although not needed by sdk - snapshot_list = self.elem_connect.list_snapshots( - volume_id=volume_id) - for snapshot in snapshot_list.snapshots: - # if actual id is provided - if str(snapshot_id).isdigit() and snapshot.snapshot_id == int(snapshot_id): - return snapshot - # if snapshot name is provided - elif snapshot.name == snapshot_id: - return snapshot - return None - - @staticmethod - def map_qos_obj_to_dict(qos_obj): - ''' Take a QOS object and return a key, normalize the key names - Interestingly, the APIs are using different ids for create and get - ''' - mappings = [ - ('burst_iops', 'burstIOPS'), - ('min_iops', 'minIOPS'), - ('max_iops', 'maxIOPS'), - ] - qos_dict = vars(qos_obj) - # Align names to create API and module interface - for read, send in mappings: - if read in qos_dict: - qos_dict[send] = qos_dict.pop(read) - return qos_dict - - def get_qos_policy(self, name): - """ - Get QOS Policy - :description: Get QOS Policy object for a given name - :return: object, error - Policy object converted to dict if found, else None - Error text if error, else None - :rtype: dict/None, str/None - """ - try: - qos_policy_list_obj = self.elem_connect.list_qos_policies() - except (solidfire.common.ApiServerError, solidfire.common.ApiConnectionError) as exc: - error = "Error getting list of qos policies: %s" % to_native(exc) - return None, error - - policy_dict = dict() - if hasattr(qos_policy_list_obj, 'qos_policies'): - for policy in qos_policy_list_obj.qos_policies: - # Check and get policy object for a given name - if str(policy.qos_policy_id) == name: - policy_dict = vars(policy) - elif policy.name == name: - policy_dict = vars(policy) - if 'qos' in policy_dict: - policy_dict['qos'] = self.map_qos_obj_to_dict(policy_dict['qos']) - - return policy_dict if policy_dict else None, None - - def account_exists(self, account): - """ - Return account_id if account exists for given account id or name - Raises an exception if account does not exist - - :param account: Account ID or Name - :type account: str - :return: Account ID if found, None if not found - """ - # If account is an integer, get_by_id - if account.isdigit(): - account_id = int(account) - try: - result = self.elem_connect.get_account_by_id(account_id=account_id) - if result.account.account_id == account_id: - return account_id - except solidfire.common.ApiServerError: - # don't fail, continue and try get_by_name - pass - # get account by name, the method returns an Exception if account doesn't exist - result = self.elem_connect.get_account_by_name(username=account) - return result.account.account_id - - def set_element_attributes(self, source): - """ - Return telemetry attributes for the current execution - - :param source: name of the module - :type source: str - :return: a dict containing telemetry attributes - """ - attributes = {} - attributes['config-mgmt'] = 'ansible' - attributes['event-source'] = source - return attributes diff --git a/ansible_collections/netapp/elementsw/plugins/module_utils/netapp_module.py b/ansible_collections/netapp/elementsw/plugins/module_utils/netapp_module.py deleted file mode 100644 index c2b02d3d2..000000000 --- a/ansible_collections/netapp/elementsw/plugins/module_utils/netapp_module.py +++ /dev/null @@ -1,225 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2018, NetApp Ansible Team <ng-ansibleteam@netapp.com> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -''' Support class for NetApp ansible modules ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -def cmp(a, b): - """ - Python 3 does not have a cmp function, this will do the cmp. - :param a: first object to check - :param b: second object to check - :return: - """ - # convert to lower case for string comparison. - if a is None: - return -1 - if type(a) is str and type(b) is str: - a = a.lower() - b = b.lower() - # if list has string element, convert string to lower case. - if type(a) is list and type(b) is list: - a = [x.lower() if type(x) is str else x for x in a] - b = [x.lower() if type(x) is str else x for x in b] - a.sort() - b.sort() - return (a > b) - (a < b) - - -class NetAppModule(object): - ''' - Common class for NetApp modules - set of support functions to derive actions based - on the current state of the system, and a desired state - ''' - - def __init__(self): - self.log = list() - self.changed = False - self.parameters = {'name': 'not intialized'} - # self.debug = list() - - def set_parameters(self, ansible_params): - self.parameters = dict() - for param in ansible_params: - if ansible_params[param] is not None: - self.parameters[param] = ansible_params[param] - return self.parameters - - def get_cd_action(self, current, desired): - ''' takes a desired state and a current state, and return an action: - create, delete, None - eg: - is_present = 'absent' - some_object = self.get_object(source) - if some_object is not None: - is_present = 'present' - action = cd_action(current=is_present, desired = self.desired.state()) - ''' - if 'state' in desired: - desired_state = desired['state'] - else: - desired_state = 'present' - - if current is None and desired_state == 'absent': - return None - if current is not None and desired_state == 'present': - return None - # change in state - self.changed = True - if current is not None: - return 'delete' - return 'create' - - def compare_and_update_values(self, current, desired, keys_to_compare): - updated_values = dict() - is_changed = False - for key in keys_to_compare: - if key in current: - if key in desired and desired[key] is not None: - if current[key] != desired[key]: - updated_values[key] = desired[key] - is_changed = True - else: - updated_values[key] = current[key] - else: - updated_values[key] = current[key] - - return updated_values, is_changed - - @staticmethod - def check_keys(current, desired): - ''' TODO: raise an error if keys do not match - with the exception of: - new_name, state in desired - ''' - pass - - @staticmethod - def compare_lists(current, desired, get_list_diff): - ''' compares two lists and return a list of elements that are either the desired elements or elements that are - modified from the current state depending on the get_list_diff flag - :param: current: current item attribute in ONTAP - :param: desired: attributes from playbook - :param: get_list_diff: specifies whether to have a diff of desired list w.r.t current list for an attribute - :return: list of attributes to be modified - :rtype: list - ''' - desired_diff_list = [item for item in desired if item not in current] # get what in desired and not in current - current_diff_list = [item for item in current if item not in desired] # get what in current but not in desired - - if desired_diff_list or current_diff_list: - # there are changes - if get_list_diff: - return desired_diff_list - else: - return desired - else: - return [] - - def get_modified_attributes(self, current, desired, get_list_diff=False, additional_keys=False): - ''' takes two dicts of attributes and return a dict of attributes that are - not in the current state - It is expected that all attributes of interest are listed in current and - desired. - The same assumption holds true for any nested directory. - TODO: This is actually not true for the ElementSW 'attributes' directory. - Practically it means you cannot add or remove a key in a modify. - :param: current: current attributes in ONTAP - :param: desired: attributes from playbook - :param: get_list_diff: specifies whether to have a diff of desired list w.r.t current list for an attribute - :return: dict of attributes to be modified - :rtype: dict - - NOTE: depending on the attribute, the caller may need to do a modify or a - different operation (eg move volume if the modified attribute is an - aggregate name) - ''' - # uncomment these 2 lines if needed - # self.log.append('current: %s' % repr(current)) - # self.log.append('desired: %s' % repr(desired)) - # if the object does not exist, we can't modify it - modified = dict() - if current is None: - return modified - - # error out if keys do not match - self.check_keys(current, desired) - - # collect changed attributes - for key, value in current.items(): - if key in desired and desired[key] is not None: - if type(value) is list: - modified_list = self.compare_lists(value, desired[key], get_list_diff) # get modified list from current and desired - if modified_list: - modified[key] = modified_list - elif type(value) is dict: - modified_dict = self.get_modified_attributes(value, desired[key], get_list_diff, additional_keys=True) - if modified_dict: - modified[key] = modified_dict - elif cmp(value, desired[key]) != 0: - modified[key] = desired[key] - if additional_keys: - for key, value in desired.items(): - if key not in current: - modified[key] = desired[key] - if modified: - self.changed = True - # Uncomment this line if needed - # self.log.append('modified: %s' % repr(modified)) - return modified - - def is_rename_action(self, source, target): - ''' takes a source and target object, and returns True - if a rename is required - eg: - source = self.get_object(source_name) - target = self.get_object(target_name) - action = is_rename_action(source, target) - :return: None for error, True for rename action, False otherwise - ''' - if source is None and target is None: - # error, do nothing - # cannot rename an non existent resource - # alternatively we could create B - return None - if source is not None and target is not None: - # error, do nothing - # idempotency (or) new_name_is_already_in_use - # alternatively we could delete B and rename A to B - return False - if source is None and target is not None: - # do nothing, maybe the rename was already done - return False - # source is not None and target is None: - # rename is in order - self.changed = True - return True diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_access_group.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_access_group.py deleted file mode 100644 index 467ca415c..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_access_group.py +++ /dev/null @@ -1,397 +0,0 @@ -#!/usr/bin/python - -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -Element Software Access Group Manager -""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_access_group - -short_description: NetApp Element Software Manage Access Groups -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, destroy, or update access groups on Element Software Cluster. - -options: - - state: - description: - - Whether the specified access group should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - from_name: - description: - - ID or Name of the access group to rename. - - Required to create a new access group called 'name' by renaming 'from_name'. - version_added: 2.8.0 - type: str - - name: - description: - - Name for the access group for create, modify and delete operations. - required: True - aliases: - - src_access_group_id - type: str - - initiators: - description: - - List of initiators to include in the access group. If unspecified, the access group will start out without configured initiators. - type: list - elements: str - - volumes: - description: - - List of volumes to initially include in the volume access group. If unspecified, the access group will start without any volumes. - - It accepts either volume_name or volume_id - type: list - elements: str - - account_id: - description: - - Account ID for the owner of this volume. - - It accepts either account_name or account_id - - if account_id is digit, it will consider as account_id - - If account_id is string, it will consider as account_name - version_added: 2.8.0 - type: str - - virtual_network_id: - description: - - The ID of the Element SW Software Cluster Virtual Network to associate the access group with. - type: int - - virtual_network_tags: - description: - - The tags of VLAN Virtual Network Tag to associate the access group with. - type: list - elements: str - - attributes: - description: List of Name/Value pairs in JSON object format. - type: dict - -''' - -EXAMPLES = """ - - name: Create Access Group - na_elementsw_access_group: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - name: AnsibleAccessGroup - volumes: [7,8] - account_id: 1 - - - name: Modify Access Group - na_elementsw_access_group: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - name: AnsibleAccessGroup-Renamed - account_id: 1 - attributes: {"volumes": [1,2,3], "virtual_network_id": 12345} - - - name: Rename Access Group - na_elementsw_access_group: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - from_name: AnsibleAccessGroup - name: AnsibleAccessGroup-Renamed - - - name: Delete Access Group - na_elementsw_access_group: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - name: 1 -""" - - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWAccessGroup(object): - """ - Element Software Volume Access Group - """ - - def __init__(self): - - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - from_name=dict(required=False, type='str'), - name=dict(required=True, aliases=["src_access_group_id"], type='str'), - initiators=dict(required=False, type='list', elements='str'), - volumes=dict(required=False, type='list', elements='str'), - account_id=dict(required=False, type='str'), - virtual_network_id=dict(required=False, type='int'), - virtual_network_tags=dict(required=False, type='list', elements='str'), - attributes=dict(required=False, type='dict'), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('state', 'present', ['account_id']) - ], - supports_check_mode=True - ) - - input_params = self.module.params - - # Set up state variables - self.state = input_params['state'] - self.from_name = input_params['from_name'] - self.access_group_name = input_params['name'] - self.initiators = input_params['initiators'] - self.volumes = input_params['volumes'] - self.account_id = input_params['account_id'] - self.virtual_network_id = input_params['virtual_network_id'] - self.virtual_network_tags = input_params['virtual_network_tags'] - self.attributes = input_params['attributes'] - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - if self.attributes is not None: - self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_access_group')) - else: - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_access_group') - - def get_access_group(self, name): - """ - Get Access Group - :description: Get Access Group object for a given name - - :return: object (Group object) - :rtype: object (Group object) - """ - access_groups_list = self.sfe.list_volume_access_groups() - group_obj = None - - for group in access_groups_list.volume_access_groups: - # Check and get access_group object for a given name - if str(group.volume_access_group_id) == name: - group_obj = group - elif group.name == name: - group_obj = group - - return group_obj - - def get_account_id(self): - # Validate account id - # Return account_id if found, None otherwise - try: - account_id = self.elementsw_helper.account_exists(self.account_id) - return account_id - except solidfire.common.ApiServerError: - return None - - def get_volume_ids(self): - # Validate volume_ids - # Return volume ids if found, fail if not found - volume_ids = [] - for volume in self.volumes: - volume_id = self.elementsw_helper.volume_exists(volume, self.account_id) - if volume_id: - volume_ids.append(volume_id) - else: - self.module.fail_json(msg='Specified volume %s does not exist' % volume) - return volume_ids - - def create_access_group(self): - """ - Create the Access Group - """ - try: - self.sfe.create_volume_access_group(name=self.access_group_name, - initiators=self.initiators, - volumes=self.volumes, - virtual_network_id=self.virtual_network_id, - virtual_network_tags=self.virtual_network_tags, - attributes=self.attributes) - except Exception as e: - self.module.fail_json(msg="Error creating volume access group %s: %s" % - (self.access_group_name, to_native(e)), exception=traceback.format_exc()) - - def delete_access_group(self): - """ - Delete the Access Group - """ - try: - self.sfe.delete_volume_access_group(volume_access_group_id=self.group_id) - - except Exception as e: - self.module.fail_json(msg="Error deleting volume access group %s: %s" % - (self.access_group_name, to_native(e)), - exception=traceback.format_exc()) - - def update_access_group(self): - """ - Update the Access Group if the access_group already exists - """ - try: - self.sfe.modify_volume_access_group(volume_access_group_id=self.group_id, - virtual_network_id=self.virtual_network_id, - virtual_network_tags=self.virtual_network_tags, - initiators=self.initiators, - volumes=self.volumes, - attributes=self.attributes) - except Exception as e: - self.module.fail_json(msg="Error updating volume access group %s: %s" % - (self.access_group_name, to_native(e)), exception=traceback.format_exc()) - - def rename_access_group(self): - """ - Rename the Access Group to the new name - """ - try: - self.sfe.modify_volume_access_group(volume_access_group_id=self.from_group_id, - virtual_network_id=self.virtual_network_id, - virtual_network_tags=self.virtual_network_tags, - name=self.access_group_name, - initiators=self.initiators, - volumes=self.volumes, - attributes=self.attributes) - except Exception as e: - self.module.fail_json(msg="Error updating volume access group %s: %s" % - (self.from_name, to_native(e)), exception=traceback.format_exc()) - - def apply(self): - """ - Process the access group operation on the Element Software Cluster - """ - changed = False - action = None - - input_account_id = self.account_id - if self.account_id is not None: - self.account_id = self.get_account_id() - if self.state == 'present' and self.volumes is not None: - if self.account_id: - self.volumes = self.get_volume_ids() - else: - self.module.fail_json(msg='Error: Specified account id "%s" does not exist.' % str(input_account_id)) - - group_detail = self.get_access_group(self.access_group_name) - - if group_detail is not None: - # If access group found - self.group_id = group_detail.volume_access_group_id - - if self.state == "absent": - action = 'delete' - changed = True - else: - # If state - present, check for any parameter of exising group needs modification. - if self.volumes is not None and len(self.volumes) > 0: - # Compare the volume list - if not group_detail.volumes: - # If access group does not have any volume attached - action = 'update' - changed = True - else: - for volumeID in group_detail.volumes: - if volumeID not in self.volumes: - action = 'update' - changed = True - break - - elif self.initiators is not None and group_detail.initiators != self.initiators: - action = 'update' - changed = True - - elif self.virtual_network_id is not None or self.virtual_network_tags is not None: - action = 'update' - changed = True - - else: - # access_group does not exist - if self.state == "present" and self.from_name is not None: - group_detail = self.get_access_group(self.from_name) - if group_detail is not None: - # If resource pointed by from_name exists, rename the access_group to name - self.from_group_id = group_detail.volume_access_group_id - action = 'rename' - changed = True - else: - # If resource pointed by from_name does not exists, error out - self.module.fail_json(msg="Resource does not exist : %s" % self.from_name) - elif self.state == "present": - # If from_name is not defined, Create from scratch. - action = 'create' - changed = True - - if changed and not self.module.check_mode: - if action == 'create': - self.create_access_group() - elif action == 'rename': - self.rename_access_group() - elif action == 'update': - self.update_access_group() - elif action == 'delete': - self.delete_access_group() - - self.module.exit_json(changed=changed) - - -def main(): - """ - Main function - """ - na_elementsw_access_group = ElementSWAccessGroup() - na_elementsw_access_group.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_access_group_volumes.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_access_group_volumes.py deleted file mode 100644 index af9053a13..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_access_group_volumes.py +++ /dev/null @@ -1,247 +0,0 @@ -#!/usr/bin/python - -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -Element Software Access Group Volumes -""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_access_group_volumes - -short_description: NetApp Element Software Add/Remove Volumes to/from Access Group -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 20.1.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Add or remove volumes to/from access group on Element Software Cluster. - -options: - - state: - description: - - Whether the specified volumes should exist or not for this access group. - choices: ['present', 'absent'] - default: present - type: str - - access_group: - description: - - Name or id for the access group to add volumes to, or remove volumes from - required: true - type: str - - volumes: - description: - - List of volumes to add/remove from/to the access group. - - It accepts either volume_name or volume_id - required: True - type: list - elements: str - - account_id: - description: - - Account ID for the owner of this volume. - - It accepts either account_name or account_id - - if account_id is numeric, look up for account_id first, then look up for account_name - - If account_id is not numeric, look up for account_name - required: true - type: str -''' - -EXAMPLES = """ - - name: Add Volumes to Access Group - na_elementsw_access_group: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - access_group: AnsibleAccessGroup - volumes: ['vol7','vol8','vol9'] - account_id: '1' - - - name: Remove Volumes from Access Group - na_elementsw_access_group: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - access_group: AnsibleAccessGroup - volumes: ['vol7','vol9'] - account_id: '1' -""" - - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWAccessGroupVolumes(object): - """ - Element Software Access Group Volumes - """ - - def __init__(self): - - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - access_group=dict(required=True, type='str'), - volumes=dict(required=True, type='list', elements='str'), - account_id=dict(required=True, type='str'), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - input_params = self.module.params - - # Set up state variables - self.state = input_params['state'] - self.access_group_name = input_params['access_group'] - self.volumes = input_params['volumes'] - self.account_id = input_params['account_id'] - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_access_group') - - def get_access_group(self, name): - """ - Get Access Group - :description: Get Access Group object for a given name - - :return: object (Group object) - :rtype: object (Group object) - """ - access_groups_list = self.sfe.list_volume_access_groups() - group_obj = None - - for group in access_groups_list.volume_access_groups: - # Check and get access_group object for a given name - if str(group.volume_access_group_id) == name: - group_obj = group - elif group.name == name: - group_obj = group - - return group_obj - - def get_account_id(self): - # Validate account id - # Return account_id if found, None otherwise - try: - account_id = self.elementsw_helper.account_exists(self.account_id) - return account_id - except solidfire.common.ApiServerError: - return None - - def get_volume_ids(self): - # Validate volume_ids - # Return volume ids if found, fail if not found - volume_ids = [] - for volume in self.volumes: - volume_id = self.elementsw_helper.volume_exists(volume, self.account_id) - if volume_id: - volume_ids.append(volume_id) - else: - self.module.fail_json(msg='Error: Specified volume %s does not exist' % volume) - return volume_ids - - def update_access_group(self, volumes): - """ - Update the Access Group if the access_group already exists - """ - try: - self.sfe.modify_volume_access_group(volume_access_group_id=self.group_id, - volumes=volumes) - except Exception as e: - self.module.fail_json(msg="Error updating volume access group %s: %s" % - (self.access_group_name, to_native(e)), exception=traceback.format_exc()) - - def apply(self): - """ - Process the volume add/remove operations for the access group on the Element Software Cluster - """ - changed = False - input_account_id = self.account_id - - if self.account_id is not None: - self.account_id = self.get_account_id() - if self.account_id is None: - self.module.fail_json(msg='Error: Specified account id "%s" does not exist.' % str(input_account_id)) - - # get volume data - self.volume_ids = self.get_volume_ids() - group_detail = self.get_access_group(self.access_group_name) - if group_detail is None: - self.module.fail_json(msg='Error: Specified access group "%s" does not exist for account id: %s.' % (self.access_group_name, str(input_account_id))) - self.group_id = group_detail.volume_access_group_id - volumes = group_detail.volumes - - # compare expected list of volumes to existing one - if self.state == "absent": - # remove volumes if present in access group - volumes = [vol for vol in group_detail.volumes if vol not in self.volume_ids] - else: - # add volumes if not already present - volumes = [vol for vol in self.volume_ids if vol not in group_detail.volumes] - volumes.extend(group_detail.volumes) - - # update if there is a change - if len(volumes) != len(group_detail.volumes): - if not self.module.check_mode: - self.update_access_group(volumes) - changed = True - - self.module.exit_json(changed=changed) - - -def main(): - """ - Main function - """ - na_elementsw_access_group_volumes = ElementSWAccessGroupVolumes() - na_elementsw_access_group_volumes.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_account.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_account.py deleted file mode 100644 index 862753747..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_account.py +++ /dev/null @@ -1,340 +0,0 @@ -#!/usr/bin/python - -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -Element Software Account Manager -""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_account - -short_description: NetApp Element Software Manage Accounts -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, destroy, or update accounts on Element SW - -options: - - state: - description: - - Whether the specified account should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - element_username: - description: - - Unique username for this account. (May be 1 to 64 characters in length). - required: true - aliases: - - account_id - type: str - - from_name: - description: - - ID or Name of the account to rename. - - Required to create an account called 'element_username' by renaming 'from_name'. - version_added: 2.8.0 - type: str - - initiator_secret: - description: - - CHAP secret to use for the initiator. Should be 12-16 characters long and impenetrable. - - The CHAP initiator secrets must be unique and cannot be the same as the target CHAP secret. - - If not specified, a random secret is created. - type: str - - target_secret: - description: - - CHAP secret to use for the target (mutual CHAP authentication). - - Should be 12-16 characters long and impenetrable. - - The CHAP target secrets must be unique and cannot be the same as the initiator CHAP secret. - - If not specified, a random secret is created. - type: str - - attributes: - description: List of Name/Value pairs in JSON object format. - type: dict - - status: - description: - - Status of the account. - type: str - -''' - -EXAMPLES = """ -- name: Create Account - na_elementsw_account: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - element_username: TenantA - -- name: Modify Account - na_elementsw_account: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - status: locked - element_username: TenantA - -- name: Rename Account - na_elementsw_account: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - element_username: TenantA_Renamed - from_name: TenantA - -- name: Rename and modify Account - na_elementsw_account: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - status: locked - element_username: TenantA_Renamed - from_name: TenantA - -- name: Delete Account - na_elementsw_account: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - element_username: TenantA_Renamed -""" - -RETURN = """ - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementSWAccount(object): - """ - Element SW Account - """ - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - element_username=dict(required=True, aliases=["account_id"], type='str'), - from_name=dict(required=False, default=None), - initiator_secret=dict(required=False, type='str', no_log=True), - target_secret=dict(required=False, type='str', no_log=True), - attributes=dict(required=False, type='dict'), - status=dict(required=False, type='str'), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - params = self.module.params - - # set up state variables - self.state = params.get('state') - self.element_username = params.get('element_username') - self.from_name = params.get('from_name') - self.initiator_secret = params.get('initiator_secret') - self.target_secret = params.get('target_secret') - self.attributes = params.get('attributes') - self.status = params.get('status') - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the Element SW Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - if self.attributes is not None: - self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_account')) - else: - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_account') - - def get_account(self, username): - """ - Get Account - :description: Get Account object from account id or name - - :return: Details about the account. None if not found. - :rtype: object (Account object) - """ - - account_list = self.sfe.list_accounts() - - for account in account_list.accounts: - # Check and get account object for a given name - if str(account.account_id) == username: - return account - elif account.username == username: - return account - return None - - def create_account(self): - """ - Create the Account - """ - try: - self.sfe.add_account(username=self.element_username, - initiator_secret=self.initiator_secret, - target_secret=self.target_secret, - attributes=self.attributes) - except Exception as e: - self.module.fail_json(msg='Error creating account %s: %s' % (self.element_username, to_native(e)), - exception=traceback.format_exc()) - - def delete_account(self): - """ - Delete the Account - """ - try: - self.sfe.remove_account(account_id=self.account_id) - - except Exception as e: - self.module.fail_json(msg='Error deleting account %s: %s' % (self.account_id, to_native(e)), - exception=traceback.format_exc()) - - def rename_account(self): - """ - Rename the Account - """ - try: - self.sfe.modify_account(account_id=self.account_id, - username=self.element_username, - status=self.status, - initiator_secret=self.initiator_secret, - target_secret=self.target_secret, - attributes=self.attributes) - - except Exception as e: - self.module.fail_json(msg='Error renaming account %s: %s' % (self.account_id, to_native(e)), - exception=traceback.format_exc()) - - def update_account(self): - """ - Update the Account if account already exists - """ - try: - self.sfe.modify_account(account_id=self.account_id, - status=self.status, - initiator_secret=self.initiator_secret, - target_secret=self.target_secret, - attributes=self.attributes) - - except Exception as e: - self.module.fail_json(msg='Error updating account %s: %s' % (self.account_id, to_native(e)), - exception=traceback.format_exc()) - - def apply(self): - """ - Process the account operation on the Element OS Cluster - """ - changed = False - update_account = False - account_detail = self.get_account(self.element_username) - - if account_detail is None and self.state == 'present': - changed = True - - elif account_detail is not None: - # If account found - self.account_id = account_detail.account_id - - if self.state == 'absent': - changed = True - else: - # If state - present, check for any parameter of exising account needs modification. - if account_detail.username is not None and self.element_username is not None and \ - account_detail.username != self.element_username: - update_account = True - changed = True - elif account_detail.status is not None and self.status is not None \ - and account_detail.status != self.status: - update_account = True - changed = True - - elif account_detail.initiator_secret is not None and self.initiator_secret is not None \ - and account_detail.initiator_secret != self.initiator_secret: - update_account = True - changed = True - - elif account_detail.target_secret is not None and self.target_secret is not None \ - and account_detail.target_secret != self.target_secret: - update_account = True - changed = True - - elif account_detail.attributes is not None and self.attributes is not None \ - and account_detail.attributes != self.attributes: - update_account = True - changed = True - if changed: - if self.module.check_mode: - # Skipping the changes - pass - else: - if self.state == 'present': - if update_account: - self.update_account() - else: - if self.from_name is not None: - # If from_name is defined - account_exists = self.get_account(self.from_name) - if account_exists is not None: - # If resource pointed by from_name exists, rename the account to name - self.account_id = account_exists.account_id - self.rename_account() - else: - # If resource pointed by from_name does not exists, error out - self.module.fail_json(msg="Resource does not exist : %s" % self.from_name) - else: - # If from_name is not defined, create from scratch. - self.create_account() - elif self.state == 'absent': - self.delete_account() - - self.module.exit_json(changed=changed) - - -def main(): - """ - Main function - """ - na_elementsw_account = ElementSWAccount() - na_elementsw_account.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_admin_users.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_admin_users.py deleted file mode 100644 index 7ad46648a..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_admin_users.py +++ /dev/null @@ -1,233 +0,0 @@ -#!/usr/bin/python - -# (c) 2017, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_admin_users - -short_description: NetApp Element Software Manage Admin Users -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, destroy, or update admin users on SolidFire - -options: - - state: - description: - - Whether the specified account should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - element_username: - description: - - Unique username for this account. (May be 1 to 64 characters in length). - required: true - type: str - - element_password: - description: - - The password for the new admin account. Setting the password attribute will always reset your password, even if the password is the same - type: str - - acceptEula: - description: - - Boolean, true for accepting Eula, False Eula - type: bool - - access: - description: - - A list of types the admin has access to - type: list - elements: str -''' - -EXAMPLES = """ - - name: Add admin user - na_elementsw_admin_users: - state: present - username: "{{ admin_user_name }}" - password: "{{ admin_password }}" - hostname: "{{ hostname }}" - element_username: carchi8py - element_password: carchi8py - acceptEula: True - access: accounts,drives - - - name: modify admin user - na_elementsw_admin_users: - state: present - username: "{{ admin_user_name }}" - password: "{{ admin_password }}" - hostname: "{{ hostname }}" - element_username: carchi8py - element_password: carchi8py12 - acceptEula: True - access: accounts,drives,nodes - - - name: delete admin user - na_elementsw_admin_users: - state: absent - username: "{{ admin_user_name }}" - password: "{{ admin_password }}" - hostname: "{{ hostname }}" - element_username: carchi8py -""" - -RETURN = """ - -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class NetAppElementSWAdminUser(object): - """ - Class to set, modify and delete admin users on ElementSW box - """ - - def __init__(self): - """ - Initialize the NetAppElementSWAdminUser class. - """ - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - element_username=dict(required=True, type='str'), - element_password=dict(required=False, type='str', no_log=True), - acceptEula=dict(required=False, type='bool'), - access=dict(required=False, type='list', elements='str') - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - param = self.module.params - # set up state variables - self.state = param['state'] - self.element_username = param['element_username'] - self.element_password = param['element_password'] - self.acceptEula = param['acceptEula'] - self.access = param['access'] - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_admin_users') - - def does_admin_user_exist(self): - """ - Checks to see if an admin user exists or not - :return: True if the user exist, False if it dose not exist - """ - admins_list = self.sfe.list_cluster_admins() - for admin in admins_list.cluster_admins: - if admin.username == self.element_username: - return True - return False - - def get_admin_user(self): - """ - Get the admin user object - :return: the admin user object - """ - admins_list = self.sfe.list_cluster_admins() - for admin in admins_list.cluster_admins: - if admin.username == self.element_username: - return admin - return None - - def modify_admin_user(self): - """ - Modify a admin user. If a password is set the user will be modified as there is no way to - compare a new password with an existing one - :return: if a user was modified or not - """ - changed = False - admin_user = self.get_admin_user() - if self.access is not None and len(self.access) > 0: - for access in self.access: - if access not in admin_user.access: - changed = True - if changed and not self.module.check_mode: - self.sfe.modify_cluster_admin(cluster_admin_id=admin_user.cluster_admin_id, - access=self.access, - password=self.element_password, - attributes=self.attributes) - - return changed - - def add_admin_user(self): - """ - Add's a new admin user to the element cluster - :return: nothing - """ - self.sfe.add_cluster_admin(username=self.element_username, - password=self.element_password, - access=self.access, - accept_eula=self.acceptEula, - attributes=self.attributes) - - def delete_admin_user(self): - """ - Deletes an existing admin user from the element cluster - :return: nothing - """ - admin_user = self.get_admin_user() - self.sfe.remove_cluster_admin(cluster_admin_id=admin_user.cluster_admin_id) - - def apply(self): - """ - determines which method to call to set, delete or modify admin users - :return: - """ - changed = False - if self.state == "present": - if self.does_admin_user_exist(): - changed = self.modify_admin_user() - else: - if not self.module.check_mode: - self.add_admin_user() - changed = True - else: - if self.does_admin_user_exist(): - if not self.module.check_mode: - self.delete_admin_user() - changed = True - - self.module.exit_json(changed=changed) - - -def main(): - v = NetAppElementSWAdminUser() - v.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_backup.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_backup.py deleted file mode 100644 index e81e7c5ea..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_backup.py +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -Element Software Backup Manager -""" -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - -DOCUMENTATION = ''' - -module: na_elementsw_backup - -short_description: NetApp Element Software Create Backups -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create backup - -options: - - src_volume_id: - description: - - ID of the backup source volume. - required: true - aliases: - - volume_id - type: str - - dest_hostname: - description: - - hostname for the backup source cluster - - will be set equal to hostname if not specified - required: false - type: str - - dest_username: - description: - - username for the backup destination cluster - - will be set equal to username if not specified - required: false - type: str - - dest_password: - description: - - password for the backup destination cluster - - will be set equal to password if not specified - required: false - type: str - - dest_volume_id: - description: - - ID of the backup destination volume - required: true - type: str - - format: - description: - - Backup format to use - choices: ['native','uncompressed'] - required: false - default: 'native' - type: str - - script: - description: - - the backup script to be executed - required: false - type: str - - script_parameters: - description: - - the backup script parameters - required: false - type: dict - -''' - -EXAMPLES = """ -na_elementsw_backup: - hostname: "{{ source_cluster_hostname }}" - username: "{{ source_cluster_username }}" - password: "{{ source_cluster_password }}" - src_volume_id: 1 - dest_hostname: "{{ destination_cluster_hostname }}" - dest_username: "{{ destination_cluster_username }}" - dest_password: "{{ destination_cluster_password }}" - dest_volume_id: 3 - format: native -""" - -RETURN = """ - -""" - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule -import time - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWBackup(object): - ''' class to handle backup operations ''' - - def __init__(self): - """ - Setup Ansible parameters and SolidFire connection - """ - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - - self.argument_spec.update(dict( - - src_volume_id=dict(aliases=['volume_id'], required=True, type='str'), - dest_hostname=dict(required=False, type='str'), - dest_username=dict(required=False, type='str'), - dest_password=dict(required=False, type='str', no_log=True), - dest_volume_id=dict(required=True, type='str'), - format=dict(required=False, choices=['native', 'uncompressed'], default='native'), - script=dict(required=False, type='str'), - script_parameters=dict(required=False, type='dict') - - - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_together=[['script', 'script_parameters']], - supports_check_mode=True - ) - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - - # If destination cluster details are not specified , set the destination to be the same as the source - if self.module.params["dest_hostname"] is None: - self.module.params["dest_hostname"] = self.module.params["hostname"] - if self.module.params["dest_username"] is None: - self.module.params["dest_username"] = self.module.params["username"] - if self.module.params["dest_password"] is None: - self.module.params["dest_password"] = self.module.params["password"] - - params = self.module.params - - # establish a connection to both source and destination elementsw clusters - self.src_connection = netapp_utils.create_sf_connection(self.module) - self.module.params["username"] = params["dest_username"] - self.module.params["password"] = params["dest_password"] - self.module.params["hostname"] = params["dest_hostname"] - self.dest_connection = netapp_utils.create_sf_connection(self.module) - - self.elementsw_helper = NaElementSWModule(self.src_connection) - - # add telemetry attributes - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_backup') - - def apply(self): - """ - Apply backup creation logic - """ - self.create_backup() - self.module.exit_json(changed=True) - - def create_backup(self): - """ - Create backup - """ - - # Start volume write on destination cluster - - try: - write_obj = self.dest_connection.start_bulk_volume_write(volume_id=self.module.params["dest_volume_id"], - format=self.module.params["format"], - attributes=self.attributes) - write_key = write_obj.key - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error starting bulk write on destination cluster", exception=to_native(err)) - - # Set script parameters if not passed by user - # These parameters are equivalent to the options used when a backup is executed via the GUI - - if self.module.params["script"] is None and self.module.params["script_parameters"] is None: - - self.module.params["script"] = 'bv_internal.py' - self.module.params["script_parameters"] = {"write": { - "mvip": self.module.params["dest_hostname"], - "username": self.module.params["dest_username"], - "password": self.module.params["dest_password"], - "key": write_key, - "endpoint": "solidfire", - "format": self.module.params["format"]}, - "range": {"lba": 0, "blocks": 244224}} - - # Start volume read on source cluster - - try: - read_obj = self.src_connection.start_bulk_volume_read(self.module.params["src_volume_id"], - self.module.params["format"], - script=self.module.params["script"], - script_parameters=self.module.params["script_parameters"], - attributes=self.attributes) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error starting bulk read on source cluster", exception=to_native(err)) - - # Poll job status until it has completed - # SF will automatically timeout if the job is not successful after certain amount of time - - completed = False - while completed is not True: - # Sleep between polling iterations to reduce api load - time.sleep(2) - try: - result = self.src_connection.get_async_result(read_obj.async_handle, True) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Unable to check backup job status", exception=to_native(err)) - - if result["status"] != 'running': - completed = True - if 'error' in result: - self.module.fail_json(msg=result['error']['message']) - - -def main(): - """ Run backup operation""" - vol_obj = ElementSWBackup() - vol_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_check_connections.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_check_connections.py deleted file mode 100644 index 2f288dc3a..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_check_connections.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/python - -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_check_connections - -short_description: NetApp Element Software Check connectivity to MVIP and SVIP. -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Used to test the management connection to the cluster. -- The test pings the MVIP and SVIP, and executes a simple API method to verify connectivity. - -options: - - skip: - description: - - Skip checking connection to SVIP or MVIP. - choices: ['svip', 'mvip'] - type: str - - mvip: - description: - - Optionally, use to test connection of a different MVIP. - - This is not needed to test the connection to the target cluster. - type: str - - svip: - description: - - Optionally, use to test connection of a different SVIP. - - This is not needed to test the connection to the target cluster. - type: str - -''' - - -EXAMPLES = """ - - name: Check connections to MVIP and SVIP - na_elementsw_check_connections: - hostname: "{{ solidfire_hostname }}" - username: "{{ solidfire_username }}" - password: "{{ solidfire_password }}" -""" - -RETURN = """ - -""" - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule - - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class NaElementSWConnection(object): - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - skip=dict(required=False, type='str', default=None, choices=['mvip', 'svip']), - mvip=dict(required=False, type='str', default=None), - svip=dict(required=False, type='str', default=None) - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('skip', 'svip', ['mvip']), - ('skip', 'mvip', ['svip']) - ], - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.module.params.copy() - self.msg = "" - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the ElementSW Python SDK") - else: - self.elem = netapp_utils.create_sf_connection(self.module, port=442) - - def check_mvip_connection(self): - """ - Check connection to MVIP - - :return: true if connection was successful, false otherwise. - :rtype: bool - """ - try: - test = self.elem.test_connect_mvip(mvip=self.parameters['mvip']) - # Todo - Log details about the test - return test.details.connected - - except Exception as e: - self.msg += 'Error checking connection to MVIP: %s' % to_native(e) - return False - - def check_svip_connection(self): - """ - Check connection to SVIP - - :return: true if connection was successful, false otherwise. - :rtype: bool - """ - try: - test = self.elem.test_connect_svip(svip=self.parameters['svip']) - # Todo - Log details about the test - return test.details.connected - except Exception as e: - self.msg += 'Error checking connection to SVIP: %s' % to_native(e) - return False - - def apply(self): - passed = False - if self.parameters.get('skip') is None: - # Set failed and msg - passed = self.check_mvip_connection() - # check if both connections have passed - passed &= self.check_svip_connection() - elif self.parameters['skip'] == 'mvip': - passed |= self.check_svip_connection() - elif self.parameters['skip'] == 'svip': - passed |= self.check_mvip_connection() - if not passed: - self.module.fail_json(msg=self.msg) - else: - self.module.exit_json() - - -def main(): - connect_obj = NaElementSWConnection() - connect_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster.py deleted file mode 100644 index ede60cae3..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster.py +++ /dev/null @@ -1,372 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software Initialize Cluster -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_cluster - -short_description: NetApp Element Software Create Cluster -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Initialize Element Software node ownership to form a cluster. - - If the cluster does not exist, username/password are still required but ignored for initial creation. - - username/password are used as the node credentials to see if the cluster already exists. - - username/password can also be used to set the cluster credentials. - - If the cluster already exists, no error is returned, but changed is set to false. - - Cluster modifications are not supported and are ignored. - -options: - management_virtual_ip: - description: - - Floating (virtual) IP address for the cluster on the management network. - required: true - type: str - - storage_virtual_ip: - description: - - Floating (virtual) IP address for the cluster on the storage (iSCSI) network. - required: true - type: str - - replica_count: - description: - - Number of replicas of each piece of data to store in the cluster. - default: 2 - type: int - - cluster_admin_username: - description: - - Username for the cluster admin. - - If not provided, default to username. - type: str - - cluster_admin_password: - description: - - Initial password for the cluster admin account. - - If not provided, default to password. - type: str - - accept_eula: - description: - - Required to indicate your acceptance of the End User License Agreement when creating this cluster. - - To accept the EULA, set this parameter to true. - type: bool - - nodes: - description: - - Storage IP (SIP) addresses of the initial set of nodes making up the cluster. - - nodes IP must be in the list. - required: true - type: list - elements: str - - attributes: - description: - - List of name-value pairs in JSON object format. - type: dict - - timeout: - description: - - Time to wait for cluster creation to complete. - default: 100 - type: int - version_added: 20.8.0 - - fail_if_cluster_already_exists_with_larger_ensemble: - description: - - If the cluster exists, the default is to verify that I(nodes) is a superset of the existing ensemble. - - A superset is accepted because some nodes may have a different role. - - But the module reports an error if the existing ensemble contains a node not listed in I(nodes). - - This checker is disabled when this option is set to false. - default: true - type: bool - version_added: 20.8.0 - - encryption: - description: to enable or disable encryption at rest - type: bool - version_added: 20.10.0 - - order_number: - description: (experimental) order number as provided by NetApp - type: str - version_added: 20.10.0 - - serial_number: - description: (experimental) serial number as provided by NetApp - type: str - version_added: 20.10.0 -''' - -EXAMPLES = """ - - - name: Initialize new cluster - tags: - - elementsw_cluster - na_elementsw_cluster: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - management_virtual_ip: 10.226.108.32 - storage_virtual_ip: 10.226.109.68 - replica_count: 2 - accept_eula: true - nodes: - - 10.226.109.72 - - 10.226.109.74 -""" - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementSWCluster(object): - """ - Element Software Initialize node with ownership for cluster formation - """ - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - management_virtual_ip=dict(required=True, type='str'), - storage_virtual_ip=dict(required=True, type='str'), - replica_count=dict(required=False, type='int', default=2), - cluster_admin_username=dict(required=False, type='str'), - cluster_admin_password=dict(required=False, type='str', no_log=True), - accept_eula=dict(required=False, type='bool'), - nodes=dict(required=True, type='list', elements='str'), - attributes=dict(required=False, type='dict', default=None), - timeout=dict(required=False, type='int', default=100), - fail_if_cluster_already_exists_with_larger_ensemble=dict(required=False, type='bool', default=True), - encryption=dict(required=False, type='bool'), - order_number=dict(required=False, type='str'), - serial_number=dict(required=False, type='str'), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - input_params = self.module.params - - self.management_virtual_ip = input_params['management_virtual_ip'] - self.storage_virtual_ip = input_params['storage_virtual_ip'] - self.replica_count = input_params['replica_count'] - self.accept_eula = input_params.get('accept_eula') - self.attributes = input_params.get('attributes') - self.nodes = input_params['nodes'] - self.cluster_admin_username = input_params['username'] if input_params.get('cluster_admin_username') is None else input_params['cluster_admin_username'] - self.cluster_admin_password = input_params['password'] if input_params.get('cluster_admin_password') is None else input_params['cluster_admin_password'] - self.fail_if_cluster_already_exists_with_larger_ensemble = input_params['fail_if_cluster_already_exists_with_larger_ensemble'] - self.encryption = input_params['encryption'] - self.order_number = input_params['order_number'] - self.serial_number = input_params['serial_number'] - self.debug = list() - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - - # 442 for node APIs, 443 (default) for cluster APIs - for role, port in [('node', 442), ('cluster', 443)]: - try: - # even though username/password should be optional, create_sf_connection fails if not set - conn = netapp_utils.create_sf_connection(module=self.module, raise_on_connection_error=True, port=port, timeout=input_params['timeout']) - if role == 'node': - self.sfe_node = conn - else: - self.sfe_cluster = conn - except netapp_utils.solidfire.common.ApiConnectionError as exc: - if str(exc) == "Bad Credentials": - msg = 'Most likely the cluster is already created.' - msg += ' Make sure to use valid %s credentials for username and password.' % 'node' if port == 442 else 'cluster' - msg += ' Even though credentials are not required for the first create, they are needed to check whether the cluster already exists.' - msg += ' Cluster reported: %s' % repr(exc) - else: - msg = 'Failed to create connection: %s' % repr(exc) - self.module.fail_json(msg=msg) - except Exception as exc: - self.module.fail_json(msg='Failed to connect: %s' % repr(exc)) - - self.elementsw_helper = NaElementSWModule(self.sfe_cluster) - - # add telemetry attributes - if self.attributes is not None: - self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_cluster')) - else: - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_cluster') - - def get_node_cluster_info(self): - """ - Get Cluster Info - using node API - """ - try: - info = self.sfe_node.get_config() - self.debug.append(repr(info.config.cluster)) - return info.config.cluster - except Exception as exc: - self.debug.append("port: %s, %s" % (str(self.sfe_node._port), repr(exc))) - return None - - def check_cluster_exists(self): - """ - validate if cluster exists with list of nodes - error out if something is found but with different nodes - return a tuple (found, info) - found is True if found, False if not found - """ - info = self.get_node_cluster_info() - if info is None: - return False - ensemble = getattr(info, 'ensemble', None) - if not ensemble: - return False - # format is 'id:IP' - nodes = [x.split(':', 1)[1] for x in ensemble] - current_ensemble_nodes = set(nodes) if ensemble else set() - requested_nodes = set(self.nodes) if self.nodes else set() - extra_ensemble_nodes = current_ensemble_nodes - requested_nodes - # TODO: the cluster may have more nodes than what is reported in ensemble: - # nodes_not_in_ensemble = requested_nodes - current_ensemble_nodes - # So it's OK to find some missing nodes, but not very deterministic. - # eg some kind of backup nodes could be in nodes_not_in_ensemble. - if extra_ensemble_nodes and self.fail_if_cluster_already_exists_with_larger_ensemble: - msg = 'Error: found existing cluster with more nodes in ensemble. Cluster: %s, extra nodes: %s' %\ - (getattr(info, 'cluster', 'not found'), extra_ensemble_nodes) - msg += '. Cluster info: %s' % repr(info) - self.module.fail_json(msg=msg) - if extra_ensemble_nodes: - self.debug.append("Extra ensemble nodes: %s" % extra_ensemble_nodes) - nodes_not_in_ensemble = requested_nodes - current_ensemble_nodes - if nodes_not_in_ensemble: - self.debug.append("Extra requested nodes not in ensemble: %s" % nodes_not_in_ensemble) - return True - - def create_cluster_api(self, options): - ''' Call send_request directly rather than using the SDK if new fields are present - The new SDK will support these in version 1.17 (Nov or Feb) - ''' - extra_options = ['enableSoftwareEncryptionAtRest', 'orderNumber', 'serialNumber'] - if not any((item in options for item in extra_options)): - # use SDK - return self.sfe_cluster.create_cluster(**options) - - # call directly the API as the SDK is not updated yet - params = { - "mvip": options['mvip'], - "svip": options['svip'], - "repCount": options['rep_count'], - "username": options['username'], - "password": options['password'], - "nodes": options['nodes'], - } - if options['accept_eula'] is not None: - params["acceptEula"] = options['accept_eula'] - if options['attributes'] is not None: - params["attributes"] = options['attributes'] - for option in extra_options: - if options.get(option): - params[option] = options[option] - - # There is no adaptor. - return self.sfe_cluster.send_request( - 'CreateCluster', - netapp_utils.solidfire.CreateClusterResult, - params, - since=None - ) - - def create_cluster(self): - """ - Create Cluster - """ - options = { - 'mvip': self.management_virtual_ip, - 'svip': self.storage_virtual_ip, - 'rep_count': self.replica_count, - 'accept_eula': self.accept_eula, - 'nodes': self.nodes, - 'attributes': self.attributes, - 'username': self.cluster_admin_username, - 'password': self.cluster_admin_password - } - if self.encryption is not None: - options['enableSoftwareEncryptionAtRest'] = self.encryption - if self.order_number is not None: - options['orderNumber'] = self.order_number - if self.serial_number is not None: - options['serialNumber'] = self.serial_number - - return_msg = 'created' - try: - # does not work as node even though documentation says otherwise - # running as node, this error is reported: 500 xUnknownAPIMethod method=CreateCluster - self.create_cluster_api(options) - except netapp_utils.solidfire.common.ApiServerError as exc: - # not sure how this can happen, but the cluster may already exists - if 'xClusterAlreadyCreated' not in str(exc.message): - self.module.fail_json(msg='Error creating cluster %s' % to_native(exc), exception=traceback.format_exc()) - return_msg = 'already_exists: %s' % str(exc.message) - except Exception as exc: - self.module.fail_json(msg='Error creating cluster %s' % to_native(exc), exception=traceback.format_exc()) - return return_msg - - def apply(self): - """ - Check connection and initialize node with cluster ownership - """ - changed = False - result_message = None - exists = self.check_cluster_exists() - if exists: - result_message = "cluster already exists" - else: - changed = True - if not self.module.check_mode: - result_message = self.create_cluster() - if result_message.startswith('already_exists:'): - changed = False - self.module.exit_json(changed=changed, msg=result_message, debug=self.debug) - - -def main(): - """ - Main function - """ - na_elementsw_cluster = ElementSWCluster() - na_elementsw_cluster.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_config.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_config.py deleted file mode 100644 index 94b5c17dc..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_config.py +++ /dev/null @@ -1,331 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software Configure cluster -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - - -DOCUMENTATION = ''' - -module: na_elementsw_cluster_config - -short_description: Configure Element SW Cluster -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.8.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Configure Element Software cluster. - -options: - modify_cluster_full_threshold: - description: - - The capacity level at which the cluster generates an event - - Requires a stage3_block_threshold_percent or - - max_metadata_over_provision_factor or - - stage2_aware_threshold - suboptions: - stage3_block_threshold_percent: - description: - - The percentage below the "Error" threshold that triggers a cluster "Warning" alert - type: int - max_metadata_over_provision_factor: - description: - - The number of times metadata space can be overprovisioned relative to the amount of space available - type: int - stage2_aware_threshold: - description: - - The number of nodes of capacity remaining in the cluster before the system triggers a notification - type: int - type: dict - - encryption_at_rest: - description: - - enable or disable the Advanced Encryption Standard (AES) 256-bit encryption at rest on the cluster - choices: ['present', 'absent'] - type: str - - set_ntp_info: - description: - - configure NTP on cluster node - - Requires a list of one or more ntp_servers - suboptions: - ntp_servers: - description: - - list of NTP servers to add to each nodes NTP configuration - type: list - elements: str - broadcastclient: - type: bool - default: False - description: - - Enables every node in the cluster as a broadcast client - type: dict - - enable_virtual_volumes: - type: bool - default: True - description: - - Enable the NetApp SolidFire VVols cluster feature -''' - -EXAMPLES = """ - - - name: Configure cluster - tags: - - elementsw_cluster_config - na_elementsw_cluster_config: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - modify_cluster_full_threshold: - stage2_aware_threshold: 2 - stage3_block_threshold_percent: 10 - max_metadata_over_provision_factor: 2 - encryption_at_rest: absent - set_ntp_info: - broadcastclient: False - ntp_servers: - - 1.1.1.1 - - 2.2.2.2 - enable_virtual_volumes: True -""" - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementSWClusterConfig(object): - """ - Element Software Configure Element SW Cluster - """ - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - - self.argument_spec.update(dict( - modify_cluster_full_threshold=dict( - type='dict', - options=dict( - stage2_aware_threshold=dict(type='int', default=None), - stage3_block_threshold_percent=dict(type='int', default=None), - max_metadata_over_provision_factor=dict(type='int', default=None) - ) - ), - encryption_at_rest=dict(type='str', choices=['present', 'absent']), - set_ntp_info=dict( - type='dict', - options=dict( - broadcastclient=dict(type='bool', default=False), - ntp_servers=dict(type='list', elements='str') - ) - ), - enable_virtual_volumes=dict(type='bool', default=True) - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - def get_ntp_details(self): - """ - get ntp info - """ - # Get ntp details - ntp_details = self.sfe.get_ntp_info() - return ntp_details - - def cmp(self, provided_ntp_servers, existing_ntp_servers): - # As python3 doesn't have default cmp function, defining manually to provide same fuctionality. - return (provided_ntp_servers > existing_ntp_servers) - (provided_ntp_servers < existing_ntp_servers) - - def get_cluster_details(self): - """ - get cluster info - """ - cluster_details = self.sfe.get_cluster_info() - return cluster_details - - def get_vvols_status(self): - """ - get vvols status - """ - feature_status = self.sfe.get_feature_status(feature='vvols') - if feature_status is not None: - return feature_status.features[0].enabled - return None - - def get_cluster_full_threshold_status(self): - """ - get cluster full threshold - """ - cluster_full_threshold_status = self.sfe.get_cluster_full_threshold() - return cluster_full_threshold_status - - def setup_ntp_info(self, servers, broadcastclient=None): - """ - configure ntp - """ - # Set ntp servers - try: - self.sfe.set_ntp_info(servers, broadcastclient) - except Exception as exception_object: - self.module.fail_json(msg='Error configuring ntp %s' % (to_native(exception_object)), - exception=traceback.format_exc()) - - def set_encryption_at_rest(self, state=None): - """ - enable/disable encryption at rest - """ - try: - if state == 'present': - encryption_state = 'enable' - self.sfe.enable_encryption_at_rest() - elif state == 'absent': - encryption_state = 'disable' - self.sfe.disable_encryption_at_rest() - except Exception as exception_object: - self.module.fail_json(msg='Failed to %s rest encryption %s' % (encryption_state, - to_native(exception_object)), - exception=traceback.format_exc()) - - def enable_feature(self, feature): - """ - enable feature - """ - try: - self.sfe.enable_feature(feature=feature) - except Exception as exception_object: - self.module.fail_json(msg='Error enabling %s %s' % (feature, to_native(exception_object)), - exception=traceback.format_exc()) - - def set_cluster_full_threshold(self, stage2_aware_threshold=None, - stage3_block_threshold_percent=None, - max_metadata_over_provision_factor=None): - """ - modify cluster full threshold - """ - try: - self.sfe.modify_cluster_full_threshold(stage2_aware_threshold=stage2_aware_threshold, - stage3_block_threshold_percent=stage3_block_threshold_percent, - max_metadata_over_provision_factor=max_metadata_over_provision_factor) - except Exception as exception_object: - self.module.fail_json(msg='Failed to modify cluster full threshold %s' % (to_native(exception_object)), - exception=traceback.format_exc()) - - def apply(self): - """ - Cluster configuration - """ - changed = False - result_message = None - - if self.parameters.get('modify_cluster_full_threshold') is not None: - # get cluster full threshold - cluster_full_threshold_details = self.get_cluster_full_threshold_status() - # maxMetadataOverProvisionFactor - current_mmopf = cluster_full_threshold_details.max_metadata_over_provision_factor - # stage3BlockThresholdPercent - current_s3btp = cluster_full_threshold_details.stage3_block_threshold_percent - # stage2AwareThreshold - current_s2at = cluster_full_threshold_details.stage2_aware_threshold - - # is cluster full threshold state change required? - if self.parameters.get("modify_cluster_full_threshold")['max_metadata_over_provision_factor'] is not None and \ - current_mmopf != self.parameters['modify_cluster_full_threshold']['max_metadata_over_provision_factor'] or \ - self.parameters.get("modify_cluster_full_threshold")['stage3_block_threshold_percent'] is not None and \ - current_s3btp != self.parameters['modify_cluster_full_threshold']['stage3_block_threshold_percent'] or \ - self.parameters.get("modify_cluster_full_threshold")['stage2_aware_threshold'] is not None and \ - current_s2at != self.parameters['modify_cluster_full_threshold']['stage2_aware_threshold']: - changed = True - self.set_cluster_full_threshold(self.parameters['modify_cluster_full_threshold']['stage2_aware_threshold'], - self.parameters['modify_cluster_full_threshold']['stage3_block_threshold_percent'], - self.parameters['modify_cluster_full_threshold']['max_metadata_over_provision_factor']) - - if self.parameters.get('encryption_at_rest') is not None: - # get all cluster info - cluster_info = self.get_cluster_details() - # register rest state - current_encryption_at_rest_state = cluster_info.cluster_info.encryption_at_rest_state - - # is encryption state change required? - if current_encryption_at_rest_state == 'disabled' and self.parameters['encryption_at_rest'] == 'present' or \ - current_encryption_at_rest_state == 'enabled' and self.parameters['encryption_at_rest'] == 'absent': - changed = True - self.set_encryption_at_rest(self.parameters['encryption_at_rest']) - - if self.parameters.get('set_ntp_info') is not None: - # get all ntp details - ntp_details = self.get_ntp_details() - # register list of ntp servers - ntp_servers = ntp_details.servers - # broadcastclient - broadcast_client = ntp_details.broadcastclient - - # has either the broadcastclient or the ntp server list changed? - - if self.parameters.get('set_ntp_info')['broadcastclient'] != broadcast_client or \ - self.cmp(self.parameters.get('set_ntp_info')['ntp_servers'], ntp_servers) != 0: - changed = True - self.setup_ntp_info(self.parameters.get('set_ntp_info')['ntp_servers'], - self.parameters.get('set_ntp_info')['broadcastclient']) - - if self.parameters.get('enable_virtual_volumes') is not None: - # check vvols status - current_vvols_status = self.get_vvols_status() - - # has the vvols state changed? - if current_vvols_status is False and self.parameters.get('enable_virtual_volumes') is True: - changed = True - self.enable_feature('vvols') - elif current_vvols_status is True and self.parameters.get('enable_virtual_volumes') is not True: - # vvols, once enabled, cannot be disabled - self.module.fail_json(msg='Error disabling vvols: this feature cannot be undone') - - if self.module.check_mode is True: - result_message = "Check mode, skipping changes" - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """ - Main function - """ - na_elementsw_cluster_config = ElementSWClusterConfig() - na_elementsw_cluster_config.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_pair.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_pair.py deleted file mode 100644 index af064e214..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_pair.py +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_cluster_pair - -short_description: NetApp Element Software Manage Cluster Pair -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, delete cluster pair - -options: - - state: - description: - - Whether the specified cluster pair should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - dest_mvip: - description: - - Destination IP address of the cluster to be paired. - required: true - type: str - - dest_username: - description: - - Destination username for the cluster to be paired. - - Optional if this is same as source cluster username. - type: str - - dest_password: - description: - - Destination password for the cluster to be paired. - - Optional if this is same as source cluster password. - type: str - -''' - -EXAMPLES = """ - - name: Create cluster pair - na_elementsw_cluster_pair: - hostname: "{{ src_hostname }}" - username: "{{ src_username }}" - password: "{{ src_password }}" - state: present - dest_mvip: "{{ dest_hostname }}" - - - name: Delete cluster pair - na_elementsw_cluster_pair: - hostname: "{{ src_hostname }}" - username: "{{ src_username }}" - password: "{{ src_password }}" - state: absent - dest_mvip: "{{ dest_hostname }}" - dest_username: "{{ dest_username }}" - dest_password: "{{ dest_password }}" - -""" - -RETURN = """ - -""" - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWClusterPair(object): - """ class to handle cluster pairing operations """ - - def __init__(self): - """ - Setup Ansible parameters and ElementSW connection - """ - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, choices=['present', 'absent'], - default='present'), - dest_mvip=dict(required=True, type='str'), - dest_username=dict(required=False, type='str'), - dest_password=dict(required=False, type='str', no_log=True) - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.elem = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.elem) - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - # get element_sw_connection for destination cluster - # overwrite existing source host, user and password with destination credentials - self.module.params['hostname'] = self.parameters['dest_mvip'] - # username and password is same as source, - # if dest_username and dest_password aren't specified - if self.parameters.get('dest_username'): - self.module.params['username'] = self.parameters['dest_username'] - if self.parameters.get('dest_password'): - self.module.params['password'] = self.parameters['dest_password'] - self.dest_elem = netapp_utils.create_sf_connection(module=self.module) - self.dest_elementsw_helper = NaElementSWModule(self.dest_elem) - - def check_if_already_paired(self, paired_clusters, hostname): - for pair in paired_clusters.cluster_pairs: - if pair.mvip == hostname: - return pair.cluster_pair_id - return None - - def get_src_pair_id(self): - """ - Check for idempotency - """ - # src cluster and dest cluster exist - paired_clusters = self.elem.list_cluster_pairs() - return self.check_if_already_paired(paired_clusters, self.parameters['dest_mvip']) - - def get_dest_pair_id(self): - """ - Getting destination cluster_pair_id - """ - paired_clusters = self.dest_elem.list_cluster_pairs() - return self.check_if_already_paired(paired_clusters, self.parameters['hostname']) - - def pair_clusters(self): - """ - Start cluster pairing on source, and complete on target cluster - """ - try: - pair_key = self.elem.start_cluster_pairing() - self.dest_elem.complete_cluster_pairing( - cluster_pairing_key=pair_key.cluster_pairing_key) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error pairing cluster %s and %s" - % (self.parameters['hostname'], - self.parameters['dest_mvip']), - exception=to_native(err)) - - def unpair_clusters(self, pair_id_source, pair_id_dest): - """ - Delete cluster pair - """ - try: - self.elem.remove_cluster_pair(cluster_pair_id=pair_id_source) - self.dest_elem.remove_cluster_pair(cluster_pair_id=pair_id_dest) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error unpairing cluster %s and %s" - % (self.parameters['hostname'], - self.parameters['dest_mvip']), - exception=to_native(err)) - - def apply(self): - """ - Call create / delete cluster pair methods - """ - pair_id_source = self.get_src_pair_id() - # If already paired, find the cluster_pair_id of destination cluster - if pair_id_source: - pair_id_dest = self.get_dest_pair_id() - # calling helper to determine action - cd_action = self.na_helper.get_cd_action(pair_id_source, self.parameters) - if cd_action == "create": - self.pair_clusters() - elif cd_action == "delete": - self.unpair_clusters(pair_id_source, pair_id_dest) - self.module.exit_json(changed=self.na_helper.changed) - - -def main(): - """ Apply cluster pair actions """ - cluster_obj = ElementSWClusterPair() - cluster_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_snmp.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_snmp.py deleted file mode 100644 index 847700197..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_cluster_snmp.py +++ /dev/null @@ -1,365 +0,0 @@ -#!/usr/bin/python -# (c) 2019, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software Configure SNMP -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - - -DOCUMENTATION = ''' - -module: na_elementsw_cluster_snmp - -short_description: Configure Element SW Cluster SNMP -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.8.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Configure Element Software cluster SNMP. - -options: - - state: - description: - - This module enables you to enable SNMP on cluster nodes. When you enable SNMP, \ - the action applies to all nodes in the cluster, and the values that are passed replace, \ - in whole, all values set in any previous call to this module. - choices: ['present', 'absent'] - default: present - type: str - - snmp_v3_enabled: - description: - - Which version of SNMP has to be enabled. - type: bool - - networks: - description: - - List of networks and what type of access they have to the SNMP servers running on the cluster nodes. - - This parameter is required if SNMP v3 is disabled. - suboptions: - access: - description: - - ro for read-only access. - - rw for read-write access. - - rosys for read-only access to a restricted set of system information. - choices: ['ro', 'rw', 'rosys'] - type: str - cidr: - description: - - A CIDR network mask. This network mask must be an integer greater than or equal to 0, \ - and less than or equal to 32. It must also not be equal to 31. - type: int - community: - description: - - SNMP community string. - type: str - network: - description: - - This parameter along with the cidr variable is used to control which network the access and \ - community string apply to. - - The special value of 'default' is used to specify an entry that applies to all networks. - - The cidr mask is ignored when network value is either a host name or default. - type: str - type: dict - - usm_users: - description: - - List of users and the type of access they have to the SNMP servers running on the cluster nodes. - - This parameter is required if SNMP v3 is enabled. - suboptions: - access: - description: - - rouser for read-only access. - - rwuser for read-write access. - - rosys for read-only access to a restricted set of system information. - choices: ['rouser', 'rwuser', 'rosys'] - type: str - name: - description: - - The name of the user. Must contain at least one character, but no more than 32 characters. - - Blank spaces are not allowed. - type: str - password: - description: - - The password of the user. Must be between 8 and 255 characters long (inclusive). - - Blank spaces are not allowed. - - Required if 'secLevel' is 'auth' or 'priv.' - type: str - passphrase: - description: - - The passphrase of the user. Must be between 8 and 255 characters long (inclusive). - - Blank spaces are not allowed. - - Required if 'secLevel' is 'priv.' - type: str - secLevel: - description: - - To define the security level of a user. - choices: ['noauth', 'auth', 'priv'] - type: str - type: dict - -''' - -EXAMPLES = """ - - - name: configure SnmpNetwork - tags: - - elementsw_cluster_snmp - na_elementsw_cluster_snmp: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - snmp_v3_enabled: True - usm_users: - access: rouser - name: testuser - password: ChangeMe123 - passphrase: ChangeMe123 - secLevel: auth - networks: - access: ro - cidr: 24 - community: TestNetwork - network: 192.168.0.1 - - - name: Disable SnmpNetwork - tags: - - elementsw_cluster_snmp - na_elementsw_cluster_snmp: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - -""" - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" - -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementSWClusterSnmp(object): - """ - Element Software Configure Element SW Cluster SnmpNetwork - """ - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - - self.argument_spec.update(dict( - state=dict(type='str', choices=['present', 'absent'], default='present'), - snmp_v3_enabled=dict(type='bool'), - networks=dict( - type='dict', - options=dict( - access=dict(type='str', choices=['ro', 'rw', 'rosys']), - cidr=dict(type='int', default=None), - community=dict(type='str', default=None), - network=dict(type='str', default=None) - ) - ), - usm_users=dict( - type='dict', - options=dict( - access=dict(type='str', choices=['rouser', 'rwuser', 'rosys']), - name=dict(type='str', default=None), - password=dict(type='str', default=None, no_log=True), - passphrase=dict(type='str', default=None, no_log=True), - secLevel=dict(type='str', choices=['auth', 'noauth', 'priv']) - ) - ), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('state', 'present', ['snmp_v3_enabled']), - ('snmp_v3_enabled', True, ['usm_users']), - ('snmp_v3_enabled', False, ['networks']) - ], - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - if self.parameters.get('state') == "present": - if self.parameters.get('usm_users') is not None: - # Getting the configuration details to configure SNMP Version3 - self.access_usm = self.parameters.get('usm_users')['access'] - self.name = self.parameters.get('usm_users')['name'] - self.password = self.parameters.get('usm_users')['password'] - self.passphrase = self.parameters.get('usm_users')['passphrase'] - self.secLevel = self.parameters.get('usm_users')['secLevel'] - if self.parameters.get('networks') is not None: - # Getting the configuration details to configure SNMP Version2 - self.access_network = self.parameters.get('networks')['access'] - self.cidr = self.parameters.get('networks')['cidr'] - self.community = self.parameters.get('networks')['community'] - self.network = self.parameters.get('networks')['network'] - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - def enable_snmp(self): - """ - enable snmp feature - """ - try: - self.sfe.enable_snmp(snmp_v3_enabled=self.parameters.get('snmp_v3_enabled')) - except Exception as exception_object: - self.module.fail_json(msg='Error enabling snmp feature %s' % to_native(exception_object), - exception=traceback.format_exc()) - - def disable_snmp(self): - """ - disable snmp feature - """ - try: - self.sfe.disable_snmp() - except Exception as exception_object: - self.module.fail_json(msg='Error disabling snmp feature %s' % to_native(exception_object), - exception=traceback.format_exc()) - - def configure_snmp(self, actual_networks, actual_usm_users): - """ - Configure snmp - """ - try: - self.sfe.set_snmp_acl(networks=[actual_networks], usm_users=[actual_usm_users]) - - except Exception as exception_object: - self.module.fail_json(msg='Error Configuring snmp feature %s' % to_native(exception_object), - exception=traceback.format_exc()) - - def apply(self): - """ - Cluster SNMP configuration - """ - changed = False - result_message = None - update_required = False - version_change = False - is_snmp_enabled = self.sfe.get_snmp_state().enabled - - if is_snmp_enabled is True: - # IF SNMP is already enabled - if self.parameters.get('state') == 'absent': - # Checking for state change(s) here, and applying it later in the code allows us to support - # check_mode - changed = True - - elif self.parameters.get('state') == 'present': - # Checking if SNMP configuration needs to be updated, - is_snmp_v3_enabled = self.sfe.get_snmp_state().snmp_v3_enabled - - if is_snmp_v3_enabled != self.parameters.get('snmp_v3_enabled'): - # Checking if there any version changes required - version_change = True - changed = True - - if is_snmp_v3_enabled is True: - # Checking If snmp configuration for usm_users needs modification - if len(self.sfe.get_snmp_info().usm_users) == 0: - # If snmp is getting configured for first time - update_required = True - changed = True - else: - for usm_user in self.sfe.get_snmp_info().usm_users: - if usm_user.access != self.access_usm or usm_user.name != self.name or usm_user.password != self.password or \ - usm_user.passphrase != self.passphrase or usm_user.sec_level != self.secLevel: - update_required = True - changed = True - else: - # Checking If snmp configuration for networks needs modification - for snmp_network in self.sfe.get_snmp_info().networks: - if snmp_network.access != self.access_network or snmp_network.cidr != self.cidr or \ - snmp_network.community != self.community or snmp_network.network != self.network: - update_required = True - changed = True - - else: - if self.parameters.get('state') == 'present': - changed = True - - result_message = "" - - if changed: - if self.module.check_mode is True: - result_message = "Check mode, skipping changes" - - else: - if self.parameters.get('state') == "present": - # IF snmp is not enabled, then enable and configure snmp - if self.parameters.get('snmp_v3_enabled') is True: - # IF SNMP is enabled with version 3 - usm_users = {'access': self.access_usm, - 'name': self.name, - 'password': self.password, - 'passphrase': self.passphrase, - 'secLevel': self.secLevel} - networks = None - else: - # IF SNMP is enabled with version 2 - usm_users = None - networks = {'access': self.access_network, - 'cidr': self.cidr, - 'community': self.community, - 'network': self.network} - - if is_snmp_enabled is False or version_change is True: - # Enable and configure snmp - self.enable_snmp() - self.configure_snmp(networks, usm_users) - result_message = "SNMP is enabled and configured" - - elif update_required is True: - # If snmp is already enabled, update the configuration if required - self.configure_snmp(networks, usm_users) - result_message = "SNMP is configured" - - elif is_snmp_enabled is True and self.parameters.get('state') == "absent": - # If snmp is enabled and state is absent, disable snmp - self.disable_snmp() - result_message = "SNMP is disabled" - - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """ - Main function - """ - na_elementsw_cluster_snmp = ElementSWClusterSnmp() - na_elementsw_cluster_snmp.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_drive.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_drive.py deleted file mode 100644 index f0fd7e38b..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_drive.py +++ /dev/null @@ -1,368 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software Node Drives -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_drive - -short_description: NetApp Element Software Manage Node Drives -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Add, Erase or Remove drive for nodes on Element Software Cluster. - -options: - drive_ids: - description: - - List of Drive IDs or Serial Names of Node drives. - - If not specified, add and remove action will be performed on all drives of node_id - type: list - elements: str - aliases: ['drive_id'] - - state: - description: - - Element SW Storage Drive operation state. - - present - To add drive of node to participate in cluster data storage. - - absent - To remove the drive from being part of active cluster. - - clean - Clean-up any residual data persistent on a *removed* drive in a secured method. - choices: ['present', 'absent', 'clean'] - default: 'present' - type: str - - node_ids: - description: - - List of IDs or Names of cluster nodes. - - If node_ids and drive_ids are not specified, all available drives in the cluster are added if state is present. - - If node_ids and drive_ids are not specified, all active drives in the cluster are removed if state is absent. - required: false - type: list - elements: str - aliases: ['node_id'] - - force_during_upgrade: - description: - - Flag to force drive operation during upgrade. - - Not supported with latest version of SolidFire SDK (1.7.0.152) - type: 'bool' - - force_during_bin_sync: - description: - - Flag to force during a bin sync operation. - - Not supported with latest version of SolidFire SDK (1.7.0.152) - type: 'bool' -''' - -EXAMPLES = """ - - name: Add drive with status available to cluster - tags: - - elementsw_add_drive - na_elementsw_drive: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - drive_ids: scsi-SATA_SAMSUNG_MZ7LM48S2UJNX0J3221807 - force_during_upgrade: false - force_during_bin_sync: false - node_ids: sf4805-meg-03 - - - name: Remove active drive from cluster - tags: - - elementsw_remove_drive - na_elementsw_drive: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - force_during_upgrade: false - drive_ids: scsi-SATA_SAMSUNG_MZ7LM48S2UJNX0J321208 - - - name: Secure Erase drive - tags: - - elemensw_clean_drive - na_elementsw_drive: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: clean - drive_ids: scsi-SATA_SAMSUNG_MZ7LM48S2UJNX0J432109 - node_ids: sf4805-meg-03 - - - name: Add all the drives of all nodes to cluster - tags: - - elementsw_add_node - na_elementsw_drive: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - force_during_upgrade: false - force_during_bin_sync: false - -""" - - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementSWDrive(object): - """ - Element Software Storage Drive operations - """ - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, choices=['present', 'absent', 'clean'], default='present'), - drive_ids=dict(required=False, type='list', elements='str', aliases=['drive_id']), - node_ids=dict(required=False, type='list', elements='str', aliases=['node_id']), - force_during_upgrade=dict(required=False, type='bool'), - force_during_bin_sync=dict(required=False, type='bool') - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - input_params = self.module.params - - self.state = input_params['state'] - self.drive_ids = input_params['drive_ids'] - self.node_ids = input_params['node_ids'] - self.force_during_upgrade = input_params['force_during_upgrade'] - self.force_during_bin_sync = input_params['force_during_bin_sync'] - self.list_nodes = None - self.debug = list() - - if HAS_SF_SDK is False: - self.module.fail_json( - msg="Unable to import the SolidFire Python SDK") - else: - # increase timeout, as removing a disk takes some time - self.sfe = netapp_utils.create_sf_connection(module=self.module, timeout=120) - - def get_node_id(self, node_id): - """ - Get Node ID - :description: Find and retrieve node_id from the active cluster - - :return: node_id (None if not found) - :rtype: node_id - """ - if self.list_nodes is None: - self.list_nodes = self.sfe.list_active_nodes() - for current_node in self.list_nodes.nodes: - if node_id == str(current_node.node_id): - return current_node.node_id - elif node_id == current_node.name: - return current_node.node_id - self.module.fail_json(msg='unable to find node for node_id=%s' % node_id) - - def get_drives_listby_status(self, node_num_ids): - """ - Capture list of drives based on status for a given node_id - :description: Capture list of active, failed and available drives from a given node_id - - :return: None - """ - self.active_drives = dict() - self.available_drives = dict() - self.other_drives = dict() - self.all_drives = self.sfe.list_drives() - - for drive in self.all_drives.drives: - # get all drives if no node is given, or match the node_ids - if node_num_ids is None or drive.node_id in node_num_ids: - if drive.status in ['active', 'failed']: - self.active_drives[drive.serial] = drive.drive_id - elif drive.status == "available": - self.available_drives[drive.serial] = drive.drive_id - else: - self.other_drives[drive.serial] = (drive.drive_id, drive.status) - - self.debug.append('available: %s' % self.available_drives) - self.debug.append('active: %s' % self.active_drives) - self.debug.append('other: %s' % self.other_drives) - - def get_drive_id(self, drive_id, node_num_ids): - """ - Get Drive ID - :description: Find and retrieve drive_id from the active cluster - Assumes self.all_drives is already populated - - :return: node_id (None if not found) - :rtype: node_id - """ - for drive in self.all_drives.drives: - if drive_id == str(drive.drive_id): - break - if drive_id == drive.serial: - break - else: - self.module.fail_json(msg='unable to find drive for drive_id=%s. Debug=%s' % (drive_id, self.debug)) - if node_num_ids and drive.node_id not in node_num_ids: - self.module.fail_json(msg='drive for drive_id=%s belongs to another node, with node_id=%d. Debug=%s' % (drive_id, drive.node_id, self.debug)) - return drive.drive_id, drive.status - - def get_active_drives(self, drives): - """ - return a list of active drives - if drives is specified, only [] or a subset of disks in drives are returned - else all available drives for this node or cluster are returned - """ - if drives is None: - return list(self.active_drives.values()) - return [drive_id for drive_id, status in drives if status in ['active', 'failed']] - - def get_available_drives(self, drives, action): - """ - return a list of available drives (not active) - if drives is specified, only [] or a subset of disks in drives are returned - else all available drives for this node or cluster are returned - """ - if drives is None: - return list(self.available_drives.values()) - action_list = list() - for drive_id, drive_status in drives: - if drive_status == 'available': - action_list.append(drive_id) - elif drive_status in ['active', 'failed']: - # already added - pass - elif drive_status == 'erasing' and action == 'erase': - # already erasing - pass - elif drive_status == 'removing': - self.module.fail_json(msg='Error - cannot %s drive while it is being removed. Debug: %s' % (action, self.debug)) - elif drive_status == 'erasing' and action == 'add': - self.module.fail_json(msg='Error - cannot %s drive while it is being erased. Debug: %s' % (action, self.debug)) - else: - self.module.fail_json(msg='Error - cannot %s drive while it is in %s state. Debug: %s' % (action, drive_status, self.debug)) - return action_list - - def add_drive(self, drives=None): - """ - Add Drive available for Cluster storage expansion - """ - kwargs = dict() - if self.force_during_upgrade is not None: - kwargs['force_during_upgrade'] = self.force_during_upgrade - if self.force_during_bin_sync is not None: - kwargs['force_during_bin_sync'] = self.force_during_bin_sync - try: - self.sfe.add_drives(drives, **kwargs) - except Exception as exception_object: - self.module.fail_json(msg='Error adding drive%s: %s: %s' % - ('s' if len(drives) > 1 else '', - str(drives), - to_native(exception_object)), - exception=traceback.format_exc()) - - def remove_drive(self, drives=None): - """ - Remove Drive active in Cluster - """ - kwargs = dict() - if self.force_during_upgrade is not None: - kwargs['force_during_upgrade'] = self.force_during_upgrade - try: - self.sfe.remove_drives(drives, **kwargs) - except Exception as exception_object: - self.module.fail_json(msg='Error removing drive%s: %s: %s' % - ('s' if len(drives) > 1 else '', - str(drives), - to_native(exception_object)), - exception=traceback.format_exc()) - - def secure_erase(self, drives=None): - """ - Secure Erase any residual data existing on a drive - """ - try: - self.sfe.secure_erase_drives(drives) - except Exception as exception_object: - self.module.fail_json(msg='Error cleaning data from drive%s: %s: %s' % - ('s' if len(drives) > 1 else '', - str(drives), - to_native(exception_object)), - exception=traceback.format_exc()) - - def apply(self): - """ - Check, process and initiate Drive operation - """ - changed = False - - action_list = [] - node_num_ids = None - drives = None - if self.node_ids: - node_num_ids = [self.get_node_id(node_id) for node_id in self.node_ids] - - self.get_drives_listby_status(node_num_ids) - if self.drive_ids: - drives = [self.get_drive_id(drive_id, node_num_ids) for drive_id in self.drive_ids] - - if self.state == "present": - action_list = self.get_available_drives(drives, 'add') - elif self.state == "absent": - action_list = self.get_active_drives(drives) - elif self.state == "clean": - action_list = self.get_available_drives(drives, 'erase') - - if len(action_list) > 0: - changed = True - if not self.module.check_mode and changed: - if self.state == "present": - self.add_drive(action_list) - elif self.state == "absent": - self.remove_drive(action_list) - elif self.state == "clean": - self.secure_erase(action_list) - - self.module.exit_json(changed=changed) - - -def main(): - """ - Main function - """ - - na_elementsw_drive = ElementSWDrive() - na_elementsw_drive.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_info.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_info.py deleted file mode 100644 index fde928784..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_info.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/python -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software Info -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_info -short_description: NetApp Element Software Info -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 20.10.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Collect cluster and node information. - - Use a MVIP as hostname for cluster and node scope. - - Use a MIP as hostname for node scope. - - When using MIPs, cluster APIs are expected to fail with 'xUnknownAPIMethod method=ListAccounts' - -options: - gather_subsets: - description: - - list of subsets to gather from target cluster or node - - supported values - - node_config, cluster_accounts, cluster_nodes, cluster_drives. - - additional values - - all - for all subsets, - - all_clusters - all subsets at cluster scope, - - all_nodes - all subsets at node scope - type: list - elements: str - default: ['all'] - aliases: ['gather_subset'] - - filter: - description: - - When a list of records is returned, this can be used to limit the records to be returned. - - If more than one key is used, all keys must match. - type: dict - - fail_on_error: - description: - - by default, errors are not fatal when collecting a subset. The subset will show on error in the info output. - - if set to True, the module fails on the first error. - type: bool - default: false - - fail_on_key_not_found: - description: - - force an error when filter is used and a key is not present in records. - type: bool - default: true - - fail_on_record_not_found: - description: - - force an error when filter is used and no record is matched. - type: bool - default: false -''' - -EXAMPLES = """ - - - name: get all available subsets - na_elementsw_info: - hostname: "{{ elementsw_mvip }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - gather_subsets: all - register: result - - - name: collect data for elementsw accounts using a filter - na_elementsw_info: - hostname: "{{ elementsw_mvip }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - gather_subsets: 'cluster_accounts' - filter: - username: "{{ username_to_find }}" - register: result -""" - -RETURN = """ - -info: - description: - - a dictionary of collected subsets - - each subset if in JSON format - returned: success - type: dict - -debug: - description: - - a list of detailed error messages if some subsets cannot be collected - returned: success - type: list - -""" -from ansible.module_utils.basic import AnsibleModule - -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementSWInfo(object): - ''' - Element Software Initialize node with ownership for cluster formation - ''' - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - gather_subsets=dict(type='list', elements='str', aliases=['gather_subset'], default='all'), - filter=dict(type='dict'), - fail_on_error=dict(type='bool', default=False), - fail_on_key_not_found=dict(type='bool', default=True), - fail_on_record_not_found=dict(type='bool', default=False), - )) - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - self.debug = list() - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - - # 442 for node APIs, 443 (default) for cluster APIs - for role, port in [('node', 442), ('cluster', 443)]: - try: - conn = netapp_utils.create_sf_connection(module=self.module, raise_on_connection_error=True, port=port) - if role == 'node': - self.sfe_node = conn - else: - self.sfe_cluster = conn - except netapp_utils.solidfire.common.ApiConnectionError as exc: - if str(exc) == "Bad Credentials": - msg = ' Make sure to use valid %s credentials for username and password.' % 'node' if port == 442 else 'cluster' - msg += '%s reported: %s' % ('Node' if port == 442 else 'Cluster', repr(exc)) - else: - msg = 'Failed to create connection for %s:%d - %s' % (self.parameters['hostname'], port, repr(exc)) - self.module.fail_json(msg=msg) - except Exception as exc: - self.module.fail_json(msg='Failed to connect for %s:%d - %s' % (self.parameters['hostname'], port, repr(exc))) - - # TODO: add new node methods here - self.node_methods = dict( - node_config=self.sfe_node.get_config, - ) - # TODO: add new cluster methods here - self.cluster_methods = dict( - cluster_accounts=self.sfe_cluster.list_accounts, - cluster_drives=self.sfe_cluster.list_drives, - cluster_nodes=self.sfe_cluster.list_all_nodes - ) - self.methods = dict(self.node_methods) - self.methods.update(self.cluster_methods) - - # add telemetry attributes - does not matter if we are using cluster or node here - # TODO: most if not all get and list APIs do not have an attributes parameter - - def get_info(self, name): - ''' - Get Element Info - run a cluster or node list method - return output as json - ''' - info = None - if name not in self.methods: - msg = 'Error: unknown subset %s.' % name - msg += ' Known_subsets: %s' % ', '.join(self.methods.keys()) - self.module.fail_json(msg=msg, debug=self.debug) - try: - info = self.methods[name]() - return info.to_json() - except netapp_utils.solidfire.common.ApiServerError as exc: - # the new SDK rearranged the fields in a different order - if all(x in str(exc) for x in ('err_json', '500', 'xUnknownAPIMethod', 'method=')): - info = 'Error (API not in scope?)' - else: - info = 'Error' - msg = '%s for subset: %s: %s' % (info, name, repr(exc)) - if self.parameters['fail_on_error']: - self.module.fail_json(msg=msg) - self.debug.append(msg) - return info - - def filter_list_of_dict_by_key(self, records, key, value): - matched = list() - for record in records: - if key in record and record[key] == value: - matched.append(record) - if key not in record and self.parameters['fail_on_key_not_found']: - msg = 'Error: key %s not found in %s' % (key, repr(record)) - self.module.fail_json(msg=msg) - return matched - - def filter_records(self, records, filter_dict): - - if isinstance(records, dict): - if len(records) == 1: - key, value = list(records.items())[0] - return dict({key: self.filter_records(value, filter_dict)}) - if not isinstance(records, list): - return records - matched = records - for key, value in filter_dict.items(): - matched = self.filter_list_of_dict_by_key(matched, key, value) - if self.parameters['fail_on_record_not_found'] and len(matched) == 0: - msg = 'Error: no match for %s out of %d records' % (repr(self.parameters['filter']), len(records)) - self.debug.append('Unmatched records: %s' % repr(records)) - self.module.fail_json(msg=msg, debug=self.debug) - return matched - - def get_and_filter_info(self, name): - ''' - Get data - If filter is present, only return the records that are matched - return output as json - ''' - records = self.get_info(name) - if self.parameters.get('filter') is None: - return records - matched = self.filter_records(records, self.parameters.get('filter')) - return matched - - def apply(self): - ''' - Check connection and initialize node with cluster ownership - ''' - changed = False - info = dict() - my_subsets = ('all', 'all_clusters', 'all_nodes') - if any(x in self.parameters['gather_subsets'] for x in my_subsets) and len(self.parameters['gather_subsets']) > 1: - msg = 'When any of %s is used, no other subset is allowed' % repr(my_subsets) - self.module.fail_json(msg=msg) - if 'all' in self.parameters['gather_subsets']: - self.parameters['gather_subsets'] = self.methods.keys() - if 'all_clusters' in self.parameters['gather_subsets']: - self.parameters['gather_subsets'] = self.cluster_methods.keys() - if 'all_nodes' in self.parameters['gather_subsets']: - self.parameters['gather_subsets'] = self.node_methods.keys() - for name in self.parameters['gather_subsets']: - info[name] = self.get_and_filter_info(name) - self.module.exit_json(changed=changed, info=info, debug=self.debug) - - -def main(): - ''' - Main function - ''' - na_elementsw_cluster = ElementSWInfo() - na_elementsw_cluster.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_initiators.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_initiators.py deleted file mode 100644 index 9bef345b4..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_initiators.py +++ /dev/null @@ -1,343 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software manage initiators -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'community'} - - -DOCUMENTATION = ''' - -module: na_elementsw_initiators - -short_description: Manage Element SW initiators -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.8.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Manage Element Software initiators that allow external clients access to volumes. - -options: - initiators: - description: A list of objects containing characteristics of each initiator. - suboptions: - name: - description: The name of the initiator. - type: str - required: true - - alias: - description: The friendly name assigned to this initiator. - type: str - - initiator_id: - description: The numeric ID of the initiator. - type: int - - volume_access_group_id: - description: volumeAccessGroupID to which this initiator belongs. - type: int - - attributes: - description: A set of JSON attributes to assign to this initiator. - type: dict - type: list - elements: dict - - state: - description: - - Whether the specified initiator should exist or not. - choices: ['present', 'absent'] - default: present - type: str -''' - -EXAMPLES = """ - - - name: Manage initiators - tags: - - na_elementsw_initiators - na_elementsw_initiators: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - initiators: - - name: a - alias: a1 - initiator_id: 1 - volume_access_group_id: 1 - attributes: {"key": "value"} - - name: b - alias: b2 - initiator_id: 2 - volume_access_group_id: 2 - state: present -""" - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule -HAS_SF_SDK = netapp_utils.has_sf_sdk() -if HAS_SF_SDK: - from solidfire.models import ModifyInitiator - - -class ElementSWInitiators(object): - """ - Element Software Manage Element SW initiators - """ - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - - self.argument_spec.update(dict( - initiators=dict( - type='list', - elements='dict', - options=dict( - name=dict(type='str', required=True), - alias=dict(type='str', default=None), - initiator_id=dict(type='int', default=None), - volume_access_group_id=dict(type='int', default=None), - attributes=dict(type='dict', default=None), - ) - ), - state=dict(choices=['present', 'absent'], default='present'), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - self.debug = list() - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # iterate over each user-provided initiator - for initiator in self.parameters.get('initiators'): - # add telemetry attributes - if 'attributes' in initiator and initiator['attributes']: - initiator['attributes'].update(self.elementsw_helper.set_element_attributes(source='na_elementsw_initiators')) - else: - initiator['attributes'] = self.elementsw_helper.set_element_attributes(source='na_elementsw_initiators') - - def compare_initiators(self, user_initiator, existing_initiator): - """ - compare user input initiator with existing dict - :return: True if matched, False otherwise - """ - if user_initiator is None or existing_initiator is None: - return False - changed = False - for param in user_initiator: - # lookup initiator_name instead of name - if param == 'name': - if user_initiator['name'] == existing_initiator['initiator_name']: - pass - elif param == 'initiator_id': - # can't change the key - pass - elif user_initiator[param] == existing_initiator[param]: - pass - else: - self.debug.append('Initiator: %s. Changed: %s from: %s to %s' % - (user_initiator['name'], param, str(existing_initiator[param]), str(user_initiator[param]))) - changed = True - return changed - - def initiator_to_dict(self, initiator_obj): - """ - converts initiator class object to dict - :return: reconstructed initiator dict - """ - known_params = ['initiator_name', - 'alias', - 'initiator_id', - 'volume_access_groups', - 'volume_access_group_id', - 'attributes'] - initiator_dict = {} - - # missing parameter cause error - # so assign defaults - for param in known_params: - initiator_dict[param] = getattr(initiator_obj, param, None) - if initiator_dict['volume_access_groups'] is not None: - if len(initiator_dict['volume_access_groups']) == 1: - initiator_dict['volume_access_group_id'] = initiator_dict['volume_access_groups'][0] - elif len(initiator_dict['volume_access_groups']) > 1: - self.module.fail_json(msg="Only 1 access group is supported, found: %s" % repr(initiator_obj)) - del initiator_dict['volume_access_groups'] - return initiator_dict - - def find_initiator(self, id=None, name=None): - """ - find a specific initiator - :return: initiator dict - """ - initiator_details = None - if self.all_existing_initiators is None: - return initiator_details - for initiator in self.all_existing_initiators: - # if name is provided or - # if id is provided - if name is not None: - if initiator.initiator_name == name: - initiator_details = self.initiator_to_dict(initiator) - elif id is not None: - if initiator.initiator_id == id: - initiator_details = self.initiator_to_dict(initiator) - else: - # if neither id nor name provided - # return everything - initiator_details = self.all_existing_initiators - return initiator_details - - @staticmethod - def rename_key(obj, old_name, new_name): - obj[new_name] = obj.pop(old_name) - - def create_initiator(self, initiator): - """ - create initiator - """ - # SF SDK is using camelCase for this one - self.rename_key(initiator, 'volume_access_group_id', 'volumeAccessGroupID') - # create_initiators needs an array - initiator_list = [initiator] - try: - self.sfe.create_initiators(initiator_list) - except Exception as exception_object: - self.module.fail_json(msg='Error creating initiator %s' % (to_native(exception_object)), - exception=traceback.format_exc()) - - def delete_initiator(self, initiator): - """ - delete initiator - """ - # delete_initiators needs an array - initiator_id_array = [initiator] - try: - self.sfe.delete_initiators(initiator_id_array) - except Exception as exception_object: - self.module.fail_json(msg='Error deleting initiator %s' % (to_native(exception_object)), - exception=traceback.format_exc()) - - def modify_initiator(self, initiator, existing_initiator): - """ - modify initiator - """ - # create the new initiator dict - # by merging old and new values - merged_initiator = existing_initiator.copy() - # can't change the key - del initiator['initiator_id'] - merged_initiator.update(initiator) - - # we MUST create an object before sending - # the new initiator to modify_initiator - initiator_object = ModifyInitiator(initiator_id=merged_initiator['initiator_id'], - alias=merged_initiator['alias'], - volume_access_group_id=merged_initiator['volume_access_group_id'], - attributes=merged_initiator['attributes']) - initiator_list = [initiator_object] - try: - self.sfe.modify_initiators(initiators=initiator_list) - except Exception as exception_object: - self.module.fail_json(msg='Error modifying initiator: %s' % (to_native(exception_object)), - exception=traceback.format_exc()) - - def apply(self): - """ - configure initiators - """ - changed = False - result_message = None - - # get all user provided initiators - input_initiators = self.parameters.get('initiators') - - # get all initiators - # store in a cache variable - self.all_existing_initiators = self.sfe.list_initiators().initiators - - # iterate over each user-provided initiator - for in_initiator in input_initiators: - if self.parameters.get('state') == 'present': - # check if initiator_id is provided and exists - if 'initiator_id' in in_initiator and in_initiator['initiator_id'] is not None and \ - self.find_initiator(id=in_initiator['initiator_id']) is not None: - if self.compare_initiators(in_initiator, self.find_initiator(id=in_initiator['initiator_id'])): - changed = True - result_message = 'modifying initiator(s)' - self.modify_initiator(in_initiator, self.find_initiator(id=in_initiator['initiator_id'])) - # otherwise check if name is provided and exists - elif 'name' in in_initiator and in_initiator['name'] is not None and self.find_initiator(name=in_initiator['name']) is not None: - if self.compare_initiators(in_initiator, self.find_initiator(name=in_initiator['name'])): - changed = True - result_message = 'modifying initiator(s)' - self.modify_initiator(in_initiator, self.find_initiator(name=in_initiator['name'])) - # this is a create op if initiator doesn't exist - else: - changed = True - result_message = 'creating initiator(s)' - self.create_initiator(in_initiator) - elif self.parameters.get('state') == 'absent': - # delete_initiators only processes ids - # so pass ids of initiators to method - if 'name' in in_initiator and in_initiator['name'] is not None and \ - self.find_initiator(name=in_initiator['name']) is not None: - changed = True - result_message = 'deleting initiator(s)' - self.delete_initiator(self.find_initiator(name=in_initiator['name'])['initiator_id']) - elif 'initiator_id' in in_initiator and in_initiator['initiator_id'] is not None and \ - self.find_initiator(id=in_initiator['initiator_id']) is not None: - changed = True - result_message = 'deleting initiator(s)' - self.delete_initiator(in_initiator['initiator_id']) - if self.module.check_mode is True: - result_message = "Check mode, skipping changes" - if self.debug: - result_message += ". %s" % self.debug - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """ - Main function - """ - na_elementsw_initiators = ElementSWInitiators() - na_elementsw_initiators.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_ldap.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_ldap.py deleted file mode 100644 index a71ddf564..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_ldap.py +++ /dev/null @@ -1,254 +0,0 @@ -#!/usr/bin/python - -# (c) 2017, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_ldap - -short_description: NetApp Element Software Manage ldap admin users -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Enable, disable ldap, and add ldap users - -options: - - state: - description: - - Whether the specified volume should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - authType: - description: - - Identifies which user authentication method to use. - choices: ['DirectBind', 'SearchAndBind'] - type: str - - groupSearchBaseDn: - description: - - The base DN of the tree to start the group search (will do a subtree search from here) - type: str - - groupSearchType: - description: - - Controls the default group search filter used - choices: ['NoGroup', 'ActiveDirectory', 'MemberDN'] - type: str - - serverURIs: - description: - - A comma-separated list of LDAP server URIs - type: str - - userSearchBaseDN: - description: - - The base DN of the tree to start the search (will do a subtree search from here) - type: str - - searchBindDN: - description: - - A dully qualified DN to log in with to perform an LDAp search for the user (needs read access to the LDAP directory). - type: str - - searchBindPassword: - description: - - The password for the searchBindDN account used for searching - type: str - - userSearchFilter: - description: - - the LDAP Filter to use - type: str - - userDNTemplate: - description: - - A string that is used form a fully qualified user DN. - type: str - - groupSearchCustomFilter: - description: - - For use with the CustomFilter Search type - type: str -''' - -EXAMPLES = """ - - name: disable ldap authentication - na_elementsw_ldap: - state: absent - username: "{{ admin username }}" - password: "{{ admin password }}" - hostname: "{{ hostname }}" - - - name: Enable ldap authentication - na_elementsw_ldap: - state: present - username: "{{ admin username }}" - password: "{{ admin password }}" - hostname: "{{ hostname }}" - authType: DirectBind - serverURIs: ldap://svmdurlabesx01spd_ldapclnt - groupSearchType: MemberDN - userDNTemplate: uid=%USERNAME%,cn=users,cn=accounts,dc=corp,dc="{{ company name }}",dc=com - - -""" - -RETURN = """ - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except Exception: - HAS_SF_SDK = False - - -class NetappElementLdap(object): - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - authType=dict(type='str', choices=['DirectBind', 'SearchAndBind']), - groupSearchBaseDn=dict(type='str'), - groupSearchType=dict(type='str', choices=['NoGroup', 'ActiveDirectory', 'MemberDN']), - serverURIs=dict(type='str'), - userSearchBaseDN=dict(type='str'), - searchBindDN=dict(type='str'), - searchBindPassword=dict(type='str', no_log=True), - userSearchFilter=dict(type='str'), - userDNTemplate=dict(type='str'), - groupSearchCustomFilter=dict(type='str'), - ) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True, - ) - - param = self.module.params - - # set up state variables - self.state = param['state'] - self.authType = param['authType'] - self.groupSearchBaseDn = param['groupSearchBaseDn'] - self.groupSearchType = param['groupSearchType'] - self.serverURIs = param['serverURIs'] - if self.serverURIs is not None: - self.serverURIs = self.serverURIs.split(',') - self.userSearchBaseDN = param['userSearchBaseDN'] - self.searchBindDN = param['searchBindDN'] - self.searchBindPassword = param['searchBindPassword'] - self.userSearchFilter = param['userSearchFilter'] - self.userDNTemplate = param['userDNTemplate'] - self.groupSearchCustomFilter = param['groupSearchCustomFilter'] - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - def get_ldap_configuration(self): - """ - Return ldap configuration if found - - :return: Details about the ldap configuration. None if not found. - :rtype: solidfire.models.GetLdapConfigurationResult - """ - ldap_config = self.sfe.get_ldap_configuration() - return ldap_config - - def enable_ldap(self): - """ - Enable LDAP - :return: nothing - """ - try: - self.sfe.enable_ldap_authentication(self.serverURIs, auth_type=self.authType, - group_search_base_dn=self.groupSearchBaseDn, - group_search_type=self.groupSearchType, - group_search_custom_filter=self.groupSearchCustomFilter, - search_bind_dn=self.searchBindDN, - search_bind_password=self.searchBindPassword, - user_search_base_dn=self.userSearchBaseDN, - user_search_filter=self.userSearchFilter, - user_dntemplate=self.userDNTemplate) - except solidfire.common.ApiServerError as error: - self.module.fail_json(msg='Error enabling LDAP: %s' % (to_native(error)), - exception=traceback.format_exc()) - - def check_config(self, ldap_config): - """ - Check to see if the ldap config has been modified. - :param ldap_config: The LDAP configuration - :return: False if the config is the same as the playbook, True if it is not - """ - if self.authType != ldap_config.ldap_configuration.auth_type: - return True - if self.serverURIs != ldap_config.ldap_configuration.server_uris: - return True - if self.groupSearchBaseDn != ldap_config.ldap_configuration.group_search_base_dn: - return True - if self.groupSearchType != ldap_config.ldap_configuration.group_search_type: - return True - if self.groupSearchCustomFilter != ldap_config.ldap_configuration.group_search_custom_filter: - return True - if self.searchBindDN != ldap_config.ldap_configuration.search_bind_dn: - return True - if self.searchBindPassword != ldap_config.ldap_configuration.search_bind_password: - return True - if self.userSearchBaseDN != ldap_config.ldap_configuration.user_search_base_dn: - return True - if self.userSearchFilter != ldap_config.ldap_configuration.user_search_filter: - return True - if self.userDNTemplate != ldap_config.ldap_configuration.user_dntemplate: - return True - return False - - def apply(self): - changed = False - ldap_config = self.get_ldap_configuration() - if self.state == 'absent': - if ldap_config and ldap_config.ldap_configuration.enabled: - changed = True - if self.state == 'present' and self.check_config(ldap_config): - changed = True - if changed: - if self.module.check_mode: - pass - else: - if self.state == 'present': - self.enable_ldap() - elif self.state == 'absent': - self.sfe.disable_ldap_authentication() - - self.module.exit_json(changed=changed) - - -def main(): - v = NetappElementLdap() - v.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_network_interfaces.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_network_interfaces.py deleted file mode 100644 index a9151a620..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_network_interfaces.py +++ /dev/null @@ -1,423 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software Node Network Interfaces - Bond 1G and 10G configuration -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_network_interfaces - -short_description: NetApp Element Software Configure Node Network Interfaces -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Configure Element SW Node Network Interfaces for Bond 1G and 10G IP addresses. - - This module does not create interfaces, it expects the interfaces to already exists and can only modify them. - - This module cannot set or modify the method (Loopback, manual, dhcp, static). - - This module is not idempotent and does not support check_mode. - -options: - method: - description: - - deprecated, this option would trigger a 'updated failed' error - type: str - - ip_address_1g: - description: - - deprecated, use bond_1g option. - type: str - - ip_address_10g: - description: - - deprecated, use bond_10g option. - type: str - - subnet_1g: - description: - - deprecated, use bond_1g option. - type: str - - subnet_10g: - description: - - deprecated, use bond_10g option. - type: str - - gateway_address_1g: - description: - - deprecated, use bond_1g option. - type: str - - gateway_address_10g: - description: - - deprecated, use bond_10g option. - type: str - - mtu_1g: - description: - - deprecated, use bond_1g option. - type: str - - mtu_10g: - description: - - deprecated, use bond_10g option. - type: str - - dns_nameservers: - description: - - deprecated, use bond_1g and bond_10g options. - type: list - elements: str - - dns_search_domains: - description: - - deprecated, use bond_1g and bond_10g options. - type: list - elements: str - - bond_mode_1g: - description: - - deprecated, use bond_1g option. - type: str - - bond_mode_10g: - description: - - deprecated, use bond_10g option. - type: str - - lacp_1g: - description: - - deprecated, use bond_1g option. - type: str - - lacp_10g: - description: - - deprecated, use bond_10g option. - type: str - - virtual_network_tag: - description: - - deprecated, use bond_1g and bond_10g options. - type: str - - bond_1g: - description: - - settings for the Bond1G interface. - type: dict - suboptions: - address: - description: - - IP address for the interface. - type: str - netmask: - description: - - subnet mask for the interface. - type: str - gateway: - description: - - IP router network address to send packets out of the local network. - type: str - mtu: - description: - - The largest packet size (in bytes) that the interface can transmit.. - - Must be greater than or equal to 1500 bytes. - type: str - dns_nameservers: - description: - - List of addresses for domain name servers. - type: list - elements: str - dns_search: - description: - - List of DNS search domains. - type: list - elements: str - bond_mode: - description: - - Bonding mode. - choices: ['ActivePassive', 'ALB', 'LACP'] - type: str - bond_lacp_rate: - description: - - Link Aggregation Control Protocol - useful only if LACP is selected as the Bond Mode. - - Slow - Packets are transmitted at 30 second intervals. - - Fast - Packets are transmitted in 1 second intervals. - choices: ['Fast', 'Slow'] - type: str - virtual_network_tag: - description: - - The virtual network identifier of the interface (VLAN tag). - type: str - - bond_10g: - description: - - settings for the Bond10G interface. - type: dict - suboptions: - address: - description: - - IP address for the interface. - type: str - netmask: - description: - - subnet mask for the interface. - type: str - gateway: - description: - - IP router network address to send packets out of the local network. - type: str - mtu: - description: - - The largest packet size (in bytes) that the interface can transmit.. - - Must be greater than or equal to 1500 bytes. - type: str - dns_nameservers: - description: - - List of addresses for domain name servers. - type: list - elements: str - dns_search: - description: - - List of DNS search domains. - type: list - elements: str - bond_mode: - description: - - Bonding mode. - choices: ['ActivePassive', 'ALB', 'LACP'] - type: str - bond_lacp_rate: - description: - - Link Aggregation Control Protocol - useful only if LACP is selected as the Bond Mode. - - Slow - Packets are transmitted at 30 second intervals. - - Fast - Packets are transmitted in 1 second intervals. - choices: ['Fast', 'Slow'] - type: str - virtual_network_tag: - description: - - The virtual network identifier of the interface (VLAN tag). - type: str - -''' - -EXAMPLES = """ - - - name: Set Node network interfaces configuration for Bond 1G and 10G properties - tags: - - elementsw_network_interfaces - na_elementsw_network_interfaces: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - bond_1g: - address: 10.253.168.131 - netmask: 255.255.248.0 - gateway: 10.253.168.1 - mtu: '1500' - bond_mode: ActivePassive - dns_nameservers: dns1,dns2 - dns_search: domain1,domain2 - bond_10g: - address: 10.253.1.202 - netmask: 255.255.255.192 - gateway: 10.253.1.193 - mtu: '9000' - bond_mode: LACP - bond_lacp_rate: Fast - virtual_network_tag: vnet_tag -""" - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - -try: - from solidfire.models import Network, NetworkConfig - from solidfire.common import ApiConnectionError as sf_ApiConnectionError, ApiServerError as sf_ApiServerError - HAS_SF_SDK = True -except ImportError: - HAS_SF_SDK = False - - -class ElementSWNetworkInterfaces(object): - """ - Element Software Network Interfaces - Bond 1G and 10G Network configuration - """ - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - method=dict(required=False, type='str'), - ip_address_1g=dict(required=False, type='str'), - ip_address_10g=dict(required=False, type='str'), - subnet_1g=dict(required=False, type='str'), - subnet_10g=dict(required=False, type='str'), - gateway_address_1g=dict(required=False, type='str'), - gateway_address_10g=dict(required=False, type='str'), - mtu_1g=dict(required=False, type='str'), - mtu_10g=dict(required=False, type='str'), - dns_nameservers=dict(required=False, type='list', elements='str'), - dns_search_domains=dict(required=False, type='list', elements='str'), - bond_mode_1g=dict(required=False, type='str'), - bond_mode_10g=dict(required=False, type='str'), - lacp_1g=dict(required=False, type='str'), - lacp_10g=dict(required=False, type='str'), - virtual_network_tag=dict(required=False, type='str'), - bond_1g=dict(required=False, type='dict', options=dict( - address=dict(required=False, type='str'), - netmask=dict(required=False, type='str'), - gateway=dict(required=False, type='str'), - mtu=dict(required=False, type='str'), - dns_nameservers=dict(required=False, type='list', elements='str'), - dns_search=dict(required=False, type='list', elements='str'), - bond_mode=dict(required=False, type='str', choices=['ActivePassive', 'ALB', 'LACP']), - bond_lacp_rate=dict(required=False, type='str', choices=['Fast', 'Slow']), - virtual_network_tag=dict(required=False, type='str'), - )), - bond_10g=dict(required=False, type='dict', options=dict( - address=dict(required=False, type='str'), - netmask=dict(required=False, type='str'), - gateway=dict(required=False, type='str'), - mtu=dict(required=False, type='str'), - dns_nameservers=dict(required=False, type='list', elements='str'), - dns_search=dict(required=False, type='list', elements='str'), - bond_mode=dict(required=False, type='str', choices=['ActivePassive', 'ALB', 'LACP']), - bond_lacp_rate=dict(required=False, type='str', choices=['Fast', 'Slow']), - virtual_network_tag=dict(required=False, type='str'), - )), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=False - ) - - input_params = self.module.params - self.fail_when_deprecated_options_are_set(input_params) - - self.bond1g = input_params['bond_1g'] - self.bond10g = input_params['bond_10g'] - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - # increase time out, as it may take 30 seconds when making a change - self.sfe = netapp_utils.create_sf_connection(module=self.module, port=442, timeout=90) - - def fail_when_deprecated_options_are_set(self, input_params): - ''' report an error and exit if any deprecated options is set ''' - - dparms_1g = [x for x in ('ip_address_1g', 'subnet_1g', 'gateway_address_1g', 'mtu_1g', 'bond_mode_1g', 'lacp_1g') - if input_params[x] is not None] - dparms_10g = [x for x in ('ip_address_10g', 'subnet_10g', 'gateway_address_10g', 'mtu_10g', 'bond_mode_10g', 'lacp_10g') - if input_params[x] is not None] - dparms_common = [x for x in ('dns_nameservers', 'dns_search_domains', 'virtual_network_tag') - if input_params[x] is not None] - - error_msg = '' - if dparms_1g and dparms_10g: - error_msg = 'Please use the new bond_1g and bond_10g options to configure the bond interfaces.' - elif dparms_1g: - error_msg = 'Please use the new bond_1g option to configure the bond 1G interface.' - elif dparms_10g: - error_msg = 'Please use the new bond_10g option to configure the bond 10G interface.' - elif dparms_common: - error_msg = 'Please use the new bond_1g or bond_10g options to configure the bond interfaces.' - if input_params['method']: - error_msg = 'This module cannot set or change "method". ' + error_msg - dparms_common.append('method') - if error_msg: - error_msg += ' The following parameters are deprecated and cannot be used: ' - dparms = dparms_1g - dparms.extend(dparms_10g) - dparms.extend(dparms_common) - error_msg += ', '.join(dparms) - self.module.fail_json(msg=error_msg) - - def set_network_config(self, network_object): - """ - set network configuration - """ - try: - self.sfe.set_network_config(network=network_object) - except (sf_ApiConnectionError, sf_ApiServerError) as exception_object: - self.module.fail_json(msg='Error setting network config for node %s' % (to_native(exception_object)), - exception=traceback.format_exc()) - - def set_network_config_object(self, network_params): - ''' set SolidFire network config object ''' - network_config = dict() - if network_params is not None: - for key in network_params: - if network_params[key] is not None: - network_config[key] = network_params[key] - if network_config: - return NetworkConfig(**network_config) - return None - - def set_network_object(self): - """ - Set Element SW Network object - :description: set Network object - - :return: Network object - :rtype: object(Network object) - """ - bond_1g_network = self.set_network_config_object(self.bond1g) - bond_10g_network = self.set_network_config_object(self.bond10g) - network_object = None - if bond_1g_network is not None or bond_10g_network is not None: - network_object = Network(bond1_g=bond_1g_network, - bond10_g=bond_10g_network) - return network_object - - def apply(self): - """ - Check connection and initialize node with cluster ownership - """ - changed = False - result_message = None - network_object = self.set_network_object() - if network_object is not None: - if not self.module.check_mode: - self.set_network_config(network_object) - changed = True - else: - result_message = "Skipping changes, No change requested" - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """ - Main function - """ - elementsw_network_interfaces = ElementSWNetworkInterfaces() - elementsw_network_interfaces.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_node.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_node.py deleted file mode 100644 index d1412f2d4..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_node.py +++ /dev/null @@ -1,357 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element Software Node Operation -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_node - -short_description: NetApp Element Software Node Operation -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Add, remove cluster node on Element Software Cluster. - - Set cluster name on node. - - When using the preset_only option, hostname/username/password are required but not used. - -options: - state: - description: - - Element Software Storage Node operation state. - - present - To add pending node to participate in cluster data storage. - - absent - To remove node from active cluster. A node cannot be removed if active drives are present. - choices: ['present', 'absent'] - default: 'present' - type: str - - node_ids: - description: - - List of IDs or Names or IP Addresses of nodes to add or remove. - - If cluster_name is set, node MIPs are required. - type: list - elements: str - required: true - aliases: ['node_id'] - - cluster_name: - description: - - If set, the current node configuration is updated with this name before adding the node to the cluster. - - This requires the node_ids to be specified as MIPs (Management IP Adresses) - type: str - version_added: 20.9.0 - - preset_only: - description: - - If true and state is 'present', set the cluster name for each node in node_ids, but do not add the nodes. - - They can be added using na_elementsw_cluster for initial cluster creation. - - If false, proceed with addition/removal. - type: bool - default: false - version_added: 20.9.0 -''' - -EXAMPLES = """ - - name: Add node from pending to active cluster - tags: - - elementsw_add_node - na_elementsw_node: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - node_id: sf4805-meg-03 - - - name: Remove active node from cluster - tags: - - elementsw_remove_node - na_elementsw_node: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - node_id: 13 - - - name: Add node from pending to active cluster using node IP - tags: - - elementsw_add_node_ip - na_elementsw_node: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - node_id: 10.109.48.65 - cluster_name: sfcluster01 - - - name: Only set cluster name - tags: - - elementsw_add_node_ip - na_elementsw_node: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - node_ids: 10.109.48.65,10.109.48.66 - cluster_name: sfcluster01 - preset_only: true -""" - - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementSWNode(object): - """ - Element SW Storage Node operations - """ - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, choices=['present', 'absent'], default='present'), - node_ids=dict(required=True, type='list', elements='str', aliases=['node_id']), - cluster_name=dict(required=False, type='str'), - preset_only=dict(required=False, type='bool', default=False), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - input_params = self.module.params - - self.state = input_params['state'] - self.node_ids = input_params['node_ids'] - self.cluster_name = input_params['cluster_name'] - self.preset_only = input_params['preset_only'] - - if HAS_SF_SDK is False: - self.module.fail_json( - msg="Unable to import the SolidFire Python SDK") - elif not self.preset_only: - # Cluster connection is only needed for add/delete operations - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - def check_node_has_active_drives(self, node_id=None): - """ - Check if node has active drives attached to cluster - :description: Validate if node have active drives in cluster - - :return: True or False - :rtype: bool - """ - if node_id is not None: - cluster_drives = self.sfe.list_drives() - for drive in cluster_drives.drives: - if drive.node_id == node_id and drive.status == "active": - return True - return False - - @staticmethod - def extract_node_info(node_list): - summary = list() - for node in node_list: - node_dict = dict() - for key, value in vars(node).items(): - if key in ['assigned_node_id', 'cip', 'mip', 'name', 'node_id', 'pending_node_id', 'sip']: - node_dict[key] = value - summary.append(node_dict) - return summary - - def get_node_list(self): - """ - Get Node List - :description: Find and retrieve node_ids from the active cluster - - :return: None - :rtype: None - """ - action_nodes_list = list() - if len(self.node_ids) > 0: - unprocessed_node_list = list(self.node_ids) - list_nodes = [] - try: - all_nodes = self.sfe.list_all_nodes() - except netapp_utils.solidfire.common.ApiServerError as exception_object: - self.module.fail_json(msg='Error getting list of nodes from cluster: %s' % to_native(exception_object), - exception=traceback.format_exc()) - - # For add operation lookup for nodes list with status pendingNodes list - # else nodes will have to be traverse through active cluster - if self.state == "present": - list_nodes = all_nodes.pending_nodes - else: - list_nodes = all_nodes.nodes - - for current_node in list_nodes: - if self.state == "absent" and \ - (str(current_node.node_id) in self.node_ids or current_node.name in self.node_ids or current_node.mip in self.node_ids): - if self.check_node_has_active_drives(current_node.node_id): - self.module.fail_json(msg='Error deleting node %s: node has active drives' % current_node.name) - else: - action_nodes_list.append(current_node.node_id) - if self.state == "present" and \ - (str(current_node.pending_node_id) in self.node_ids or current_node.name in self.node_ids or current_node.mip in self.node_ids): - action_nodes_list.append(current_node.pending_node_id) - - # report an error if state == present and node is unknown - if self.state == "present": - for current_node in all_nodes.nodes: - if str(current_node.node_id) in unprocessed_node_list: - unprocessed_node_list.remove(str(current_node.node_id)) - elif current_node.name in unprocessed_node_list: - unprocessed_node_list.remove(current_node.name) - elif current_node.mip in unprocessed_node_list: - unprocessed_node_list.remove(current_node.mip) - for current_node in all_nodes.pending_nodes: - if str(current_node.pending_node_id) in unprocessed_node_list: - unprocessed_node_list.remove(str(current_node.pending_node_id)) - elif current_node.name in unprocessed_node_list: - unprocessed_node_list.remove(current_node.name) - elif current_node.mip in unprocessed_node_list: - unprocessed_node_list.remove(current_node.mip) - if len(unprocessed_node_list) > 0: - summary = dict( - nodes=self.extract_node_info(all_nodes.nodes), - pending_nodes=self.extract_node_info(all_nodes.pending_nodes), - pending_active_nodes=self.extract_node_info(all_nodes.pending_active_nodes) - ) - self.module.fail_json(msg='Error adding nodes %s: nodes not in pending or active lists: %s' % - (to_native(unprocessed_node_list), repr(summary))) - return action_nodes_list - - def add_node(self, nodes_list=None): - """ - Add Node that are on PendingNodes list available on Cluster - """ - try: - self.sfe.add_nodes(nodes_list, auto_install=True) - except Exception as exception_object: - self.module.fail_json(msg='Error adding nodes %s to cluster: %s' % (nodes_list, to_native(exception_object)), - exception=traceback.format_exc()) - - def remove_node(self, nodes_list=None): - """ - Remove active node from Cluster - """ - try: - self.sfe.remove_nodes(nodes_list) - except Exception as exception_object: - self.module.fail_json(msg='Error removing nodes %s from cluster %s' % (nodes_list, to_native(exception_object)), - exception=traceback.format_exc()) - - def set_cluster_name(self, node): - ''' set up cluster name for the node using its MIP ''' - cluster = dict(cluster=self.cluster_name) - port = 442 - try: - node_cx = netapp_utils.create_sf_connection(module=self.module, raise_on_connection_error=True, hostname=node, port=port) - except netapp_utils.solidfire.common.ApiConnectionError as exc: - if str(exc) == "Bad Credentials": - msg = 'Most likely the node %s is already in a cluster.' % node - msg += ' Make sure to use valid node credentials for username and password.' - msg += ' Node reported: %s' % repr(exc) - else: - msg = 'Failed to create connection: %s' % repr(exc) - self.module.fail_json(msg=msg) - except Exception as exc: - self.module.fail_json(msg='Failed to connect to %s:%d - %s' % (node, port, to_native(exc)), - exception=traceback.format_exc()) - - try: - cluster_config = node_cx.get_cluster_config() - except netapp_utils.solidfire.common.ApiServerError as exc: - self.module.fail_json(msg='Error getting cluster config: %s' % to_native(exc), - exception=traceback.format_exc()) - - if cluster_config.cluster.cluster == self.cluster_name: - return False - if cluster_config.cluster.state == 'Active': - self.module.fail_json(msg="Error updating cluster name for node %s, already in 'Active' state" - % node, cluster_config=repr(cluster_config)) - if self.module.check_mode: - return True - - try: - node_cx.set_cluster_config(cluster) - except netapp_utils.solidfire.common.ApiServerError as exc: - self.module.fail_json(msg='Error updating cluster name: %s' % to_native(exc), - cluster_config=repr(cluster_config), - exception=traceback.format_exc()) - return True - - def apply(self): - """ - Check, process and initiate Cluster Node operation - """ - changed = False - updated_nodes = list() - result_message = '' - if self.state == "present" and self.cluster_name is not None: - for node in self.node_ids: - if self.set_cluster_name(node): - changed = True - updated_nodes.append(node) - if not self.preset_only: - # let's see if there is anything to add or remove - action_nodes_list = self.get_node_list() - action = None - if self.state == "present" and len(action_nodes_list) > 0: - changed = True - action = 'added' - if not self.module.check_mode: - self.add_node(action_nodes_list) - elif self.state == "absent" and len(action_nodes_list) > 0: - changed = True - action = 'removed' - if not self.module.check_mode: - self.remove_node(action_nodes_list) - if action: - result_message = 'List of %s nodes: %s - requested: %s' % (action, to_native(action_nodes_list), to_native(self.node_ids)) - if updated_nodes: - result_message += '\n' if result_message else '' - result_message += 'List of updated nodes with %s: %s' % (self.cluster_name, updated_nodes) - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """ - Main function - """ - - na_elementsw_node = ElementSWNode() - na_elementsw_node.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_qos_policy.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_qos_policy.py deleted file mode 100644 index 9d9e16994..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_qos_policy.py +++ /dev/null @@ -1,270 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -Element Software QOS Policy -""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_qos_policy - -short_description: NetApp Element Software create/modify/rename/delete QOS Policy -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 20.9.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, modify, rename, or delete QOS policy on Element Software Cluster. - -options: - - state: - description: - - Whether the specified QOS policy should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - name: - description: - - Name or id for the QOS policy. - required: true - type: str - - from_name: - description: - - Name or id for the QOS policy to be renamed. - type: str - - qos: - description: - - The quality of service (QQOS) for the policy. - - Required for create - - Supported keys are minIOPS, maxIOPS, burstIOPS - type: dict - suboptions: - minIOPS: - description: The minimum number of IOPS guaranteed for the volume. - type: int - version_added: 21.3.0 - maxIOPS: - description: The maximum number of IOPS allowed for the volume. - type: int - version_added: 21.3.0 - burstIOPS: - description: The maximum number of IOPS allowed over a short period of time for the volume. - type: int - version_added: 21.3.0 - debug: - description: report additional information when set to true. - type: bool - default: false - version_added: 21.3.0 -''' - -EXAMPLES = """ - - name: Add QOS Policy - na_elementsw_qos_policy: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - name: gold - qos: {minIOPS: 1000, maxIOPS: 20000, burstIOPS: 50000} - - - name: Modify QOS Policy - na_elementsw_qos_policy: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - name: gold - qos: {minIOPS: 100, maxIOPS: 5000, burstIOPS: 20000} - - - name: Rename QOS Policy - na_elementsw_qos_policy: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - from_name: gold - name: silver - - - name: Remove QOS Policy - na_elementsw_qos_policy: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - name: silver -""" - - -RETURN = """ -""" - -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule - - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWQosPolicy(object): - """ - Element Software QOS Policy - """ - - def __init__(self): - - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - name=dict(required=True, type='str'), - from_name=dict(required=False, type='str'), - qos=dict(required=False, type='dict', options=dict( - minIOPS=dict(type='int'), - maxIOPS=dict(type='int'), - burstIOPS=dict(type='int'), - )), - debug=dict(required=False, type='bool', default=False) - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - # Set up state variables - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - self.qos_policy_id = None - self.debug = dict() - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_qos_policy') - - def get_qos_policy(self, name): - """ - Get QOS Policy - """ - policy, error = self.elementsw_helper.get_qos_policy(name) - if error is not None: - self.module.fail_json(msg=error, exception=traceback.format_exc()) - self.debug['current_policy'] = policy - return policy - - def create_qos_policy(self, name, qos): - """ - Create the QOS Policy - """ - try: - self.sfe.create_qos_policy(name=name, qos=qos) - except (solidfire.common.ApiServerError, solidfire.common.ApiConnectionError) as exc: - self.module.fail_json(msg="Error creating qos policy: %s: %s" % - (name, to_native(exc)), exception=traceback.format_exc()) - - def update_qos_policy(self, qos_policy_id, modify, name=None): - """ - Update the QOS Policy if the policy already exists - """ - options = dict( - qos_policy_id=qos_policy_id - ) - if name is not None: - options['name'] = name - if 'qos' in modify: - options['qos'] = modify['qos'] - - try: - self.sfe.modify_qos_policy(**options) - except (solidfire.common.ApiServerError, solidfire.common.ApiConnectionError) as exc: - self.module.fail_json(msg="Error updating qos policy: %s: %s" % - (self.parameters['from_name'] if name is not None else self.parameters['name'], to_native(exc)), - exception=traceback.format_exc()) - - def delete_qos_policy(self, qos_policy_id): - """ - Delete the QOS Policy - """ - try: - self.sfe.delete_qos_policy(qos_policy_id=qos_policy_id) - except (solidfire.common.ApiServerError, solidfire.common.ApiConnectionError) as exc: - self.module.fail_json(msg="Error deleting qos policy: %s: %s" % - (self.parameters['name'], to_native(exc)), exception=traceback.format_exc()) - - def apply(self): - """ - Process the create/delete/rename/modify actions for qos policy on the Element Software Cluster - """ - modify = dict() - current = self.get_qos_policy(self.parameters['name']) - qos_policy_id = None if current is None else current['qos_policy_id'] - cd_action = self.na_helper.get_cd_action(current, self.parameters) - modify = self.na_helper.get_modified_attributes(current, self.parameters) - if cd_action == 'create' and self.parameters.get('from_name') is not None: - from_qos_policy = self.get_qos_policy(self.parameters['from_name']) - if from_qos_policy is None: - self.module.fail_json(msg="Error renaming qos policy, no existing policy with name/id: %s" % self.parameters['from_name']) - cd_action = 'rename' - qos_policy_id = from_qos_policy['qos_policy_id'] - self.na_helper.changed = True - modify = self.na_helper.get_modified_attributes(from_qos_policy, self.parameters) - if cd_action == 'create' and 'qos' not in self.parameters: - self.module.fail_json(msg="Error creating qos policy: %s, 'qos:' option is required" % self.parameters['name']) - self.debug['modify'] = modify - - if not self.module.check_mode: - if cd_action == 'create': - self.create_qos_policy(self.parameters['name'], self.parameters['qos']) - elif cd_action == 'delete': - self.delete_qos_policy(qos_policy_id) - elif cd_action == 'rename': - self.update_qos_policy(qos_policy_id, modify, name=self.parameters['name']) - elif modify: - self.update_qos_policy(qos_policy_id, modify) - - results = dict(changed=self.na_helper.changed) - if self.parameters['debug']: - results['debug'] = self.debug - self.module.exit_json(**results) - - -def main(): - """ - Main function - """ - na_elementsw_qos_policy = ElementSWQosPolicy() - na_elementsw_qos_policy.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot.py deleted file mode 100644 index 23144e42e..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot.py +++ /dev/null @@ -1,369 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -Element OS Software Snapshot Manager -''' -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_snapshot - -short_description: NetApp Element Software Manage Snapshots -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: - - Create, Modify or Delete Snapshot on Element OS Cluster. - -options: - name: - description: - - Name of new snapshot create. - - If unspecified, date and time when the snapshot was taken is used. - type: str - - state: - description: - - Whether the specified snapshot should exist or not. - choices: ['present', 'absent'] - default: 'present' - type: str - - src_volume_id: - description: - - ID or Name of active volume. - required: true - type: str - - account_id: - description: - - Account ID or Name of Parent/Source Volume. - required: true - type: str - - retention: - description: - - Retention period for the snapshot. - - Format is 'HH:mm:ss'. - type: str - - src_snapshot_id: - description: - - ID or Name of an existing snapshot. - - Required when C(state=present), to modify snapshot properties. - - Required when C(state=present), to create snapshot from another snapshot in the volume. - - Required when C(state=absent), to delete snapshot. - type: str - - enable_remote_replication: - description: - - Flag, whether to replicate the snapshot created to a remote replication cluster. - - To enable specify 'true' value. - type: bool - - snap_mirror_label: - description: - - Label used by SnapMirror software to specify snapshot retention policy on SnapMirror endpoint. - type: str - - expiration_time: - description: - - The date and time (format ISO 8601 date string) at which this snapshot will expire. - type: str -''' - -EXAMPLES = """ - - name: Create snapshot - tags: - - elementsw_create_snapshot - na_elementsw_snapshot: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - src_volume_id: 118 - account_id: sagarsh - name: newsnapshot-1 - - - name: Modify Snapshot - tags: - - elementsw_modify_snapshot - na_elementsw_snapshot: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - src_volume_id: sagarshansivolume - src_snapshot_id: test1 - account_id: sagarsh - expiration_time: '2018-06-16T12:24:56Z' - enable_remote_replication: false - - - name: Delete Snapshot - tags: - - elementsw_delete_snapshot - na_elementsw_snapshot: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - src_snapshot_id: deltest1 - account_id: sagarsh - src_volume_id: sagarshansivolume -""" - - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementOSSnapshot(object): - """ - Element OS Snapshot Manager - """ - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, choices=['present', 'absent'], default='present'), - account_id=dict(required=True, type='str'), - name=dict(required=False, type='str'), - src_volume_id=dict(required=True, type='str'), - retention=dict(required=False, type='str'), - src_snapshot_id=dict(required=False, type='str'), - enable_remote_replication=dict(required=False, type='bool'), - expiration_time=dict(required=False, type='str'), - snap_mirror_label=dict(required=False, type='str') - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - input_params = self.module.params - - self.state = input_params['state'] - self.name = input_params['name'] - self.account_id = input_params['account_id'] - self.src_volume_id = input_params['src_volume_id'] - self.src_snapshot_id = input_params['src_snapshot_id'] - self.retention = input_params['retention'] - self.properties_provided = False - - self.expiration_time = input_params['expiration_time'] - if input_params['expiration_time'] is not None: - self.properties_provided = True - - self.enable_remote_replication = input_params['enable_remote_replication'] - if input_params['enable_remote_replication'] is not None: - self.properties_provided = True - - self.snap_mirror_label = input_params['snap_mirror_label'] - if input_params['snap_mirror_label'] is not None: - self.properties_provided = True - - if self.state == 'absent' and self.src_snapshot_id is None: - self.module.fail_json( - msg="Please provide required parameter : snapshot_id") - - if HAS_SF_SDK is False: - self.module.fail_json( - msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_snapshot') - - def get_account_id(self): - """ - Return account id if found - """ - try: - # Update and return self.account_id - self.account_id = self.elementsw_helper.account_exists(self.account_id) - return self.account_id - except Exception as err: - self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err)) - - def get_src_volume_id(self): - """ - Return volume id if found - """ - src_vol_id = self.elementsw_helper.volume_exists(self.src_volume_id, self.account_id) - if src_vol_id is not None: - # Update and return self.volume_id - self.src_volume_id = src_vol_id - # Return src_volume_id - return self.src_volume_id - return None - - def get_snapshot(self, name=None): - """ - Return snapshot details if found - """ - src_snapshot = None - if name is not None: - src_snapshot = self.elementsw_helper.get_snapshot(name, self.src_volume_id) - elif self.src_snapshot_id is not None: - src_snapshot = self.elementsw_helper.get_snapshot(self.src_snapshot_id, self.src_volume_id) - if src_snapshot is not None: - # Update self.src_snapshot_id - self.src_snapshot_id = src_snapshot.snapshot_id - # Return src_snapshot - return src_snapshot - - def create_snapshot(self): - """ - Create Snapshot - """ - try: - self.sfe.create_snapshot(volume_id=self.src_volume_id, - snapshot_id=self.src_snapshot_id, - name=self.name, - enable_remote_replication=self.enable_remote_replication, - retention=self.retention, - snap_mirror_label=self.snap_mirror_label, - attributes=self.attributes) - except Exception as exception_object: - self.module.fail_json( - msg='Error creating snapshot %s' % ( - to_native(exception_object)), - exception=traceback.format_exc()) - - def modify_snapshot(self): - """ - Modify Snapshot Properties - """ - try: - self.sfe.modify_snapshot(snapshot_id=self.src_snapshot_id, - expiration_time=self.expiration_time, - enable_remote_replication=self.enable_remote_replication, - snap_mirror_label=self.snap_mirror_label) - except Exception as exception_object: - self.module.fail_json( - msg='Error modify snapshot %s' % ( - to_native(exception_object)), - exception=traceback.format_exc()) - - def delete_snapshot(self): - """ - Delete Snapshot - """ - try: - self.sfe.delete_snapshot(snapshot_id=self.src_snapshot_id) - except Exception as exception_object: - self.module.fail_json( - msg='Error delete snapshot %s' % ( - to_native(exception_object)), - exception=traceback.format_exc()) - - def apply(self): - """ - Check, process and initiate snapshot operation - """ - changed = False - result_message = None - self.get_account_id() - - # Dont proceed if source volume is not found - if self.get_src_volume_id() is None: - self.module.fail_json(msg="Volume id not found %s" % self.src_volume_id) - - # Get snapshot details using source volume - snapshot_detail = self.get_snapshot() - - if snapshot_detail: - if self.properties_provided: - if self.expiration_time != snapshot_detail.expiration_time: - changed = True - else: # To preserve value in case parameter expiration_time is not defined/provided. - self.expiration_time = snapshot_detail.expiration_time - - if self.enable_remote_replication != snapshot_detail.enable_remote_replication: - changed = True - else: # To preserve value in case parameter enable_remote_Replication is not defined/provided. - self.enable_remote_replication = snapshot_detail.enable_remote_replication - - if self.snap_mirror_label != snapshot_detail.snap_mirror_label: - changed = True - else: # To preserve value in case parameter snap_mirror_label is not defined/provided. - self.snap_mirror_label = snapshot_detail.snap_mirror_label - - if self.account_id is None or self.src_volume_id is None or self.module.check_mode: - changed = False - result_message = "Check mode, skipping changes" - elif self.state == 'absent' and snapshot_detail is not None: - self.delete_snapshot() - changed = True - elif self.state == 'present' and snapshot_detail is not None: - if changed: - self.modify_snapshot() # Modify Snapshot properties - elif not self.properties_provided: - if self.name is not None: - snapshot = self.get_snapshot(self.name) - # If snapshot with name already exists return without performing any action - if snapshot is None: - self.create_snapshot() # Create Snapshot using parent src_snapshot_id - changed = True - else: - self.create_snapshot() - changed = True - elif self.state == 'present': - if self.name is not None: - snapshot = self.get_snapshot(self.name) - # If snapshot with name already exists return without performing any action - if snapshot is None: - self.create_snapshot() # Create Snapshot using parent src_snapshot_id - changed = True - else: - self.create_snapshot() - changed = True - else: - changed = False - result_message = "No changes requested, skipping changes" - - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """ - Main function - """ - - na_elementsw_snapshot = ElementOSSnapshot() - na_elementsw_snapshot.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot_restore.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot_restore.py deleted file mode 100644 index 1e9d8e59a..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot_restore.py +++ /dev/null @@ -1,203 +0,0 @@ -#!/usr/bin/python - -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -""" -Element Software Snapshot Restore -""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_snapshot_restore - -short_description: NetApp Element Software Restore Snapshot -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Element OS Cluster restore snapshot to volume. - -options: - - src_volume_id: - description: - - ID or Name of source active volume. - required: true - type: str - - src_snapshot_id: - description: - - ID or Name of an existing snapshot. - required: true - type: str - - dest_volume_name: - description: - - New Name of destination for restoring the snapshot - required: true - type: str - - account_id: - description: - - Account ID or Name of Parent/Source Volume. - required: true - type: str -''' - -EXAMPLES = """ - - name: Restore snapshot to volume - tags: - - elementsw_create_snapshot_restore - na_elementsw_snapshot_restore: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - account_id: ansible-1 - src_snapshot_id: snapshot_20171021 - src_volume_id: volume-playarea - dest_volume_name: dest-volume-area - -""" - - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" -import traceback - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementOSSnapshotRestore(object): - """ - Element OS Restore from snapshot - """ - - def __init__(self): - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - account_id=dict(required=True, type='str'), - src_volume_id=dict(required=True, type='str'), - dest_volume_name=dict(required=True, type='str'), - src_snapshot_id=dict(required=True, type='str') - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - input_params = self.module.params - - self.account_id = input_params['account_id'] - self.src_volume_id = input_params['src_volume_id'] - self.dest_volume_name = input_params['dest_volume_name'] - self.src_snapshot_id = input_params['src_snapshot_id'] - - if HAS_SF_SDK is False: - self.module.fail_json( - msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_snapshot_restore') - - def get_account_id(self): - """ - Get account id if found - """ - try: - # Update and return self.account_id - self.account_id = self.elementsw_helper.account_exists(self.account_id) - return self.account_id - except Exception as err: - self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err)) - - def get_snapshot_id(self): - """ - Return snapshot details if found - """ - src_snapshot = self.elementsw_helper.get_snapshot(self.src_snapshot_id, self.src_volume_id) - # Update and return self.src_snapshot_id - if src_snapshot: - self.src_snapshot_id = src_snapshot.snapshot_id - # Return self.src_snapshot_id - return self.src_snapshot_id - return None - - def restore_snapshot(self): - """ - Restore Snapshot to Volume - """ - try: - self.sfe.clone_volume(volume_id=self.src_volume_id, - name=self.dest_volume_name, - snapshot_id=self.src_snapshot_id, - attributes=self.attributes) - except Exception as exception_object: - self.module.fail_json( - msg='Error restore snapshot %s' % (to_native(exception_object)), - exception=traceback.format_exc()) - - def apply(self): - """ - Check, process and initiate restore snapshot to volume operation - """ - changed = False - result_message = None - self.get_account_id() - src_vol_id = self.elementsw_helper.volume_exists(self.src_volume_id, self.account_id) - - if src_vol_id is not None: - # Update self.src_volume_id - self.src_volume_id = src_vol_id - if self.get_snapshot_id() is not None: - # Addressing idempotency by comparing volume does not exist with same volume name - if self.elementsw_helper.volume_exists(self.dest_volume_name, self.account_id) is None: - self.restore_snapshot() - changed = True - else: - result_message = "No changes requested, Skipping changes" - else: - self.module.fail_json(msg="Snapshot id not found %s" % self.src_snapshot_id) - else: - self.module.fail_json(msg="Volume id not found %s" % self.src_volume_id) - - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """ - Main function - """ - na_elementsw_snapshot_restore = ElementOSSnapshotRestore() - na_elementsw_snapshot_restore.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot_schedule.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot_schedule.py deleted file mode 100644 index 2ace1bd4b..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_snapshot_schedule.py +++ /dev/null @@ -1,586 +0,0 @@ -#!/usr/bin/python -# (c) 2017, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -"""Element SW Software Snapshot Schedule""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_snapshot_schedule - -short_description: NetApp Element Software Snapshot Schedules -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, destroy, or update snapshot schedules on ElementSW - -options: - - state: - description: - - Whether the specified schedule should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - paused: - description: - - Pause / Resume a schedule. - type: bool - - recurring: - description: - - Should the schedule recur? - type: bool - - schedule_type: - description: - - Schedule type for creating schedule. - choices: ['DaysOfWeekFrequency','DaysOfMonthFrequency','TimeIntervalFrequency'] - type: str - - time_interval_days: - description: Time interval in days. - type: int - - time_interval_hours: - description: Time interval in hours. - type: int - - time_interval_minutes: - description: Time interval in minutes. - type: int - - days_of_week_weekdays: - description: List of days of the week (Sunday to Saturday) - type: list - elements: str - - days_of_week_hours: - description: Time specified in hours - type: int - - days_of_week_minutes: - description: Time specified in minutes. - type: int - - days_of_month_monthdays: - description: List of days of the month (1-31) - type: list - elements: int - - days_of_month_hours: - description: Time specified in hours - type: int - - days_of_month_minutes: - description: Time specified in minutes. - type: int - - name: - description: - - Name for the snapshot schedule. - - It accepts either schedule_id or schedule_name - - if name is digit, it will consider as schedule_id - - If name is string, it will consider as schedule_name - required: true - type: str - - snapshot_name: - description: - - Name for the created snapshots. - type: str - - volumes: - description: - - Volume IDs that you want to set the snapshot schedule for. - - It accepts both volume_name and volume_id - type: list - elements: str - - account_id: - description: - - Account ID for the owner of this volume. - - It accepts either account_name or account_id - - if account_id is digit, it will consider as account_id - - If account_id is string, it will consider as account_name - type: str - - retention: - description: - - Retention period for the snapshot. - - Format is 'HH:mm:ss'. - type: str - - starting_date: - description: - - Starting date for the schedule. - - Required when C(state=present). - - "Format: C(2016-12-01T00:00:00Z)" - type: str -''' - -EXAMPLES = """ - - name: Create Snapshot schedule - na_elementsw_snapshot_schedule: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - name: Schedule_A - schedule_type: TimeIntervalFrequency - time_interval_days: 1 - starting_date: '2016-12-01T00:00:00Z' - retention: '24:00:00' - volumes: - - 7 - - test - account_id: 1 - - - name: Update Snapshot schedule - na_elementsw_snapshot_schedule: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - name: Schedule_A - schedule_type: TimeIntervalFrequency - time_interval_days: 1 - starting_date: '2016-12-01T00:00:00Z' - retention: '24:00:00' - volumes: - - 8 - - test1 - account_id: 1 - - - name: Delete Snapshot schedule - na_elementsw_snapshot_schedule: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - name: 6 -""" - -RETURN = """ - -schedule_id: - description: Schedule ID of the newly created schedule - returned: success - type: str -""" -import traceback -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - from solidfire.custom.models import DaysOfWeekFrequency, Weekday, DaysOfMonthFrequency - from solidfire.common import ApiConnectionError, ApiServerError - from solidfire.custom.models import TimeIntervalFrequency - from solidfire.models import Schedule, ScheduleInfo -except ImportError: - HAS_SF_SDK = False - -try: - # Hack to see if we we have the 1.7 version of the SDK, or later - from solidfire.common.model import VER3 - HAS_SF_SDK_1_7 = True - del VER3 -except ImportError: - HAS_SF_SDK_1_7 = False - - -class ElementSWSnapShotSchedule(object): - """ - Contains methods to parse arguments, - derive details of ElementSW objects - and send requests to ElementSW via - the ElementSW SDK - """ - - def __init__(self): - """ - Parse arguments, setup state variables, - check paramenters and ensure SDK is installed - """ - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - name=dict(required=True, type='str'), - schedule_type=dict(required=False, choices=['DaysOfWeekFrequency', 'DaysOfMonthFrequency', 'TimeIntervalFrequency']), - - time_interval_days=dict(required=False, type='int'), - time_interval_hours=dict(required=False, type='int'), - time_interval_minutes=dict(required=False, type='int'), - - days_of_week_weekdays=dict(required=False, type='list', elements='str'), - days_of_week_hours=dict(required=False, type='int'), - days_of_week_minutes=dict(required=False, type='int'), - - days_of_month_monthdays=dict(required=False, type='list', elements='int'), - days_of_month_hours=dict(required=False, type='int'), - days_of_month_minutes=dict(required=False, type='int'), - - paused=dict(required=False, type='bool'), - recurring=dict(required=False, type='bool'), - - starting_date=dict(required=False, type='str'), - - snapshot_name=dict(required=False, type='str'), - volumes=dict(required=False, type='list', elements='str'), - account_id=dict(required=False, type='str'), - retention=dict(required=False, type='str'), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('state', 'present', ['account_id', 'volumes', 'schedule_type']), - ('schedule_type', 'DaysOfMonthFrequency', ['days_of_month_monthdays']), - ('schedule_type', 'DaysOfWeekFrequency', ['days_of_week_weekdays']) - - ], - supports_check_mode=True - ) - - param = self.module.params - - # set up state variables - self.state = param['state'] - self.name = param['name'] - self.schedule_type = param['schedule_type'] - self.days_of_week_weekdays = param['days_of_week_weekdays'] - self.days_of_week_hours = param['days_of_week_hours'] - self.days_of_week_minutes = param['days_of_week_minutes'] - self.days_of_month_monthdays = param['days_of_month_monthdays'] - self.days_of_month_hours = param['days_of_month_hours'] - self.days_of_month_minutes = param['days_of_month_minutes'] - self.time_interval_days = param['time_interval_days'] - self.time_interval_hours = param['time_interval_hours'] - self.time_interval_minutes = param['time_interval_minutes'] - self.paused = param['paused'] - self.recurring = param['recurring'] - if self.schedule_type == 'DaysOfWeekFrequency': - # Create self.weekday list if self.schedule_type is days_of_week - if self.days_of_week_weekdays is not None: - # Create self.weekday list if self.schedule_type is days_of_week - self.weekdays = [] - for day in self.days_of_week_weekdays: - if str(day).isdigit(): - # If id specified, return appropriate day - self.weekdays.append(Weekday.from_id(int(day))) - else: - # If name specified, return appropriate day - self.weekdays.append(Weekday.from_name(day.capitalize())) - - if self.state == 'present' and self.schedule_type is None: - # Mandate schedule_type for create operation - self.module.fail_json( - msg="Please provide required parameter: schedule_type") - - # Mandate schedule name for delete operation - if self.state == 'absent' and self.name is None: - self.module.fail_json( - msg="Please provide required parameter: name") - - self.starting_date = param['starting_date'] - self.snapshot_name = param['snapshot_name'] - self.volumes = param['volumes'] - self.account_id = param['account_id'] - self.retention = param['retention'] - self.create_schedule_result = None - - if HAS_SF_SDK is False: - # Create ElementSW connection - self.module.fail_json(msg="Unable to import the ElementSW Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - self.elementsw_helper = NaElementSWModule(self.sfe) - - def get_schedule(self): - # Checking whether schedule id is exist or not - # Return schedule details if found, None otherwise - # If exist set variable self.name - try: - schedule_list = self.sfe.list_schedules() - except ApiServerError: - return None - - for schedule in schedule_list.schedules: - if schedule.to_be_deleted: - # skip this schedule if it is being deleted, it can as well not exist - continue - if str(schedule.schedule_id) == self.name: - self.name = schedule.name - return schedule - elif schedule.name == self.name: - return schedule - return None - - def get_account_id(self): - # Validate account id - # Return account_id if found, None otherwise - try: - account_id = self.elementsw_helper.account_exists(self.account_id) - return account_id - except ApiServerError: - return None - - def get_volume_id(self): - # Validate volume_ids - # Return volume ids if found, fail if not found - volume_ids = [] - for volume in self.volumes: - volume_id = self.elementsw_helper.volume_exists(volume.strip(), self.account_id) - if volume_id: - volume_ids.append(volume_id) - else: - self.module.fail_json(msg='Specified volume %s does not exist' % volume) - return volume_ids - - def get_frequency(self): - # Configuring frequency depends on self.schedule_type - frequency = None - if self.schedule_type is not None and self.schedule_type == 'DaysOfWeekFrequency': - if self.weekdays is not None: - params = dict(weekdays=self.weekdays) - if self.days_of_week_hours is not None: - params['hours'] = self.days_of_week_hours - if self.days_of_week_minutes is not None: - params['minutes'] = self.days_of_week_minutes - frequency = DaysOfWeekFrequency(**params) - elif self.schedule_type is not None and self.schedule_type == 'DaysOfMonthFrequency': - if self.days_of_month_monthdays is not None: - params = dict(monthdays=self.days_of_month_monthdays) - if self.days_of_month_hours is not None: - params['hours'] = self.days_of_month_hours - if self.days_of_month_minutes is not None: - params['minutes'] = self.days_of_month_minutes - frequency = DaysOfMonthFrequency(**params) - elif self.schedule_type is not None and self.schedule_type == 'TimeIntervalFrequency': - params = dict() - if self.time_interval_days is not None: - params['days'] = self.time_interval_days - if self.time_interval_hours is not None: - params['hours'] = self.time_interval_hours - if self.time_interval_minutes is not None: - params['minutes'] = self.time_interval_minutes - if not params or sum(params.values()) == 0: - self.module.fail_json(msg='Specify at least one non zero value with TimeIntervalFrequency.') - frequency = TimeIntervalFrequency(**params) - return frequency - - def is_same_schedule_type(self, schedule_detail): - # To check schedule type is same or not - if str(schedule_detail.frequency).split('(', maxsplit=1)[0] == self.schedule_type: - return True - else: - return False - - def create_schedule(self): - # Create schedule - try: - frequency = self.get_frequency() - if frequency is None: - self.module.fail_json(msg='Failed to create schedule frequency object - type %s parameters' % self.schedule_type) - - # Create schedule - name = self.name - schedule_info = ScheduleInfo( - volume_ids=self.volumes, - snapshot_name=self.snapshot_name, - retention=self.retention - ) - if HAS_SF_SDK_1_7: - sched = Schedule(frequency, name, schedule_info) - else: - sched = Schedule(schedule_info, name, frequency) - sched.paused = self.paused - sched.recurring = self.recurring - sched.starting_date = self.starting_date - - self.create_schedule_result = self.sfe.create_schedule(sched) - - except (ApiServerError, ApiConnectionError) as exc: - self.module.fail_json(msg='Error creating schedule %s: %s' % (self.name, to_native(exc)), - exception=traceback.format_exc()) - - def delete_schedule(self, schedule_id): - # delete schedule - try: - get_schedule_result = self.sfe.get_schedule(schedule_id=schedule_id) - sched = get_schedule_result.schedule - sched.to_be_deleted = True - self.sfe.modify_schedule(schedule=sched) - - except (ApiServerError, ApiConnectionError) as exc: - self.module.fail_json(msg='Error deleting schedule %s: %s' % (self.name, to_native(exc)), - exception=traceback.format_exc()) - - def update_schedule(self, schedule_id): - # Update schedule - try: - get_schedule_result = self.sfe.get_schedule(schedule_id=schedule_id) - sched = get_schedule_result.schedule - # Update schedule properties - sched.frequency = self.get_frequency() - if sched.frequency is None: - self.module.fail_json(msg='Failed to create schedule frequency object - type %s parameters' % self.schedule_type) - - if self.volumes is not None and len(self.volumes) > 0: - sched.schedule_info.volume_ids = self.volumes - if self.retention is not None: - sched.schedule_info.retention = self.retention - if self.snapshot_name is not None: - sched.schedule_info.snapshot_name = self.snapshot_name - if self.paused is not None: - sched.paused = self.paused - if self.recurring is not None: - sched.recurring = self.recurring - if self.starting_date is not None: - sched.starting_date = self.starting_date - - # Make API call - self.sfe.modify_schedule(schedule=sched) - - except (ApiServerError, ApiConnectionError) as exc: - self.module.fail_json(msg='Error updating schedule %s: %s' % (self.name, to_native(exc)), - exception=traceback.format_exc()) - - def apply(self): - # Perform pre-checks, call functions and exit - - changed = False - update_schedule = False - - if self.account_id is not None: - self.account_id = self.get_account_id() - - if self.state == 'present' and self.volumes is not None: - if self.account_id: - self.volumes = self.get_volume_id() - else: - self.module.fail_json(msg='Specified account id does not exist') - - # Getting the schedule details - schedule_detail = self.get_schedule() - - if schedule_detail is None and self.state == 'present': - if len(self.volumes) > 0: - changed = True - else: - self.module.fail_json(msg='Specified volumes not on cluster') - elif schedule_detail is not None: - # Getting the schedule id - if self.state == 'absent': - changed = True - else: - # Check if we need to update the snapshot schedule - if self.retention is not None and schedule_detail.schedule_info.retention != self.retention: - update_schedule = True - changed = True - elif self.snapshot_name is not None and schedule_detail.schedule_info.snapshot_name != self.snapshot_name: - update_schedule = True - changed = True - elif self.paused is not None and schedule_detail.paused != self.paused: - update_schedule = True - changed = True - elif self.recurring is not None and schedule_detail.recurring != self.recurring: - update_schedule = True - changed = True - elif self.starting_date is not None and schedule_detail.starting_date != self.starting_date: - update_schedule = True - changed = True - elif self.volumes is not None and len(self.volumes) > 0: - for volume_id in schedule_detail.schedule_info.volume_ids: - if volume_id not in self.volumes: - update_schedule = True - changed = True - - temp_frequency = self.get_frequency() - if temp_frequency is not None: - # Checking schedule_type changes - if self.is_same_schedule_type(schedule_detail): - # If same schedule type - if self.schedule_type == "TimeIntervalFrequency": - # Check if there is any change in schedule.frequency, If schedule_type is time_interval - if schedule_detail.frequency.days != temp_frequency.days or \ - schedule_detail.frequency.hours != temp_frequency.hours or \ - schedule_detail.frequency.minutes != temp_frequency.minutes: - update_schedule = True - changed = True - elif self.schedule_type == "DaysOfMonthFrequency": - # Check if there is any change in schedule.frequency, If schedule_type is days_of_month - if len(schedule_detail.frequency.monthdays) != len(temp_frequency.monthdays) or \ - schedule_detail.frequency.hours != temp_frequency.hours or \ - schedule_detail.frequency.minutes != temp_frequency.minutes: - update_schedule = True - changed = True - elif len(schedule_detail.frequency.monthdays) == len(temp_frequency.monthdays): - actual_frequency_monthday = schedule_detail.frequency.monthdays - temp_frequency_monthday = temp_frequency.monthdays - for monthday in actual_frequency_monthday: - if monthday not in temp_frequency_monthday: - update_schedule = True - changed = True - elif self.schedule_type == "DaysOfWeekFrequency": - # Check if there is any change in schedule.frequency, If schedule_type is days_of_week - if len(schedule_detail.frequency.weekdays) != len(temp_frequency.weekdays) or \ - schedule_detail.frequency.hours != temp_frequency.hours or \ - schedule_detail.frequency.minutes != temp_frequency.minutes: - update_schedule = True - changed = True - elif len(schedule_detail.frequency.weekdays) == len(temp_frequency.weekdays): - actual_frequency_weekdays = schedule_detail.frequency.weekdays - temp_frequency_weekdays = temp_frequency.weekdays - if len([actual_weekday for actual_weekday, temp_weekday in - zip(actual_frequency_weekdays, temp_frequency_weekdays) if actual_weekday != temp_weekday]) != 0: - update_schedule = True - changed = True - else: - update_schedule = True - changed = True - else: - self.module.fail_json(msg='Failed to create schedule frequency object - type %s parameters' % self.schedule_type) - - result_message = " " - if changed: - if self.module.check_mode: - # Skip changes - result_message = "Check mode, skipping changes" - else: - if self.state == 'present': - if update_schedule: - self.update_schedule(schedule_detail.schedule_id) - result_message = "Snapshot Schedule modified" - else: - self.create_schedule() - result_message = "Snapshot Schedule created" - elif self.state == 'absent': - self.delete_schedule(schedule_detail.schedule_id) - result_message = "Snapshot Schedule deleted" - - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - sss = ElementSWSnapShotSchedule() - sss.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_vlan.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_vlan.py deleted file mode 100644 index 299338ad5..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_vlan.py +++ /dev/null @@ -1,274 +0,0 @@ -#!/usr/bin/python -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_vlan - -short_description: NetApp Element Software Manage VLAN -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, delete, modify VLAN - -options: - - state: - description: - - Whether the specified vlan should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - vlan_tag: - description: - - Virtual Network Tag - required: true - type: str - - name: - description: - - User defined name for the new VLAN - - Name of the vlan is unique - - Required for create - type: str - - svip: - description: - - Storage virtual IP which is unique - - Required for create - type: str - - address_blocks: - description: - - List of address blocks for the VLAN - - Each address block contains the starting IP address and size for the block - - Required for create - type: list - elements: dict - - netmask: - description: - - Netmask for the VLAN - - Required for create - type: str - - gateway: - description: - - Gateway for the VLAN - type: str - - namespace: - description: - - Enable or disable namespaces - type: bool - - attributes: - description: - - Dictionary of attributes with name and value for each attribute - type: dict - -''' - -EXAMPLES = """ -- name: Create vlan - na_elementsw_vlan: - state: present - name: test - vlan_tag: 1 - svip: "{{ ip address }}" - netmask: "{{ netmask }}" - address_blocks: - - start: "{{ starting ip_address }}" - size: 5 - - start: "{{ starting ip_address }}" - size: 5 - hostname: "{{ netapp_hostname }}" - username: "{{ netapp_username }}" - password: "{{ netapp_password }}" - -- name: Delete Lun - na_elementsw_vlan: - state: absent - vlan_tag: 1 - hostname: "{{ netapp_hostname }}" - username: "{{ netapp_username }}" - password: "{{ netapp_password }}" -""" - -RETURN = """ - -""" - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWVlan(object): - """ class to handle VLAN operations """ - - def __init__(self): - """ - Setup Ansible parameters and ElementSW connection - """ - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, choices=['present', 'absent'], - default='present'), - name=dict(required=False, type='str'), - vlan_tag=dict(required=True, type='str'), - svip=dict(required=False, type='str'), - netmask=dict(required=False, type='str'), - gateway=dict(required=False, type='str'), - namespace=dict(required=False, type='bool'), - attributes=dict(required=False, type='dict'), - address_blocks=dict(required=False, type='list', elements='dict') - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.elem = netapp_utils.create_sf_connection(module=self.module) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.elementsw_helper = NaElementSWModule(self.elem) - - # add telemetry attributes - if self.parameters.get('attributes') is not None: - self.parameters['attributes'].update(self.elementsw_helper.set_element_attributes(source='na_elementsw_vlan')) - else: - self.parameters['attributes'] = self.elementsw_helper.set_element_attributes(source='na_elementsw_vlan') - - def validate_keys(self): - """ - Validate if all required keys are present before creating - """ - required_keys = ['address_blocks', 'svip', 'netmask', 'name'] - if all(item in self.parameters.keys() for item in required_keys) is False: - self.module.fail_json(msg="One or more required fields %s for creating VLAN is missing" - % required_keys) - addr_blk_fields = ['start', 'size'] - for address in self.parameters['address_blocks']: - if 'start' not in address or 'size' not in address: - self.module.fail_json(msg="One or more required fields %s for address blocks is missing" - % addr_blk_fields) - - def create_network(self): - """ - Add VLAN - """ - try: - self.validate_keys() - create_params = self.parameters.copy() - for key in ['username', 'hostname', 'password', 'state', 'vlan_tag']: - del create_params[key] - self.elem.add_virtual_network(virtual_network_tag=self.parameters['vlan_tag'], **create_params) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error creating VLAN %s" - % self.parameters['vlan_tag'], - exception=to_native(err)) - - def delete_network(self): - """ - Remove VLAN - """ - try: - self.elem.remove_virtual_network(virtual_network_tag=self.parameters['vlan_tag']) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error deleting VLAN %s" - % self.parameters['vlan_tag'], - exception=to_native(err)) - - def modify_network(self, modify): - """ - Modify the VLAN - """ - try: - self.elem.modify_virtual_network(virtual_network_tag=self.parameters['vlan_tag'], **modify) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error modifying VLAN %s" - % self.parameters['vlan_tag'], - exception=to_native(err)) - - def get_network_details(self): - """ - Check existing VLANs - :return: vlan details if found, None otherwise - :type: dict - """ - vlans = self.elem.list_virtual_networks(virtual_network_tag=self.parameters['vlan_tag']) - vlan_details = dict() - for vlan in vlans.virtual_networks: - if vlan is not None: - vlan_details['name'] = vlan.name - vlan_details['address_blocks'] = list() - for address in vlan.address_blocks: - vlan_details['address_blocks'].append({ - 'start': address.start, - 'size': address.size - }) - vlan_details['svip'] = vlan.svip - vlan_details['gateway'] = vlan.gateway - vlan_details['netmask'] = vlan.netmask - vlan_details['namespace'] = vlan.namespace - vlan_details['attributes'] = vlan.attributes - return vlan_details - return None - - def apply(self): - """ - Call create / delete / modify vlan methods - """ - network = self.get_network_details() - # calling helper to determine action - cd_action = self.na_helper.get_cd_action(network, self.parameters) - modify = self.na_helper.get_modified_attributes(network, self.parameters) - if not self.module.check_mode: - if cd_action == "create": - self.create_network() - elif cd_action == "delete": - self.delete_network() - elif modify: - if 'attributes' in modify: - # new attributes will replace existing ones - modify['attributes'] = self.parameters['attributes'] - self.modify_network(modify) - self.module.exit_json(changed=self.na_helper.changed) - - -def main(): - """ Apply vlan actions """ - network_obj = ElementSWVlan() - network_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume.py deleted file mode 100644 index 3fcaf00ce..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume.py +++ /dev/null @@ -1,413 +0,0 @@ -#!/usr/bin/python - -# (c) 2017, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -"""Element OS Software Volume Manager""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_volume - -short_description: NetApp Element Software Manage Volumes -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, destroy, or update volumes on ElementSW - -options: - - state: - description: - - Whether the specified volume should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - name: - description: - - The name of the volume to manage. - - It accepts volume_name or volume_id - required: true - type: str - - account_id: - description: - - Account ID for the owner of this volume. - - It accepts Account_id or Account_name - required: true - type: str - - enable512e: - description: - - Required when C(state=present) - - Should the volume provide 512-byte sector emulation? - type: bool - aliases: - - enable512emulation - - qos: - description: Initial quality of service settings for this volume. Configure as dict in playbooks. - type: dict - - qos_policy_name: - description: - - Quality of service policy for this volume. - - It can be a name or an id. - - Mutually exclusive with C(qos) option. - type: str - - attributes: - description: A YAML dictionary of attributes that you would like to apply on this volume. - type: dict - - size: - description: - - The size of the volume in (size_unit). - - Required when C(state = present). - type: int - - size_unit: - description: - - The unit used to interpret the size parameter. - choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb'] - default: 'gb' - type: str - - access: - description: - - Access allowed for the volume. - - readOnly Only read operations are allowed. - - readWrite Reads and writes are allowed. - - locked No reads or writes are allowed. - - replicationTarget Identify a volume as the target volume for a paired set of volumes. - - If the volume is not paired, the access status is locked. - - If unspecified, the access settings of the clone will be the same as the source. - choices: ['readOnly', 'readWrite', 'locked', 'replicationTarget'] - type: str -''' - -EXAMPLES = """ - - name: Create Volume - na_elementsw_volume: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - name: AnsibleVol - qos: {minIOPS: 1000, maxIOPS: 20000, burstIOPS: 50000} - account_id: 3 - enable512e: False - size: 1 - size_unit: gb - - - name: Update Volume - na_elementsw_volume: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: present - name: AnsibleVol - account_id: 3 - access: readWrite - - - name: Delete Volume - na_elementsw_volume: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - state: absent - name: AnsibleVol - account_id: 2 -""" - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWVolume(object): - """ - Contains methods to parse arguments, - derive details of ElementSW objects - and send requests to ElementOS via - the ElementSW SDK - """ - - def __init__(self): - """ - Parse arguments, setup state variables, - check paramenters and ensure SDK is installed - """ - self._size_unit_map = netapp_utils.SF_BYTE_MAP - - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), - name=dict(required=True, type='str'), - account_id=dict(required=True), - enable512e=dict(required=False, type='bool', aliases=['enable512emulation']), - qos=dict(required=False, type='dict', default=None), - qos_policy_name=dict(required=False, type='str', default=None), - attributes=dict(required=False, type='dict', default=None), - size=dict(type='int'), - size_unit=dict(default='gb', - choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb', - 'pb', 'eb', 'zb', 'yb'], type='str'), - - access=dict(required=False, type='str', default=None, - choices=['readOnly', 'readWrite', 'locked', 'replicationTarget']), - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - required_if=[ - ('state', 'present', ['size', 'enable512e']) - ], - mutually_exclusive=[ - ('qos', 'qos_policy_name'), - ], - supports_check_mode=True - ) - - param = self.module.params - - # set up state variables - self.state = param['state'] - self.name = param['name'] - self.account_id = param['account_id'] - self.enable512e = param['enable512e'] - self.qos = param['qos'] - self.qos_policy_name = param['qos_policy_name'] - self.attributes = param['attributes'] - self.access = param['access'] - self.size_unit = param['size_unit'] - if param['size'] is not None: - self.size = param['size'] * self._size_unit_map[self.size_unit] - else: - self.size = None - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the ElementSW Python SDK") - else: - try: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - except solidfire.common.ApiServerError: - self.module.fail_json(msg="Unable to create the connection") - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - if self.attributes is not None: - self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_volume')) - else: - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_volume') - - def get_account_id(self): - """ - Return account id if found - """ - try: - # Update and return self.account_id - self.account_id = self.elementsw_helper.account_exists(self.account_id) - except Exception as err: - self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err)) - return self.account_id - - def get_qos_policy(self, name): - """ - Get QOS Policy - """ - policy, error = self.elementsw_helper.get_qos_policy(name) - if error is not None: - self.module.fail_json(msg=error) - return policy - - def get_volume(self): - """ - Return volume details if found - """ - # Get volume details - volume_id = self.elementsw_helper.volume_exists(self.name, self.account_id) - - if volume_id is not None: - # Return volume_details - volume_details = self.elementsw_helper.get_volume(volume_id) - if volume_details is not None: - return volume_details - return None - - def create_volume(self, qos_policy_id): - """ - Create Volume - :return: True if created, False if fails - """ - options = dict( - name=self.name, - account_id=self.account_id, - total_size=self.size, - enable512e=self.enable512e, - attributes=self.attributes - ) - if qos_policy_id is not None: - options['qos_policy_id'] = qos_policy_id - if self.qos is not None: - options['qos'] = self.qos - try: - self.sfe.create_volume(**options) - except Exception as err: - self.module.fail_json(msg="Error provisioning volume: %s of size: %s" % (self.name, self.size), - exception=to_native(err)) - - def delete_volume(self, volume_id): - """ - Delete and purge the volume using volume id - :return: Success : True , Failed : False - """ - try: - self.sfe.delete_volume(volume_id=volume_id) - self.sfe.purge_deleted_volume(volume_id=volume_id) - # Delete method will delete and also purge the volume instead of moving the volume state to inactive. - - except Exception as err: - # Throwing the exact error message instead of generic error message - self.module.fail_json(msg='Error deleting volume: %s, %s' % (str(volume_id), to_native(err)), - exception=to_native(err)) - - def update_volume(self, volume_id, qos_policy_id): - """ - Update the volume with the specified param - :return: Success : True, Failed : False - """ - options = dict( - attributes=self.attributes - ) - if self.access is not None: - options['access'] = self.access - if self.account_id is not None: - options['account_id'] = self.account_id - if self.qos is not None: - options['qos'] = self.qos - if qos_policy_id is not None: - options['qos_policy_id'] = qos_policy_id - if self.size is not None: - options['total_size'] = self.size - try: - self.sfe.modify_volume(volume_id, **options) - except Exception as err: - # Throwing the exact error message instead of generic error message - self.module.fail_json(msg='Error updating volume: %s, %s' % (str(volume_id), to_native(err)), - exception=to_native(err)) - - def apply(self): - # Perform pre-checks, call functions and exit - changed = False - qos_policy_id = None - action = None - - self.get_account_id() - volume_detail = self.get_volume() - - if self.state == 'present' and self.qos_policy_name is not None: - policy = self.get_qos_policy(self.qos_policy_name) - if policy is None: - error = 'Cannot find qos policy with name/id: %s' % self.qos_policy_name - self.module.fail_json(msg=error) - qos_policy_id = policy['qos_policy_id'] - - if volume_detail: - volume_id = volume_detail.volume_id - if self.state == 'absent': - action = 'delete' - - elif self.state == 'present': - # Checking all the params for update operation - if self.access is not None and volume_detail.access != self.access: - action = 'update' - - if self.account_id is not None and volume_detail.account_id != self.account_id: - action = 'update' - - if qos_policy_id is not None and volume_detail.qos_policy_id != qos_policy_id: - # volume_detail.qos_policy_id may be None if no policy is associated with the volume - action = 'update' - - if self.qos is not None and volume_detail.qos_policy_id is not None: - # remove qos_policy - action = 'update' - - if self.qos is not None: - # Actual volume_detail.qos has ['burst_iops', 'burst_time', 'curve', 'max_iops', 'min_iops'] keys. - # As only minOPS, maxOPS, burstOPS is important to consider, checking only these values. - volume_qos = vars(volume_detail.qos) - if volume_qos['min_iops'] != self.qos['minIOPS'] or volume_qos['max_iops'] != self.qos['maxIOPS'] \ - or volume_qos['burst_iops'] != self.qos['burstIOPS']: - action = 'update' - - if self.size is not None and volume_detail.total_size is not None and volume_detail.total_size != self.size: - size_difference = abs(float(volume_detail.total_size - self.size)) - # Change size only if difference is bigger than 0.001 - if size_difference / self.size > 0.001: - action = 'update' - - if self.attributes is not None and volume_detail.attributes != self.attributes: - action = 'update' - - elif self.state == 'present': - action = 'create' - - result_message = "" - - if action is not None: - changed = True - if self.module.check_mode: - result_message = "Check mode, skipping changes" - else: - if action == 'create': - self.create_volume(qos_policy_id) - result_message = "Volume created" - elif action == 'update': - self.update_volume(volume_id, qos_policy_id) - result_message = "Volume updated" - elif action == 'delete': - self.delete_volume(volume_id) - result_message = "Volume deleted" - - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - # Create object and call apply - na_elementsw_volume = ElementSWVolume() - na_elementsw_volume.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume_clone.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume_clone.py deleted file mode 100644 index 186ca85bc..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume_clone.py +++ /dev/null @@ -1,276 +0,0 @@ -#!/usr/bin/python - -# (c) 2018, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or -# https://www.gnu.org/licenses/gpl-3.0.txt) - -"""Element Software volume clone""" - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' - -module: na_elementsw_volume_clone - -short_description: NetApp Element Software Create Volume Clone -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create volume clones on Element OS - -options: - - name: - description: - - The name of the clone. - required: true - type: str - - src_volume_id: - description: - - The id of the src volume to clone. id may be a numeric identifier or a volume name. - required: true - type: str - - src_snapshot_id: - description: - - The id of the snapshot to clone. id may be a numeric identifier or a snapshot name. - type: str - - account_id: - description: - - Account ID for the owner of this cloned volume. id may be a numeric identifier or an account name. - required: true - type: str - - attributes: - description: A YAML dictionary of attributes that you would like to apply on this cloned volume. - type: dict - - size: - description: - - The size of the cloned volume in (size_unit). - type: int - - size_unit: - description: - - The unit used to interpret the size parameter. - choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb'] - default: 'gb' - type: str - - access: - choices: ['readOnly', 'readWrite', 'locked', 'replicationTarget'] - description: - - Access allowed for the volume. - - If unspecified, the access settings of the clone will be the same as the source. - - readOnly - Only read operations are allowed. - - readWrite - Reads and writes are allowed. - - locked - No reads or writes are allowed. - - replicationTarget - Identify a volume as the target volume for a paired set of volumes. If the volume is not paired, the access status is locked. - type: str - -''' - -EXAMPLES = """ - - name: Clone Volume - na_elementsw_volume_clone: - hostname: "{{ elementsw_hostname }}" - username: "{{ elementsw_username }}" - password: "{{ elementsw_password }}" - name: CloneAnsibleVol - src_volume_id: 123 - src_snapshot_id: 41 - account_id: 3 - size: 1 - size_unit: gb - access: readWrite - attributes: {"virtual_network_id": 12345} - -""" - -RETURN = """ - -msg: - description: Success message - returned: success - type: str - -""" - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() - - -class ElementOSVolumeClone(object): - """ - Contains methods to parse arguments, - derive details of Element Software objects - and send requests to Element OS via - the Solidfire SDK - """ - - def __init__(self): - """ - Parse arguments, setup state variables, - check paramenters and ensure SDK is installed - """ - self._size_unit_map = netapp_utils.SF_BYTE_MAP - - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - name=dict(required=True), - src_volume_id=dict(required=True), - src_snapshot_id=dict(), - account_id=dict(required=True), - attributes=dict(type='dict', default=None), - size=dict(type='int'), - size_unit=dict(default='gb', - choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb', - 'pb', 'eb', 'zb', 'yb'], type='str'), - access=dict(type='str', - default=None, choices=['readOnly', 'readWrite', - 'locked', 'replicationTarget']), - - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - parameters = self.module.params - - # set up state variables - self.name = parameters['name'] - self.src_volume_id = parameters['src_volume_id'] - self.src_snapshot_id = parameters['src_snapshot_id'] - self.account_id = parameters['account_id'] - self.attributes = parameters['attributes'] - - self.size_unit = parameters['size_unit'] - if parameters['size'] is not None: - self.size = parameters['size'] * \ - self._size_unit_map[self.size_unit] - else: - self.size = None - self.access = parameters['access'] - - if HAS_SF_SDK is False: - self.module.fail_json( - msg="Unable to import the SolidFire Python SDK") - else: - self.sfe = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.sfe) - - # add telemetry attributes - if self.attributes is not None: - self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_volume_clone')) - else: - self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_volume_clone') - - def get_account_id(self): - """ - Return account id if found - """ - try: - # Update and return self.account_id - self.account_id = self.elementsw_helper.account_exists(self.account_id) - return self.account_id - except Exception as err: - self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err)) - - def get_snapshot_id(self): - """ - Return snapshot details if found - """ - src_snapshot = self.elementsw_helper.get_snapshot(self.src_snapshot_id, self.src_volume_id) - # Update and return self.src_snapshot_id - if src_snapshot is not None: - self.src_snapshot_id = src_snapshot.snapshot_id - # Return src_snapshot - return self.src_snapshot_id - return None - - def get_src_volume_id(self): - """ - Return volume id if found - """ - src_vol_id = self.elementsw_helper.volume_exists(self.src_volume_id, self.account_id) - if src_vol_id is not None: - # Update and return self.volume_id - self.src_volume_id = src_vol_id - # Return src_volume_id - return self.src_volume_id - return None - - def clone_volume(self): - """Clone Volume from source""" - try: - self.sfe.clone_volume(volume_id=self.src_volume_id, - name=self.name, - new_account_id=self.account_id, - new_size=self.size, - access=self.access, - snapshot_id=self.src_snapshot_id, - attributes=self.attributes) - - except Exception as err: - self.module.fail_json(msg="Error creating clone %s of size %s" % (self.name, self.size), exception=to_native(err)) - - def apply(self): - """Perform pre-checks, call functions and exit""" - changed = False - result_message = "" - - if self.get_account_id() is None: - self.module.fail_json(msg="Account id not found: %s" % (self.account_id)) - - # there is only one state. other operations - # are part of the volume module - - # ensure that a volume with the clone name - # isn't already present - if self.elementsw_helper.volume_exists(self.name, self.account_id) is None: - # check for the source volume - if self.get_src_volume_id() is not None: - # check for a valid snapshot - if self.src_snapshot_id and not self.get_snapshot_id(): - self.module.fail_json(msg="Snapshot id not found: %s" % (self.src_snapshot_id)) - # change required - changed = True - else: - self.module.fail_json(msg="Volume id not found %s" % (self.src_volume_id)) - - if changed: - if self.module.check_mode: - result_message = "Check mode, skipping changes" - else: - self.clone_volume() - result_message = "Volume cloned" - - self.module.exit_json(changed=changed, msg=result_message) - - -def main(): - """Create object and call apply""" - volume_clone = ElementOSVolumeClone() - volume_clone.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume_pair.py b/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume_pair.py deleted file mode 100644 index 0d5b38a0d..000000000 --- a/ansible_collections/netapp/elementsw/plugins/modules/na_elementsw_volume_pair.py +++ /dev/null @@ -1,293 +0,0 @@ -#!/usr/bin/python -# (c) 2017, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - -DOCUMENTATION = ''' - -module: na_elementsw_volume_pair - -short_description: NetApp Element Software Volume Pair -extends_documentation_fragment: - - netapp.elementsw.netapp.solidfire -version_added: 2.7.0 -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> -description: -- Create, delete volume pair - -options: - - state: - description: - - Whether the specified volume pair should exist or not. - choices: ['present', 'absent'] - default: present - type: str - - src_volume: - description: - - Source volume name or volume ID - required: true - type: str - - src_account: - description: - - Source account name or ID - required: true - type: str - - dest_volume: - description: - - Destination volume name or volume ID - required: true - type: str - - dest_account: - description: - - Destination account name or ID - required: true - type: str - - mode: - description: - - Mode to start the volume pairing - choices: ['async', 'sync', 'snapshotsonly'] - default: async - type: str - - dest_mvip: - description: - - Destination IP address of the paired cluster. - required: true - type: str - - dest_username: - description: - - Destination username for the paired cluster - - Optional if this is same as source cluster username. - type: str - - dest_password: - description: - - Destination password for the paired cluster - - Optional if this is same as source cluster password. - type: str - -''' - -EXAMPLES = """ - - name: Create volume pair - na_elementsw_volume_pair: - hostname: "{{ src_cluster_hostname }}" - username: "{{ src_cluster_username }}" - password: "{{ src_cluster_password }}" - state: present - src_volume: test1 - src_account: test2 - dest_volume: test3 - dest_account: test4 - mode: sync - dest_mvip: "{{ dest_cluster_hostname }}" - - - name: Delete volume pair - na_elementsw_volume_pair: - hostname: "{{ src_cluster_hostname }}" - username: "{{ src_cluster_username }}" - password: "{{ src_cluster_password }}" - state: absent - src_volume: 3 - src_account: 1 - dest_volume: 2 - dest_account: 1 - dest_mvip: "{{ dest_cluster_hostname }}" - dest_username: "{{ dest_cluster_username }}" - dest_password: "{{ dest_cluster_password }}" - -""" - -RETURN = """ - -""" - -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils._text import to_native -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module import NaElementSWModule -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule - -HAS_SF_SDK = netapp_utils.has_sf_sdk() -try: - import solidfire.common -except ImportError: - HAS_SF_SDK = False - - -class ElementSWVolumePair(object): - ''' class to handle volume pairing operations ''' - - def __init__(self): - """ - Setup Ansible parameters and SolidFire connection - """ - self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() - self.argument_spec.update(dict( - state=dict(required=False, choices=['present', 'absent'], - default='present'), - src_volume=dict(required=True, type='str'), - src_account=dict(required=True, type='str'), - dest_volume=dict(required=True, type='str'), - dest_account=dict(required=True, type='str'), - mode=dict(required=False, type='str', - choices=['async', 'sync', 'snapshotsonly'], - default='async'), - dest_mvip=dict(required=True, type='str'), - dest_username=dict(required=False, type='str'), - dest_password=dict(required=False, type='str', no_log=True) - )) - - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - if HAS_SF_SDK is False: - self.module.fail_json(msg="Unable to import the SolidFire Python SDK") - else: - self.elem = netapp_utils.create_sf_connection(module=self.module) - - self.elementsw_helper = NaElementSWModule(self.elem) - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - # get element_sw_connection for destination cluster - # overwrite existing source host, user and password with destination credentials - self.module.params['hostname'] = self.parameters['dest_mvip'] - # username and password is same as source, - # if dest_username and dest_password aren't specified - if self.parameters.get('dest_username'): - self.module.params['username'] = self.parameters['dest_username'] - if self.parameters.get('dest_password'): - self.module.params['password'] = self.parameters['dest_password'] - self.dest_elem = netapp_utils.create_sf_connection(module=self.module) - self.dest_elementsw_helper = NaElementSWModule(self.dest_elem) - - def check_if_already_paired(self, vol_id): - """ - Check for idempotency - A volume can have only one pair - Return paired-volume-id if volume is paired already - None if volume is not paired - """ - paired_volumes = self.elem.list_volumes(volume_ids=[vol_id], - is_paired=True) - for vol in paired_volumes.volumes: - for pair in vol.volume_pairs: - if pair is not None: - return pair.remote_volume_id - return None - - def pair_volumes(self): - """ - Start volume pairing on source, and complete on target volume - """ - try: - pair_key = self.elem.start_volume_pairing( - volume_id=self.parameters['src_vol_id'], - mode=self.parameters['mode']) - self.dest_elem.complete_volume_pairing( - volume_pairing_key=pair_key.volume_pairing_key, - volume_id=self.parameters['dest_vol_id']) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error pairing volume id %s" - % (self.parameters['src_vol_id']), - exception=to_native(err)) - - def pairing_exists(self, src_id, dest_id): - src_paired = self.check_if_already_paired(self.parameters['src_vol_id']) - dest_paired = self.check_if_already_paired(self.parameters['dest_vol_id']) - if src_paired is not None or dest_paired is not None: - return True - return None - - def unpair_volumes(self): - """ - Delete volume pair - """ - try: - self.elem.remove_volume_pair(volume_id=self.parameters['src_vol_id']) - self.dest_elem.remove_volume_pair(volume_id=self.parameters['dest_vol_id']) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error unpairing volume ids %s and %s" - % (self.parameters['src_vol_id'], - self.parameters['dest_vol_id']), - exception=to_native(err)) - - def get_account_id(self, account, type): - """ - Get source and destination account IDs - """ - try: - if type == 'src': - self.parameters['src_account_id'] = self.elementsw_helper.account_exists(account) - elif type == 'dest': - self.parameters['dest_account_id'] = self.dest_elementsw_helper.account_exists(account) - except solidfire.common.ApiServerError as err: - self.module.fail_json(msg="Error: either account %s or %s does not exist" - % (self.parameters['src_account'], - self.parameters['dest_account']), - exception=to_native(err)) - - def get_volume_id(self, volume, type): - """ - Get source and destination volume IDs - """ - if type == 'src': - self.parameters['src_vol_id'] = self.elementsw_helper.volume_exists(volume, self.parameters['src_account_id']) - if self.parameters['src_vol_id'] is None: - self.module.fail_json(msg="Error: source volume %s does not exist" - % (self.parameters['src_volume'])) - elif type == 'dest': - self.parameters['dest_vol_id'] = self.dest_elementsw_helper.volume_exists(volume, self.parameters['dest_account_id']) - if self.parameters['dest_vol_id'] is None: - self.module.fail_json(msg="Error: destination volume %s does not exist" - % (self.parameters['dest_volume'])) - - def get_ids(self): - """ - Get IDs for volumes and accounts - """ - self.get_account_id(self.parameters['src_account'], 'src') - self.get_account_id(self.parameters['dest_account'], 'dest') - self.get_volume_id(self.parameters['src_volume'], 'src') - self.get_volume_id(self.parameters['dest_volume'], 'dest') - - def apply(self): - """ - Call create / delete volume pair methods - """ - self.get_ids() - paired = self.pairing_exists(self.parameters['src_vol_id'], - self.parameters['dest_vol_id']) - # calling helper to determine action - cd_action = self.na_helper.get_cd_action(paired, self.parameters) - if cd_action == "create": - self.pair_volumes() - elif cd_action == "delete": - self.unpair_volumes() - self.module.exit_json(changed=self.na_helper.changed) - - -def main(): - """ Apply volume pair actions """ - vol_obj = ElementSWVolumePair() - vol_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/elementsw/requirements.txt b/ansible_collections/netapp/elementsw/requirements.txt deleted file mode 100644 index 2054956e3..000000000 --- a/ansible_collections/netapp/elementsw/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -solidfire-sdk-python
\ No newline at end of file diff --git a/ansible_collections/netapp/elementsw/tests/unit/compat/__init__.py b/ansible_collections/netapp/elementsw/tests/unit/compat/__init__.py deleted file mode 100644 index e69de29bb..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/compat/__init__.py +++ /dev/null diff --git a/ansible_collections/netapp/elementsw/tests/unit/compat/builtins.py b/ansible_collections/netapp/elementsw/tests/unit/compat/builtins.py deleted file mode 100644 index f60ee6782..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/compat/builtins.py +++ /dev/null @@ -1,33 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -# -# Compat for python2.7 -# - -# One unittest needs to import builtins via __import__() so we need to have -# the string that represents it -try: - import __builtin__ -except ImportError: - BUILTINS = 'builtins' -else: - BUILTINS = '__builtin__' diff --git a/ansible_collections/netapp/elementsw/tests/unit/compat/mock.py b/ansible_collections/netapp/elementsw/tests/unit/compat/mock.py deleted file mode 100644 index 0972cd2e8..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/compat/mock.py +++ /dev/null @@ -1,122 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python3.x's unittest.mock module -''' -import sys - -# Python 2.7 - -# Note: Could use the pypi mock library on python3.x as well as python2.x. It -# is the same as the python3 stdlib mock library - -try: - # Allow wildcard import because we really do want to import all of mock's - # symbols into this compat shim - # pylint: disable=wildcard-import,unused-wildcard-import - from unittest.mock import * -except ImportError: - # Python 2 - # pylint: disable=wildcard-import,unused-wildcard-import - try: - from mock import * - except ImportError: - print('You need the mock library installed on python2.x to run tests') - - -# Prior to 3.4.4, mock_open cannot handle binary read_data -if sys.version_info >= (3,) and sys.version_info < (3, 4, 4): - file_spec = None - - def _iterate_read_data(read_data): - # Helper for mock_open: - # Retrieve lines from read_data via a generator so that separate calls to - # readline, read, and readlines are properly interleaved - sep = b'\n' if isinstance(read_data, bytes) else '\n' - data_as_list = [l + sep for l in read_data.split(sep)] - - if data_as_list[-1] == sep: - # If the last line ended in a newline, the list comprehension will have an - # extra entry that's just a newline. Remove this. - data_as_list = data_as_list[:-1] - else: - # If there wasn't an extra newline by itself, then the file being - # emulated doesn't have a newline to end the last line remove the - # newline that our naive format() added - data_as_list[-1] = data_as_list[-1][:-1] - - for line in data_as_list: - yield line - - def mock_open(mock=None, read_data=''): - """ - A helper function to create a mock to replace the use of `open`. It works - for `open` called directly or used as a context manager. - - The `mock` argument is the mock object to configure. If `None` (the - default) then a `MagicMock` will be created for you, with the API limited - to methods or attributes available on standard file handles. - - `read_data` is a string for the `read` methoddline`, and `readlines` of the - file handle to return. This is an empty string by default. - """ - def _readlines_side_effect(*args, **kwargs): - if handle.readlines.return_value is not None: - return handle.readlines.return_value - return list(_data) - - def _read_side_effect(*args, **kwargs): - if handle.read.return_value is not None: - return handle.read.return_value - return type(read_data)().join(_data) - - def _readline_side_effect(): - if handle.readline.return_value is not None: - while True: - yield handle.readline.return_value - for line in _data: - yield line - - global file_spec - if file_spec is None: - import _io - file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))) - - if mock is None: - mock = MagicMock(name='open', spec=open) - - handle = MagicMock(spec=file_spec) - handle.__enter__.return_value = handle - - _data = _iterate_read_data(read_data) - - handle.write.return_value = None - handle.read.return_value = None - handle.readline.return_value = None - handle.readlines.return_value = None - - handle.read.side_effect = _read_side_effect - handle.readline.side_effect = _readline_side_effect() - handle.readlines.side_effect = _readlines_side_effect - - mock.return_value = handle - return mock diff --git a/ansible_collections/netapp/elementsw/tests/unit/compat/unittest.py b/ansible_collections/netapp/elementsw/tests/unit/compat/unittest.py deleted file mode 100644 index 73a20cf8c..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/compat/unittest.py +++ /dev/null @@ -1,44 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python2.7's unittest module -''' - -import sys - -import pytest - -# Allow wildcard import because we really do want to import all of -# unittests's symbols into this compat shim -# pylint: disable=wildcard-import,unused-wildcard-import -if sys.version_info < (2, 7): - try: - # Need unittest2 on python2.6 - from unittest2 import * - except ImportError: - print('You need unittest2 installed on python2.6.x to run tests') - - class TestCase: - """ skip everything """ - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as unittest2 may not be available') -else: - from unittest import * diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_access_group.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_access_group.py deleted file mode 100644 index 0bd1e2550..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_access_group.py +++ /dev/null @@ -1,175 +0,0 @@ -''' unit test for Ansible module: na_elementsw_account.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_access_group \ - import ElementSWAccessGroup as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -ADD_ERROR = 'some_error_in_add_access_group' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - - def list_volume_access_groups(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build access_group list: access_groups.name, access_groups.account_id ''' - access_groups = list() - access_group_list = self.Bunch(volume_access_groups=access_groups) - return access_group_list - - def create_volume_access_group(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'add' in self.where: - # The module does not check for a specific exception :( - raise OSError(ADD_ERROR) - - def get_account_by_name(self, *args, **kwargs): # pylint: disable=unused-argument - ''' returns account_id ''' - if self.force_error and 'account_id' in self.where: - account_id = None - else: - account_id = 1 - print('account_id', account_id) - account = self.Bunch(account_id=account_id) - result = self.Bunch(account=account) - return result - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_command_called(self, mock_create_sf_connection): - ''' a more interesting test ''' - set_module_args({ - 'state': 'present', - 'name': 'element_groupname', - 'account_id': 'element_account_id', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - }) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - # It may not be a good idea to start with apply - # More atomic methods can be easier to mock - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_add_exception(self, mock_create_sf_connection): - ''' a more interesting test ''' - set_module_args({ - 'state': 'present', - 'name': 'element_groupname', - 'account_id': 'element_account_id', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - }) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['add']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - # It may not be a good idea to start with apply - # More atomic methods can be easier to mock - # apply() is calling list_accounts() and add_account() - my_obj.apply() - print(exc.value.args[0]) - message = 'Error creating volume access group element_groupname: %s' % ADD_ERROR - assert exc.value.args[0]['msg'] == message - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_invalid_account_id(self, mock_create_sf_connection): - ''' a more interesting test ''' - set_module_args({ - 'state': 'present', - 'name': 'element_groupname', - 'account_id': 'element_account_id', - 'volumes': ['volume1'], - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - }) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['account_id']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - # It may not be a good idea to start with apply - # More atomic methods can be easier to mock - # apply() is calling list_accounts() and add_account() - my_obj.apply() - print(exc.value.args[0]) - message = 'Error: Specified account id "%s" does not exist.' % 'element_account_id' - assert exc.value.args[0]['msg'] == message diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_access_group_volumes.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_access_group_volumes.py deleted file mode 100644 index fb78ad78a..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_access_group_volumes.py +++ /dev/null @@ -1,245 +0,0 @@ -''' unit test for Ansible module: na_elementsw_access_group_volumes.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_access_group_volumes \ - import ElementSWAccessGroupVolumes as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -MODIFY_ERROR = 'some_error_in_modify_access_group' - -VOLUME_ID = 777 - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None, volume_id=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - self.volume_id = volume_id - - def list_volume_access_groups(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build access_group list: access_groups.name, access_groups.account_id ''' - group_name = 'element_groupname' - if self.volume_id is None: - volume_list = list() - else: - volume_list = [self.volume_id] - access_group = self.Bunch(name=group_name, volume_access_group_id=888, volumes=volume_list) - access_groups = [access_group] - access_group_list = self.Bunch(volume_access_groups=access_groups) - return access_group_list - - def list_volumes_for_account(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build volume list: volume.name, volume.id ''' - volume = self.Bunch(name='element_volumename', volume_id=VOLUME_ID, delete_time='') - volumes = [volume] - volume_list = self.Bunch(volumes=volumes) - return volume_list - - def modify_volume_access_group(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'modify_exception' in self.where: - # The module does not check for a specific exception :( - raise OSError(MODIFY_ERROR) - - def get_account_by_name(self, *args, **kwargs): # pylint: disable=unused-argument - ''' returns account_id ''' - if self.force_error and 'get_account_id' in self.where: - account_id = None - else: - account_id = 1 - print('account_id', account_id) - account = self.Bunch(account_id=account_id) - result = self.Bunch(account=account) - return result - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - ARGS = { - 'state': 'present', - 'access_group': 'element_groupname', - 'volumes': 'element_volumename', - 'account_id': 'element_account_id', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_volume(self, mock_create_sf_connection): - ''' adding a volume ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_volume_idempotent(self, mock_create_sf_connection): - ''' adding a volume that is already in the access group ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(volume_id=VOLUME_ID) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_remove_volume(self, mock_create_sf_connection): - ''' removing a volume that is in the access group ''' - args = dict(self.ARGS) - args['state'] = 'absent' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(volume_id=VOLUME_ID) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_remove_volume_idempotent(self, mock_create_sf_connection): - ''' removing a volume that is not in the access group ''' - args = dict(self.ARGS) - args['state'] = 'absent' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_modify_exception(self, mock_create_sf_connection): - ''' modify does not return anything but can raise an exception ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['modify_exception']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error updating volume access group element_groupname: %s' % MODIFY_ERROR - assert exc.value.args[0]['msg'] == message - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_invalid_volume_name(self, mock_create_sf_connection): - ''' report error if volume does not exist ''' - args = dict(self.ARGS) - args['volumes'] = ['volume1'] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error: Specified volume %s does not exist' % 'volume1' - assert exc.value.args[0]['msg'] == message - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_invalid_account_group_name(self, mock_create_sf_connection): - ''' report error if access group does not exist ''' - args = dict(self.ARGS) - args['access_group'] = 'something_else' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error: Specified access group "%s" does not exist for account id: %s.' % ('something_else', 'element_account_id') - assert exc.value.args[0]['msg'] == message - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_invalid_account_id(self, mock_create_sf_connection): - ''' report error if account id is not found ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where='get_account_id') - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error: Specified account id "%s" does not exist.' % 'element_account_id' - assert exc.value.args[0]['msg'] == message diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_account.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_account.py deleted file mode 100644 index 8075ba5c4..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_account.py +++ /dev/null @@ -1,137 +0,0 @@ -''' unit test for Ansible module: na_elementsw_account.py ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_account \ - import ElementSWAccount as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -ADD_ERROR = 'some_error_in_add_account' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - - def list_accounts(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build account list: account.username, account.account_id ''' - accounts = list() - account_list = self.Bunch(accounts=accounts) - return account_list - - def add_account(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'add' in self.where: - # The module does not check for a specific exception :( - raise OSError(ADD_ERROR) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_command_called(self, mock_create_sf_connection): - ''' a more interesting test ''' - set_module_args({ - 'state': 'present', - 'element_username': 'element_username', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - }) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - # It may not be a good idea to start with apply - # More atomic methods can be easier to mock - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_add_exception(self, mock_create_sf_connection): - ''' a more interesting test ''' - set_module_args({ - 'state': 'present', - 'element_username': 'element_username', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - }) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['add']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - # It may not be a good idea to start with apply - # More atomic methods can be easier to mock - # apply() is calling list_accounts() and add_account() - my_obj.apply() - print(exc.value.args[0]) - message = 'Error creating account element_username: %s' % ADD_ERROR - assert exc.value.args[0]['msg'] == message diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster.py deleted file mode 100644 index 6624f374d..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster.py +++ /dev/null @@ -1,228 +0,0 @@ -''' unit test for Ansible module: na_elementsw_cluster.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import inspect -import json -import pytest - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_cluster \ - import ElementSWCluster as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -NODE_ID1 = 777 -NODE_ID2 = 888 -NODE_ID3 = 999 - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __repr__(self): - results = dict() - for key, value in vars(self).items(): - results[key] = repr(value) - return repr(results) - - def __init__(self, force_error=False, where=None, nodes=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - self.nodes = nodes - self._port = 442 - self.called = list() - - def record(self, args, kwargs): - name = inspect.stack()[1][3] # caller function name - print('%s: , args: %s, kwargs: %s' % (name, args, kwargs)) - self.called.append(name) - - def create_cluster(self, *args, **kwargs): # pylint: disable=unused-argument - self.record(repr(args), repr(kwargs)) - - def send_request(self, *args, **kwargs): # pylint: disable=unused-argument - self.record(repr(args), repr(kwargs)) - - def get_config(self, *args, **kwargs): # pylint: disable=unused-argument - self.record(repr(args), repr(kwargs)) - if self.force_error and self.where == 'get_config_exception': - raise ConnectionError - if self.nodes is not None: - nodes = ['%d:%s' % (i, node) for i, node in enumerate(self.nodes)] - else: - nodes = list() - cluster = self.Bunch(ensemble=nodes, cluster='cl_name') - config = self.Bunch(cluster=cluster) - return self.Bunch(config=config) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - ARGS = { - # 'state': 'present', - 'management_virtual_ip': '10.10.10.10', - 'storage_virtual_ip': '10.10.10.11', - 'nodes': [NODE_ID1, NODE_ID2], - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create(self, mock_create_sf_connection): - ''' create cluster basic ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where='get_config_exception') - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - msg = 'created' - assert msg in exc.value.args[0]['msg'] - assert 'create_cluster' in my_obj.sfe_node.called - assert 'send_request' not in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_extra_parms(self, mock_create_sf_connection): - ''' force a direct call to send_request ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['order_number'] = '12345' - args['serial_number'] = '54321' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where='get_config_exception') - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - assert 'send_request' in my_obj.sfe_node.called - assert 'create_cluster' not in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_idempotent(self, mock_create_sf_connection): - ''' cluster already exists with same nodes ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(nodes=[NODE_ID1, NODE_ID2]) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - assert 'send_request' not in my_obj.sfe_node.called - assert 'create_cluster' not in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_idempotent_extra_nodes(self, mock_create_sf_connection): - ''' cluster already exists with more nodes ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(nodes=[NODE_ID1, NODE_ID2, NODE_ID3]) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - msg = 'Error: found existing cluster with more nodes in ensemble.' - assert msg in exc.value.args[0]['msg'] - assert 'send_request' not in my_obj.sfe_node.called - assert 'create_cluster' not in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_idempotent_extra_nodes_ok(self, mock_create_sf_connection): - ''' cluster already exists with more nodes but we're OK with a superset ''' - args = dict(self.ARGS) - args['fail_if_cluster_already_exists_with_larger_ensemble'] = False - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(nodes=[NODE_ID1, NODE_ID2, NODE_ID3]) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - msg = 'cluster already exists' - assert msg in exc.value.args[0]['msg'] - assert 'send_request' not in my_obj.sfe_node.called - assert 'create_cluster' not in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_idempotent_missing_nodes(self, mock_create_sf_connection): - ''' cluster already exists with fewer nodes. - Since not every node is lister in the ensemble, we can't tell if it's an error or not ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(nodes=[NODE_ID1]) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - msg = 'cluster already exists' - assert msg in exc.value.args[0]['msg'] - assert 'send_request' not in my_obj.sfe_node.called - assert 'create_cluster' not in my_obj.sfe_node.called diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster_config.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster_config.py deleted file mode 100644 index 79f461ccc..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster_config.py +++ /dev/null @@ -1,157 +0,0 @@ -''' unit test for Ansible module: na_elementsw_cluster_config.py ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_cluster_config \ - import ElementSWClusterConfig as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -GET_ERROR = 'some_error_in_get_ntp_info' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def set_default_args(self): - return dict({ - 'hostname': '10.253.168.129', - 'username': 'namburu', - 'password': 'SFlab1234', - }) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_module_fail_when_required_args_missing(self, mock_create_sf_connection): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_setup_ntp_info_called(self, mock_create_sf_connection): - ''' test if setup_ntp_info is called ''' - module_args = {} - module_args.update(self.set_default_args()) - ntp_dict = {'set_ntp_info': {'broadcastclient': None, - 'ntp_servers': ['1.1.1.1']}} - module_args.update(ntp_dict) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_setup_ntp_info: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_set_encryption_at_rest_called(self, mock_create_sf_connection): - ''' test if set_encryption_at_rest is called ''' - module_args = {} - module_args.update(self.set_default_args()) - module_args.update({'encryption_at_rest': 'present'}) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_set_encryption_at_rest enable: %s' % repr(exc.value)) - assert not exc.value.args[0]['changed'] - module_args.update({'encryption_at_rest': 'absent'}) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_set_encryption_at_rest disable: %s' % repr(exc.value)) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_enable_feature_called(self, mock_create_sf_connection): - ''' test if enable_feature for vvols is called ''' - module_args = {} - module_args.update(self.set_default_args()) - module_args.update({'enable_virtual_volumes': True}) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_enable_feature: %s' % repr(exc.value)) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_set_cluster_full_threshold_called(self, mock_create_sf_connection): - ''' test if set_cluster_full threshold is called ''' - module_args = {} - module_args.update(self.set_default_args()) - cluster_mod_dict = \ - {'modify_cluster_full_threshold': {'stage2_aware_threshold': 2, - 'stage3_block_threshold_percent': 2, - 'max_metadata_over_provision_factor': 2}} - module_args.update(cluster_mod_dict) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_set_cluster_full_threshold: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster_snmp.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster_snmp.py deleted file mode 100644 index 9236daa04..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_cluster_snmp.py +++ /dev/null @@ -1,176 +0,0 @@ -''' unit test for Ansible module: na_elementsw_cluster_snmp.py ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_cluster_snmp \ - import ElementSWClusterSnmp as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -GET_ERROR = 'some_error_in_get_snmp_info' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def set_default_args(self): - return dict({ - 'hostname': '10.117.78.131', - 'username': 'admin', - 'password': 'netapp1!', - }) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_module_fail_when_required_args_missing(self, mock_create_sf_connection): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_enable_snmp_called(self, mock_create_sf_connection): - ''' test if enable_snmp is called ''' - module_args = {} - module_args.update(self.set_default_args()) - module_args.update({'snmp_v3_enabled': True, - 'state': 'present'}) - module_args.update({'usm_users': {'access': 'rouser', - 'name': 'TestUser', - 'password': 'ChangeMe@123', - 'passphrase': 'ChangeMe@123', - 'secLevel': 'auth', }}) - - module_args.update({'networks': {'access': 'ro', - 'cidr': 24, - 'community': 'TestNetwork', - 'network': '192.168.0.1', }}) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_if_enable_snmp_called: %s' % repr(exc.value)) - assert exc.value - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_configure_snmp_from_version_3_TO_version_2_called(self, mock_create_sf_connection): - ''' test if configure snmp from version_3 to version_2''' - module_args = {} - module_args.update(self.set_default_args()) - module_args.update({'snmp_v3_enabled': False, - 'state': 'present'}) - module_args.update({'usm_users': {'access': 'rouser', - 'name': 'TestUser', - 'password': 'ChangeMe@123', - 'passphrase': 'ChangeMe@123', - 'secLevel': 'auth', }}) - - module_args.update({'networks': {'access': 'ro', - 'cidr': 24, - 'community': 'TestNetwork', - 'network': '192.168.0.1', }}) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_ensure_configure_snmp_from_version_3_TO_version_2_called: %s' % repr(exc.value)) - assert exc.value - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_configure_snmp_from_version_2_TO_version_3_called(self, mock_create_sf_connection): - ''' test if configure snmp from version_2 to version_3''' - module_args = {} - module_args.update(self.set_default_args()) - module_args.update({'snmp_v3_enabled': True, - 'state': 'present'}) - module_args.update({'usm_users': {'access': 'rouser', - 'name': 'TestUser_sample', - 'password': 'ChangeMe@123', - 'passphrase': 'ChangeMe@123', - 'secLevel': 'auth', }}) - - module_args.update({'networks': {'access': 'ro', - 'cidr': 24, - 'community': 'TestNetwork', - 'network': '192.168.0.1', }}) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_ensure_configure_snmp_from_version_2_TO_version_3_called: %s' % repr(exc.value)) - assert exc.value - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_disable_snmp_called(self, mock_create_sf_connection): - ''' test if disable_snmp is called ''' - module_args = {} - module_args.update(self.set_default_args()) - module_args.update({'state': 'absent'}) - set_module_args(module_args) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_if_disable_snmp_called: %s' % repr(exc.value)) - assert exc.value diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_info.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_info.py deleted file mode 100644 index dc8fd5e23..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_info.py +++ /dev/null @@ -1,344 +0,0 @@ -''' unit tests for Ansible module: na_elementsw_info.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import inspect -import json -import pytest - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_info \ - import ElementSWInfo as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -NODE_ID1 = 777 -NODE_ID2 = 888 -NODE_ID3 = 999 - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __repr__(self): - results = dict() - for key, value in vars(self).items(): - results[key] = repr(value) - return repr(results) - - def to_json(self): - return json.loads(json.dumps(self, default=lambda x: x.__dict__)) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - self.nodes = [NODE_ID1, NODE_ID2, NODE_ID3] - self._port = 442 - self.called = list() - if force_error and where == 'cx': - raise netapp_utils.solidfire.common.ApiConnectionError('testme') - - def record(self, args, kwargs): - name = inspect.stack()[1][3] # caller function name - print('%s: , args: %s, kwargs: %s' % (name, args, kwargs)) - self.called.append(name) - - def list_accounts(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build account list: account.username, account.account_id ''' - self.record(repr(args), repr(kwargs)) - accounts = list() - accounts.append({'username': 'user1'}) - account_list = self.Bunch(accounts=accounts) - return account_list - - def list_all_nodes(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build all_node list: all_node.name, all_node.all_node_id ''' - self.record(repr(args), repr(kwargs)) - all_nodes = list() - all_nodes.append({'id': 123}) - all_node_list = self.Bunch(all_nodes=all_nodes) - return all_node_list - - def list_drives(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build drive list: drive.name, drive.drive_id ''' - self.record(repr(args), repr(kwargs)) - drives = list() - drives.append({'id': 123}) - drive_list = self.Bunch(drives=drives) - return drive_list - - def get_config(self, *args, **kwargs): # pylint: disable=unused-argument - self.record(repr(args), repr(kwargs)) - if self.force_error and self.where == 'get_config_exception': - raise ConnectionError - if self.nodes is not None: - nodes = ['%d:%s' % (i, node) for i, node in enumerate(self.nodes)] - else: - nodes = list() - cluster = self.Bunch(ensemble=nodes, cluster='cl_name') - config = self.Bunch(cluster=cluster) - return self.Bunch(config=config) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - ARGS = { - # 'state': 'present', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_all_default(self, mock_create_sf_connection): - ''' gather all by default ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - assert 'cluster_accounts' in exc.value.args[0]['info'] - assert 'node_config' in exc.value.args[0]['info'] - username = exc.value.args[0]['info']['cluster_accounts']['accounts'][0]['username'] - assert username == 'user1' - assert 'list_accounts' in my_obj.sfe_node.called - assert 'get_config' in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_all_all(self, mock_create_sf_connection): - ''' gather all explictly ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['all'] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - assert 'list_accounts' in my_obj.sfe_node.called - assert 'get_config' in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_all_clusters(self, mock_create_sf_connection): - ''' gather all cluster scoped subsets ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['all_clusters'] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - assert 'cluster_accounts' in exc.value.args[0]['info'] - accounts = exc.value.args[0]['info']['cluster_accounts'] - print('accounts: >>%s<<' % accounts, type(accounts)) - print(my_obj.sfe_node.called) - assert 'list_accounts' in my_obj.sfe_node.called - assert 'get_config' not in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_all_nodes(self, mock_create_sf_connection): - ''' gather all node scoped subsets ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['all_nodes'] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - assert 'node_config' in exc.value.args[0]['info'] - config = exc.value.args[0]['info']['node_config'] - print('config: >>%s<<' % config, type(config)) - print(my_obj.sfe_node.called) - assert 'list_accounts' not in my_obj.sfe_node.called - assert 'get_config' in my_obj.sfe_node.called - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_all_nodes_not_alone(self, mock_create_sf_connection): - ''' gather all node scoped subsets but fail as another subset is present ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['all_nodes', 'dummy'] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - msg = 'no other subset is allowed' - assert msg in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_filter_success(self, mock_create_sf_connection): - ''' filter on key, value - succesful match ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['cluster_accounts'] - args['filter'] = dict(username='user1') - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - username = exc.value.args[0]['info']['cluster_accounts']['accounts'][0]['username'] - assert username == 'user1' - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_filter_bad_key(self, mock_create_sf_connection): - ''' filter on key, value - key not found ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['cluster_accounts'] - args['filter'] = dict(bad_key='user1') - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - msg = 'Error: key bad_key not found in' - assert msg in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_filter_bad_key_ignored(self, mock_create_sf_connection): - ''' filter on key, value - key not found - ignore error ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['cluster_accounts'] - args['filter'] = dict(bad_key='user1') - args['fail_on_key_not_found'] = False - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['info']['cluster_accounts']['accounts'] == list() - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_filter_record_not_found(self, mock_create_sf_connection): - ''' filter on key, value - no match ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['cluster_accounts'] - args['filter'] = dict(bad_key='user1') - args['fail_on_key_not_found'] = False - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['info']['cluster_accounts']['accounts'] == list() - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_info_filter_record_not_found_error(self, mock_create_sf_connection): - ''' filter on key, value - no match - force error on empty ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['gather_subsets'] = ['cluster_accounts'] - args['filter'] = dict(username='user111') - args['fail_on_record_not_found'] = True - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - msg = 'Error: no match for' - assert msg in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_connection_error(self, mock_create_sf_connection): - ''' filter on key, value - no match - force error on empty ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # force a connection exception - mock_create_sf_connection.side_effect = netapp_utils.solidfire.common.ApiConnectionError('testme') - with pytest.raises(AnsibleFailJson) as exc: - my_module() - print(exc.value.args[0]) - msg = 'Failed to create connection for hostname:442' - assert msg in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_other_connection_error(self, mock_create_sf_connection): - ''' filter on key, value - no match - force error on empty ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # force a connection exception - mock_create_sf_connection.side_effect = KeyError('testme') - with pytest.raises(AnsibleFailJson) as exc: - my_module() - print(exc.value.args[0]) - msg = 'Failed to connect for hostname:442' - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_initiators.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_initiators.py deleted file mode 100644 index ee5ff85db..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_initiators.py +++ /dev/null @@ -1,201 +0,0 @@ -''' unit test for Ansible module: na_elementsw_initiators.py ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_initiators \ - import ElementSWInitiators as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - class Initiator(object): - def __init__(self, entries): - self.__dict__.update(entries) - - def list_initiators(self): - ''' build initiator Obj ''' - initiator = self.Bunch( - initiator_name="a", - initiator_id=13, - alias="a2", - # Note: 'config-mgmt' and 'event-source' are added for telemetry - attributes={'key': 'value', 'config-mgmt': 'ansible', 'event-source': 'na_elementsw_initiators'}, - volume_access_groups=[1] - ) - initiators = self.Bunch( - initiators=[initiator] - ) - return initiators - - def create_initiators(self, *args, **kwargs): # pylint: disable=unused-argument - ''' mock method ''' - pass - - def delete_initiators(self, *args, **kwargs): # pylint: disable=unused-argument - ''' mock method ''' - pass - - def modify_initiators(self, *args, **kwargs): # pylint: disable=unused-argument - ''' mock method ''' - pass - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def set_default_args(self): - return dict({ - 'hostname': '10.253.168.129', - 'username': 'namburu', - 'password': 'SFlab1234', - }) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_module_fail_when_required_args_missing(self, mock_create_sf_connection): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_initiator(self, mock_create_sf_connection): - ''' test if create initiator is called ''' - module_args = {} - module_args.update(self.set_default_args()) - initiator_dict = { - "state": "present", - "initiators": [{ - "name": "newinitiator1", - "alias": "newinitiator1alias", - "attributes": {"key1": "value1"} - }] - } - module_args.update(initiator_dict) - set_module_args(module_args) - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_create_initiators: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_delete_initiator(self, mock_create_sf_connection): - ''' test if delete initiator is called ''' - module_args = {} - module_args.update(self.set_default_args()) - initiator_dict = { - "state": "absent", - "initiators": [{ - "name": "a" - }] - } - module_args.update(initiator_dict) - set_module_args(module_args) - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_delete_initiators: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_initiator(self, mock_create_sf_connection): - ''' test if modify initiator is called ''' - module_args = {} - module_args.update(self.set_default_args()) - initiator_dict = { - "state": "present", - "initiators": [{ - "name": "a", - "alias": "a3", - "attributes": {"key": "value"} - }] - } - module_args.update(initiator_dict) - set_module_args(module_args) - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_modify_initiators: %s' % repr(exc.value)) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_initiator_idempotent(self, mock_create_sf_connection): - ''' test if modify initiator is called ''' - module_args = {} - module_args.update(self.set_default_args()) - initiator_dict = { - "state": "present", - "initiators": [{ - "name": "a", - "alias": "a2", - "attributes": {"key": "value"}, - "volume_access_group_id": 1 - }] - } - module_args.update(initiator_dict) - set_module_args(module_args) - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print('Info: test_modify_initiators: %s' % repr(exc.value)) - assert not exc.value.args[0]['changed'] diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_network_interfaces.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_network_interfaces.py deleted file mode 100644 index 5364a4e76..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_network_interfaces.py +++ /dev/null @@ -1,293 +0,0 @@ -''' unit tests for Ansible module: na_elementsw_info.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import inspect -import json -import pytest - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_network_interfaces \ - import ElementSWNetworkInterfaces as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -NODE_ID1 = 777 -NODE_ID2 = 888 -NODE_ID3 = 999 - -MAPPING = dict( - bond_mode='bond-mode', - bond_lacp_rate='bond-lacp_rate', - dns_nameservers='dns-nameservers', - dns_search='dns-search', - virtual_network_tag='virtualNetworkTag', -) - - -def mapkey(key): - if key in MAPPING: - return MAPPING[key] - return key - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __repr__(self): - results = dict() - for key, value in vars(self).items(): - results[key] = repr(value) - return repr(results) - - def to_json(self): - return json.loads(json.dumps(self, default=lambda x: x.__dict__)) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - # self._port = 442 - self.called = list() - self.set_network_config_args = dict() - if force_error and where == 'cx': - raise netapp_utils.solidfire.common.ApiConnectionError('testme') - - def record(self, args, kwargs): # pylint: disable=unused-argument - name = inspect.stack()[1][3] # caller function name - # print('%s: , args: %s, kwargs: %s' % (name, args, kwargs)) - self.called.append(name) - - def set_network_config(self, *args, **kwargs): # pylint: disable=unused-argument - self.record(repr(args), repr(kwargs)) - print('network:', kwargs['network'].to_json()) - self.set_network_config_args = kwargs['network'].to_json() - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - DEPRECATED_ARGS = { - 'ip_address_1g': 'ip_address_1g', - 'subnet_1g': 'subnet_1g', - 'gateway_address_1g': 'gateway_address_1g', - 'mtu_1g': 'mtu_1g', # make sure the use a value != from default - 'bond_mode_1g': 'ALB', # make sure the use a value != from default - 'lacp_1g': 'Fast', # make sure the use a value != from default - 'ip_address_10g': 'ip_address_10g', - 'subnet_10g': 'subnet_10g', - 'gateway_address_10g': 'gateway_address_10g', - 'mtu_10g': 'mtu_10g', # make sure the use a value != from default - 'bond_mode_10g': 'LACP', # make sure the use a value != from default - 'lacp_10g': 'Fast', # make sure the use a value != from default - 'method': 'static', - 'dns_nameservers': 'dns_nameservers', - 'dns_search_domains': 'dns_search_domains', - 'virtual_network_tag': 'virtual_network_tag', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - ARGS = { - 'bond_1g': { - 'address': '10.10.10.10', - 'netmask': '255.255.255.0', - 'gateway': '10.10.10.1', - 'mtu': '1500', - 'bond_mode': 'ActivePassive', - 'dns_nameservers': ['dns_nameservers'], - 'dns_search': ['dns_search_domains'], - 'virtual_network_tag': 'virtual_network_tag', - }, - 'bond_10g': { - 'bond_mode': 'LACP', - 'bond_lacp_rate': 'Fast', - }, - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - def test_deprecated_nothing(self): - ''' deprecated without 1g or 10g options ''' - args = dict(self.DEPRECATED_ARGS) # deep copy as other tests can modify args - for key in list(args): - if '1g' in key or '10g' in key: - del args[key] - set_module_args(args) - with pytest.raises(AnsibleFailJson) as exc: - my_module() - msg = 'Please use the new bond_1g or bond_10g options to configure the bond interfaces.' - assert msg in exc.value.args[0]['msg'] - msg = 'This module cannot set or change "method"' - assert msg in exc.value.args[0]['msg'] - - def test_deprecated_all(self): - ''' deprecated with all options ''' - args = dict(self.DEPRECATED_ARGS) # deep copy as other tests can modify args - set_module_args(args) - with pytest.raises(AnsibleFailJson) as exc: - my_module() - msg = 'Please use the new bond_1g and bond_10g options to configure the bond interfaces.' - assert msg in exc.value.args[0]['msg'] - msg = 'This module cannot set or change "method"' - assert msg in exc.value.args[0]['msg'] - - def test_deprecated_1g_only(self): - ''' deprecated with 1g options only ''' - args = dict(self.DEPRECATED_ARGS) # deep copy as other tests can modify args - for key in list(args): - if '10g' in key: - del args[key] - set_module_args(args) - with pytest.raises(AnsibleFailJson) as exc: - my_module() - msg = 'Please use the new bond_1g option to configure the bond 1G interface.' - assert msg in exc.value.args[0]['msg'] - msg = 'This module cannot set or change "method"' - assert msg in exc.value.args[0]['msg'] - - def test_deprecated_10g_only(self): - ''' deprecated with 10g options only ''' - args = dict(self.DEPRECATED_ARGS) # deep copy as other tests can modify args - for key in list(args): - if '1g' in key: - del args[key] - set_module_args(args) - with pytest.raises(AnsibleFailJson) as exc: - my_module() - msg = 'Please use the new bond_10g option to configure the bond 10G interface.' - assert msg in exc.value.args[0]['msg'] - msg = 'This module cannot set or change "method"' - assert msg in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_nothing(self, mock_create_sf_connection): - ''' modify without 1g or 10g options ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - for key in list(args): - if '1g' in key or '10g' in key: - del args[key] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - print('LN:', my_obj.module.params) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - assert len(my_obj.sfe.set_network_config_args) == 0 - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_all(self, mock_create_sf_connection): - ''' modify with all options ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - assert 'Bond1G' in my_obj.sfe.set_network_config_args - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_1g_only(self, mock_create_sf_connection): - ''' modify with 1g options only ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - for key in list(args): - if '10g' in key: - del args[key] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - assert 'Bond1G' in my_obj.sfe.set_network_config_args - assert 'Bond10G' not in my_obj.sfe.set_network_config_args - print(my_obj.sfe.set_network_config_args['Bond1G']) - for key in args['bond_1g']: - if key != 'bond_lacp_rate': - assert my_obj.sfe.set_network_config_args['Bond1G'][mapkey(key)] == args['bond_1g'][key] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_10g_only(self, mock_create_sf_connection): - ''' modify with 10g options only ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - for key in list(args): - if '1g' in key: - del args[key] - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - assert 'Bond1G' not in my_obj.sfe.set_network_config_args - assert 'Bond10G' in my_obj.sfe.set_network_config_args - assert my_obj.sfe.set_network_config_args['Bond10G']['bond-lacp_rate'] == args['bond_10g']['bond_lacp_rate'] - for key in args['bond_10g']: - assert my_obj.sfe.set_network_config_args['Bond10G'][mapkey(key)] == args['bond_10g'][key] diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_nodes.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_nodes.py deleted file mode 100644 index 3e163d000..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_nodes.py +++ /dev/null @@ -1,324 +0,0 @@ -''' unit test for Ansible module: na_elementsw_node.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import json -import pytest - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_node \ - import ElementSWNode as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -MODIFY_ERROR = 'some_error_in_modify_access_group' - -NODE_ID1 = 777 -NODE_ID2 = 888 -NODE_NAME1 = 'node_name1' -NODE_NAME2 = 'node_name2' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None, node_id=None, cluster_name='', node_state='Pending'): - ''' save arguments ''' - self.force_error = force_error - self.where = where - self.node_id = node_id - self.cluster_name = cluster_name - self.node_state = node_state - - def list_all_nodes(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build access_group list: access_groups.name, access_groups.account_id ''' - nodes = list() - pending_nodes = list() - active_pending_nodes = list() - if self.node_id is None: - node_list = list() - else: - node_list = [self.node_id] - attrs1 = dict(mip='10.10.10.101', name=NODE_NAME1, node_id=NODE_ID1) - attrs2 = dict(mip='10.10.10.101', name=NODE_NAME2, node_id=NODE_ID2) - if self.where == 'pending': - attrs1['pending_node_id'] = NODE_ID1 - attrs2['pending_node_id'] = NODE_ID2 - node1 = self.Bunch(**attrs1) - node2 = self.Bunch(**attrs2) - if self.where == 'nodes': - nodes = [node1, node2] - elif self.where == 'pending': - pending_nodes = [node1, node2] - elif self.where == 'active_pending': - active_pending_nodes = [node1, node2] - node_list = self.Bunch(nodes=nodes, pending_nodes=pending_nodes, pending_active_nodes=active_pending_nodes) - return node_list - - def add_nodes(self, *args, **kwargs): # pylint: disable=unused-argument - print('adding_node: ', repr(args), repr(kwargs)) - - def remove_nodes(self, *args, **kwargs): # pylint: disable=unused-argument - print('adding_node: ', repr(args), repr(kwargs)) - - def get_cluster_config(self, *args, **kwargs): # pylint: disable=unused-argument - print('get_cluster_config: ', repr(args), repr(kwargs)) - cluster = self.Bunch(cluster=self.cluster_name, state=self.node_state) - return self.Bunch(cluster=cluster) - - def set_cluster_config(self, *args, **kwargs): # pylint: disable=unused-argument - print('set_cluster_config: ', repr(args), repr(kwargs)) - - def list_drives(self, *args, **kwargs): # pylint: disable=unused-argument - print('list_drives: ', repr(args), repr(kwargs)) - drive = self.Bunch(node_id=self.node_id, status="active") - return self.Bunch(drives=[drive]) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - ARGS = { - 'state': 'present', - 'node_ids': [NODE_ID1, NODE_ID2], - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_node_fail_not_pending(self, mock_create_sf_connection): - ''' adding a node - fails as these nodes are unknown ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - msg = 'nodes not in pending or active lists' - assert msg in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_node(self, mock_create_sf_connection): - ''' adding a node ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(where='pending') - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_node_idempotent(self, mock_create_sf_connection): - ''' adding a node that is already in the cluster ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(where='nodes') - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_remove_node(self, mock_create_sf_connection): - ''' removing a node that is in the cluster ''' - args = dict(self.ARGS) - args['state'] = 'absent' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(where='nodes') - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_remove_node_idempotent(self, mock_create_sf_connection): - ''' removing a node that is not in the cluster ''' - args = dict(self.ARGS) - args['state'] = 'absent' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_remove_node_with_active_drive(self, mock_create_sf_connection): - ''' removing a node that is in the cluster but still associated with a drive ''' - args = dict(self.ARGS) - args['state'] = 'absent' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(node_id=NODE_ID1, where='nodes') - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - msg = 'Error deleting node %s: node has active drives' % NODE_NAME1 - assert msg in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_set_cluster_name_only(self, mock_create_sf_connection): - ''' set cluster name without adding the node ''' - args = dict(self.ARGS) - args['preset_only'] = True - args['cluster_name'] = 'cluster_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - message = 'List of updated nodes with cluster_name:' - assert message in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_set_cluster_name_only_idempotent(self, mock_create_sf_connection): - ''' set cluster name without adding the node - name already set ''' - args = dict(self.ARGS) - args['preset_only'] = True - args['cluster_name'] = 'cluster_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(cluster_name=args['cluster_name']) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - message = '' - assert message == exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_set_cluster_name_and_add(self, mock_create_sf_connection): - ''' set cluster name and add the node ''' - args = dict(self.ARGS) - args['cluster_name'] = 'cluster_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(where='pending') - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - message = 'List of updated nodes with cluster_name:' - assert message in exc.value.args[0]['msg'] - message = 'List of added nodes: ' - assert message in exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_set_cluster_name_and_add_idempotent(self, mock_create_sf_connection): - ''' set cluster name and add the node ''' - args = dict(self.ARGS) - args['cluster_name'] = 'cluster_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(where='nodes', cluster_name=args['cluster_name']) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - message = '' - assert message == exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_set_cluster_name_already_active_no_change(self, mock_create_sf_connection): - ''' set cluster name fails because node state is 'Active' ''' - args = dict(self.ARGS) - args['cluster_name'] = 'cluster_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(where='nodes', cluster_name=args['cluster_name'], node_state='Active') - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - message = '' - assert message == exc.value.args[0]['msg'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_set_cluster_name_already_active_change_not_allowed(self, mock_create_sf_connection): - ''' set cluster name fails because node state is 'Active' ''' - args = dict(self.ARGS) - args['cluster_name'] = 'new_cluster_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(where='nodes', cluster_name='old_cluster_name', node_state='Active') - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = "Error updating cluster name for node %s, already in 'Active' state" % NODE_ID1 - assert message == exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_qos_policy.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_qos_policy.py deleted file mode 100644 index 83ac3711a..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_qos_policy.py +++ /dev/null @@ -1,300 +0,0 @@ -''' unit test for Ansible module: na_elementsw_qos_policy.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import json -import pytest - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_qos_policy \ - import ElementSWQosPolicy as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -CREATE_ERROR = 'create', 'some_error_in_create_qos_policy' -MODIFY_ERROR = 'modify', 'some_error_in_modify_qos_policy' -DELETE_ERROR = 'delete', 'some_error_in_delete_qos_policy' - -POLICY_ID = 888 -POLICY_NAME = 'element_qos_policy_name' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None, qos_policy_name=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - self.policy_name = qos_policy_name - - def list_qos_policies(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build qos_policy list: qos_policy.name, qos_policy.account_id ''' - if self.policy_name: - qos_policy_name = self.policy_name - else: - qos_policy_name = POLICY_NAME - qos = self.Bunch(min_iops=1000, max_iops=20000, burst_iops=20000) - qos_policy = self.Bunch(name=qos_policy_name, qos_policy_id=POLICY_ID, qos=qos) - qos_policies = [qos_policy] - qos_policy_list = self.Bunch(qos_policies=qos_policies) - return qos_policy_list - - def create_qos_policy(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'create_exception' in self.where: - raise netapp_utils.solidfire.common.ApiServerError(*CREATE_ERROR) - - def modify_qos_policy(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'modify_exception' in self.where: - raise netapp_utils.solidfire.common.ApiServerError(*MODIFY_ERROR) - - def delete_qos_policy(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'delete_exception' in self.where: - raise netapp_utils.solidfire.common.ApiServerError(*DELETE_ERROR) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - ARGS = { - 'state': 'present', - 'name': 'element_qos_policy_name', - 'qos': {'minIOPS': 1000, 'maxIOPS': 20000, 'burstIOPS': 20000}, - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_qos_policy(self, mock_create_sf_connection): - ''' adding a qos_policy ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['name'] += '_1' # new name to force a create - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_qos_policy_idempotent(self, mock_create_sf_connection): - ''' adding a qos_policy ''' - args = dict(self.ARGS) - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_delete_qos_policy(self, mock_create_sf_connection): - ''' removing a qos policy ''' - args = dict(self.ARGS) - args['state'] = 'absent' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_delete_qos_policy_idempotent(self, mock_create_sf_connection): - ''' removing a qos policy ''' - args = dict(self.ARGS) - args['state'] = 'absent' - args['name'] += '_1' # new name to force idempotency - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_qos_policy(self, mock_create_sf_connection): - ''' modifying a qos policy ''' - args = dict(self.ARGS) - args['qos'] = {'minIOPS': 2000} - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_rename_qos_policy(self, mock_create_sf_connection): - ''' renaming a qos policy ''' - args = dict(self.ARGS) - args['from_name'] = args['name'] - args['name'] = 'a_new_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_rename_modify_qos_policy_idempotent(self, mock_create_sf_connection): - ''' renaming a qos policy ''' - args = dict(self.ARGS) - args['from_name'] = 'some_older_name' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_qos_policy_exception(self, mock_create_sf_connection): - ''' creating a qos policy can raise an exception ''' - args = dict(self.ARGS) - args['name'] += '_1' # new name to force a create - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['create_exception']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error creating qos policy: %s' % POLICY_NAME - assert exc.value.args[0]['msg'].startswith(message) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_qos_policy_exception(self, mock_create_sf_connection): - ''' modifying a qos policy can raise an exception ''' - args = dict(self.ARGS) - args['qos'] = {'minIOPS': 2000} - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['modify_exception']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error updating qos policy: %s' % POLICY_NAME - assert exc.value.args[0]['msg'].startswith(message) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_delete_qos_policy_exception(self, mock_create_sf_connection): - ''' deleting a qos policy can raise an exception ''' - args = dict(self.ARGS) - args['state'] = 'absent' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['delete_exception']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error deleting qos policy: %s' % POLICY_NAME - assert exc.value.args[0]['msg'].startswith(message) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_missing_qos_option(self, mock_create_sf_connection): - ''' report error if qos option is not given on create ''' - args = dict(self.ARGS) - args['name'] += '_1' # new name to force a create - args.pop('qos') - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = "Error creating qos policy: %s, 'qos:' option is required" % args['name'] - assert exc.value.args[0]['msg'] == message - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_missing_from_name_policy(self, mock_create_sf_connection): - ''' report error if qos policy to rename does not exist ''' - args = dict(self.ARGS) - args['name'] += '_1' # new name to force a create - args['from_name'] = 'something_not_likely_to_exist' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = "Error renaming qos policy, no existing policy with name/id: %s" % args['from_name'] - assert exc.value.args[0]['msg'] == message diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_template.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_template.py deleted file mode 100644 index 7dc6e2d6b..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_template.py +++ /dev/null @@ -1,138 +0,0 @@ -''' unit test for Ansible module: na_elementsw_account.py ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_account \ - import ElementSWAccount as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -ADD_ERROR = 'some_error_in_add_account' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - -# TODO: replace list_accounts and add_account as needed - def list_accounts(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build account list: account.username, account.account_id ''' - accounts = list() - account_list = self.Bunch(accounts=accounts) - return account_list - - def add_account(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'add' in self.where: - # The module does not check for a specific exception :( - raise OSError(ADD_ERROR) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_ensure_command_called(self, mock_create_sf_connection): - ''' a more interesting test ''' - set_module_args({ - 'state': 'present', - 'element_username': 'element_username', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - }) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - # It may not be a good idea to start with apply - # More atomic methods can be easier to mock - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_add_exception(self, mock_create_sf_connection): - ''' a more interesting test ''' - set_module_args({ - 'state': 'present', - 'element_username': 'element_username', - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - }) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['add']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - # It may not be a good idea to start with apply - # More atomic methods can be easier to mock - # apply() is calling list_accounts() and add_account() - my_obj.apply() - print(exc.value.args[0]) - message = 'Error creating account element_username: %s' % ADD_ERROR - assert exc.value.args[0]['msg'] == message diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_vlan.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_vlan.py deleted file mode 100644 index e2dc51f79..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_vlan.py +++ /dev/null @@ -1,343 +0,0 @@ -''' unit test for Ansible module: na_elementsw_account.py ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch, Mock -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_vlan \ - import ElementSWVlan as vlan # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -ADD_ERROR = 'some_error_in_add_account' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - class Vlan(object): - def __init__(self, entries): - self.__dict__.update(entries) - - def __init__(self, force_error=False, where=None): - ''' save arguments ''' - self.force_error = force_error - self.where = where - - def list_virtual_networks(self, virtual_network_tag=None): # pylint: disable=unused-argument - ''' list of vlans ''' - if virtual_network_tag == '1': - add1 = self.Bunch( - start='2.2.2.2', - size=4 - ) - add2 = self.Bunch( - start='3.3.3.3', - size=4 - ) - vlan = self.Bunch( - attributes={'key': 'value', 'config-mgmt': 'ansible', 'event-source': 'na_elementsw_vlan'}, - name="test", - address_blocks=[ - add1, - add2 - ], - svip='192.168.1.2', - gateway='0.0.0.0', - netmask='255.255.248.0', - namespace=False - ) - vlans = self.Bunch( - virtual_networks=[vlan] - ) - else: - vlans = self.Bunch( - virtual_networks=[] - ) - return vlans - - def add_virtual_network(self, virtual_network_tag=None, **create): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'add' in self.where: - # The module does not check for a specific exception :( - raise OSError(ADD_ERROR) - - def remove_virtual_network(self, virtual_network_tag=None): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'remove' in self.where: - # The module does not check for a specific exception :( - raise OSError(ADD_ERROR) - - def modify_virtual_network(self, virtual_network_tag=None, **modify): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'modify' in self.where: - # The module does not check for a specific exception :( - raise OSError(ADD_ERROR) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - vlan() - print('Info: %s' % exc.value.args[0]['msg']) - - def mock_args(self): - args = { - 'state': 'present', - 'name': 'test', - 'vlan_tag': 1, - 'address_blocks': [ - {'start': '192.168.1.2', 'size': 5} - ], - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - 'netmask': '255.255.248.0', - 'gateway': '0.0.0.0', - 'namespace': False, - 'svip': '192.168.1.2' - } - return dict(args) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp_elementsw_module.NaElementSWModule.set_element_attributes') - def test_successful_create(self, mock_set_attributes, mock_create_sf_connection): - ''' successful create''' - mock_set_attributes.return_value = {'key': 'new_value'} - data = self.mock_args() - data['vlan_tag'] = '3' - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_successful_delete(self, mock_create_sf_connection): - ''' successful delete''' - data = self.mock_args() - data['state'] = 'absent' - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_successful_modify(self, mock_create_sf_connection): - ''' successful modify''' - data = self.mock_args() - data['svip'] = '3.4.5.6' - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - @patch('ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_vlan.ElementSWVlan.get_network_details') - def test_successful_modify_address_blocks_same_length(self, mock_get, mock_create_sf_connection): - ''' successful modify''' - mock_get.return_value = { - 'address_blocks': [ - {'start': '10.10.10.20', 'size': 5}, - {'start': '10.10.10.40', 'size': 5} - ] - } - data = self.mock_args() - data['address_blocks'] = [{'start': '10.10.10.20', 'size': 5}, - {'start': '10.20.10.50', 'size': 5}] - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - @patch('ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_vlan.ElementSWVlan.get_network_details') - def test_successful_modify_address_blocks_different_length_1(self, mock_get, mock_create_sf_connection): - ''' successful modify''' - mock_get.return_value = { - 'address_blocks': [ - {'start': '10.10.10.20', 'size': 5}, - {'start': '10.20.10.30', 'size': 5} - ] - } - data = self.mock_args() - data['address_blocks'] = [{'start': '10.10.10.20', 'size': 5}, - {'start': '10.20.10.30', 'size': 5}, - {'start': '10.20.10.50', 'size': 5}] - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - @patch('ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_vlan.ElementSWVlan.get_network_details') - def test_successful_modify_address_blocks_different_length_2(self, mock_get, mock_create_sf_connection): - ''' successful modify''' - mock_get.return_value = { - 'address_blocks': [ - {'start': '10.10.10.20', 'size': 5}, - {'start': '10.20.10.30', 'size': 5}, - {'start': '10.20.10.40', 'size': 5} - ] - } - data = self.mock_args() - data['address_blocks'] = [{'start': '10.10.10.20', 'size': 5}, - {'start': '10.20.10.40', 'size': 5}, - {'start': '10.20.10.30', 'size': 5}] - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - @patch('ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_vlan.ElementSWVlan.get_network_details') - def test_successful_modify_address_blocks_different_length_3(self, mock_get, mock_create_sf_connection): - ''' successful modify''' - mock_get.return_value = { - 'address_blocks': [ - {'start': '10.10.10.20', 'size': 5}, - {'start': '10.10.10.30', 'size': 5}, - {'start': '10.20.10.40', 'size': 5} - ] - } - data = self.mock_args() - data['address_blocks'] = [{'start': '10.10.10.20', 'size': 5}, - {'start': '10.20.10.40', 'size': 5}, - {'start': '10.20.10.30', 'size': 5}] - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_helper_validate_keys(self, mock_create_sf_connection): - '''test validate_keys()''' - data = self.mock_args() - del data['svip'] - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.validate_keys() - msg = "One or more required fields ['address_blocks', 'svip', 'netmask', 'name'] for creating VLAN is missing" - assert exc.value.args[0]['msg'] == msg - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_successful_modify_idempotent(self, mock_create_sf_connection): - ''' successful modify''' - data = self.mock_args() - data['address_blocks'] = [{'start': '2.2.2.2', 'size': 4}, - {'start': '3.3.3.3', 'size': 4}] - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_successful_modify_attribute_value(self, mock_create_sf_connection): - ''' successful modify''' - data = self.mock_args() - data['address_blocks'] = [{'start': '2.2.2.2', 'size': 4}, - {'start': '3.3.3.3', 'size': 4}] - data['attributes'] = {'key': 'value2'} - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_successful_modify_attribute_key(self, mock_create_sf_connection): - ''' successful modify''' - data = self.mock_args() - data['address_blocks'] = [{'start': '2.2.2.2', 'size': 4}, - {'start': '3.3.3.3', 'size': 4}] - data['attributes'] = {'key2': 'value2'} - set_module_args(data) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = vlan() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_volume.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_volume.py deleted file mode 100644 index 926dda90b..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules/test_na_elementsw_volume.py +++ /dev/null @@ -1,364 +0,0 @@ -''' unit test for Ansible module: na_elementsw_volume.py ''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - -import json -import pytest - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.tests.unit.compat.mock import patch -import ansible_collections.netapp.elementsw.plugins.module_utils.netapp as netapp_utils - -if not netapp_utils.has_sf_sdk(): - pytestmark = pytest.mark.skip('skipping as missing required SolidFire Python SDK') - -from ansible_collections.netapp.elementsw.plugins.modules.na_elementsw_volume \ - import ElementSWVolume as my_module # module under test - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -CREATE_ERROR = 'create', 'some_error_in_create_volume' -MODIFY_ERROR = 'modify', 'some_error_in_modify_volume' -DELETE_ERROR = 'delete', 'some_error_in_delete_volume' - -POLICY_ID = 888 -POLICY_NAME = 'element_qos_policy_name' -VOLUME_ID = 777 -VOLUME_NAME = 'element_volume_name' - - -class MockSFConnection(object): - ''' mock connection to ElementSW host ''' - - class Bunch(object): # pylint: disable=too-few-public-methods - ''' create object with arbitrary attributes ''' - def __init__(self, **kw): - ''' called with (k1=v1, k2=v2), creates obj.k1, obj.k2 with values v1, v2 ''' - setattr(self, '__dict__', kw) - - def __init__(self, force_error=False, where=None, with_qos_policy_id=True): - ''' save arguments ''' - self.force_error = force_error - self.where = where - self.with_qos_policy_id = with_qos_policy_id - - def list_qos_policies(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build qos_policy list ''' - qos_policy_name = POLICY_NAME - qos = self.Bunch(min_iops=1000, max_iops=20000, burst_iops=20000) - qos_policy = self.Bunch(name=qos_policy_name, qos_policy_id=POLICY_ID, qos=qos) - qos_policy_1 = self.Bunch(name=qos_policy_name + '_1', qos_policy_id=POLICY_ID + 1, qos=qos) - qos_policies = [qos_policy, qos_policy_1] - qos_policy_list = self.Bunch(qos_policies=qos_policies) - return qos_policy_list - - def list_volumes_for_account(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build volume list: volume.name, volume.id ''' - volume = self.Bunch(name=VOLUME_NAME, volume_id=VOLUME_ID, delete_time='') - volumes = [volume] - volume_list = self.Bunch(volumes=volumes) - return volume_list - - def list_volumes(self, *args, **kwargs): # pylint: disable=unused-argument - ''' build volume details: volume.name, volume.id ''' - if self.with_qos_policy_id: - qos_policy_id = POLICY_ID - else: - qos_policy_id = None - qos = self.Bunch(min_iops=1000, max_iops=20000, burst_iops=20000) - volume = self.Bunch(name=VOLUME_NAME, volume_id=VOLUME_ID, delete_time='', access='rw', - account_id=1, qos=qos, qos_policy_id=qos_policy_id, total_size=1000000000, - attributes={'config-mgmt': 'ansible', 'event-source': 'na_elementsw_volume'} - ) - volumes = [volume] - volume_list = self.Bunch(volumes=volumes) - return volume_list - - def get_account_by_name(self, *args, **kwargs): # pylint: disable=unused-argument - ''' returns account_id ''' - if self.force_error and 'get_account_id' in self.where: - account_id = None - else: - account_id = 1 - account = self.Bunch(account_id=account_id) - result = self.Bunch(account=account) - return result - - def create_volume(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'create_exception' in self.where: - raise netapp_utils.solidfire.common.ApiServerError(*CREATE_ERROR) - - def modify_volume(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - print("modify: %s, %s " % (repr(args), repr(kwargs))) - if self.force_error and 'modify_exception' in self.where: - raise netapp_utils.solidfire.common.ApiServerError(*MODIFY_ERROR) - - def delete_volume(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'delete_exception' in self.where: - raise netapp_utils.solidfire.common.ApiServerError(*DELETE_ERROR) - - def purge_deleted_volume(self, *args, **kwargs): # pylint: disable=unused-argument - ''' We don't check the return code, but could force an exception ''' - if self.force_error and 'delete_exception' in self.where: - raise netapp_utils.solidfire.common.ApiServerError(*DELETE_ERROR) - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - ARGS = { - 'state': 'present', - 'name': VOLUME_NAME, - 'account_id': 'element_account_id', - 'qos': {'minIOPS': 1000, 'maxIOPS': 20000, 'burstIOPS': 20000}, - 'qos_policy_name': POLICY_NAME, - 'size': 1, - 'enable512e': True, - 'hostname': 'hostname', - 'username': 'username', - 'password': 'password', - } - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_volume(self, mock_create_sf_connection): - ''' adding a volume ''' - args = dict(self.ARGS) # deep copy as other tests can modify args - args['name'] += '_1' # new name to force a create - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_or_modify_volume_idempotent_qos_policy(self, mock_create_sf_connection): - ''' adding a volume ''' - args = dict(self.ARGS) - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_add_or_modify_volume_idempotent_qos(self, mock_create_sf_connection): - ''' adding a volume ''' - args = dict(self.ARGS) - args.pop('qos_policy_name') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(with_qos_policy_id=False) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_delete_volume(self, mock_create_sf_connection): - ''' removing a volume ''' - args = dict(self.ARGS) - args['state'] = 'absent' - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_delete_volume_idempotent(self, mock_create_sf_connection): - ''' removing a volume ''' - args = dict(self.ARGS) - args['state'] = 'absent' - args['name'] += '_1' # new name to force idempotency - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert not exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_volume_qos(self, mock_create_sf_connection): - ''' modifying a volume ''' - args = dict(self.ARGS) - args['qos'] = {'minIOPS': 2000} - args.pop('qos_policy_name') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(with_qos_policy_id=False) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_volume_qos_policy_to_qos(self, mock_create_sf_connection): - ''' modifying a volume ''' - args = dict(self.ARGS) - args['qos'] = {'minIOPS': 2000} - args.pop('qos_policy_name') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_volume_qos_policy(self, mock_create_sf_connection): - ''' modifying a volume ''' - args = dict(self.ARGS) - args['qos_policy_name'] += '_1' - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_volume_qos_to_qos_policy(self, mock_create_sf_connection): - ''' modifying a volume ''' - args = dict(self.ARGS) - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(with_qos_policy_id=False) - my_obj = my_module() - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_create_volume_exception(self, mock_create_sf_connection): - ''' creating a volume can raise an exception ''' - args = dict(self.ARGS) - args['name'] += '_1' # new name to force a create - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['create_exception']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error provisioning volume: %s' % args['name'] - assert exc.value.args[0]['msg'].startswith(message) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_modify_volume_exception(self, mock_create_sf_connection): - ''' modifying a volume can raise an exception ''' - args = dict(self.ARGS) - args['qos'] = {'minIOPS': 2000} - args.pop('qos_policy_name') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['modify_exception']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error updating volume: %s' % VOLUME_ID - assert exc.value.args[0]['msg'].startswith(message) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_delete_volume_exception(self, mock_create_sf_connection): - ''' deleting a volume can raise an exception ''' - args = dict(self.ARGS) - args['state'] = 'absent' - args.pop('qos') # parameters are mutually exclusive: qos|qos_policy_name - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection(force_error=True, where=['delete_exception']) - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = 'Error deleting volume: %s' % VOLUME_ID - assert exc.value.args[0]['msg'].startswith(message) - - @patch('ansible_collections.netapp.elementsw.plugins.module_utils.netapp.create_sf_connection') - def test_check_error_reporting_on_non_existent_qos_policy(self, mock_create_sf_connection): - ''' report error if qos option is not given on create ''' - args = dict(self.ARGS) - args['name'] += '_1' # new name to force a create - args.pop('qos') - args['qos_policy_name'] += '_2' - set_module_args(args) - # my_obj.sfe will be assigned a MockSFConnection object: - mock_create_sf_connection.return_value = MockSFConnection() - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.apply() - print(exc.value.args[0]) - message = "Cannot find qos policy with name/id: %s" % args['qos_policy_name'] - assert exc.value.args[0]['msg'] == message diff --git a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules_utils/test_netapp_module.py b/ansible_collections/netapp/elementsw/tests/unit/plugins/modules_utils/test_netapp_module.py deleted file mode 100644 index 171a7bae5..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/plugins/modules_utils/test_netapp_module.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright (c) 2018 NetApp -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests for module_utils netapp_module.py ''' -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -from ansible_collections.netapp.elementsw.tests.unit.compat import unittest -from ansible_collections.netapp.elementsw.plugins.module_utils.netapp_module import NetAppModule as na_helper - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def test_get_cd_action_create(self): - ''' validate cd_action for create ''' - current = None - desired = {'state': 'present'} - my_obj = na_helper() - result = my_obj.get_cd_action(current, desired) - assert result == 'create' - - def test_get_cd_action_delete(self): - ''' validate cd_action for delete ''' - current = {'state': 'absent'} - desired = {'state': 'absent'} - my_obj = na_helper() - result = my_obj.get_cd_action(current, desired) - assert result == 'delete' - - def test_get_cd_action(self): - ''' validate cd_action for returning None ''' - current = None - desired = {'state': 'absent'} - my_obj = na_helper() - result = my_obj.get_cd_action(current, desired) - assert result is None - - def test_get_modified_attributes_for_no_data(self): - ''' validate modified attributes when current is None ''' - current = None - desired = {'name': 'test'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired) - assert result == {} - - def test_get_modified_attributes(self): - ''' validate modified attributes ''' - current = {'name': ['test', 'abcd', 'xyz', 'pqr'], 'state': 'present'} - desired = {'name': ['abcd', 'abc', 'xyz', 'pqr'], 'state': 'absent'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired) - assert result == desired - - def test_get_modified_attributes_for_intersecting_mixed_list(self): - ''' validate modified attributes for list diff ''' - current = {'name': [2, 'four', 'six', 8]} - desired = {'name': ['a', 8, 'ab', 'four', 'abcd']} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['a', 'ab', 'abcd']} - - def test_get_modified_attributes_for_intersecting_list(self): - ''' validate modified attributes for list diff ''' - current = {'name': ['two', 'four', 'six', 'eight']} - desired = {'name': ['a', 'six', 'ab', 'four', 'abc']} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['a', 'ab', 'abc']} - - def test_get_modified_attributes_for_nonintersecting_list(self): - ''' validate modified attributes for list diff ''' - current = {'name': ['two', 'four', 'six', 'eight']} - desired = {'name': ['a', 'ab', 'abd']} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['a', 'ab', 'abd']} - - def test_get_modified_attributes_for_list_of_dicts_no_data(self): - ''' validate modified attributes for list diff ''' - current = None - desired = {'address_blocks': [{'start': '10.20.10.40', 'size': 5}]} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {} - - def test_get_modified_attributes_for_intersecting_list_of_dicts(self): - ''' validate modified attributes for list diff ''' - current = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}]} - desired = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'address_blocks': [{'start': '10.20.10.40', 'size': 5}]} - - def test_get_modified_attributes_for_nonintersecting_list_of_dicts(self): - ''' validate modified attributes for list diff ''' - current = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}]} - desired = {'address_blocks': [{'start': '10.20.10.23', 'size': 5}, {'start': '10.20.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'address_blocks': [{'start': '10.20.10.23', 'size': 5}, {'start': '10.20.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]} - - def test_get_modified_attributes_for_list_diff(self): - ''' validate modified attributes for list diff ''' - current = {'name': ['test', 'abcd'], 'state': 'present'} - desired = {'name': ['abcd', 'abc'], 'state': 'present'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired, True) - assert result == {'name': ['abc']} - - def test_get_modified_attributes_for_no_change(self): - ''' validate modified attributes for same data in current and desired ''' - current = {'name': 'test'} - desired = {'name': 'test'} - my_obj = na_helper() - result = my_obj.get_modified_attributes(current, desired) - assert result == {} - - def test_is_rename_action_for_empty_input(self): - ''' validate rename action for input None ''' - source = None - target = None - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result == source - - def test_is_rename_action_for_no_source(self): - ''' validate rename action when source is None ''' - source = None - target = 'test2' - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result is False - - def test_is_rename_action_for_no_target(self): - ''' validate rename action when target is None ''' - source = 'test2' - target = None - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result is True - - def test_is_rename_action(self): - ''' validate rename action ''' - source = 'test' - target = 'test2' - my_obj = na_helper() - result = my_obj.is_rename_action(source, target) - assert result is False diff --git a/ansible_collections/netapp/elementsw/tests/unit/requirements.txt b/ansible_collections/netapp/elementsw/tests/unit/requirements.txt deleted file mode 100644 index dde1958f1..000000000 --- a/ansible_collections/netapp/elementsw/tests/unit/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -solidfire-sdk-python ; python_version >= '2.7' diff --git a/ansible_collections/netapp/um_info/.github/ISSUE_TEMPLATE/bug_report.yml b/ansible_collections/netapp/um_info/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index 71c875355..000000000 --- a/ansible_collections/netapp/um_info/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,210 +0,0 @@ ---- -name: 🐛 Bug report -description: Create a report to help us improve - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to report a bug in netapp.um_info!** - - - ⚠ - Verify first that your issue is not [already reported on - GitHub][issue search] and keep in mind that we may have to keep - the current behavior because [every change breaks someone's - workflow][XKCD 1172]. - We try to be mindful about this. - - Also test if the latest release and devel branch are affected too. - - - **Tip:** If you are seeking community support, please consider - [Join our Slack community][ML||IRC]. - - - - [ML||IRC]: - https://join.slack.com/t/netapppub/shared_invite/zt-njcjx2sh-1VR2mEDvPcJAmPutOnP~mg - - [issue search]: ../search?q=is%3Aissue&type=issues - - [XKCD 1172]: https://xkcd.com/1172/ - - -- type: textarea - attributes: - label: Summary - description: Explain the problem briefly below. - placeholder: >- - When I try to do X with netapp.um_info from the devel branch on GitHub, Y - breaks in a way Z under the env E. Here are all the details I know - about this problem... - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the rst file, module, plugin, task or - feature below, *use your best guess if unsure*. - - - **Tip:** Cannot find it in this repository? Please be advised that - the source for some parts of the documentation are hosted outside - of this repository. If the page you are reporting describes - modules/plugins/etc that are not officially supported by the - Ansible Core Engineering team, there is a good chance that it is - coming from one of the [Ansible Collections maintained by the - community][collections org]. If this is the case, please make sure - to file an issue under the appropriate project there instead. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Ansible Version - description: >- - Paste verbatim output from `ansible --version` below, under - the prompt line. Please don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - value: | - $ ansible --version - placeholder: | - $ ansible --version - ansible [core 2.11.0b4.post0] (detached HEAD ref: refs/) last updated 2021/04/02 00:33:35 (GMT +200) - config file = None - configured module search path = ['~/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] - ansible python module location = ~/src/github/ansible/ansible/lib/ansible - ansible collection location = ~/.ansible/collections:/usr/share/ansible/collections - executable location = bin/ansible - python version = 3.9.0 (default, Oct 26 2020, 13:08:59) [GCC 10.2.0] - jinja version = 2.11.3 - libyaml = True - validations: - required: true - -- type: textarea - attributes: - label: UM_Info Collection Version - description: >- - UM_Info Collection Version. Run `ansible-galaxy collection` and copy the entire output - render: console - value: | - $ ansible-galaxy collection list - validations: - required: true - -- type: textarea - attributes: - label: Playbook - description: >- - The task from the playbook that is give you the issue - render: console - validations: - required: true - -- type: textarea - attributes: - label: Steps to Reproduce - description: | - Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: | - 1. Implement the following playbook: - - ```yaml - --- - # ping.yml - - hosts: all - gather_facts: false - tasks: - - ping: - ... - ``` - 2. Then run `ANSIBLE_DEBUG=1 ansible-playbook ping.yml -vvvvv` - 3. An error occurs. - validations: - required: true - -- type: textarea - attributes: - label: Expected Results - description: >- - Describe what you expected to happen when running the steps above. - placeholder: >- - I expected X to happen because I assumed Y and was shocked - that it did not. - validations: - required: true - -- type: textarea - attributes: - label: Actual Results - description: | - Describe what actually happened. If possible run with extra verbosity (`-vvvv`). - - Paste verbatim command output and don't wrap it with tripple backticks — your - whole input will be turned into a code snippet automatically. - render: console - placeholder: >- - Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))} - Exception: - Traceback (most recent call last): - File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main - status = self.run(options, args) - File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run - wb.build(autobuilding=True) - File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build - self.requirement_set.prepare_files(self.finder) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files - ignore_dependencies=self.ignore_dependencies)) - File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file - session=self.session, hashes=hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url - hashes=hashes - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url - hashes) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url - stream=True, - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get - return self.request('GET', url, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request - return super(PipSession, self).request(method, url, *args, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request - resp = self.send(prep, **send_kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send - r = adapter.send(request, **kwargs) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send - resp = super(CacheControlAdapter, self).send(request, **kw) - File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send - raise SSLError(e, request=request) - SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),)) - ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2. - ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1. - validations: - required: true - - -- type: markdown - attributes: - value: > - *One last thing...* - - - Thank you for your collaboration! - - -... diff --git a/ansible_collections/netapp/um_info/.github/ISSUE_TEMPLATE/feature_request.yml b/ansible_collections/netapp/um_info/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index 126b4b9cd..000000000 --- a/ansible_collections/netapp/um_info/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,100 +0,0 @@ ---- -name: ✨ Feature request -description: Suggest an idea for this project - -body: -- type: markdown - attributes: - value: > - **Thank you for wanting to suggest a feature for netapp.um_info!** - - 💡 - Before you go ahead with your request, please first consider if it - would be useful for majority of the netapp.um_info users. As a - general rule of thumb, any feature that is only of interest to a - small sub group should be [implemented in a third-party Ansible - Collection][contribute to collections] or maybe even just your - project alone. Be mindful of the fact that the essential - netapp.um_info features have a broad impact. - - - <details> - <summary> - ❗ Every change breaks someone's workflow. - </summary> - - - [![❗ Every change breaks someone's workflow. - ](https://imgs.xkcd.com/comics/workflow.png) - ](https://xkcd.com/1172/) - </details> - - - ⚠ - Verify first that your idea is not [already requested on - GitHub][issue search]. - - Also test if the main branch does not already implement this. - - -- type: textarea - attributes: - label: Summary - description: > - Describe the new feature/improvement you would like briefly below. - - - What's the problem this feature will solve? - - What are you trying to do, that you are unable to achieve - with netapp.um_info as it currently stands? - - - * Provide examples of real-world use cases that this would enable - and how it solves the problem you described. - - * How do you solve this now? - - * Have you tried to work around the problem using other tools? - - * Could there be a different approach to solving this issue? - - placeholder: >- - I am trying to do X with netapp.um_info from the devel branch on GitHub and - I think that implementing a feature Y would be very helpful for me and - every other user of netapp.um_info because of Z. - validations: - required: true - -- type: input - attributes: - label: Component Name - description: > - Write the short name of the module, plugin, task or feature below, - *use your best guess if unsure*. - - - [collections org]: /ansible-collections - placeholder: dnf, apt, yum, pip, user etc. - validations: - required: true - -- type: textarea - attributes: - label: Additional Information - description: | - Describe how the feature would be used, why it is needed and what it would solve. - - **HINT:** You can paste https://gist.github.com links for larger files. - value: | - <!--- Paste example playbooks or commands between quotes below --> - ```yaml (paste below) - - ``` - placeholder: >- - I asked on https://stackoverflow.com/.... and the community - advised me to do X, Y and Z. - validations: - required: true - -... diff --git a/ansible_collections/netapp/um_info/.github/workflows/coverage.yml b/ansible_collections/netapp/um_info/.github/workflows/coverage.yml deleted file mode 100644 index 39d5818c9..000000000 --- a/ansible_collections/netapp/um_info/.github/workflows/coverage.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: NetApp.um_info Ansible Coverage - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity: - name: Coverage on UM_INFO - runs-on: ubuntu-latest - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - - name: Install ansible stable-2.11 - run: pip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/um_info/ - rsync -av . ansible_collections/netapp/um_info/ --exclude ansible_collections/netapp/um_info/ - - - name: Run Unit Tests - run: ansible-test units --coverage --color --docker --python 3.8 - working-directory: ansible_collections/netapp/um_info/ - - # ansible-test support producing code coverage date - - name: Generate coverage report - run: ansible-test coverage xml -v --requirements --group-by command --group-by version - working-directory: ansible_collections/netapp/um_info/ - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2 - with: - working-directory: ansible_collections/netapp/um_info/ - verbose: true
\ No newline at end of file diff --git a/ansible_collections/netapp/um_info/.github/workflows/main.yml b/ansible_collections/netapp/um_info/.github/workflows/main.yml deleted file mode 100644 index 9294078a2..000000000 --- a/ansible_collections/netapp/um_info/.github/workflows/main.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: NetApp.um_info Ansible CI - -on: - push: - pull_request: - schedule: - - cron: '0 6 * * *' - -jobs: - sanity: - name: Sanity (${{ matrix.ansible }} on Um_info - runs-on: ubuntu-latest - strategy: - matrix: - ansible: - - stable-2.9 - - stable-2.10 - - stable-2.11 - - stable-2.12 - - stable-2.13 - - devel - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - # Ansible 2.14 requires 3.9 as a minimum - python-version: 3.9 - - - name: Install ansible (${{ matrix.ansible }}) - run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check - - - name: Make directory to make ansible-test happy - run: | - pwd - mkdir -p ansible_collections/netapp/um_info/ - rsync -av . ansible_collections/netapp/um_info/ --exclude ansible_collections/netapp/um_info/ - - - - name: Run sanity tests Um_info - run: ansible-test sanity --docker -v --color - working-directory: ansible_collections/netapp/um_info/ - - - name: Run Unit Tests - run: ansible-test units --docker -v --color - working-directory: ansible_collections/netapp/um_info/ diff --git a/ansible_collections/netapp/um_info/CHANGELOG.rst b/ansible_collections/netapp/um_info/CHANGELOG.rst deleted file mode 100644 index f5d538d11..000000000 --- a/ansible_collections/netapp/um_info/CHANGELOG.rst +++ /dev/null @@ -1,78 +0,0 @@ -==================================================== -NetApp Unified Manager Info Collection Release Notes -==================================================== - -.. contents:: Topics - - -v21.8.0 -======= - -Minor Changes -------------- - -- PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - -v21.7.0 -======= - -Minor Changes -------------- - -- all modules - ability to trace API calls and responses. -- all modules - new ``max_records`` option to limit the amount of data in a single GET response. - -Bugfixes --------- - -- all modules - report error when connecting to a server that does not run AIQUM. -- all modules - return all records rather than the first 1000 records (mostly for volumes). -- rename na_um_list_volumes.p to na_um_list_volumes.py - -v21.6.0 -======= - -Minor Changes -------------- - -- na_um_list_aggregates has been renamed na_um_aggregates_info. -- na_um_list_clusters has been renamed na_um_clusters_info. -- na_um_list_nodes has been renamed na_um_nodes_info. -- na_um_list_svms has been renamed na_um_svms_info. -- na_um_list_volumes has been renamed na_um_volumes_info. - -v21.5.0 -======= - -Minor Changes -------------- - -- minor changes to meet Red Hat requirements to be certified. - -v20.7.0 -======= - -Minor Changes -------------- - -- na_um_list_aggregates - Now sort by performance_capacity.used -- na_um_list_nodes - Now sort by performance_capacity.used - -v20.6.0 -======= - -New Modules ------------ - -- netapp.um_info.na_um_list_volumes - NetApp Unified Manager list volumes. - -v20.5.0 -======= - -New Modules ------------ - -- netapp.um_info.na_um_list_aggregates - NetApp Unified Manager list aggregates. -- netapp.um_info.na_um_list_clusters - NetApp Unified Manager list cluster. -- netapp.um_info.na_um_list_nodes - NetApp Unified Manager list nodes. -- netapp.um_info.na_um_list_svms - NetApp Unified Manager list svms. diff --git a/ansible_collections/netapp/um_info/COPYING b/ansible_collections/netapp/um_info/COPYING deleted file mode 100644 index 94a9ed024..000000000 --- a/ansible_collections/netapp/um_info/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - <one line to give the program's name and a brief idea of what it does.> - Copyright (C) <year> <name of author> - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see <http://www.gnu.org/licenses/>. - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - <program> Copyright (C) <year> <name of author> - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -<http://www.gnu.org/licenses/>. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -<http://www.gnu.org/philosophy/why-not-lgpl.html>. diff --git a/ansible_collections/netapp/um_info/FILES.json b/ansible_collections/netapp/um_info/FILES.json deleted file mode 100644 index 769a883a4..000000000 --- a/ansible_collections/netapp/um_info/FILES.json +++ /dev/null @@ -1,467 +0,0 @@ -{ - "files": [ - { - "name": ".", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ec72420df5dfbdce4111f715c96338df3b7cb75f58e478d2449c9720e560de8c", - "format": 1 - }, - { - "name": "plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/doc_fragments/netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "bd4a43b072697053c41ac2c6979513bd8fadd8c80eece1ca2a5454f24ecf85da", - "format": 1 - }, - { - "name": "plugins/module_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/module_utils/netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a255d934e0f25750f739d26bc124f7542db92b11385d5a3f350409ed6ae3fc2f", - "format": 1 - }, - { - "name": "plugins/module_utils/netapp_module.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cf7052585943d6e39a9b671538947f8de77411805f659c148267099603d26bef", - "format": 1 - }, - { - "name": "plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "plugins/modules/na_um_list_volumes.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e155cc127f6b1fb1a7512a52b15077e8bcc71cd2bd36fceb8e5811b43fa6c647", - "format": 1 - }, - { - "name": "plugins/modules/na_um_list_nodes.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "c9dc5fee102f858e25be36bd6a4cbc72732fd4d87f5693d9dc9c070360b05b3b", - "format": 1 - }, - { - "name": "plugins/modules/na_um_svms_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "810aed45c718629fc7629558a6f1de69bcb30206dbe1b04b8aff4284512b910f", - "format": 1 - }, - { - "name": "plugins/modules/na_um_clusters_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "fa552cbf9cc2684c8378023e0bd8b338d33f8f826c05812addc5867830999b97", - "format": 1 - }, - { - "name": "plugins/modules/na_um_aggregates_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b65dde8d0d786b63ea89d411024d5c36dc56fe45d9472eec7f5c1d3dba47fff8", - "format": 1 - }, - { - "name": "plugins/modules/na_um_list_clusters.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "fa552cbf9cc2684c8378023e0bd8b338d33f8f826c05812addc5867830999b97", - "format": 1 - }, - { - "name": "plugins/modules/na_um_list_svms.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "810aed45c718629fc7629558a6f1de69bcb30206dbe1b04b8aff4284512b910f", - "format": 1 - }, - { - "name": "plugins/modules/na_um_nodes_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "c9dc5fee102f858e25be36bd6a4cbc72732fd4d87f5693d9dc9c070360b05b3b", - "format": 1 - }, - { - "name": "plugins/modules/na_um_volumes_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e155cc127f6b1fb1a7512a52b15077e8bcc71cd2bd36fceb8e5811b43fa6c647", - "format": 1 - }, - { - "name": "plugins/modules/na_um_list_aggregates.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b65dde8d0d786b63ea89d411024d5c36dc56fe45d9472eec7f5c1d3dba47fff8", - "format": 1 - }, - { - "name": "tests", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/compat/unittest.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cba95d18c5b39c6f49714eacf1ac77452c2e32fa087c03cf01aacd19ae597b0f", - "format": 1 - }, - { - "name": "tests/unit/compat/builtins.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0ca4cac919e166b25e601e11acb01f6957dddd574ff0a62569cb994a5ecb63e1", - "format": 1 - }, - { - "name": "tests/unit/compat/__init__.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "format": 1 - }, - { - "name": "tests/unit/compat/mock.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "0af958450cf6de3fbafe94b1111eae8ba5a8dbe1d785ffbb9df81f26e4946d99", - "format": 1 - }, - { - "name": "tests/unit/requirements.txt", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "68a61b1d58a722f4ffabaa28da01c9837c93a582ea41c1bfb1c1fd54ea2d8fab", - "format": 1 - }, - { - "name": "tests/unit/plugins", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/module_utils", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/module_utils/test_netapp.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a4de097829490ea8016a6227b340a27e23a38f40189f12f80217caa199c608ec", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_volumes_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b8a2d4ca0f304a588de4d642da415362f3b15b2926fa12a90117b58b9f71d6d9", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_clusters_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "11f9aa85947d440b6a647a2cc6be1cf16d93d69b08ed288a33fa8168836d2521", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_list_svms.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7933824403b197ec756f540be054a5e2c75b5a3a28cf60280cd11493f4c4b235", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_list_aggregates.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ba3687cb122aa7452f21052b5a6f26448df8356e92e4b78c20cce55c66ea3026", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_list_clusters.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "11f9aa85947d440b6a647a2cc6be1cf16d93d69b08ed288a33fa8168836d2521", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_nodes_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cf4ded8134d30ed7b82769252addf2094d07b6bf3ec81e7aba0615b290558cfb", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_aggregates_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "ba3687cb122aa7452f21052b5a6f26448df8356e92e4b78c20cce55c66ea3026", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_list_volumes.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "b8a2d4ca0f304a588de4d642da415362f3b15b2926fa12a90117b58b9f71d6d9", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_svms_info.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "7933824403b197ec756f540be054a5e2c75b5a3a28cf60280cd11493f4c4b235", - "format": 1 - }, - { - "name": "tests/unit/plugins/modules/test_na_um_list_nodes.py", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "cf4ded8134d30ed7b82769252addf2094d07b6bf3ec81e7aba0615b290558cfb", - "format": 1 - }, - { - "name": "meta", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "meta/execution-environment.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "db75f5fcae43fd2db36d3c9a004748dd1ec4165a6e2ebb36ada6943a8b440f4a", - "format": 1 - }, - { - "name": "meta/runtime.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "2fe9f7286aadaf2d0c4dbd2a0f118f155f564496bbc1bc742478ef7e8ece8269", - "format": 1 - }, - { - "name": "changelogs", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3962.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "f4f9cd51e1471197180cbedd7a89fc1ebbbd5f3daed2ac50a63e1df54e28c0b7", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4087.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "549499c7518f654d08f85c8d2774506fa8206db9cfad997a620a874ba55a0b24", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4059.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "a77590a8f2eefce57127281e067aa0a873c7dee6c29c64b9f24a6aa44ed559fc", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-2952.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "df2ee1a655d129670751fac00f64cb82d73b66b4b2e4babf1c03eef1958f6784", - "format": 1 - }, - { - "name": "changelogs/fragments/20.7.0.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "fc97a5a80d92a3fa6228c00d686e4ce5173facb9ce1d2282905883eb4bec385a", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-3920.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "819a48b8af6f9be3dc5978af0e690830f15f3090cc9ce1e86532d726809cfb2c", - "format": 1 - }, - { - "name": "changelogs/fragments/DEVOPS-4416.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "4224db573f34caeeb956c8728eb343a47bc2729d898001a4c6a671b780dae1bf", - "format": 1 - }, - { - "name": "changelogs/config.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "603b6174cae76f0b6d21432d05ba48ec1332b3a24bb910a81d75b1d8c0942914", - "format": 1 - }, - { - "name": "changelogs/changelog.yaml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "dfd5a84377e87d5a796d1b6fe3c534fe0bceb63b8bc1dcd94a8bbf9ea9c86947", - "format": 1 - }, - { - "name": "README.md", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "69664a93902d708863a70abc626b771889dce8cd6682efc449bb01a0efcc9dca", - "format": 1 - }, - { - "name": "COPYING", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "8ceb4b9ee5adedde47b31e975c1d90c73ad27b6b165a1dcd80c7c545eb65b903", - "format": 1 - }, - { - "name": "metadata-29PbAy.json", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "424c5b110233e003275aaa5d6886c892f39c23b72a69bbf72e9a3172933691a6", - "format": 1 - }, - { - "name": ".github", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/workflows/coverage.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "9e4fe7f09274f317cf0b299f8c35a3df98f8d48e5014a7c7523c587593e1a63c", - "format": 1 - }, - { - "name": ".github/workflows/main.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e7781e9cc514eb5cb80bbee9a821af8661cbd39542ddfe6aa59e811afbabdf13", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE", - "ftype": "dir", - "chksum_type": null, - "chksum_sha256": null, - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/feature_request.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "faa5043f7628d0fa7975b9feb2c4d72e73965d0e6f072ca5fa13b00055f773e3", - "format": 1 - }, - { - "name": ".github/ISSUE_TEMPLATE/bug_report.yml", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "5b1080b0ad861868077511d5a8fa8b3a84c5abb2cfc13cf4fa10eeaffb2c0957", - "format": 1 - }, - { - "name": "CHANGELOG.rst", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "eff94a97af8456b8fcf2677bf5e05db854239b496bc787834c562f015937fbcf", - "format": 1 - } - ], - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/um_info/MANIFEST.json b/ansible_collections/netapp/um_info/MANIFEST.json deleted file mode 100644 index 5683ca1df..000000000 --- a/ansible_collections/netapp/um_info/MANIFEST.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "collection_info": { - "namespace": "netapp", - "name": "um_info", - "version": "21.8.1", - "authors": [ - "NetApp Ansible Team <ng-ansibleteam@netapp.com>" - ], - "readme": "README.md", - "tags": [ - "storage", - "netapp", - "aiqum", - "um", - "ontap" - ], - "description": "NetApp Unified Manager(AIQUM 9.7) Collection", - "license": [], - "license_file": "COPYING", - "dependencies": {}, - "repository": "https://github.com/ansible-collections/netapp.um_info", - "documentation": null, - "homepage": "https://netapp.io/configuration-management-and-automation/", - "issues": "https://github.com/ansible-collections/netapp.um_info/issues" - }, - "file_manifest_file": { - "name": "FILES.json", - "ftype": "file", - "chksum_type": "sha256", - "chksum_sha256": "e33360247a1805f8191170f1cba1aa27a57e3abac652d447b398169adb14a626", - "format": 1 - }, - "format": 1 -}
\ No newline at end of file diff --git a/ansible_collections/netapp/um_info/README.md b/ansible_collections/netapp/um_info/README.md deleted file mode 100644 index c1ad29008..000000000 --- a/ansible_collections/netapp/um_info/README.md +++ /dev/null @@ -1,84 +0,0 @@ -[![Documentation](https://img.shields.io/badge/docs-brightgreen.svg)](https://docs.ansible.com/ansible/devel/collections/netapp/um_info/index.html) -![example workflow](https://github.com/ansible-collections/netapp.um_info/actions/workflows/main.yml/badge.svg) -[![codecov](https://codecov.io/gh/ansible-collections/netapp.um_info/branch/main/graph/badge.svg?token=weBYkksxSi)](https://codecov.io/gh/ansible-collections/netapp.um_info) -[![Discord](https://img.shields.io/discord/855068651522490400)](https://discord.gg/NetApp) - - -============================================================= - - netapp.um_info - - NetApp Unified Manager(AIQUM 9.7) Collection - - Copyright (c) 2020 NetApp, Inc. All rights reserved. - Specifications subject to change without notice. - -============================================================= -# Installation -```bash -ansible-galaxy collection install netapp.um_info -``` -To use Collection add the following to the top of your playbook, with out this you will be using Ansible 2.9 version of the module -``` -collections: - - netapp.um_info -``` - -# Module documentation -https://docs.ansible.com/ansible/devel/collections/netapp/um_info/ - -# Code of Conduct -This collection follows the [Ansible project's Code of Conduct](https://docs.ansible.com/ansible/devel/community/code_of_conduct.html). - -# Need help -Join our [Discord](https://discord.gg/NetApp) - -# Release Notes - -## 21.8.0 - -#### Minor changes - - all modules - enable usage of Ansible module group defaults - for Ansible 2.12+. - -## 21.7.0 - -#### Minor changes - - all modules - ability to trace API calls and responses. - - all modules - new `max_records` option to limit the amount of data in a single GET response. - -### Bux fixes - - all modules - report error when connecting to a server that does not run AIQUM. - - all modules - return all records rather than the first 1000 records (mostly for volumes). - - rename na_um_list_volumes.p to na_um_list_volumes.py. - -## 21.6.0 -### Minor changes -- na_um_list_aggregates has been renamed na_um_aggregates_info -- na_um_list_clusters has been renamed na_um_clusters_info -- na_um_list_nodes has been renamed na_um_nodes_info -- na_um_list_svms has been renamed na_um_svms_info -- na_um_list_volumes has been renamed na_um_volumes_info - -## 21.5.0 - -### Minor changes -- minor changes to meet Red Hat requirements to be certified. - -## 20.7.0 - -### Minor changes -- na_um_list_aggregates: Now sort by performance_capacity.used -- na_um_list_nodes: Now sort by performance_capacity.used - -## 20.6.0 - -### New Modules -- na_um_list_volumes: list volumes. - -## 20.5.0 - -### New Modules -- na_um_list_aggregates: list aggregates. -- na_um_list_clusters: list clusters. -- na_um_list_nodes: list nodes. -- na_um_list_svms: list svms. diff --git a/ansible_collections/netapp/um_info/changelogs/changelog.yaml b/ansible_collections/netapp/um_info/changelogs/changelog.yaml deleted file mode 100644 index c5d56b307..000000000 --- a/ansible_collections/netapp/um_info/changelogs/changelog.yaml +++ /dev/null @@ -1,72 +0,0 @@ -ancestor: null -releases: - 20.5.0: - modules: - - description: NetApp Unified Manager list aggregates. - name: na_um_list_aggregates - namespace: '' - - description: NetApp Unified Manager list cluster. - name: na_um_list_clusters - namespace: '' - - description: NetApp Unified Manager list nodes. - name: na_um_list_nodes - namespace: '' - - description: NetApp Unified Manager list svms. - name: na_um_list_svms - namespace: '' - release_date: '2020-05-06' - 20.6.0: - modules: - - description: NetApp Unified Manager list volumes. - name: na_um_list_volumes - namespace: '' - release_date: '2020-06-03' - 20.7.0: - changes: - minor_changes: - - na_um_list_aggregates - Now sort by performance_capacity.used - - na_um_list_nodes - Now sort by performance_capacity.used - fragments: - - 20.7.0.yaml - release_date: '2020-06-24' - 21.5.0: - changes: - minor_changes: - - minor changes to meet Red Hat requirements to be certified. - fragments: - - DEVOPS-3920.yaml - release_date: '2021-04-21' - 21.6.0: - changes: - minor_changes: - - na_um_list_aggregates has been renamed na_um_aggregates_info. - - na_um_list_clusters has been renamed na_um_clusters_info. - - na_um_list_nodes has been renamed na_um_nodes_info. - - na_um_list_svms has been renamed na_um_svms_info. - - na_um_list_volumes has been renamed na_um_volumes_info. - fragments: - - DEVOPS-3962.yaml - release_date: '2021-05-06' - 21.7.0: - changes: - bugfixes: - - all modules - report error when connecting to a server that does not run AIQUM. - - all modules - return all records rather than the first 1000 records (mostly - for volumes). - - rename na_um_list_volumes.p to na_um_list_volumes.py - minor_changes: - - all modules - ability to trace API calls and responses. - - all modules - new ``max_records`` option to limit the amount of data in a - single GET response. - fragments: - - DEVOPS-2952.yaml - - DEVOPS-4059.yaml - - DEVOPS-4087.yaml - release_date: '2021-07-14' - 21.8.0: - changes: - minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. - fragments: - - DEVOPS-4416.yaml - release_date: '2021-11-03' diff --git a/ansible_collections/netapp/um_info/changelogs/config.yaml b/ansible_collections/netapp/um_info/changelogs/config.yaml deleted file mode 100644 index a39ec8cc2..000000000 --- a/ansible_collections/netapp/um_info/changelogs/config.yaml +++ /dev/null @@ -1,32 +0,0 @@ -changelog_filename_template: ../CHANGELOG.rst -changelog_filename_version_depth: 0 -changes_file: changelog.yaml -changes_format: combined -ignore_other_fragment_extensions: true -keep_fragments: true -mention_ancestor: true -new_plugins_after_name: removed_features -notesdir: fragments -prelude_section_name: release_summary -prelude_section_title: Release Summary -sanitize_changelog: true -sections: -- - major_changes - - Major Changes -- - minor_changes - - Minor Changes -- - breaking_changes - - Breaking Changes / Porting Guide -- - deprecated_features - - Deprecated Features -- - removed_features - - Removed Features (previously deprecated) -- - security_fixes - - Security Fixes -- - bugfixes - - Bugfixes -- - known_issues - - Known Issues -title: NetApp Unified Manager Info Collection -trivial_section_name: trivial -use_fqcn: true diff --git a/ansible_collections/netapp/um_info/changelogs/fragments/20.7.0.yaml b/ansible_collections/netapp/um_info/changelogs/fragments/20.7.0.yaml deleted file mode 100644 index 75f9b4e46..000000000 --- a/ansible_collections/netapp/um_info/changelogs/fragments/20.7.0.yaml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - na_um_list_aggregates - Now sort by performance_capacity.used - - na_um_list_nodes - Now sort by performance_capacity.used diff --git a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-2952.yaml b/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-2952.yaml deleted file mode 100644 index 0d0699cc9..000000000 --- a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-2952.yaml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - all modules - new ``max_records`` option to limit the amount of data in a single GET response. -bugfixes: - - all modules - return all records rather than the first 1000 records (mostly for volumes). diff --git a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-3920.yaml b/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-3920.yaml deleted file mode 100644 index c3c7f1224..000000000 --- a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-3920.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - minor changes to meet Red Hat requirements to be certified. diff --git a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-3962.yaml b/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-3962.yaml deleted file mode 100644 index 1d3ef3f86..000000000 --- a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-3962.yaml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: -- na_um_list_aggregates has been renamed na_um_aggregates_info. -- na_um_list_clusters has been renamed na_um_clusters_info. -- na_um_list_nodes has been renamed na_um_nodes_info. -- na_um_list_svms has been renamed na_um_svms_info. -- na_um_list_volumes has been renamed na_um_volumes_info. diff --git a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4059.yaml b/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4059.yaml deleted file mode 100644 index 824f55f1e..000000000 --- a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4059.yaml +++ /dev/null @@ -1,2 +0,0 @@ -bugfixes: - - rename na_um_list_volumes.p to na_um_list_volumes.py diff --git a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4087.yaml b/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4087.yaml deleted file mode 100644 index 836a4b1b2..000000000 --- a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4087.yaml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - all modules - ability to trace API calls and responses. -bugfixes: - - all modules - report error when connecting to a server that does not run AIQUM. diff --git a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4416.yaml b/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4416.yaml deleted file mode 100644 index 6b4b660a0..000000000 --- a/ansible_collections/netapp/um_info/changelogs/fragments/DEVOPS-4416.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - PR1 - allow usage of Ansible module group defaults - for Ansible 2.12+. diff --git a/ansible_collections/netapp/um_info/meta/execution-environment.yml b/ansible_collections/netapp/um_info/meta/execution-environment.yml deleted file mode 100644 index 315d71a13..000000000 --- a/ansible_collections/netapp/um_info/meta/execution-environment.yml +++ /dev/null @@ -1,3 +0,0 @@ -version: 1 -dependencies: - python: ../requirements.txt diff --git a/ansible_collections/netapp/um_info/meta/runtime.yml b/ansible_collections/netapp/um_info/meta/runtime.yml deleted file mode 100644 index cc45f44f7..000000000 --- a/ansible_collections/netapp/um_info/meta/runtime.yml +++ /dev/null @@ -1,9 +0,0 @@ ---- -requires_ansible: ">=2.13" -action_groups: - netapp_um_info: - - na_um_aggregates_info - - na_um_clusters_info - - na_um_nodes_info - - na_um_svms_info - - na_um_volumes_info diff --git a/ansible_collections/netapp/um_info/metadata-29PbAy.json b/ansible_collections/netapp/um_info/metadata-29PbAy.json deleted file mode 100644 index 7ccf48221..000000000 --- a/ansible_collections/netapp/um_info/metadata-29PbAy.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "change_description": { - "changed_paths": [], - "command": "", - "deleted_paths": [], - "focused_command_targets": {}, - "no_integration_paths": [], - "regular_command_targets": {} - }, - "changes": {}, - "ci_provider": "", - "cloud_config": null, - "instance_config": null -} diff --git a/ansible_collections/netapp/um_info/plugins/doc_fragments/netapp.py b/ansible_collections/netapp/um_info/plugins/doc_fragments/netapp.py deleted file mode 100644 index 0790f109a..000000000 --- a/ansible_collections/netapp/um_info/plugins/doc_fragments/netapp.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2020, Suhas Bangalore Shekar <bsuhas@netapp.com> -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -class ModuleDocFragment(object): - - DOCUMENTATION = r''' -options: - - See respective platform section for more details -requirements: - - See respective platform section for more details -notes: - - Ansible modules are available for the following NetApp Storage Management Platforms: AIQUM 9.7 -''' - - # Documentation fragment for AIQUM (um) - UM = r''' -options: - hostname: - description: - - The hostname or IP address of the Unified Manager instance. - type: str - required: true - username: - description: - - username of the Unified Manager instance. - type: str - required: true - password: - description: - - Password for the specified user. - type: str - required: true - validate_certs: - description: - - If set to C(False), the SSL certificates will not be validated. - - This should only set to C(False) used on personally controlled sites using self-signed certificates. - type: bool - default: True - http_port: - description: - - Override the default port (443) with this port - type: int - feature_flags: - description: - - Enable or disable a new feature. - - This can be used to enable an experimental feature or disable a new feature that breaks backward compatibility. - - Supported keys and values are subject to change without notice. Unknown keys are ignored. - - trace_apis can be set to true to enable tracing, data is written to /tmp/um_apis.log. - type: dict - version_added: 21.7.0 - max_records: - description: - - Maximum number of records retrieved in a single GET request. - - This module loops on GET requests until all available records are fetched. - - If absent, AIQUM uses 1000. - type: int - version_added: 21.7.0 - - -requirements: - - A AIQUM 9.7 system. - - Ansible 2.9 or later. - -notes: - - With the 21.6.0 release, all modules have been renamed to na_um_<module>_info. The old ones will continue to work but will be depecrated in the future. - - The modules prefixed with na_um are built to support the AIQUM 9.7 platform. - - Supports check_mode. -''' diff --git a/ansible_collections/netapp/um_info/plugins/module_utils/netapp.py b/ansible_collections/netapp/um_info/plugins/module_utils/netapp.py deleted file mode 100644 index 15a113ae4..000000000 --- a/ansible_collections/netapp/um_info/plugins/module_utils/netapp.py +++ /dev/null @@ -1,246 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2017, Sumit Kumar <sumit4@netapp.com> -# Copyright (c) 2017, Michael Price <michael.price@netapp.com> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -''' -common routines for um_info -''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import logging -from ansible.module_utils.basic import missing_required_lib -from ansible.module_utils._text import to_native - -try: - from ansible.module_utils.ansible_release import __version__ as ansible_version -except ImportError: - ansible_version = 'unknown' - -COLLECTION_VERSION = "21.8.1" - -try: - import requests - HAS_REQUESTS = True -except ImportError: - HAS_REQUESTS = False - -ERROR_MSG = dict( - no_cserver='This module is expected to run as cluster admin' -) - -LOG = logging.getLogger(__name__) -LOG_FILE = '/tmp/um_apis.log' - - -def na_um_host_argument_spec(): - - return dict( - hostname=dict(required=True, type='str'), - username=dict(required=True, type='str'), - password=dict(required=True, type='str', no_log=True), - validate_certs=dict(required=False, type='bool', default=True), - http_port=dict(required=False, type='int'), - feature_flags=dict(required=False, type='dict', default=dict()), - max_records=dict(required=False, type='int') - ) - - -def has_feature(module, feature_name): - feature = get_feature(module, feature_name) - if isinstance(feature, bool): - return feature - module.fail_json(msg="Error: expected bool type for feature flag: %s" % feature_name) - - -def get_feature(module, feature_name): - ''' if the user has configured the feature, use it - otherwise, use our default - ''' - default_flags = dict( - strict_json_check=True, # if true, fail if response.content in not empty and is not valid json - trace_apis=False, # if true, append REST requests/responses to LOG_FILE - - ) - - if module.params['feature_flags'] is not None and feature_name in module.params['feature_flags']: - return module.params['feature_flags'][feature_name] - if feature_name in default_flags: - return default_flags[feature_name] - module.fail_json(msg="Internal error: unexpected feature flag: %s" % feature_name) - - -class UMRestAPI(object): - ''' send REST request and process response ''' - def __init__(self, module, timeout=60): - self.module = module - self.username = self.module.params['username'] - self.password = self.module.params['password'] - self.hostname = self.module.params['hostname'] - self.verify = self.module.params['validate_certs'] - self.max_records = self.module.params['max_records'] - self.timeout = timeout - if self.module.params.get('http_port') is not None: - self.url = 'https://%s:%d' % (self.hostname, self.module.params['http_port']) - else: - self.url = 'https://%s' % self.hostname - self.errors = list() - self.debug_logs = list() - self.check_required_library() - if has_feature(module, 'trace_apis'): - logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s') - - def check_required_library(self): - if not HAS_REQUESTS: - self.module.fail_json(msg=missing_required_lib('requests')) - - def get_records(self, message, api): - records = list() - try: - if message['total_records'] > 0: - records = message['records'] - if message['total_records'] != len(records): - self.module.warn('Mismatch between received: %d and expected: %d records.' % (len(records), message['total_records'])) - except KeyError as exc: - self.module.fail_json(msg='Error: unexpected response from %s: %s - expecting key: %s' - % (api, message, to_native(exc))) - return records - - def send_request(self, method, api, params, json=None, accept=None): - ''' send http request and process response, including error conditions ''' - url = self.url + api - status_code = None - content = None - json_dict = None - json_error = None - error_details = None - headers = None - if accept is not None: - headers = dict() - # accept is used to turn on/off HAL linking - if accept is not None: - headers['accept'] = accept - - def check_contents(response): - '''json() may fail on an empty value, but it's OK if no response is expected. - To avoid false positives, only report an issue when we expect to read a value. - The first get will see it. - ''' - if method == 'GET' and has_feature(self.module, 'strict_json_check'): - contents = response.content - if len(contents) > 0: - raise ValueError("Expecting json, got: %s" % contents) - - def get_json(response): - ''' extract json, and error message if present ''' - try: - json = response.json() - except ValueError: - check_contents(response) - return None, None - error = json.get('error') - return json, error - - self.log_debug('sending', repr(dict(method=method, url=url, verify=self.verify, params=params, - timeout=self.timeout, json=json, headers=headers))) - try: - response = requests.request(method, url, verify=self.verify, auth=(self.username, self.password), - params=params, timeout=self.timeout, json=json, headers=headers) - content = response.content # for debug purposes - status_code = response.status_code - # If the response was successful, no Exception will be raised - response.raise_for_status() - json_dict, json_error = get_json(response) - except requests.exceptions.HTTPError as err: - __, json_error = get_json(response) - if json_error is None: - self.log_error(status_code, 'HTTP error: %s' % err) - error_details = str(err) - # If an error was reported in the json payload, it is handled below - except requests.exceptions.ConnectionError as err: - self.log_error(status_code, 'Connection error: %s' % err) - error_details = str(err) - except Exception as err: - self.log_error(status_code, 'Other error: %s' % err) - error_details = str(err) - if json_error is not None: - self.log_error(status_code, 'Endpoint error: %d: %s' % (status_code, json_error)) - error_details = json_error - self.log_debug(status_code, content) - return json_dict, error_details - - def get(self, api, params): - - def get_next_api(message): - '''make sure _links is present, and href is present if next is present - return api if next is present, None otherwise - return error if _links or href are missing - ''' - api, error = None, None - if message is None or '_links' not in message: - error = 'Expecting _links key in %s' % message - elif 'next' in message['_links']: - if 'href' in message['_links']['next']: - api = message['_links']['next']['href'] - else: - error = 'Expecting href key in %s' % message['_links']['next'] - return api, error - - method = 'GET' - records = list() - if self.max_records is not None: - if params and 'max_records' not in params: - params['max_records'] = self.max_records - else: - params = dict(max_records=self.max_records) - api = '/api/%s' % api - - while api: - message, error = self.send_request(method, api, params) - if error: - return message, error - api, error = get_next_api(message) - if error: - return message, error - if 'records' in message: - records.extend(message['records']) - params = None # already included in the next link - - if records: - message['records'] = records - return message, error - - def log_error(self, status_code, message): - LOG.error("%s: %s", status_code, message) - self.errors.append(message) - self.debug_logs.append((status_code, message)) - - def log_debug(self, status_code, content): - LOG.debug("%s: %s", status_code, content) - self.debug_logs.append((status_code, content)) diff --git a/ansible_collections/netapp/um_info/plugins/module_utils/netapp_module.py b/ansible_collections/netapp/um_info/plugins/module_utils/netapp_module.py deleted file mode 100644 index f3b95800e..000000000 --- a/ansible_collections/netapp/um_info/plugins/module_utils/netapp_module.py +++ /dev/null @@ -1,51 +0,0 @@ -# This code is part of Ansible, but is an independent component. -# This particular file snippet, and this file snippet only, is BSD licensed. -# Modules you write using this snippet, which is embedded dynamically by Ansible -# still belong to the author of the module, and may assign their own license -# to the complete work. -# -# Copyright (c) 2020, Laurent Nicolas <laurentn@netapp.com> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -''' Support class for NetApp ansible modules ''' - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -class NetAppModule(object): - ''' - Common class for NetApp modules - set of support functions to derive actions based - on the current state of the system, and a desired state - ''' - - def __init__(self): - self.changed = False - self.parameters = {} - - def set_parameters(self, ansible_params): - self.parameters = dict() - for param in ansible_params: - if ansible_params[param] is not None: - self.parameters[param] = ansible_params[param] - return self.parameters diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_aggregates_info.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_aggregates_info.py deleted file mode 100644 index 10a34cfdf..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_aggregates_info.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_aggregates -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_aggregates_info -short_description: NetApp Unified Manager list aggregates. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Aggregates on AIQUM. -''' - -EXAMPLES = """ -- name: List Aggregates - netapp.um_info.na_um_aggregates_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Aggregates information - returned: always - type: list - sample: [{'node': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'snaplock_type': '...', - 'uuid': '...', - 'space': - {'block_storage': - {'available': ..., - 'used': ..., - 'size': ... - }, - 'efficiency': - {'savings': ..., - 'logical_used': ... - } - }, - 'block_storage': - {'hybrid_cache': - {'enabled': ..., - 'size': ... - }, - 'primary': - {'raid_size': ..., - 'raid_type': '...' - }, - 'mirror': - {'state': '...' - } - }, - 'data_encryption': - {'software_encryption_enabled': ... - }, - 'cluster': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'state': '...', - 'create_time': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'key': '...', - 'type': '...', - 'name': '...' - } - ] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMAggregate(object): - ''' aggregates initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_aggregates(self): - """ - Fetch details of aggregates. - :return: - Dictionary of current details if aggregates found - None if aggregates is not found - """ - data = {} - api = "datacenter/storage/aggregates?order_by=performance_capacity.used" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the aggregates listing - :return: None - """ - current = self.get_aggregates() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Aggregate class instance and invoke apply - :return: None - """ - list_aggregates_obj = NetAppUMAggregate() - list_aggregates_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_clusters_info.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_clusters_info.py deleted file mode 100644 index 60baa7a48..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_clusters_info.py +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_clusters -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_clusters_info -short_description: NetApp Unified Manager list cluster. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Cluster on AIQUM. -''' - -EXAMPLES = """ -- name: List Clusters - netapp.um_info.na_um_clusters_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Clusters information - returned: always - type: list - sample: [{ - 'name': '...', - 'version': - { - 'generation': ..., - 'major': ..., - 'full': '...', - 'minor': ... - }, - 'management_ip': '...', - 'contact': ..., - '_links': - { - 'self': - { - 'href': '...' - } - }, - 'location': '...', - 'key': '', - 'nodes': - [ - { - 'uptime': ..., - 'uuid': '...', - 'version': - { - 'generation': ..., - 'major': ..., - 'full': '...', - 'minor': ... - }, - '_links': - { - 'self': - { - 'href': '...' - } - }, - 'location': '...', - 'key': '...', - 'serial_number': '...', - 'model': '...', - 'name': '...' - } - ], - 'isSanOptimized': ..., - 'uuid': '...' - } - ] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMCluster(object): - ''' cluster initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_clusters(self): - """ - Fetch details of clusters. - :return: - Dictionary of current details if clusters found - None if clusters is not found - """ - data = {} - api = "datacenter/cluster/clusters" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the cluster listing - :return: None - """ - current = self.get_clusters() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Cluster class instance and invoke apply - :return: None - """ - list_cluster_obj = NetAppUMCluster() - list_cluster_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_aggregates.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_list_aggregates.py deleted file mode 100644 index 10a34cfdf..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_aggregates.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_aggregates -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_aggregates_info -short_description: NetApp Unified Manager list aggregates. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Aggregates on AIQUM. -''' - -EXAMPLES = """ -- name: List Aggregates - netapp.um_info.na_um_aggregates_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Aggregates information - returned: always - type: list - sample: [{'node': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'snaplock_type': '...', - 'uuid': '...', - 'space': - {'block_storage': - {'available': ..., - 'used': ..., - 'size': ... - }, - 'efficiency': - {'savings': ..., - 'logical_used': ... - } - }, - 'block_storage': - {'hybrid_cache': - {'enabled': ..., - 'size': ... - }, - 'primary': - {'raid_size': ..., - 'raid_type': '...' - }, - 'mirror': - {'state': '...' - } - }, - 'data_encryption': - {'software_encryption_enabled': ... - }, - 'cluster': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'state': '...', - 'create_time': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'key': '...', - 'type': '...', - 'name': '...' - } - ] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMAggregate(object): - ''' aggregates initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_aggregates(self): - """ - Fetch details of aggregates. - :return: - Dictionary of current details if aggregates found - None if aggregates is not found - """ - data = {} - api = "datacenter/storage/aggregates?order_by=performance_capacity.used" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the aggregates listing - :return: None - """ - current = self.get_aggregates() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Aggregate class instance and invoke apply - :return: None - """ - list_aggregates_obj = NetAppUMAggregate() - list_aggregates_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_clusters.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_list_clusters.py deleted file mode 100644 index 60baa7a48..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_clusters.py +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_clusters -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_clusters_info -short_description: NetApp Unified Manager list cluster. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Cluster on AIQUM. -''' - -EXAMPLES = """ -- name: List Clusters - netapp.um_info.na_um_clusters_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Clusters information - returned: always - type: list - sample: [{ - 'name': '...', - 'version': - { - 'generation': ..., - 'major': ..., - 'full': '...', - 'minor': ... - }, - 'management_ip': '...', - 'contact': ..., - '_links': - { - 'self': - { - 'href': '...' - } - }, - 'location': '...', - 'key': '', - 'nodes': - [ - { - 'uptime': ..., - 'uuid': '...', - 'version': - { - 'generation': ..., - 'major': ..., - 'full': '...', - 'minor': ... - }, - '_links': - { - 'self': - { - 'href': '...' - } - }, - 'location': '...', - 'key': '...', - 'serial_number': '...', - 'model': '...', - 'name': '...' - } - ], - 'isSanOptimized': ..., - 'uuid': '...' - } - ] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMCluster(object): - ''' cluster initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_clusters(self): - """ - Fetch details of clusters. - :return: - Dictionary of current details if clusters found - None if clusters is not found - """ - data = {} - api = "datacenter/cluster/clusters" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the cluster listing - :return: None - """ - current = self.get_clusters() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Cluster class instance and invoke apply - :return: None - """ - list_cluster_obj = NetAppUMCluster() - list_cluster_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_nodes.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_list_nodes.py deleted file mode 100644 index 27e81ec2e..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_nodes.py +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_nodes -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_nodes_info -short_description: NetApp Unified Manager list nodes. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Nodes on AIQUM. -''' - -EXAMPLES = """ -- name: List Nodes - netapp.um_info.na_um_nodes_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Nodes information - returned: always - type: list - sample: [{'allFlashOptimized': ..., - 'uptime': ..., - 'vendor': '...', - 'uuid': '...', - 'nvramid': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'cluster': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'version': - {'generation': ..., - 'major': ..., - 'full': '...', - 'minor': ... - }, - 'systemid': '...', - 'location': '...', - 'key': ...', - 'is_all_flash_optimized': ..., - 'serial_number': '...', - 'model': '...', - 'ha': - {'partners': - [{'_links': {}, - 'uuid': ..., - 'key': ..., - 'name': ... - }] - }, - 'health': ..., - 'name': '...' - }] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMNode(object): - ''' nodes initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_nodes(self): - """ - Fetch details of nodes. - :return: - Dictionary of current details if nodes found - None if nodes is not found - """ - data = {} - api = "datacenter/cluster/nodes?order_by=performance_capacity.used" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the nodes listing - :return: None - """ - current = self.get_nodes() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Node class instance and invoke apply - :return: None - """ - list_nodes_obj = NetAppUMNode() - list_nodes_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_svms.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_list_svms.py deleted file mode 100644 index 2722e9ef6..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_svms.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_svms -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_svms_info -short_description: NetApp Unified Manager list svms. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List SVMs on AIQUM. -''' - -EXAMPLES = """ -- name: List SVMs - netapp.um_info.na_um_svms_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of SVMs information - returned: always - type: list - sample: [{'fcp': - {'enabled': ... - }, - 'dns': ..., - 'snapshot_policy': - {'_links': {}, - 'uuid': ..., - 'key': '...', - 'name': '...' - }, - 'language': '...', - 'subtype': 'default', - 'aggregates': - [{'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }], - 'nvme': - {'enabled': ... - }, - 'ipspace': - {'_links': {}, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'uuid': '...', - 'cluster': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'state': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'key': '...', - 'ldap': - {'enabled': ... - }, - 'nis': - {'domain': ..., - 'enabled': ..., - 'servers': ... - }, - 'cifs': - {'enabled': ..., - 'name': ..., - 'ad_domain': - {'fqdn': ... - } - }, - 'iscsi': - {'enabled': ... - }, - 'nfs': - {'enabled': ... - }, - 'name': '...' - }] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMSVM(object): - ''' svms initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_svms(self): - """ - Fetch details of svms. - :return: - Dictionary of current details if svms found - None if svms is not found - """ - data = {} - api = "datacenter/svm/svms" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the svms listing - :return: None - """ - current = self.get_svms() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create SVM class instance and invoke apply - :return: None - """ - list_svms_obj = NetAppUMSVM() - list_svms_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_volumes.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_list_volumes.py deleted file mode 100644 index 099213226..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_list_volumes.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_volumes -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_volumes_info -short_description: NetApp Unified Manager list volumes. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.6.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Volumes on AIQUM. -''' - -EXAMPLES = """ -- name: List Volumes - netapp.um_info.na_um_volumes_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Volumes information - returned: always - type: list - sample: [{'style': '...', - 'svm': - {'_links': - {'self': {...} - }, - '...' - }, - 'qos': {...}, - 'name': '...', - 'language': '...', - 'space': {...}, - 'aggregates': - [ - {...} - ], - 'tiering': {...}, - 'autosize': {...}, - 'cluster': {...}, - 'state': '...', - 'create_time': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'key': '...', - 'snapmirror': {...}, - 'snapshot_policy': {...}, - 'type': '...', - 'uuid': '...' - }] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMVolume(object): - ''' volumes initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_volumes(self): - """ - Fetch details of volumes. - :return: - Dictionary of current details if volumes found - None if volumes is not found - """ - data = {} - api = "datacenter/storage/volumes" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the volumes listing - :return: None - """ - current = self.get_volumes() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Volume class instance and invoke apply - :return: None - """ - list_volumes_obj = NetAppUMVolume() - list_volumes_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_nodes_info.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_nodes_info.py deleted file mode 100644 index 27e81ec2e..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_nodes_info.py +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_nodes -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_nodes_info -short_description: NetApp Unified Manager list nodes. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Nodes on AIQUM. -''' - -EXAMPLES = """ -- name: List Nodes - netapp.um_info.na_um_nodes_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Nodes information - returned: always - type: list - sample: [{'allFlashOptimized': ..., - 'uptime': ..., - 'vendor': '...', - 'uuid': '...', - 'nvramid': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'cluster': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'version': - {'generation': ..., - 'major': ..., - 'full': '...', - 'minor': ... - }, - 'systemid': '...', - 'location': '...', - 'key': ...', - 'is_all_flash_optimized': ..., - 'serial_number': '...', - 'model': '...', - 'ha': - {'partners': - [{'_links': {}, - 'uuid': ..., - 'key': ..., - 'name': ... - }] - }, - 'health': ..., - 'name': '...' - }] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMNode(object): - ''' nodes initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_nodes(self): - """ - Fetch details of nodes. - :return: - Dictionary of current details if nodes found - None if nodes is not found - """ - data = {} - api = "datacenter/cluster/nodes?order_by=performance_capacity.used" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the nodes listing - :return: None - """ - current = self.get_nodes() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Node class instance and invoke apply - :return: None - """ - list_nodes_obj = NetAppUMNode() - list_nodes_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_svms_info.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_svms_info.py deleted file mode 100644 index 2722e9ef6..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_svms_info.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_svms -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_svms_info -short_description: NetApp Unified Manager list svms. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.5.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List SVMs on AIQUM. -''' - -EXAMPLES = """ -- name: List SVMs - netapp.um_info.na_um_svms_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of SVMs information - returned: always - type: list - sample: [{'fcp': - {'enabled': ... - }, - 'dns': ..., - 'snapshot_policy': - {'_links': {}, - 'uuid': ..., - 'key': '...', - 'name': '...' - }, - 'language': '...', - 'subtype': 'default', - 'aggregates': - [{'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }], - 'nvme': - {'enabled': ... - }, - 'ipspace': - {'_links': {}, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'uuid': '...', - 'cluster': - {'_links': - {'self': - {'href': '...' - } - }, - 'uuid': '...', - 'key': '...', - 'name': '...' - }, - 'state': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'key': '...', - 'ldap': - {'enabled': ... - }, - 'nis': - {'domain': ..., - 'enabled': ..., - 'servers': ... - }, - 'cifs': - {'enabled': ..., - 'name': ..., - 'ad_domain': - {'fqdn': ... - } - }, - 'iscsi': - {'enabled': ... - }, - 'nfs': - {'enabled': ... - }, - 'name': '...' - }] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMSVM(object): - ''' svms initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_svms(self): - """ - Fetch details of svms. - :return: - Dictionary of current details if svms found - None if svms is not found - """ - data = {} - api = "datacenter/svm/svms" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the svms listing - :return: None - """ - current = self.get_svms() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create SVM class instance and invoke apply - :return: None - """ - list_svms_obj = NetAppUMSVM() - list_svms_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/plugins/modules/na_um_volumes_info.py b/ansible_collections/netapp/um_info/plugins/modules/na_um_volumes_info.py deleted file mode 100644 index 099213226..000000000 --- a/ansible_collections/netapp/um_info/plugins/modules/na_um_volumes_info.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/python - -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' -na_um_list_volumes -''' - -from __future__ import absolute_import, division, print_function -__metaclass__ = type - - -ANSIBLE_METADATA = {'metadata_version': '1.1', - 'status': ['preview'], - 'supported_by': 'certified'} - - -DOCUMENTATION = ''' -module: na_um_volumes_info -short_description: NetApp Unified Manager list volumes. -extends_documentation_fragment: - - netapp.um_info.netapp.um -version_added: '20.6.0' -author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> - -description: -- List Volumes on AIQUM. -''' - -EXAMPLES = """ -- name: List Volumes - netapp.um_info.na_um_volumes_info: - hostname: "{{ hostname }}" - username: "{{ username }}" - password: "{{ password }}" -""" - -RETURN = """ -records: - description: Returns list of Volumes information - returned: always - type: list - sample: [{'style': '...', - 'svm': - {'_links': - {'self': {...} - }, - '...' - }, - 'qos': {...}, - 'name': '...', - 'language': '...', - 'space': {...}, - 'aggregates': - [ - {...} - ], - 'tiering': {...}, - 'autosize': {...}, - 'cluster': {...}, - 'state': '...', - 'create_time': '...', - '_links': - {'self': - {'href': '...' - } - }, - 'key': '...', - 'snapmirror': {...}, - 'snapshot_policy': {...}, - 'type': '...', - 'uuid': '...' - }] -""" - -from ansible.module_utils.basic import AnsibleModule -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils -from ansible_collections.netapp.um_info.plugins.module_utils.netapp_module import NetAppModule -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import UMRestAPI - - -class NetAppUMVolume(object): - ''' volumes initialize and class methods ''' - - def __init__(self): - self.argument_spec = netapp_utils.na_um_host_argument_spec() - self.module = AnsibleModule( - argument_spec=self.argument_spec, - supports_check_mode=True - ) - - self.na_helper = NetAppModule() - self.parameters = self.na_helper.set_parameters(self.module.params) - - self.rest_api = UMRestAPI(self.module) - - def get_volumes(self): - """ - Fetch details of volumes. - :return: - Dictionary of current details if volumes found - None if volumes is not found - """ - data = {} - api = "datacenter/storage/volumes" - message, error = self.rest_api.get(api, data) - if error: - self.module.fail_json(msg=error) - return self.rest_api.get_records(message, api) - - def apply(self): - """ - Apply action to the volumes listing - :return: None - """ - current = self.get_volumes() - if current is not None: - self.na_helper.changed = True - self.module.exit_json(changed=self.na_helper.changed, msg=current) - - -def main(): - """ - Create Volume class instance and invoke apply - :return: None - """ - list_volumes_obj = NetAppUMVolume() - list_volumes_obj.apply() - - -if __name__ == '__main__': - main() diff --git a/ansible_collections/netapp/um_info/requirements.txt b/ansible_collections/netapp/um_info/requirements.txt deleted file mode 100644 index 663bd1f6a..000000000 --- a/ansible_collections/netapp/um_info/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests
\ No newline at end of file diff --git a/ansible_collections/netapp/um_info/tests/unit/compat/__init__.py b/ansible_collections/netapp/um_info/tests/unit/compat/__init__.py deleted file mode 100644 index e69de29bb..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/compat/__init__.py +++ /dev/null diff --git a/ansible_collections/netapp/um_info/tests/unit/compat/builtins.py b/ansible_collections/netapp/um_info/tests/unit/compat/builtins.py deleted file mode 100644 index f60ee6782..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/compat/builtins.py +++ /dev/null @@ -1,33 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -# -# Compat for python2.7 -# - -# One unittest needs to import builtins via __import__() so we need to have -# the string that represents it -try: - import __builtin__ -except ImportError: - BUILTINS = 'builtins' -else: - BUILTINS = '__builtin__' diff --git a/ansible_collections/netapp/um_info/tests/unit/compat/mock.py b/ansible_collections/netapp/um_info/tests/unit/compat/mock.py deleted file mode 100644 index 0972cd2e8..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/compat/mock.py +++ /dev/null @@ -1,122 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python3.x's unittest.mock module -''' -import sys - -# Python 2.7 - -# Note: Could use the pypi mock library on python3.x as well as python2.x. It -# is the same as the python3 stdlib mock library - -try: - # Allow wildcard import because we really do want to import all of mock's - # symbols into this compat shim - # pylint: disable=wildcard-import,unused-wildcard-import - from unittest.mock import * -except ImportError: - # Python 2 - # pylint: disable=wildcard-import,unused-wildcard-import - try: - from mock import * - except ImportError: - print('You need the mock library installed on python2.x to run tests') - - -# Prior to 3.4.4, mock_open cannot handle binary read_data -if sys.version_info >= (3,) and sys.version_info < (3, 4, 4): - file_spec = None - - def _iterate_read_data(read_data): - # Helper for mock_open: - # Retrieve lines from read_data via a generator so that separate calls to - # readline, read, and readlines are properly interleaved - sep = b'\n' if isinstance(read_data, bytes) else '\n' - data_as_list = [l + sep for l in read_data.split(sep)] - - if data_as_list[-1] == sep: - # If the last line ended in a newline, the list comprehension will have an - # extra entry that's just a newline. Remove this. - data_as_list = data_as_list[:-1] - else: - # If there wasn't an extra newline by itself, then the file being - # emulated doesn't have a newline to end the last line remove the - # newline that our naive format() added - data_as_list[-1] = data_as_list[-1][:-1] - - for line in data_as_list: - yield line - - def mock_open(mock=None, read_data=''): - """ - A helper function to create a mock to replace the use of `open`. It works - for `open` called directly or used as a context manager. - - The `mock` argument is the mock object to configure. If `None` (the - default) then a `MagicMock` will be created for you, with the API limited - to methods or attributes available on standard file handles. - - `read_data` is a string for the `read` methoddline`, and `readlines` of the - file handle to return. This is an empty string by default. - """ - def _readlines_side_effect(*args, **kwargs): - if handle.readlines.return_value is not None: - return handle.readlines.return_value - return list(_data) - - def _read_side_effect(*args, **kwargs): - if handle.read.return_value is not None: - return handle.read.return_value - return type(read_data)().join(_data) - - def _readline_side_effect(): - if handle.readline.return_value is not None: - while True: - yield handle.readline.return_value - for line in _data: - yield line - - global file_spec - if file_spec is None: - import _io - file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))) - - if mock is None: - mock = MagicMock(name='open', spec=open) - - handle = MagicMock(spec=file_spec) - handle.__enter__.return_value = handle - - _data = _iterate_read_data(read_data) - - handle.write.return_value = None - handle.read.return_value = None - handle.readline.return_value = None - handle.readlines.return_value = None - - handle.read.side_effect = _read_side_effect - handle.readline.side_effect = _readline_side_effect() - handle.readlines.side_effect = _readlines_side_effect - - mock.return_value = handle - return mock diff --git a/ansible_collections/netapp/um_info/tests/unit/compat/unittest.py b/ansible_collections/netapp/um_info/tests/unit/compat/unittest.py deleted file mode 100644 index 73a20cf8c..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/compat/unittest.py +++ /dev/null @@ -1,44 +0,0 @@ -# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see <http://www.gnu.org/licenses/>. - -# Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -''' -Compat module for Python2.7's unittest module -''' - -import sys - -import pytest - -# Allow wildcard import because we really do want to import all of -# unittests's symbols into this compat shim -# pylint: disable=wildcard-import,unused-wildcard-import -if sys.version_info < (2, 7): - try: - # Need unittest2 on python2.6 - from unittest2 import * - except ImportError: - print('You need unittest2 installed on python2.6.x to run tests') - - class TestCase: - """ skip everything """ - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as unittest2 may not be available') -else: - from unittest import * diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/module_utils/test_netapp.py b/ansible_collections/netapp/um_info/tests/unit/plugins/module_utils/test_netapp.py deleted file mode 100644 index eefca9041..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/module_utils/test_netapp.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright (c) 2018 NetApp -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -''' unit tests for module_utils netapp.py ''' -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json -import os.path -import sys -import tempfile - -import pytest - -from ansible.module_utils.ansible_release import __version__ as ansible_version -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.plugins.module_utils.netapp import COLLECTION_VERSION -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch - -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -SRR = { - # common responses - 'is_rest': (200, {}, None), - 'is_zapi': (400, {}, "Unreachable"), - # 'empty_good': ({}, None), - 'empty_good': (dict(_links=dict(self='me')), None), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'no__links_error': (dict(), None), - 'no_href_error': (dict(_links=dict(self='me', next=dict())), None), -} - - -def mock_args(feature_flags=None): - args = { - 'hostname': 'test', - 'username': 'test_user', - 'password': 'test_pass!', - } - if feature_flags is not None: - args.update({'feature_flags': feature_flags}) - return args - - -def create_module(args): - argument_spec = netapp_utils.na_um_host_argument_spec() - set_module_args(args) - module = basic.AnsibleModule(argument_spec) - return module - - -def create_restapi_object(args): - module = create_module(args) - module.fail_json = fail_json - rest_api = netapp_utils.UMRestAPI(module) - return rest_api - - -class mockResponse: - def __init__(self, json_data, status_code, raise_action=None): - self.json_data = json_data - self.status_code = status_code - self.content = json_data - self.raise_action = raise_action - - def raise_for_status(self): - pass - - def json(self): - if self.raise_action == 'bad_json': - raise ValueError(self.raise_action) - return self.json_data - - -@patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') -def test_empty_get(mock_request): - ''' get with no data ''' - mock_request.side_effect = [ - SRR['empty_good'], - SRR['end_of_sequence'], - ] - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert not error - # only one key (_links) - assert len(message) == 1 - - -@patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') -def test_get_next(mock_request): - ''' get with a next href ''' - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert not error - print('empty_get:', message) - assert message['records'] == SRR['get_data'][0]['records'] - - -@patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') -def test_negative_get_next_no__links(mock_request): - ''' get with a next href ''' - mock_request.side_effect = [ - SRR['no__links_error'], - SRR['end_of_sequence'], - ] - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - print('error:', error) - assert error - assert 'Expecting _links key in' in error - - -@patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') -def test_negative_get_next_no_href(mock_request): - ''' get with a next href ''' - mock_request.side_effect = [ - SRR['no_href_error'], - SRR['end_of_sequence'], - ] - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - print('error:', error) - assert error - assert 'Expecting href key in' in error - - -def test_has_feature_success_default_0(): - ''' existing feature_flag with default of False''' - flag = 'trace_apis' - module = create_module(mock_args()) - value = netapp_utils.has_feature(module, flag) - assert not value - - -def test_has_feature_success_default_1(): - ''' existing feature_flag with default of True''' - flag = 'strict_json_check' - module = create_module(mock_args()) - value = netapp_utils.has_feature(module, flag) - assert value - - -def test_has_feature_success_user_true(): - ''' existing feature_flag with value set to True ''' - flag = 'user_deprecation_warning' - args = dict(mock_args({flag: True})) - module = create_module(args) - value = netapp_utils.has_feature(module, flag) - assert value - - -def test_has_feature_success_user_false(): - ''' existing feature_flag with value set to False ''' - flag = 'user_deprecation_warning' - args = dict(mock_args({flag: False})) - print(args) - module = create_module(args) - value = netapp_utils.has_feature(module, flag) - assert not value - - -def test_has_feature_invalid_key(): - ''' existing feature_flag with unknown key ''' - flag = 'deprecation_warning_bad_key' - module = create_module(mock_args()) - # replace ANsible fail method with ours - module.fail_json = fail_json - with pytest.raises(AnsibleFailJson) as exc: - netapp_utils.has_feature(module, flag) - msg = 'Internal error: unexpected feature flag: %s' % flag - assert exc.value.args[0]['msg'] == msg - - -@patch('requests.request') -def test_empty_get_sent(mock_request): - ''' get with no data ''' - mock_request.return_value = mockResponse(json_data=dict(_links='me'), status_code=200) - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert not error - # only one key (_links) - assert len(message) == 1 - - -@patch('requests.request') -def test_empty_get_sent_bad_json(mock_request): - ''' get with no data ''' - mock_request.return_value = mockResponse(json_data='anything', status_code=200, raise_action='bad_json') - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert error - assert 'Expecting json, got: anything' in error - print('errors:', rest_api.errors) - print('debug:', rest_api.debug_logs) - - -@patch('requests.request') -def test_empty_get_sent_bad_but_empty_json(mock_request): - ''' get with no data ''' - mock_request.return_value = mockResponse(json_data='', status_code=200, raise_action='bad_json') - rest_api = create_restapi_object(mock_args()) - message, error = rest_api.get('api', None) - assert error - assert 'Expecting _links key in None' in error - print('errors:', rest_api.errors) - print('debug:', rest_api.debug_logs) diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_aggregates_info.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_aggregates_info.py deleted file mode 100644 index 9d2479484..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_aggregates_info.py +++ /dev/null @@ -1,159 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_aggregates_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_aggregates_info\ - import NetAppUMAggregate as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_not_so_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_aggregates': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_aggregates_info.NetAppUMAggregate.get_aggregates') - def test_ensure_list_aggregates_get_called(self, get_aggregates): - ''' fetching details of aggregates ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_aggregates = Mock(return_value=SRR['get_aggregates']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - # to reset na_helper from remembering the previous 'changed' value - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_aggregates_info.NetAppUMAggregate.get_aggregates') - def test_ensure_get_called_existing(self, get_aggregates): - ''' test for existing aggregates''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_aggregates = Mock(return_value=SRR['get_aggregates']) - assert my_obj.get_aggregates() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing aggregates''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_aggregates() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing aggregates''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_aggregates() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/storage/aggregates?order_by=performance_capacity.used' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_clusters_info.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_clusters_info.py deleted file mode 100644 index c4939adb8..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_clusters_info.py +++ /dev/null @@ -1,159 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_clusters_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_clusters_info\ - import NetAppUMCluster as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_cluster': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_clusters_info.NetAppUMCluster.get_clusters') - def test_ensure_list_cluster_get_called(self, get_cluster): - ''' fetching details of cluster ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_cluster = Mock(return_value=SRR['get_cluster']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - # to reset na_helper from remembering the previous 'changed' value - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_clusters_info.NetAppUMCluster.get_clusters') - def test_ensure_get_called_existing(self, get_cluster): - ''' test for existing cluster''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_cluster = Mock(return_value=SRR['get_cluster']) - assert my_obj.get_cluster() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing clusters''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_clusters() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing clusters''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_clusters() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/cluster/clusters' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_aggregates.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_aggregates.py deleted file mode 100644 index 9d2479484..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_aggregates.py +++ /dev/null @@ -1,159 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_aggregates_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_aggregates_info\ - import NetAppUMAggregate as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_not_so_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_aggregates': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_aggregates_info.NetAppUMAggregate.get_aggregates') - def test_ensure_list_aggregates_get_called(self, get_aggregates): - ''' fetching details of aggregates ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_aggregates = Mock(return_value=SRR['get_aggregates']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - # to reset na_helper from remembering the previous 'changed' value - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_aggregates_info.NetAppUMAggregate.get_aggregates') - def test_ensure_get_called_existing(self, get_aggregates): - ''' test for existing aggregates''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_aggregates = Mock(return_value=SRR['get_aggregates']) - assert my_obj.get_aggregates() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing aggregates''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_aggregates() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing aggregates''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_aggregates() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/storage/aggregates?order_by=performance_capacity.used' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_clusters.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_clusters.py deleted file mode 100644 index c4939adb8..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_clusters.py +++ /dev/null @@ -1,159 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_clusters_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_clusters_info\ - import NetAppUMCluster as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_cluster': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_clusters_info.NetAppUMCluster.get_clusters') - def test_ensure_list_cluster_get_called(self, get_cluster): - ''' fetching details of cluster ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_cluster = Mock(return_value=SRR['get_cluster']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - # to reset na_helper from remembering the previous 'changed' value - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_clusters_info.NetAppUMCluster.get_clusters') - def test_ensure_get_called_existing(self, get_cluster): - ''' test for existing cluster''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_cluster = Mock(return_value=SRR['get_cluster']) - assert my_obj.get_cluster() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing clusters''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_clusters() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing clusters''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_clusters() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/cluster/clusters' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_nodes.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_nodes.py deleted file mode 100644 index e5769d1f1..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_nodes.py +++ /dev/null @@ -1,158 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_nodes_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_nodes_info\ - import NetAppUMNode as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_nodes': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_nodes_info.NetAppUMNode.get_nodes') - def test_ensure_list_nodes_get_called(self, get_nodes): - ''' fetching details of nodes ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_nodes = Mock(return_value=SRR['get_nodes']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_nodes_info.NetAppUMNode.get_nodes') - def test_ensure_get_called_existing(self, get_nodes): - ''' test for existing nodes''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_nodes = Mock(return_value=SRR['get_nodes']) - assert my_obj.get_nodes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing nodes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_nodes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing nodes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_nodes() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/cluster/nodes' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_svms.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_svms.py deleted file mode 100644 index 2eafd508f..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_svms.py +++ /dev/null @@ -1,158 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_svms_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_svms_info\ - import NetAppUMSVM as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_svms': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_svms_info.NetAppUMSVM.get_svms') - def test_ensure_list_svms_get_called(self, get_svms): - ''' fetching details of svms ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_svms = Mock(return_value=SRR['get_svms']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_svms_info.NetAppUMSVM.get_svms') - def test_ensure_get_called_existing(self, get_svms): - ''' test for existing svms''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_svms = Mock(return_value=SRR['get_svms']) - assert my_obj.get_svms() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing svms''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_svms() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing svms''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_svms() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/svm/svms' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_volumes.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_volumes.py deleted file mode 100644 index 4c8a267fb..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_list_volumes.py +++ /dev/null @@ -1,158 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_volumes_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_volumes_info\ - import NetAppUMVolume as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_volumes': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_volumes_info.NetAppUMVolume.get_volumes') - def test_ensure_list_volumes_get_called(self, get_volumes): - ''' fetching details of volumes ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_volumes = Mock(return_value=SRR['get_volumes']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_volumes_info.NetAppUMVolume.get_volumes') - def test_ensure_get_called_existing(self, get_volumes): - ''' test for existing volumes''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_volumes = Mock(return_value=SRR['get_volumes']) - assert my_obj.get_volumes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing volumes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_volumes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing volumes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_volumes() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/storage/volumes' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_nodes_info.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_nodes_info.py deleted file mode 100644 index e5769d1f1..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_nodes_info.py +++ /dev/null @@ -1,158 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_nodes_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_nodes_info\ - import NetAppUMNode as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_nodes': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_nodes_info.NetAppUMNode.get_nodes') - def test_ensure_list_nodes_get_called(self, get_nodes): - ''' fetching details of nodes ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_nodes = Mock(return_value=SRR['get_nodes']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_nodes_info.NetAppUMNode.get_nodes') - def test_ensure_get_called_existing(self, get_nodes): - ''' test for existing nodes''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_nodes = Mock(return_value=SRR['get_nodes']) - assert my_obj.get_nodes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing nodes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_nodes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing nodes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_nodes() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/cluster/nodes' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_svms_info.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_svms_info.py deleted file mode 100644 index 2eafd508f..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_svms_info.py +++ /dev/null @@ -1,158 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_svms_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_svms_info\ - import NetAppUMSVM as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_svms': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_svms_info.NetAppUMSVM.get_svms') - def test_ensure_list_svms_get_called(self, get_svms): - ''' fetching details of svms ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_svms = Mock(return_value=SRR['get_svms']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_svms_info.NetAppUMSVM.get_svms') - def test_ensure_get_called_existing(self, get_svms): - ''' test for existing svms''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_svms = Mock(return_value=SRR['get_svms']) - assert my_obj.get_svms() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing svms''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_svms() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing svms''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_svms() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/svm/svms' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_volumes_info.py b/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_volumes_info.py deleted file mode 100644 index 4c8a267fb..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/plugins/modules/test_na_um_volumes_info.py +++ /dev/null @@ -1,158 +0,0 @@ -# (c) 2020, NetApp, Inc -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -""" unit tests for Ansible module: na_um_volumes_info """ - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -import json -import pytest -import sys - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes -from ansible_collections.netapp.um_info.tests.unit.compat import unittest -from ansible_collections.netapp.um_info.tests.unit.compat.mock import patch, Mock -import ansible_collections.netapp.um_info.plugins.module_utils.netapp as netapp_utils - -from ansible_collections.netapp.um_info.plugins.modules.na_um_volumes_info\ - import NetAppUMVolume as my_module # module under test - - -if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7): - pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') - - -# REST API canned responses when mocking send_request -SRR = { - # common responses - 'empty_good': ({}, None), - 'end_of_sequence': (None, "Unexpected call to send_request"), - 'generic_error': (None, "Expected error"), - 'get_next': (dict(_links=dict(self='me', next=dict(href='next_records'))), None), - 'get_data': (dict(_links=dict(self='me'), records=['data1', 'data2'], total_records=2), None), - 'get_data_missing_field': (dict(_links=dict(self='me'), records=['data1', 'data2']), None), - # module specific responses - 'get_volumes': {'name': 'ansible'} -} - - -def set_module_args(args): - """prepare arguments so that they will be picked up during module creation""" - args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) - basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access - - -class AnsibleExitJson(Exception): - """Exception class to be raised by module.exit_json and caught by the test case""" - pass - - -class AnsibleFailJson(Exception): - """Exception class to be raised by module.fail_json and caught by the test case""" - pass - - -def exit_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over exit_json; package return data into an exception""" - if 'changed' not in kwargs: - kwargs['changed'] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): # pylint: disable=unused-argument - """function to patch over fail_json; package return data into an exception""" - kwargs['failed'] = True - raise AnsibleFailJson(kwargs) - - -class MockUMConnection(object): - ''' mock server connection to Unified Manager host ''' - - def __init__(self): - ''' pass init ''' - - -class TestMyModule(unittest.TestCase): - ''' a group of related Unit Tests ''' - - def setUp(self): - self.mock_module_helper = patch.multiple(basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json) - self.mock_module_helper.start() - self.addCleanup(self.mock_module_helper.stop) - self.server = MockUMConnection() - # whether to use a mock or a simulator - self.onbox = False - - def set_default_args(self): - if self.onbox: - hostname = '10.10.10.10' - username = 'admin' - password = 'password' - else: - hostname = 'hostname' - username = 'username' - password = 'password' - return dict({ - 'hostname': hostname, - 'username': username, - 'password': password, - }) - - def test_module_fail_when_required_args_missing(self): - ''' required arguments are reported as errors ''' - with pytest.raises(AnsibleFailJson) as exc: - set_module_args({}) - my_module() - print('Info: %s' % exc.value.args[0]['msg']) - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_volumes_info.NetAppUMVolume.get_volumes') - def test_ensure_list_volumes_get_called(self, get_volumes): - ''' fetching details of volumes ''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.server = self.server - my_obj.get_volumes = Mock(return_value=SRR['get_volumes']) - with pytest.raises(AnsibleExitJson) as exc: - my_obj.apply() - assert exc.value.args[0]['changed'] - - @patch('ansible_collections.netapp.um_info.plugins.modules.na_um_volumes_info.NetAppUMVolume.get_volumes') - def test_ensure_get_called_existing(self, get_volumes): - ''' test for existing volumes''' - set_module_args(self.set_default_args()) - my_obj = my_module() - my_obj.get_volumes = Mock(return_value=SRR['get_volumes']) - assert my_obj.get_volumes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_get_next(self, mock_request): - ''' test for existing volumes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - assert my_obj.get_volumes() is not None - - @patch('ansible_collections.netapp.um_info.plugins.module_utils.netapp.UMRestAPI.send_request') - def test_negative_get_next(self, mock_request): - ''' test for existing volumes''' - set_module_args(self.set_default_args()) - mock_request.side_effect = [ - SRR['get_next'], - SRR['get_data_missing_field'], - SRR['end_of_sequence'], - ] - my_obj = my_module() - with pytest.raises(AnsibleFailJson) as exc: - my_obj.get_volumes() is not None - print(exc.value.args[0]) - msg = 'unexpected response from datacenter/storage/volumes' - assert msg in exc.value.args[0]['msg'] - msg = "expecting key: 'total_records'" - assert msg in exc.value.args[0]['msg'] diff --git a/ansible_collections/netapp/um_info/tests/unit/requirements.txt b/ansible_collections/netapp/um_info/tests/unit/requirements.txt deleted file mode 100644 index b754473a9..000000000 --- a/ansible_collections/netapp/um_info/tests/unit/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests ; python_version >= '2.7' |