diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
commit | 975f66f2eebe9dadba04f275774d4ab83f74cf25 (patch) | |
tree | 89bd26a93aaae6a25749145b7e4bca4a1e75b2be /ansible_collections/netapp/storagegrid | |
parent | Initial commit. (diff) | |
download | ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.tar.xz ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.zip |
Adding upstream version 7.7.0+dfsg.upstream/7.7.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/netapp/storagegrid')
68 files changed, 15422 insertions, 0 deletions
diff --git a/ansible_collections/netapp/storagegrid/.github/workflows/coverage.yml b/ansible_collections/netapp/storagegrid/.github/workflows/coverage.yml new file mode 100644 index 000000000..d33950e7a --- /dev/null +++ b/ansible_collections/netapp/storagegrid/.github/workflows/coverage.yml @@ -0,0 +1,45 @@ +name: NetApp.storagegrid Ansible Coverage + +on: + push: + pull_request: + schedule: + - cron: '0 6 * * *' + +jobs: + sanity: + name: Coverage on StorageGrid + runs-on: ubuntu-latest + + steps: + - name: Check out code + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Install ansible stable-2.11 + run: pip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check + + - name: Make directory to make ansible-test happy + run: | + pwd + mkdir -p ansible_collections/netapp/storagegrid/ + rsync -av . ansible_collections/netapp/storagegrid/ --exclude ansible_collections/netapp/storagegrid/ + + - name: Run Unit Tests + run: ansible-test units --coverage --color --docker --python 3.8 + working-directory: ansible_collections/netapp/storagegrid/ + + # ansible-test support producing code coverage date + - name: Generate coverage report + run: ansible-test coverage xml -v --requirements --group-by command --group-by version + working-directory: ansible_collections/netapp/storagegrid/ + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v2 + with: + working-directory: ansible_collections/netapp/storagegrid/ + verbose: true
\ No newline at end of file diff --git a/ansible_collections/netapp/storagegrid/.github/workflows/main.yml b/ansible_collections/netapp/storagegrid/.github/workflows/main.yml new file mode 100644 index 000000000..ec05c061d --- /dev/null +++ b/ansible_collections/netapp/storagegrid/.github/workflows/main.yml @@ -0,0 +1,48 @@ +name: NetApp.storagegrid Ansible CI + +on: + push: + pull_request: + schedule: + - cron: '0 6 * * *' + +jobs: + sanity: + name: Sanity (${{ matrix.ansible }} on storagegrid + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + ansible: + - stable-2.9 + - stable-2.10 + - stable-2.11 + - stable-2.12 + - stable-2.13 + - devel + + steps: + - name: Check out code + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + # Ansible 2.14 requires 3.9 as a minimum + python-version: 3.9 + + - name: Install ansible (${{ matrix.ansible }}) + run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check + + - name: Make directory to make ansible-test happy + run: | + pwd + mkdir -p ansible_collections/netapp/storagegrid/ + rsync -av . ansible_collections/netapp/storagegrid/ --exclude ansible_collections/netapp/storagegrid/ + - name: Run sanity tests storagegrid + run: ansible-test sanity --docker -v --color + working-directory: ansible_collections/netapp/storagegrid/ + + - name: Run Unit Tests + run: ansible-test units --docker -v --color + working-directory: ansible_collections/netapp/storagegrid/ diff --git a/ansible_collections/netapp/storagegrid/CHANGELOG.rst b/ansible_collections/netapp/storagegrid/CHANGELOG.rst new file mode 100644 index 000000000..c3d64be86 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/CHANGELOG.rst @@ -0,0 +1,172 @@ +=========================================== +NetApp StorageGRID Collection Release Notes +=========================================== + +.. contents:: Topics + + +v21.11.1 +======== + +Bugfixes +-------- + +- na_sg_org_container - fix versioning not enabled on initial bucket creation. + +v21.11.0 +======== + +Minor Changes +------------- + +- na_sg_org_container - supports versioning configuration for S3 buckets available in StorageGRID 11.6+. + +New Modules +----------- + +- netapp.storagegrid.na_sg_grid_client_certificate - Manage Client Certificates on StorageGRID. + +v21.10.0 +======== + +Minor Changes +------------- + +- na_sg_grid_gateway - supports specifying HA Groups by name or UUID. + +Bugfixes +-------- + +- na_sg_org_group - fixed behaviour where update to ``s3_policy`` is ignored if ``management_policy`` is set. + +New Modules +----------- + +- netapp.storagegrid.na_sg_grid_ha_group - Manage high availability (HA) group configuration on StorageGRID. +- netapp.storagegrid.na_sg_grid_traffic_classes - Manage Traffic Classification Policy configuration on StorageGRID. + +v21.9.0 +======= + +Minor Changes +------------- + +- PR2 - allow usage of Ansible module group defaults - for Ansible 2.12+. +- na_sg_grid_gateway - supports load balancer endpoint binding available in StorageGRID 11.5+. +- na_sg_org_container - supports creation of S3 Object Lock buckets available in StorageGRID 11.5+. + +Bugfixes +-------- + +- na_sg_grid_account - minor documentation fix. +- na_sg_grid_gateway - existing endpoints matched by ``name`` and ``port``. + +v21.8.0 +======= + +Minor Changes +------------- + +- PR2 - allow usage of Ansible module group defaults - for Ansible 2.12+. + +v21.7.0 +======= + +Minor Changes +------------- + +- Updated documentation - added RETURN block for each module + +New Modules +----------- + +- netapp.storagegrid.na_sg_grid_gateway - Manage Load balancer (gateway) endpoints on StorageGRID. + +v21.6.0 +======= + +Minor Changes +------------- + +- na_sg_org_container - supports deletion of buckets when ``state`` is set to ``absent``. + +Bugfixes +-------- + +- na_sg_org_container - fix issue with applying compliance settings on buckets. + +New Modules +----------- + +- netapp.storagegrid.na_sg_grid_certificate - Manage the Storage API and Grid Management certificates on StorageGRID. +- netapp.storagegrid.na_sg_grid_identity_federation - NetApp StorageGRID manage Grid identity federation. +- netapp.storagegrid.na_sg_org_identity_federation - NetApp StorageGRID manage Tenant identity federation. + +v20.11.0 +======== + +Minor Changes +------------- + +- na_sg_grid_account - New option ``root_access_account`` for granting initial root access permissions for the tenant to an existing federated group + +New Modules +----------- + +- netapp.storagegrid.na_sg_grid_info - NetApp StorageGRID Grid information gatherer +- netapp.storagegrid.na_sg_org_info - NetApp StorageGRID Org information gatherer + +v20.10.0 +======== + +Minor Changes +------------- + +- na_sg_grid_account - new option ``update_password`` for managing Tenant Account root password changes. +- na_sg_grid_user - new option ``password`` and ``update_password`` for setting or updating Grid Admin User passwords. +- na_sg_org_user - new option ``password`` and ``update_password`` for setting or updating Tenant User passwords. + +Breaking Changes / Porting Guide +-------------------------------- + +- This version introduces a breaking change. + All modules have been renamed from ``nac_sg_*`` to ``na_sg_*``. + Playbooks and Roles must be updated to match. + +Bugfixes +-------- + +- na_sg_grid_account - added ``no_log`` flag to password fields. +- na_sg_grid_account - fixed documentation issue. +- na_sg_grid_group - fixed group name parsing. +- na_sg_org_group - fixed group name parsing. + +v20.6.1 +======= + +Minor Changes +------------- + +- Fixed documentation issue in README.md + +Bugfixes +-------- + +- nac_sg_org_container - fixed documentation issue. + +v20.6.0 +======= + +New Modules +----------- + +- netapp.storagegrid.nac_sg_grid_account - NetApp StorageGRID Manage Tenant account. +- netapp.storagegrid.nac_sg_grid_dns - NetApp StorageGRID Manage Grid DNS servers. +- netapp.storagegrid.nac_sg_grid_group - NetApp StorageGRID Manage Grid admin group. +- netapp.storagegrid.nac_sg_grid_ntp - NetApp StorageGRID Manage Grid NTP servers. +- netapp.storagegrid.nac_sg_grid_regions - NetApp StorageGRID Manage Grid Regions. +- netapp.storagegrid.nac_sg_grid_user - NetApp StorageGRID Manage Grid admin user. +- netapp.storagegrid.nac_sg_org_container - NetApp StorageGRID Manage S3 bucket. +- netapp.storagegrid.nac_sg_org_group - NetApp StorageGRID Manage Tenant group. +- netapp.storagegrid.nac_sg_org_user - NetApp StorageGRID Manage Tenant user. +- netapp.storagegrid.nac_sg_org_user_s3_key - NetApp StorageGRID Manage S3 key. diff --git a/ansible_collections/netapp/storagegrid/COPYING b/ansible_collections/netapp/storagegrid/COPYING new file mode 100644 index 000000000..20d40b6bc --- /dev/null +++ b/ansible_collections/netapp/storagegrid/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<http://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<http://www.gnu.org/philosophy/why-not-lgpl.html>.
\ No newline at end of file diff --git a/ansible_collections/netapp/storagegrid/FILES.json b/ansible_collections/netapp/storagegrid/FILES.json new file mode 100644 index 000000000..86ed3f4c8 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/FILES.json @@ -0,0 +1,572 @@ +{ + "files": [ + { + "name": ".", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7cc4959877dbe6b6c63a8eb1bfe3bfb545fa8fe5b28b1b2c13e4a7c1c0d1c4d4", + "format": 1 + }, + { + "name": "plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/doc_fragments", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/doc_fragments/netapp.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e790e69d7116516a69110a233da28e21442e5fee8805b3b6f985854f27f26449", + "format": 1 + }, + { + "name": "plugins/module_utils", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/module_utils/netapp.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9f7a966f44fca740564887beea1232f8e89bad232cde62d9de9a12428ec442c4", + "format": 1 + }, + { + "name": "plugins/module_utils/netapp_module.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b1a4b77fd27fd5bf2810c0db1d4692093ae5b310992fb183e3817e2e3903891a", + "format": 1 + }, + { + "name": "plugins/modules", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_client_certificate.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e0df90a4c30e50481afe5f508798187ecfbe31b1f5bb7e29e3d724d41203373f", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_traffic_classes.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e27c214bb0ff1e9022a575647d1c05d7e0f7bf5c2b35fa49b576f7beb64ce79c", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_org_user_s3_key.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d71cb0c2c53424b5ce28ead1b06f9c3b3106c709e4d2f0b74879b0eef2d668b6", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_certificate.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6b10f9a744d3f78024f3b82f082338aed13f31b5aa71efa86f4ba8ed8cce3088", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_org_identity_federation.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "93bf629ba06db59b1c74bfda8422471a64d3804776207dc925b175ed3378ed9c", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_account.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a20270da62abd5c22c977081e1c4a11b9f96e39d56254711edb21f7c5cbe7f5b", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_user.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "bc4be89bd9b2977d59fa51e1ef72b0522d2aad116e7dcb531ba0b214854d878b", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_org_info.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a088c55bad0b8b4a1c5e19024ed922ff4388ab92b3edcdcd91353b27cdf3d05a", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_ha_group.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "48017730eff1e3a04c47c9187488eb915b18ef9293ca06b9dd49832eb2877856", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_org_group.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6b6e5bff028d0bd999f8b7a6998e59451c7bd4e5c0de63953ddd19ee13fafad5", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_ntp.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9e3659af27a8a5b26b5ff95eb407a1d0b393a74f22b8e9a66a762b75565dab69", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_identity_federation.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "279f42f668b563bf22aa79be77fd3b90de7dab0a8bccbfe4773900acf820e64c", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_group.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "0fe8a0e6ce5aefad01638e493dda3964019360e0c04cd6aa3b1e432d586b00d9", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_org_user.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "5766c6c0c639b848c6ce3007533059d570f66f9c48f477e0e915b3f5c90a96ae", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_info.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "324308668d15dbc88894b4aea9f9838093d3dd38e09307c94038cff4d289a614", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_gateway.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "036c8706d7ff3ca3a5b970e0fa4d10ba4d33f7cfcfa160acf868d5bb9dc51eaf", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_org_container.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "70b7ca86ed2853ca6fa99a5dc712770e342d1d5b95fea154a67d7c7db3df3c8c", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_dns.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ccc1e086f899279cce569309c183b358bce0620446fbc8602c94f3be1f5b793f", + "format": 1 + }, + { + "name": "plugins/modules/na_sg_grid_regions.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7aa2472c25390e20db5437cb23e2824546854c18033b7e92e77eb627b92981a7", + "format": 1 + }, + { + "name": "tests", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/compat", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/compat/unittest.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cba95d18c5b39c6f49714eacf1ac77452c2e32fa087c03cf01aacd19ae597b0f", + "format": 1 + }, + { + "name": "tests/unit/compat/builtins.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ba13a350ade8ef804336f888d5883b8e54f8bddfb9d0fadc10277a8ca6540f4e", + "format": 1 + }, + { + "name": "tests/unit/compat/__init__.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "format": 1 + }, + { + "name": "tests/unit/compat/mock.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d18988875cf9d824a5089d062935b2750d96e99e47925a2eb62c527d92c9cc13", + "format": 1 + }, + { + "name": "tests/unit/requirements.txt", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "68a61b1d58a722f4ffabaa28da01c9837c93a582ea41c1bfb1c1fd54ea2d8fab", + "format": 1 + }, + { + "name": "tests/unit/plugins", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/modules", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_dns.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a96a6d0218e6e7764786af0bf8dc8fbb24c3b492d57a627a7cf827bb33b2c327", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_regions.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "870928ef29b01babed679fb5d207eb997ed724aa97135528f45cfda0314705c3", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_org_group.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "ff42467a461b8a69a2a2895242852444c89e0471bc0101ae10f99bbcf54528cb", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_info.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "8b713f3a28e2948c42996d06cc69f316ed2576999d2372ed354c474e0a52472e", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_org_user.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c7fa1a8212a01dd81411129c7a31048d3e154c9729a0ac28a8016ace1fbd6e87", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_ntp.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7299330b0ddfa005c4976c10b3c564b4c84c5f3cc620d566f9baa0ebe63460b1", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_group.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "15ad7caefc2c6fc3c9e473d176e032473f2065dfb249f3524263ec6129b61e6c", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_identity_federation.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3943d740fc467010966238cce4d10aa484ea70d6e689704043cee7344e1916b5", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_gateway.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c87e92555fb3aafaa82e197910b8fa61976ce358e220da2e0968469ea0df505c", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_client_certificate.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1ab3f4d2b408d5943dfb334ed64729ba5b9a2974c78ef1c2cb48622289181e52", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_certificate.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d046329c9ba078773c442a8bc47b00c76b49eac762f05f1b43789689056e295a", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_org_info.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3d167cae9bf3aebf2b3b8b6b2736701a37e0c86ebae8ad89223deca373d4e4dd", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_account.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "eebfa82a09b7b3413d77749c1afc10a69cde0d90e940b0d454af807cebc476be", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_org_identity_federation.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "cda86d8e2632609873d7ccda496ca69e0884ba82712418e23148025f0f3a5482", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_traffic_classes.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "4ec3ad3f3716e69cd39a3dddbbffc1796d415b4a8a729f5d61289a3f39967b18", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_ha_group.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "a0782c6e1a6a9185cf4724da8d7e4363f6af13934efd4152779fcbe9aa05c2b7", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_org_container.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "9593869384f8f9f0f0de69e63f72a90de414e858dbfead681966e54b9ebd4b23", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_org_user_s3_key.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "d39947658ff5fdbc740275959f4fc82fcbdfd84553b771d78632d461e1638b4f", + "format": 1 + }, + { + "name": "tests/unit/plugins/modules/test_na_sg_grid_user.py", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "614c50a7ef9b7fe296025441e0350b78cb5e381f04d336f1cb49512d60404605", + "format": 1 + }, + { + "name": "meta", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "meta/runtime.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "fe82353b93dc1bedb4d4f8695b23d6a33537fd0401c406547d132d4497ff7ca2", + "format": 1 + }, + { + "name": "changelogs", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "changelogs/fragments", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": "changelogs/fragments/github-66.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "49dfee10361876355f72737bd6f8d86ce6cea258bac589e20ec31125d6d4023a", + "format": 1 + }, + { + "name": "changelogs/fragments/github-10.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "82584dd77f4f48618a3ae519fdfe9347774691201e4fd98868d21cc782820ecd", + "format": 1 + }, + { + "name": "changelogs/fragments/20.10.0.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2567ca0de5c5aa2b12772cf077003ea361b46b046e7fff165fbfb901dc86ff24", + "format": 1 + }, + { + "name": "changelogs/fragments/21.9.0.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "7b2062ed359ef484360534a91969fdb39e59f741cd5aa96b18e08de79bef81f1", + "format": 1 + }, + { + "name": "changelogs/fragments/20.7.0.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e197a5a1f5b3a2e38dfd7456429e59a3a01a8f80e86e445ff1b7d5b5acf7dc3e", + "format": 1 + }, + { + "name": "changelogs/fragments/github-8.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "e99536aa20608eaee59a0f0bc586425750a4561b39b78fa50722be660f4333a3", + "format": 1 + }, + { + "name": "changelogs/fragments/21.11.1.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c392f498059c8b2db2e7756553dde51d153bb7003f22c379641746e0bcf26188", + "format": 1 + }, + { + "name": "changelogs/fragments/20.6.1.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "54144fc957437d09f4efd29f0dbfd18cfe40e21d7196e1c0d9dca35acc644a70", + "format": 1 + }, + { + "name": "changelogs/fragments/21.6.0.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1f058dcc3961d0dc37d26b6a3ea6aefd477378bb51f8bdbe0595c2bf1c145b73", + "format": 1 + }, + { + "name": "changelogs/fragments/21.11.0.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "b05d16dd6f1c82d0b43033d464d48f2c487cc90195fd1f0a8d79c4b8f97560a5", + "format": 1 + }, + { + "name": "changelogs/fragments/DEVOPS-4416.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "6742492ce83786ffcddc12818ef3771ef915123fbe3b0518a101044435701af3", + "format": 1 + }, + { + "name": "changelogs/config.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "3c330af70628d6a33029dadb8c4e4aac81eb5e82946651f804cff46bd0736cbe", + "format": 1 + }, + { + "name": "changelogs/changelog.yaml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "2d6b62b96b1a730bbb09d926e37a0dc4f1d3cf9f4218e8a2feb4a00c30c66171", + "format": 1 + }, + { + "name": "README.md", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "1fb8b745f8becd6ea2348808e1979a31486ab9357ec1197cb3136c2727d712b2", + "format": 1 + }, + { + "name": "COPYING", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c53a65c2fd561c87eaabf1072ef5dcab8653042bc15308465f52413585eb6271", + "format": 1 + }, + { + "name": ".github", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/workflows", + "ftype": "dir", + "chksum_type": null, + "chksum_sha256": null, + "format": 1 + }, + { + "name": ".github/workflows/coverage.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "faf067634d432b31207f4ad48bdc037d2ec518a7df4377cb0533126fe9a50a21", + "format": 1 + }, + { + "name": ".github/workflows/main.yml", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "07c19a69adcb2c9c482cadf8785c3bc0bf621ff161a592b48db9458e0673b1c5", + "format": 1 + }, + { + "name": "CHANGELOG.rst", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "131e7c4ca2ddb153d23af2174d262f4b0ae62079e892ecf36f062e929d43f729", + "format": 1 + } + ], + "format": 1 +}
\ No newline at end of file diff --git a/ansible_collections/netapp/storagegrid/MANIFEST.json b/ansible_collections/netapp/storagegrid/MANIFEST.json new file mode 100644 index 000000000..53238fc0d --- /dev/null +++ b/ansible_collections/netapp/storagegrid/MANIFEST.json @@ -0,0 +1,32 @@ +{ + "collection_info": { + "namespace": "netapp", + "name": "storagegrid", + "version": "21.11.1", + "authors": [ + "NetApp Ansible Team <ng-ansibleteam@netapp.com>" + ], + "readme": "README.md", + "tags": [ + "storage", + "netapp", + "storagegrid" + ], + "description": "NetApp StorageGRID Collection", + "license": [], + "license_file": "COPYING", + "dependencies": {}, + "repository": "https://github.com/ansible-collections/netapp.storagegrid", + "documentation": null, + "homepage": "https://netapp.io/configuration-management-and-automation/", + "issues": null + }, + "file_manifest_file": { + "name": "FILES.json", + "ftype": "file", + "chksum_type": "sha256", + "chksum_sha256": "c9021a8a169b205830d9914712b6c039123456973f905ab635b85e37192f132c", + "format": 1 + }, + "format": 1 +}
\ No newline at end of file diff --git a/ansible_collections/netapp/storagegrid/README.md b/ansible_collections/netapp/storagegrid/README.md new file mode 100644 index 000000000..582a77329 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/README.md @@ -0,0 +1,199 @@ +![example workflow](https://github.com/ansible-collections/netapp.storagegrid/actions/workflows/main.yml/badge.svg) +[![codecov](https://codecov.io/gh/ansible-collections/netapp.storagegrid/branch/main/graph/badge.svg?token=weBYkksxSi)](https://codecov.io/gh/ansible-collections/netapp.storagegrid) +[![Discord](https://img.shields.io/discord/855068651522490400)](https://discord.gg/NetApp) + + +============================================================= + + netapp.storagegrid + + NetApp StorageGRID Collection + + Copyright (c) 2020 NetApp, Inc. All rights reserved. + Specifications subject to change without notice. + +============================================================= + +# Installation + +```bash +ansible-galaxy collection install netapp.storagegrid +``` +To use this collection add the following to the top of your playbook. +``` +collections: + - netapp.storagegrid +``` + +# Usage + +Each of the StorageGRID modules require an `auth_token` parameter to be specified. This can be obtained by executing a `uri` task against the StorageGRID Authorization API endpoint and registering the output as the first item in a Playbook. + +If you are performing a Tenant operation, ensure that the `accountId` parameter is also specified in the URI body and set to the Tenant Account ID. For example, `"accountId": "01234567890123456789"` + +```yaml +- name: Get Grid Authorization token + uri: + url: "https://sgadmin.example.com/api/v3/authorize" + method: POST + body: { + "username": "root", + "password": "storagegrid123", + "cookie": false, + "csrfToken": false + } + body_format: json + validate_certs: false + register: auth +``` + +Subsequent tasks can leverage the registered auth token. + +```yaml +- name: Create a StorageGRID Tenant Account + netapp.storagegrid.na_sg_grid_account: + api_url: "https://sgadmin.example.com" + auth_token: "{{ auth.json.data }}" + validate_certs: false + state: present + name: AnsibleTenant + protocol: s3 + management: true + use_own_identity_source: true + allow_platform_services: true + password: "mytenantrootpassword" + quota_size: 10 +``` + +# Versioning + +[Releasing, Versioning and Deprecation](https://github.com/ansible-collections/netapp/issues/93) + +# Need help + +Join our [Discord](https://discord.gg/NetApp) + +# Code of Conduct + +This collection follows the [Ansible project's Code of Conduct](https://docs.ansible.com/ansible/devel/community/code_of_conduct.html). + +# Release Notes + +## 21.11.1 + +### Bug Fixes + - na_sg_org_container - fix versioning not enabled on initial bucket creation. + +## 21.11.0 + +### Minor Changes + - na_sg_org_container - supports versioning configuration for S3 buckets available in StorageGRID 11.6+. + +### New Modules + - na_sg_grid_client_certificate - Manage Client Certificates on StorageGRID. + +## 21.10.0 + +### Minor Changes + - na_sg_grid_gateway - supports specifying HA Groups by name or UUID. + +### Bug Fixes + - na_sg_org_group - fixed behaviour where update to ``s3_policy`` is ignored if ``management_policy`` is set. + +### New Modules + - na_sg_grid_ha_group - Manage high availability (HA) group configuration on StorageGRID. + - na_sg_grid_traffic_classes - Manage Traffic Classification Policy configuration on StorageGRID. + +## 21.9.0 + +### Minor Changes + - na_sg_grid_gateway - supports load balancer endpoint binding available in StorageGRID 11.5+. + - na_sg_org_container - supports creation of S3 Object Lock buckets available in StorageGRID 11.5+. + +### Bug Fixes + - na_sg_grid_gateway - existing endpoints matched by ``name`` and ``port``. + - na_sg_grid_account - minor documentation fix. + +## 21.8.0 + +### Minor Changes + - all modules - enable usage of Ansible module group defaults - for Ansible 2.12+. + +## 21.7.0 + +### New Modules + +- na_sg_grid_gateway: Manage Load balancer (gateway) endpoints + +### Minor Changes +- Updated documentation - added RETURN block for each module + +## 21.6.0 + +### New Modules + +- na_sg_grid_certificate: Manage the Storage API and Grid Management certificates on StorageGRID. +- na_sg_grid_identity_federation: Manage Grid identity federation. +- na_sg_org_identity_federation: Manage Tenant identity federation. + +### Minor Changes +- na_sg_org_container - supports deletion of buckets when `state` is set to `absent`. + +### Bug Fixes +- na_sg_org_container - fix issue with applying compliance settings on buckets. + +## 20.11.0 + +### New Modules + +- na_sg_grid_info: Gather StorageGRID Grig subset information +- na_sg_org_info: Gather StorageGRID Org subset information + +### Minor Changes + +- na_sg_grid_account: new option `root_access_account` for granting initial root access permissions for the tenant to an existing federated group + +## 20.10.0 + +### Breaking Changes + +This version introduces a breaking change. All modules have been renamed from `nac_sg_*` to `na_sg_*`. Playbooks and Roles must be updated to match. + +### Bug Fixes + +- na_sg_grid_account: fixed documentation issue. +- na_sg_grid_account: added `no_log` flag to password fields +- na_sg_grid_group: fixed group name parsing +- na_sg_org_group: fixed group name parsing + +### New Options + +- na_sg_grid_account: new option `update_password` for managing Tenant Account root password changes +- na_sg_org_user: new option `password` and `update_password` for setting or updating Tenant User passwords +- na_sg_grid_user: new option `password` and `update_password` for setting or updating Grid Admin User passwords + +## 20.6.1 + +### Minor Changes +- Fixed documentation issue in README.md + +### Bug Fixes +- nac_sg_org_container: fixed documentation issue. + +## 20.6.0 + +Initial release of NetApp StorageGRID Ansible modules + +### New Modules + +- nac_sg_grid_account: create/modify/delete Tenant account +- nac_sg_grid_dns: set Grid DNS servers +- nac_sg_grid_group: create/modify/delete Grid admin group +- nac_sg_grid_ntp: set Grid NTP servers +- nac_sg_grid_regions: set Grid Regions +- nac_sg_grid_user: create/modify/delete Grid admin user +- nac_sg_org_container: create S3 bucket +- nac_sg_org_group: create/modify/delete Tenant group +- nac_sg_org_user: create/modify/delete Tenant user +- nac_sg_org_user_s3_key: create/delete S3 key + diff --git a/ansible_collections/netapp/storagegrid/changelogs/changelog.yaml b/ansible_collections/netapp/storagegrid/changelogs/changelog.yaml new file mode 100644 index 000000000..288cbdc34 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/changelog.yaml @@ -0,0 +1,171 @@ +ancestor: null +releases: + 20.10.0: + changes: + breaking_changes: + - 'This version introduces a breaking change. + + All modules have been renamed from ``nac_sg_*`` to ``na_sg_*``. + + Playbooks and Roles must be updated to match.' + bugfixes: + - na_sg_grid_account - added ``no_log`` flag to password fields. + - na_sg_grid_account - fixed documentation issue. + - na_sg_grid_group - fixed group name parsing. + - na_sg_org_group - fixed group name parsing. + minor_changes: + - na_sg_grid_account - new option ``update_password`` for managing Tenant Account + root password changes. + - na_sg_grid_user - new option ``password`` and ``update_password`` for setting + or updating Grid Admin User passwords. + - na_sg_org_user - new option ``password`` and ``update_password`` for setting + or updating Tenant User passwords. + fragments: + - 20.10.0.yaml + release_date: '2020-10-15' + 20.11.0: + changes: + minor_changes: + - na_sg_grid_account - New option ``root_access_account`` for granting initial + root access permissions for the tenant to an existing federated group + fragments: + - github-66.yaml + modules: + - description: NetApp StorageGRID Grid information gatherer + name: na_sg_grid_info + namespace: '' + - description: NetApp StorageGRID Org information gatherer + name: na_sg_org_info + namespace: '' + release_date: '2020-11-18' + 20.6.0: + modules: + - description: NetApp StorageGRID Manage Tenant account. + name: nac_sg_grid_account + namespace: '' + - description: NetApp StorageGRID Manage Grid DNS servers. + name: nac_sg_grid_dns + namespace: '' + - description: NetApp StorageGRID Manage Grid admin group. + name: nac_sg_grid_group + namespace: '' + - description: NetApp StorageGRID Manage Grid NTP servers. + name: nac_sg_grid_ntp + namespace: '' + - description: NetApp StorageGRID Manage Grid Regions. + name: nac_sg_grid_regions + namespace: '' + - description: NetApp StorageGRID Manage Grid admin user. + name: nac_sg_grid_user + namespace: '' + - description: NetApp StorageGRID Manage S3 bucket. + name: nac_sg_org_container + namespace: '' + - description: NetApp StorageGRID Manage Tenant group. + name: nac_sg_org_group + namespace: '' + - description: NetApp StorageGRID Manage Tenant user. + name: nac_sg_org_user + namespace: '' + - description: NetApp StorageGRID Manage S3 key. + name: nac_sg_org_user_s3_key + namespace: '' + release_date: '2020-06-09' + 20.6.1: + changes: + bugfixes: + - nac_sg_org_container - fixed documentation issue. + minor_changes: + - Fixed documentation issue in README.md + fragments: + - 20.6.1.yaml + release_date: '2020-06-09' + 21.10.0: + changes: + bugfixes: + - na_sg_org_group - fixed behaviour where update to ``s3_policy`` is ignored + if ``management_policy`` is set. + minor_changes: + - na_sg_grid_gateway - supports specifying HA Groups by name or UUID. + fragments: + - github-10.yaml + - github-8.yaml + modules: + - description: Manage high availability (HA) group configuration on StorageGRID. + name: na_sg_grid_ha_group + namespace: '' + - description: Manage Traffic Classification Policy configuration on StorageGRID. + name: na_sg_grid_traffic_classes + namespace: '' + release_date: '2022-03-17' + 21.11.0: + changes: + minor_changes: + - na_sg_org_container - supports versioning configuration for S3 buckets available + in StorageGRID 11.6+. + fragments: + - 21.11.0.yaml + modules: + - description: Manage Client Certificates on StorageGRID. + name: na_sg_grid_client_certificate + namespace: '' + release_date: '2022-09-06' + 21.11.1: + changes: + bugfixes: + - na_sg_org_container - fix versioning not enabled on initial bucket creation. + fragments: + - 21.11.1.yaml + release_date: '2022-09-23' + 21.6.0: + changes: + bugfixes: + - na_sg_org_container - fix issue with applying compliance settings on buckets. + minor_changes: + - na_sg_org_container - supports deletion of buckets when ``state`` is set to + ``absent``. + fragments: + - 21.6.0.yaml + modules: + - description: Manage the Storage API and Grid Management certificates on StorageGRID. + name: na_sg_grid_certificate + namespace: '' + - description: NetApp StorageGRID manage Grid identity federation. + name: na_sg_grid_identity_federation + namespace: '' + - description: NetApp StorageGRID manage Tenant identity federation. + name: na_sg_org_identity_federation + namespace: '' + release_date: '2021-06-16' + 21.7.0: + changes: + minor_changes: + - Updated documentation - added RETURN block for each module + fragments: + - 20.7.0.yaml + modules: + - description: Manage Load balancer (gateway) endpoints on StorageGRID. + name: na_sg_grid_gateway + namespace: '' + release_date: '2021-10-05' + 21.8.0: + changes: + minor_changes: + - PR2 - allow usage of Ansible module group defaults - for Ansible 2.12+. + fragments: + - DEVOPS-4416.yaml + release_date: '2021-11-11' + 21.9.0: + changes: + bugfixes: + - na_sg_grid_account - minor documentation fix. + - na_sg_grid_gateway - existing endpoints matched by ``name`` and ``port``. + minor_changes: + - PR2 - allow usage of Ansible module group defaults - for Ansible 2.12+. + - na_sg_grid_gateway - supports load balancer endpoint binding available in + StorageGRID 11.5+. + - na_sg_org_container - supports creation of S3 Object Lock buckets available + in StorageGRID 11.5+. + fragments: + - 21.9.0.yaml + release_date: '2021-12-17' diff --git a/ansible_collections/netapp/storagegrid/changelogs/config.yaml b/ansible_collections/netapp/storagegrid/changelogs/config.yaml new file mode 100644 index 000000000..871e634c1 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/config.yaml @@ -0,0 +1,32 @@ +changelog_filename_template: ../CHANGELOG.rst +changelog_filename_version_depth: 0 +changes_file: changelog.yaml +changes_format: combined +ignore_other_fragment_extensions: true +keep_fragments: true +mention_ancestor: true +new_plugins_after_name: removed_features +notesdir: fragments +prelude_section_name: release_summary +prelude_section_title: Release Summary +sanitize_changelog: true +sections: +- - major_changes + - Major Changes +- - minor_changes + - Minor Changes +- - breaking_changes + - Breaking Changes / Porting Guide +- - deprecated_features + - Deprecated Features +- - removed_features + - Removed Features (previously deprecated) +- - security_fixes + - Security Fixes +- - bugfixes + - Bugfixes +- - known_issues + - Known Issues +title: NetApp StorageGRID Collection +trivial_section_name: trivial +use_fqcn: true diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/20.10.0.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/20.10.0.yaml new file mode 100644 index 000000000..350ee15dc --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/20.10.0.yaml @@ -0,0 +1,14 @@ +breaking_changes: + - | + This version introduces a breaking change. + All modules have been renamed from ``nac_sg_*`` to ``na_sg_*``. + Playbooks and Roles must be updated to match. +minor_changes: + - na_sg_grid_account - new option ``update_password`` for managing Tenant Account root password changes. + - na_sg_org_user - new option ``password`` and ``update_password`` for setting or updating Tenant User passwords. + - na_sg_grid_user - new option ``password`` and ``update_password`` for setting or updating Grid Admin User passwords. +bugfixes: + - na_sg_grid_account - fixed documentation issue. + - na_sg_grid_account - added ``no_log`` flag to password fields. + - na_sg_grid_group - fixed group name parsing. + - na_sg_org_group - fixed group name parsing. diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/20.6.1.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/20.6.1.yaml new file mode 100644 index 000000000..3d1bb11a0 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/20.6.1.yaml @@ -0,0 +1,4 @@ +minor_changes: + - Fixed documentation issue in README.md +bugfixes: + - nac_sg_org_container - fixed documentation issue. diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/20.7.0.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/20.7.0.yaml new file mode 100644 index 000000000..a91168e87 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/20.7.0.yaml @@ -0,0 +1,2 @@ +minor_changes: + - Updated documentation - added RETURN block for each module diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/21.11.0.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.11.0.yaml new file mode 100644 index 000000000..a8fbff2df --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.11.0.yaml @@ -0,0 +1,2 @@ +minor_changes: + - na_sg_org_container - supports versioning configuration for S3 buckets available in StorageGRID 11.6+. diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/21.11.1.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.11.1.yaml new file mode 100644 index 000000000..229a6c51f --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.11.1.yaml @@ -0,0 +1,2 @@ +bugfixes: + - na_sg_org_container - fix versioning not enabled on initial bucket creation. diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/21.6.0.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.6.0.yaml new file mode 100644 index 000000000..6f8a660eb --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.6.0.yaml @@ -0,0 +1,4 @@ +minor_changes: + - na_sg_org_container - supports deletion of buckets when ``state`` is set to ``absent``. +bugfixes: + - na_sg_org_container - fix issue with applying compliance settings on buckets. diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/21.9.0.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.9.0.yaml new file mode 100644 index 000000000..87aecb16c --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/21.9.0.yaml @@ -0,0 +1,6 @@ +minor_changes: + - na_sg_grid_gateway - supports load balancer endpoint binding available in StorageGRID 11.5+. + - na_sg_org_container - supports creation of S3 Object Lock buckets available in StorageGRID 11.5+. +bugfixes: + - na_sg_grid_gateway - existing endpoints matched by ``name`` and ``port``. + - na_sg_grid_account - minor documentation fix. diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/DEVOPS-4416.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/DEVOPS-4416.yaml new file mode 100644 index 000000000..63514c5c5 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/DEVOPS-4416.yaml @@ -0,0 +1,2 @@ +minor_changes: + - PR2 - allow usage of Ansible module group defaults - for Ansible 2.12+. diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/github-10.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/github-10.yaml new file mode 100644 index 000000000..62e8845c6 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/github-10.yaml @@ -0,0 +1,2 @@ +minor_changes: + - na_sg_grid_gateway - supports specifying HA Groups by name or UUID.
\ No newline at end of file diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/github-66.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/github-66.yaml new file mode 100644 index 000000000..a515be8c2 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/github-66.yaml @@ -0,0 +1,2 @@ +minor_changes: + - na_sg_grid_account - New option ``root_access_account`` for granting initial root access permissions for the tenant to an existing federated group diff --git a/ansible_collections/netapp/storagegrid/changelogs/fragments/github-8.yaml b/ansible_collections/netapp/storagegrid/changelogs/fragments/github-8.yaml new file mode 100644 index 000000000..f21ecbe46 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/changelogs/fragments/github-8.yaml @@ -0,0 +1,2 @@ +bugfixes: + - na_sg_org_group - fixed behaviour where update to ``s3_policy`` is ignored if ``management_policy`` is set. diff --git a/ansible_collections/netapp/storagegrid/meta/runtime.yml b/ansible_collections/netapp/storagegrid/meta/runtime.yml new file mode 100644 index 000000000..4625ad259 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/meta/runtime.yml @@ -0,0 +1,23 @@ +--- +requires_ansible: ">=2.9.10" +action_groups: + netapp_storagegrid: + - na_sg_grid_account + - na_sg_grid_certificate + - na_sg_grid_client_certificate + - na_sg_grid_dns + - na_sg_grid_gateway + - na_sg_grid_group + - na_sg_grid_ha_group + - na_sg_grid_identity_federation + - na_sg_grid_info + - na_sg_grid_ntp + - na_sg_grid_regions + - na_sg_grid_traffic_classes + - na_sg_grid_user + - na_sg_org_container + - na_sg_org_group + - na_sg_org_identity_federation + - na_sg_org_info + - na_sg_org_user + - na_sg_org_user_s3_key diff --git a/ansible_collections/netapp/storagegrid/plugins/doc_fragments/netapp.py b/ansible_collections/netapp/storagegrid/plugins/doc_fragments/netapp.py new file mode 100644 index 000000000..8c06f9d99 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/doc_fragments/netapp.py @@ -0,0 +1,41 @@ +# Copyright: (c) 2019, NetApp Ansible Team <ng-ansibleteam@netapp.com> +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +class ModuleDocFragment(object): + + DOCUMENTATION = r""" +options: + - See respective platform section for more details +requirements: + - See respective platform section for more details +notes: + - This is documentation for NetApp's StorageGRID modules. +""" + + # Documentation fragment for StorageGRID + SG = """ +options: + auth_token: + required: true + type: str + description: + - The authorization token for the API request + api_url: + required: true + type: str + description: + - The url to the StorageGRID Admin Node REST API. + validate_certs: + required: false + default: true + description: + - Should https certificates be validated? + type: bool +notes: + - The modules prefixed with C(na_sg) are built to manage NetApp StorageGRID. +""" diff --git a/ansible_collections/netapp/storagegrid/plugins/module_utils/netapp.py b/ansible_collections/netapp/storagegrid/plugins/module_utils/netapp.py new file mode 100644 index 000000000..9892ae7b5 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/module_utils/netapp.py @@ -0,0 +1,211 @@ +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Copyright (c) 2020, NetApp Ansible Team <ng-ansibleteam@netapp.com> +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json +import mimetypes +import os +import random + +from pprint import pformat +from ansible.module_utils import six +from ansible.module_utils.basic import AnsibleModule, missing_required_lib +from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError +from ansible.module_utils.urls import open_url +from ansible.module_utils.api import basic_auth_argument_spec +from ansible.module_utils._text import to_native + +COLLECTION_VERSION = "21.11.1" + +try: + import requests + + HAS_REQUESTS = True +except ImportError: + HAS_REQUESTS = False + +import ssl + +try: + from urlparse import urlparse, urlunparse +except ImportError: + from urllib.parse import urlparse, urlunparse + + +POW2_BYTE_MAP = dict( + # Here, 1 kb = 1024 + bytes=1, + b=1, + kb=1024, + mb=1024**2, + gb=1024**3, + tb=1024**4, + pb=1024**5, + eb=1024**6, + zb=1024**7, + yb=1024**8, +) + + +def na_storagegrid_host_argument_spec(): + + return dict( + api_url=dict(required=True, type="str"), + validate_certs=dict(required=False, type="bool", default=True), + auth_token=dict(required=True, type="str", no_log=True), + ) + + +class SGRestAPI(object): + def __init__(self, module, timeout=60): + self.module = module + self.auth_token = self.module.params["auth_token"] + self.api_url = self.module.params["api_url"] + self.verify = self.module.params["validate_certs"] + self.timeout = timeout + self.check_required_library() + self.sg_version = dict(major=-1, minor=-1, full="", valid=False) + + def check_required_library(self): + if not HAS_REQUESTS: + self.module.fail_json(msg=missing_required_lib("requests")) + + def send_request(self, method, api, params, json=None): + """send http request and process reponse, including error conditions""" + url = "%s/%s" % (self.api_url, api) + status_code = None + content = None + json_dict = None + json_error = None + error_details = None + headers = { + "Content-type": "application/json", + "Authorization": self.auth_token, + "Cache-Control": "no-cache", + } + + def get_json(response): + """extract json, and error message if present""" + try: + json = response.json() + + except ValueError: + return None, None + success_code = [200, 201, 202, 204] + if response.status_code not in success_code: + error = json.get("message") + else: + error = None + return json, error + + try: + response = requests.request( + method, + url, + headers=headers, + timeout=self.timeout, + json=json, + verify=self.verify, + params=params, + ) + status_code = response.status_code + # If the response was successful, no Exception will be raised + json_dict, json_error = get_json(response) + except requests.exceptions.HTTPError as err: + __, json_error = get_json(response) + if json_error is None: + error_details = str(err) + except requests.exceptions.ConnectionError as err: + error_details = str(err) + except Exception as err: + error_details = str(err) + if json_error is not None: + error_details = json_error + + return json_dict, error_details + + # If an error was reported in the json payload, it is handled below + def get(self, api, params=None): + method = "GET" + return self.send_request(method, api, params) + + def post(self, api, data, params=None): + method = "POST" + return self.send_request(method, api, params, json=data) + + def patch(self, api, data, params=None): + method = "PATCH" + return self.send_request(method, api, params, json=data) + + def put(self, api, data, params=None): + method = "PUT" + return self.send_request(method, api, params, json=data) + + def delete(self, api, data, params=None): + method = "DELETE" + return self.send_request(method, api, params, json=data) + + def get_sg_product_version(self, api_root="grid"): + method = "GET" + api = "api/v3/%s/config/product-version" % api_root + message, error = self.send_request(method, api, params={}) + if error: + self.module.fail_json(msg=error) + self.set_version(message) + + def set_version(self, message): + try: + product_version = message.get("data", "not found").get("productVersion", "not_found") + except AttributeError: + self.sg_version["valid"] = False + return + + self.sg_version["major"], self.sg_version["minor"] = list(map(int, product_version.split(".")[0:2])) + self.sg_version["full"] = product_version + self.sg_version["valid"] = True + + def get_sg_version(self): + if self.sg_version["valid"]: + return self.sg_version["major"], self.sg_version["minor"] + return -1, -1 + + def meets_sg_minimum_version(self, minimum_major, minimum_minor): + return self.get_sg_version() >= (minimum_major, minimum_minor) + + def requires_sg_version(self, module_or_option, version): + return "%s requires StorageGRID %s or later." % (module_or_option, version) + + def fail_if_not_sg_minimum_version(self, module_or_option, minimum_major, minimum_minor): + version = self.get_sg_version() + if version < (minimum_major, minimum_minor): + msg = "Error: " + self.requires_sg_version(module_or_option, "%d.%d" % (minimum_major, minimum_minor)) + msg += " Found: %s.%s." % version + self.module.fail_json(msg=msg) diff --git a/ansible_collections/netapp/storagegrid/plugins/module_utils/netapp_module.py b/ansible_collections/netapp/storagegrid/plugins/module_utils/netapp_module.py new file mode 100644 index 000000000..f562938cf --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/module_utils/netapp_module.py @@ -0,0 +1,237 @@ +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Copyright (c) 2018, Laurent Nicolas <laurentn@netapp.com> +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" Support class for NetApp ansible modules """ + +from __future__ import absolute_import, division, print_function + +from copy import deepcopy + +__metaclass__ = type + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils + + +def cmp(obj1, obj2): + """ + Python 3 does not have a cmp function, this will do the cmp. + :param obj1: first object to check + :param obj2: second object to check + :return: + """ + # convert to lower case for string comparison. + if obj1 is None: + return -1 + if isinstance(obj1, str) and isinstance(obj2, str): + obj1 = obj1.lower() + obj2 = obj2.lower() + # if list has string element, convert string to lower case. + if isinstance(obj1, list) and isinstance(obj2, list): + obj1 = [x.lower() if isinstance(x, str) else x for x in obj1] + obj2 = [x.lower() if isinstance(x, str) else x for x in obj2] + obj1.sort() + obj2.sort() + return (obj1 > obj2) - (obj1 < obj2) + + +class NetAppModule(object): + """ + Common class for NetApp modules + set of support functions to derive actions based + on the current state of the system, and a desired state + """ + + def __init__(self): + self.log = list() + self.changed = False + self.parameters = {"name": "not initialized"} + + def set_parameters(self, ansible_params): + self.parameters = dict() + for param in ansible_params: + if ansible_params[param] is not None: + self.parameters[param] = ansible_params[param] + return self.parameters + + def get_cd_action(self, current, desired): + """ takes a desired state and a current state, and return an action: + create, delete, None + eg: + is_present = 'absent' + some_object = self.get_object(source) + if some_object is not None: + is_present = 'present' + action = cd_action(current=is_present, desired = self.desired.state()) + """ + if "state" in desired: + desired_state = desired["state"] + else: + desired_state = "present" + + if current is None and desired_state == "absent": + return None + if current is not None and desired_state == "present": + return None + # change in state + self.changed = True + if current is not None: + return "delete" + return "create" + + def compare_and_update_values(self, current, desired, keys_to_compare): + updated_values = dict() + is_changed = False + for key in keys_to_compare: + if key in current: + if key in desired and desired[key] is not None: + if current[key] != desired[key]: + updated_values[key] = desired[key] + is_changed = True + else: + updated_values[key] = current[key] + else: + updated_values[key] = current[key] + + return updated_values, is_changed + + @staticmethod + def check_keys(current, desired): + ''' TODO: raise an error if keys do not match + with the exception of: + new_name, state in desired + ''' + + def is_rename_action(self, source, target): + """ takes a source and target object, and returns True + if a rename is required + eg: + source = self.get_object(source_name) + target = self.get_object(target_name) + action = is_rename_action(source, target) + :return: None for error, True for rename action, False otherwise + """ + if source is None and target is None: + # error, do nothing + # cannot rename an non existent resource + # alternatively we could create B + return None + if source is not None and target is not None: + # error, do nothing + # idempotency (or) new_name_is_already_in_use + # alternatively we could delete B and rename A to B + return False + if source is None and target is not None: + # do nothing, maybe the rename was already done + return False + # source is not None and target is None: + # rename is in order + self.changed = True + return True + + @staticmethod + def compare_lists(current, desired, get_list_diff): + ''' compares two lists and return a list of elements that are either the desired elements or elements that are + modified from the current state depending on the get_list_diff flag + :param: current: current item attribute in ONTAP + :param: desired: attributes from playbook + :param: get_list_diff: specifies whether to have a diff of desired list w.r.t current list for an attribute + :return: list of attributes to be modified + :rtype: list + ''' + current_copy = deepcopy(current) + desired_copy = deepcopy(desired) + + # get what in desired and not in current + desired_diff_list = list() + for item in desired: + if item in current_copy: + current_copy.remove(item) + else: + desired_diff_list.append(item) + + # get what in current but not in desired + current_diff_list = [] + for item in current: + if item in desired_copy: + desired_copy.remove(item) + else: + current_diff_list.append(item) + + if desired_diff_list or current_diff_list: + # there are changes + if get_list_diff: + return desired_diff_list + else: + return desired + else: + return None + + def get_modified_attributes(self, current, desired, get_list_diff=False): + ''' takes two dicts of attributes and return a dict of attributes that are + not in the current state + It is expected that all attributes of interest are listed in current and + desired. + :param: current: current attributes on StorageGRID + :param: desired: attributes from playbook + :param: get_list_diff: specifies whether to have a diff of desired list w.r.t current list for an attribute + :return: dict of attributes to be modified + :rtype: dict + NOTE: depending on the attribute, the caller may need to do a modify or a + different operation (eg move volume if the modified attribute is an + aggregate name) + ''' + # if the object does not exist, we can't modify it + modified = {} + if current is None: + return modified + + # error out if keys do not match + self.check_keys(current, desired) + + # collect changed attributes + for key, value in current.items(): + if key in desired and desired[key] is not None: + if isinstance(value, list): + modified_list = self.compare_lists(value, desired[key], get_list_diff) # get modified list from current and desired + if modified_list is not None: + modified[key] = modified_list + elif isinstance(value, dict): + modified_dict = self.get_modified_attributes(value, desired[key]) + if modified_dict: + modified[key] = modified_dict + else: + try: + result = cmp(value, desired[key]) + except TypeError as exc: + raise TypeError("%s, key: %s, value: %s, desired: %s" % (repr(exc), key, repr(value), repr(desired[key]))) + else: + if result != 0: + modified[key] = desired[key] + if modified: + self.changed = True + return modified diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_account.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_account.py new file mode 100644 index 000000000..88943c082 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_account.py @@ -0,0 +1,458 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Accounts""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_grid_account +short_description: NetApp StorageGRID manage accounts. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Tenant Accounts on NetApp StorageGRID. +options: + state: + description: + - Whether the specified account should exist or not. + - Required for all operations. + type: str + choices: ['present', 'absent'] + default: present + name: + description: + - Name of the tenant. + - Required for create or modify operation. + type: str + account_id: + description: + - Account Id of the tenant. + - May be used for modify or delete operation. + type: str + protocol: + description: + - Object Storage protocol used by the tenancy. + - Required for create operation. + type: str + choices: ['s3', 'swift'] + management: + description: + - Whether the tenant can login to the StorageGRID tenant portal. + type: bool + default: true + use_own_identity_source: + description: + - Whether the tenant account should configure its own identity source. + type: bool + allow_platform_services: + description: + - Allows tenant to use platform services features such as CloudMirror. + type: bool + root_access_group: + description: + - Existing federated group to have initial Root Access permissions for the tenant. + - Must begin with C(federated-group/) + type: str + version_added: 20.11.0 + quota_size: + description: + - Quota to apply to the tenant specified in I(quota_size_unit). + - If you intend to have no limits, assign C(0). + type: int + default: 0 + quota_size_unit: + description: + - The unit used to interpret the size parameter. + choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb'] + type: str + default: 'gb' + password: + description: + - Root password for tenant account. + - Requires root privilege. + type: str + update_password: + description: + - Choose when to update the password. + - When set to C(always), the password will always be updated. + - When set to C(on_create) the password will only be set upon a new user creation. + default: on_create + choices: + - on_create + - always + type: str +""" + +EXAMPLES = """ + - name: create a tenant account + netapp.storagegrid.na_sg_grid_account: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: storagegrid-tenant-1 + protocol: s3 + management: true + use_own_identity_source: false + allow_platform_services: false + password: "tenant-password" + quota_size: 0 + + - name: update a tenant account + netapp.storagegrid.na_sg_grid_account: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: storagegrid-tenant-1 + protocol: s3 + management: true + use_own_identity_source: false + allow_platform_services: true + password: "tenant-password" + quota_size: 10240 + + - name: delete a tenant account + netapp.storagegrid.na_sg_grid_account: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: absent + name: storagegrid-tenant-1 + protocol: s3 +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID tenant account. + returned: success + type: dict + sample: { + "name": "Example Account", + "capabilities": ["management", "s3"], + "policy": { + "useAccountIdentitySource": true, + "allowPlatformServices": false, + "quotaObjectBytes": 100000000000 + }, + "id": "12345678901234567890" + } +""" + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import ( + NetAppModule, +) +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import ( + SGRestAPI, +) + + +class SgGridAccount(object): + """ + Create, modify and delete StorageGRID Tenant Account + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + name=dict(required=False, type="str"), + account_id=dict(required=False, type="str"), + protocol=dict(required=False, choices=["s3", "swift"]), + management=dict(required=False, type="bool", default=True), + use_own_identity_source=dict(required=False, type="bool"), + allow_platform_services=dict(required=False, type="bool"), + root_access_group=dict(required=False, type="str"), + quota_size=dict(required=False, type="int", default=0), + quota_size_unit=dict( + default="gb", + choices=[ + "bytes", + "b", + "kb", + "mb", + "gb", + "tb", + "pb", + "eb", + "zb", + "yb", + ], + type="str", + ), + password=dict(required=False, type="str", no_log=True), + update_password=dict( + default="on_create", choices=["on_create", "always"] + ), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[ + ( + "state", + "present", + [ + "name", + "protocol", + "use_own_identity_source", + "allow_platform_services", + ], + ) + ], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + + # Checking for the parameters passed and create new parameters list + self.data = {} + self.data["name"] = self.parameters["name"] + self.data["capabilities"] = [self.parameters["protocol"]] + + if self.parameters.get("password") is not None: + self.data["password"] = self.parameters["password"] + + # Append "management" to the capability list only if parameter is True + if self.parameters.get("management"): + self.data["capabilities"].append("management") + + self.data["policy"] = {} + + if "use_own_identity_source" in self.parameters: + self.data["policy"]["useAccountIdentitySource"] = self.parameters[ + "use_own_identity_source" + ] + + if "allow_platform_services" in self.parameters: + self.data["policy"]["allowPlatformServices"] = self.parameters[ + "allow_platform_services" + ] + + if self.parameters.get("root_access_group") is not None: + self.data["grantRootAccessToGroup"] = self.parameters["root_access_group"] + + if self.parameters["quota_size"] > 0: + self.parameters["quota_size"] = ( + self.parameters["quota_size"] + * netapp_utils.POW2_BYTE_MAP[ + self.parameters["quota_size_unit"] + ] + ) + self.data["policy"]["quotaObjectBytes"] = self.parameters[ + "quota_size" + ] + elif self.parameters["quota_size"] == 0: + self.data["policy"]["quotaObjectBytes"] = None + + self.pw_change = {} + if self.parameters.get("password") is not None: + self.pw_change["password"] = self.parameters["password"] + + def get_tenant_account_id(self): + # Check if tenant account exists + # Return tenant account info if found, or None + api = "api/v3/grid/accounts?limit=350" + + list_accounts, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + for account in list_accounts.get("data"): + if account["name"] == self.parameters["name"]: + return account["id"] + + return None + + def get_tenant_account(self, account_id): + api = "api/v3/grid/accounts/%s" % account_id + account, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + else: + return account["data"] + return None + + def create_tenant_account(self): + api = "api/v3/grid/accounts" + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_tenant_account(self, account_id): + api = "api/v3/grid/accounts/" + account_id + + self.data = None + response, error = self.rest_api.delete(api, self.data) + if error: + self.module.fail_json(msg=error) + + def update_tenant_account(self, account_id): + api = "api/v3/grid/accounts/" + account_id + + if "password" in self.data: + del self.data["password"] + + if "grantRootAccessToGroup" in self.data: + del self.data["grantRootAccessToGroup"] + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def set_tenant_root_password(self, account_id): + api = "api/v3/grid/accounts/%s/change-password" % account_id + response, error = self.rest_api.post(api, self.pw_change) + + if error: + self.module.fail_json(msg=error["text"]) + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + + tenant_account = None + + if self.parameters.get("account_id"): + tenant_account = self.get_tenant_account( + self.parameters["account_id"] + ) + + else: + tenant_account_id = self.get_tenant_account_id() + if tenant_account_id: + tenant_account = self.get_tenant_account(tenant_account_id) + + cd_action = self.na_helper.get_cd_action( + tenant_account, self.parameters + ) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + capability_diff = [ + i + for i in self.data["capabilities"] + + tenant_account["capabilities"] + if i not in self.data["capabilities"] + or i not in tenant_account["capabilities"] + ] + + if self.parameters["quota_size"] > 0: + if ( + tenant_account["policy"]["quotaObjectBytes"] + != self.parameters["quota_size"] + ): + update = True + elif ( + self.parameters["quota_size"] == 0 + and tenant_account["policy"]["quotaObjectBytes"] is not None + ): + update = True + + if ( + "use_own_identity_source" in self.parameters + and tenant_account["policy"]["useAccountIdentitySource"] + != self.parameters["use_own_identity_source"] + ): + update = True + + elif ( + "allow_platform_services" in self.parameters + and tenant_account["policy"]["allowPlatformServices"] + != self.parameters["allow_platform_services"] + ): + update = True + + elif capability_diff: + update = True + + if update: + self.na_helper.changed = True + + result_message = "" + resp_data = tenant_account + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + if cd_action == "delete": + self.delete_tenant_account(tenant_account["id"]) + result_message = "Tenant Account deleted" + resp_data = None + + elif cd_action == "create": + resp_data = self.create_tenant_account() + result_message = "Tenant Account created" + + else: + resp_data = self.update_tenant_account(tenant_account["id"]) + result_message = "Tenant Account updated" + + # If a password has been set + if self.pw_change: + if self.module.check_mode: + pass + else: + # Only update the password if update_password is always + # On a create action, the password is set directly by the POST /grid/accounts method + if self.parameters["update_password"] == "always" and cd_action != "create": + self.set_tenant_root_password(tenant_account["id"]) + self.na_helper.changed = True + + results = [result_message, "Tenant Account root password updated"] + result_message = "; ".join(filter(None, results)) + + self.module.exit_json( + changed=self.na_helper.changed, msg=result_message, resp=resp_data + ) + + +def main(): + """ + Main function + """ + na_sg_grid_account = SgGridAccount() + na_sg_grid_account.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_certificate.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_certificate.py new file mode 100644 index 000000000..97f9ab972 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_certificate.py @@ -0,0 +1,226 @@ +#!/usr/bin/python + +# (c) 2021, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Certificates""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +module: na_sg_grid_certificate +short_description: Manage the Storage API and Grid Management certificates on StorageGRID. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '21.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Set and update the Storage API and Grid Management certificates on NetApp StorageGRID. +options: + state: + description: + - Whether the specified certificate should be set. + type: str + choices: ['present', 'absent'] + default: present + type: + description: + - Which certificate to update. + type: str + choices: ['storage-api', 'management'] + required: true + server_certificate: + description: + - X.509 server certificate in PEM-encoding. + type: str + ca_bundle: + description: + - Intermediate CA certificate bundle in concatenated PEM-encoding. + - Omit if there is no intermediate CA. + type: str + private_key: + description: + - Certificate private key in PEM-encoding. + - Required if I(server_certificate) is specified. + type: str +""" + +EXAMPLES = """ + - name: set storage API certificate + netapp.storagegrid.na_sg_grid_certificate: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + type: storage-api + server_certificate: | + -----BEGIN CERTIFICATE----- + MIIC6DCCAdACCQC7l4WukhKD0zANBgkqhkiG9w0BAQsFADA2MQswCQYDVQQGEwJB + BAMMHnNnYW4wMS5kZXYubWljcm9icmV3Lm5ldGFwcC5hdTCCASIwDQYJKoZIhvcN + AQEBBQADggEPADCCAQoCggEBAMvjm9I35lmKcC7ITVL8+QiZ/klvdkbfZCUQrfdy + 71inP+XmPjs0rnkhICA9ItODteRcVlO+t7nDTfm7HgG0mJFkcJm0ffyEYrcx24qu + S7gXYQjRsJmrep1awoaCa20BMGuqK2WKI3IvZ7YiT22qkBqKJD+hIFffX6u3Jy+B + 77pR6YcATtpMHW/AaOx+OX9l80dIRsRZKMDxYQ== + -----END CERTIFICATE----- + private_key: | + -----BEGIN PRIVATE KEY----- + MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDL45vSN+ZZinAu + L25W0+cz1Oi69AKkI7d9nbFics2ay5+7o+4rKqf3en2R4MSxiJvy+iDlOmATib5O + x8TN5pJ9AgMBAAECggEADDLM8tHXXUoUFihzv+BUwff8p8YcbHcXFcSes+xTd5li + po8lNsx/v2pQx4ByBkuaYLZGIEXOWS6gkp44xhIXgQKBgQD4Hq7862u5HLbmhrV3 + vs8nC69b3QKBgQDacCD8d8JpwPbg8t2VjXM3UvdmgAaLUfU7O1DWV+W3jqzmDOoN + zWVgPbPNj0UmzvLDbgxLoxe77wjn2BHsAJVAfJ9VeQKBgGqFAegYO+wHR8lJUoa5 + ZEe8Upy2oBtvND/0dnwO2ym2FGsBJN0Gr4NKdG5vkzLsthKkcwRm0ikwEUOUZQKE + K8J5yEVeo9K2v3wggtq8fYn6 + -----END PRIVATE KEY----- + +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID server certificates. + returned: success + type: dict + sample: { + "serverCertificateEncoded": "-----BEGIN CERTIFICATE-----MIIC6DCCAdACCQC7l4WukhKD0zANBgkqhkiG9w0BAQsFADA2MQswCQYDVQQGE...-----END CERTIFICATE-----", + "caBundleEncoded": "-----BEGIN CERTIFICATE-----MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELM...-----END CERTIFICATE-----" + } +""" + +import json + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridCertificate: + """ + Update StorageGRID certificates + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + type=dict(required=True, type="str", choices=["storage-api", "management"]), + server_certificate=dict(required=False, type="str"), + ca_bundle=dict(required=False, type="str"), + private_key=dict(required=False, type="str", no_log=True), + ) + ) + + parameter_map = { + "server_certificate": "serverCertificateEncoded", + "ca_bundle": "caBundleEncoded", + "private_key": "privateKeyEncoded", + } + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "present", ["server_certificate", "private_key"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + + if self.parameters["state"] == "present": + for k in parameter_map.keys(): + if self.parameters.get(k) is not None: + self.data[parameter_map[k]] = self.parameters[k] + + self.module.fail_json + + def get_grid_certificate(self, cert_type): + api = "api/v3/grid/%s" % cert_type + + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def update_grid_certificate(self, cert_type): + api = "api/v3/grid/%s/update" % cert_type + + response, error = self.rest_api.post(api, self.data) + if error: + self.module.fail_json(msg=error) + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + + cert_type = "" + cd_action = None + + if self.parameters.get("type") == "storage-api": + cert_type = "storage-api-certificate" + elif self.parameters.get("type") == "management": + cert_type = "management-certificate" + + cert_data = self.get_grid_certificate(cert_type) + + if cert_data["serverCertificateEncoded"] is None and cert_data["caBundleEncoded"] is None: + cd_action = self.na_helper.get_cd_action(None, self.parameters) + else: + cd_action = self.na_helper.get_cd_action(cert_data, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + if self.data.get("serverCertificateEncoded") is not None and self.data.get("privateKeyEncoded") is not None: + for item in ["serverCertificateEncoded", "caBundleEncoded"]: + if self.data.get(item) != cert_data.get(item): + update = True + + if update: + self.na_helper.changed = True + + result_message = "" + resp_data = cert_data + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + if cd_action == "delete": + self.update_grid_certificate(cert_type) + resp_data = self.get_grid_certificate(cert_type) + result_message = "Grid %s removed" % cert_type + + else: + self.update_grid_certificate(cert_type) + resp_data = self.get_grid_certificate(cert_type) + result_message = "Grid %s updated" % cert_type + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_certificate = SgGridCertificate() + na_sg_grid_certificate.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_client_certificate.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_client_certificate.py new file mode 100644 index 000000000..aa381b397 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_client_certificate.py @@ -0,0 +1,265 @@ +#!/usr/bin/python + +# (c) 2022, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Certificates""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +module: na_sg_grid_client_certificate +short_description: Manage Client Certificates on StorageGRID +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '21.11.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Client Certificates on NetApp StorageGRID. +options: + state: + description: + - Whether the specified certificate should exist. + type: str + choices: ['present', 'absent'] + default: present + certificate_id: + description: + - ID of the client certificate. + type: str + display_name: + description: + - A display name for the client certificate configuration. + - This parameter can be modified if I(certificate_id) is also specified. + type: str + public_key: + description: + - X.509 client certificate in PEM-encoding. + type: str + allow_prometheus: + description: + - Whether the external monitoring tool can access Prometheus metrics. + type: bool +""" + +EXAMPLES = """ + - name: create client certificate + netapp.storagegrid.na_sg_grid_client_certificate: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + display_name: client-cert1 + public_key: | + -----BEGIN CERTIFICATE----- + MIIC6DCCAdACCQC7l4WukhKD0zANBgkqhkiG9w0BAQsFADA2..swCQYDVQQGEwJB + BAMMHnNnYW4wMS5kZXYubWljcm9icmV3Lm5ldGFwcC5hdTCC..IwDQYJKoZIhvcN + AQEBBQADggEPADCCAQoCggEBAMvjm9I35lmKcC7ITVL8+QiZ..lvdkbfZCUQrfdy + 71inP+XmPjs0rnkhICA9ItODteRcVlO+t7nDTfm7HgG0mJFk..m0ffyEYrcx24qu + S7gXYQjRsJmrep1awoaCa20BMGuqK2WKI3IvZ7YiT22qkBqK..+hIFffX6u3Jy+B + 77pR6YcATtpMHW/AaOx+OX9l80dIRsRZKMDxYQ== + -----END CERTIFICATE----- + allow_prometheus: true + + - name: rename client certificate + netapp.storagegrid.na_sg_grid_client_certificate: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + certificate_id: 00000000-0000-0000-0000-000000000000 + display_name: client-cert1-rename + public_key: | + -----BEGIN CERTIFICATE----- + MIIC6DCCAdACCQC7l4WukhKD0zANBgkqhkiG9w0BAQsFADA2..swCQYDVQQGEwJB + BAMMHnNnYW4wMS5kZXYubWljcm9icmV3Lm5ldGFwcC5hdTCC..IwDQYJKoZIhvcN + AQEBBQADggEPADCCAQoCggEBAMvjm9I35lmKcC7ITVL8+QiZ..lvdkbfZCUQrfdy + 71inP+XmPjs0rnkhICA9ItODteRcVlO+t7nDTfm7HgG0mJFk..m0ffyEYrcx24qu + S7gXYQjRsJmrep1awoaCa20BMGuqK2WKI3IvZ7YiT22qkBqK..+hIFffX6u3Jy+B + 77pR6YcATtpMHW/AaOx+OX9l80dIRsRZKMDxYQ== + -----END CERTIFICATE----- + allow_prometheus: true + + - name: delete client certificate + netapp.storagegrid.na_sg_grid_client_certificate: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: absent + display_name: client-cert1-rename +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID server certificates. + returned: success + type: dict + sample: { + "id": "abcABC_01234-0123456789abcABCabc0123456789==", + "displayName": "client-cert1", + "expiryDate": "2024-01-01T00:00:00.000Z", + "publicKey": "-----BEGIN CERTIFICATE-----MIIC6DCCAdACCQC7l4WukhKD0zANBgkqhkiG9w0BAQsFADA2MQswCQYDVQQGE...-----END CERTIFICATE-----", + "allowPrometheus": true + } +""" + +import json + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridClientCertificate: + """ + Update StorageGRID client certificates + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + certificate_id=dict(required=False, type="str"), + display_name=dict(required=False, type="str"), + public_key=dict(required=False, type="str"), + allow_prometheus=dict(required=False, type="bool"), + ) + ) + + parameter_map = { + "display_name": "displayName", + "public_key": "publicKey", + "allow_prometheus": "allowPrometheus", + } + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "present", ["display_name", "public_key"])], + required_one_of=[("display_name", "certificate_id")], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + + if self.parameters["state"] == "present": + for k in parameter_map.keys(): + if self.parameters.get(k) is not None: + self.data[parameter_map[k]] = self.parameters[k] + + self.module.fail_json + + def get_grid_client_certificate_id(self): + # Check if certificate with name exists + # Return certificate ID if found, or None + api = "api/v3/grid/client-certificates" + + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + for cert in response.get("data"): + if cert["displayName"] == self.parameters["display_name"]: + return cert["id"] + return None + + def get_grid_client_certificate(self, cert_id): + api = "api/v3/grid/client-certificates/%s" % cert_id + account, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + else: + return account["data"] + return None + + def create_grid_client_certificate(self): + api = "api/v3/grid/client-certificates" + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error["text"]) + + return response["data"] + + def delete_grid_client_certificate(self, cert_id): + api = "api/v3/grid/client-certificates/" + cert_id + + self.data = None + response, error = self.rest_api.delete(api, self.data) + if error: + self.module.fail_json(msg=error) + + def update_grid_client_certificate(self, cert_id): + api = "api/v3/grid/client-certificates/" + cert_id + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error["text"]) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + + client_certificate = None + + if self.parameters.get("certificate_id"): + client_certificate = self.get_grid_client_certificate(self.parameters["certificate_id"]) + + else: + client_cert_id = self.get_grid_client_certificate_id() + if client_cert_id: + client_certificate = self.get_grid_client_certificate(client_cert_id) + + cd_action = self.na_helper.get_cd_action(client_certificate, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + modify = self.na_helper.get_modified_attributes(client_certificate, self.data) + + result_message = "" + resp_data = client_certificate + + if self.na_helper.changed and not self.module.check_mode: + if cd_action == "delete": + self.delete_grid_client_certificate(client_certificate["id"]) + result_message = "Client Certificate deleted" + elif cd_action == "create": + resp_data = self.create_grid_client_certificate() + result_message = "Client Certificate created" + elif modify: + resp_data = self.update_grid_client_certificate(client_certificate["id"]) + result_message = "Client Certificate updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_certificate = SgGridClientCertificate() + na_sg_grid_certificate.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_dns.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_dns.py new file mode 100644 index 000000000..95e4e4594 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_dns.py @@ -0,0 +1,163 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Grid DNS Servers""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_grid_dns +short_description: NetApp StorageGRID manage external DNS servers for the grid. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Update NetApp StorageGRID DNS addresses. +options: + state: + description: + - Whether the specified DNS address should exist or not. + - Required for all operations. + type: str + choices: ['present'] + default: present + dns_servers: + description: + - List of comma separated DNS Addresses to be updated or delete. + type: list + elements: str + required: true +""" + +EXAMPLES = """ + - name: update DNS servers on StorageGRID + netapp.storagegrid.na_sg_grid_dns: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + dns_servers: "x.x.x.x,xxx.xxx.xxx.xxx" +""" + +RETURN = """ +resp: + description: Returns information about the configured DNS servers. + returned: success + type: list + elements: str + sample: ["8.8.8.8", "8.8.4.4"] +""" + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridDns(object): + """ + Create, modify and delete DNS entries for StorageGRID + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present"], default="present"), + dns_servers=dict(required=True, type="list", elements="str"), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + # required_if=[("state", "present", ["state", "name", "protocol"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = self.parameters["dns_servers"] + + def get_grid_dns(self): + # Check if tenant account exists + # Return tenant account info if found, or None + api = "api/v3/grid/dns-servers" + + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def update_grid_dns(self): + api = "api/v3/grid/dns-servers" + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + grid_dns = self.get_grid_dns() + + cd_action = self.na_helper.get_cd_action(grid_dns, self.parameters["dns_servers"]) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + dns_diff = [i for i in self.data + grid_dns if i not in self.data or i not in grid_dns] + if dns_diff: + update = True + + if update: + self.na_helper.changed = True + result_message = "" + resp_data = grid_dns + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + resp_data = self.update_grid_dns() + result_message = "Grid DNS updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_dns = SgGridDns() + na_sg_grid_dns.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_gateway.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_gateway.py new file mode 100644 index 000000000..9202decff --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_gateway.py @@ -0,0 +1,532 @@ +#!/usr/bin/python + +# (c) 2021, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Load Balancer Endpoints""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +module: na_sg_grid_gateway +short_description: Manage Load balancer (gateway) endpoints on StorageGRID. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '21.7.0' +author: NetApp Ansible Team (@jkandati) <ng-sg-ansibleteam@netapp.com> +description: +- Create or Update Load Balancer Endpoints on StorageGRID. +- This module is idempotent if I(private_key) is not specified. +- The module will match an existing config based on I(port) and I(display_name). +- If multiple load balancer endpoints exist utilizing the same port and display name, use I(gateway_id) to select the intended endpoint. +options: + state: + description: + - Whether the specified load balancer endpoint should be configured. + type: str + choices: ['present', 'absent'] + default: present + gateway_id: + description: + - ID of the load balancer endpoint. + type: str + version_added: '21.9.0' + display_name: + description: + - A display name for the configuration. + - This parameter can be modified if I(gateway_id) is also specified. + type: str + port: + description: + - The TCP port to serve traffic on. + - This parameter cannot be modified after the load balancer endpoint has been created. + type: int + required: true + secure: + description: + - Whether the load balancer endpoint serves HTTP or HTTPS traffic. + - This parameter cannot be modified after the load balancer endpoint has been created. + type: bool + default: true + enable_ipv4: + description: + - Indicates whether to listen for connections on IPv4. + type: bool + default: true + enable_ipv6: + description: + - Indicates whether to listen for connections on IPv6. + type: bool + default: true + binding_mode: + description: + - Binding mode to restrict accessibility of the load balancer endpoint. + - A binding mode other than I(global) requires StorageGRID 11.5 or greater. + type: str + choices: ['global', 'ha-groups', 'node-interfaces'] + default: 'global' + version_added: '21.9.0' + ha_groups: + description: + - A set of StorageGRID HA Groups by name or UUID to bind the load balancer endpoint to. + - Option is ignored unless I(binding_mode=ha-groups). + type: list + elements: str + version_added: '21.9.0' + node_interfaces: + description: + - A set of StorageGRID node interfaces to bind the load balancer endpoint to. + type: list + elements: dict + suboptions: + node: + description: + - Name of the StorageGRID node. + type: str + interface: + description: + - The interface to bind to. eth0 corresponds to the Grid Network, eth1 to the Admin Network, and eth2 to the Client Network. + type: str + version_added: '21.9.0' + default_service_type: + description: + - The type of service to proxy through the load balancer. + type: str + choices: ['s3', 'swift'] + default: 's3' + server_certificate: + description: + - X.509 server certificate in PEM-encoding. + - Omit if using default certificates. + type: str + required: false + private_key: + description: + - Certficate private key in PEM-encoding. + - Required if I(server_certificate) is not empty. + type: str + required: false + ca_bundle: + description: + - Intermediate CA certificate bundle in concatenated PEM-encoding. + - Omit when there is no intermediate CA. + type: str + required: false + +""" +EXAMPLES = """ + - name: Create and Upload Certificate to a Gateway Endpoint with global binding + netapp.storagegrid.na_sg_grid_gateway: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + displayName: "FabricPool Endpoint" + port: 10443 + secure: True + enable_ipv4: True + enable_ipv6: True + default_service_type: "s3" + server_certificate: | + -----BEGIN CERTIFICATE----- + MIIC6DCCAdACCQC7l4WukhKD0zANBgkqhkiG9w0BAQsFADA2..swCQYDVQQGEwJB + BAMMHnNnYW4wMS5kZXYubWljcm9icmV3Lm5ldGFwcC5hdTCC..IwDQYJKoZIhvcN + AQEBBQADggEPADCCAQoCggEBAMvjm9I35lmKcC7ITVL8+QiZ..lvdkbfZCUQrfdy + 71inP+XmPjs0rnkhICA9ItODteRcVlO+t7nDTfm7HgG0mJFk..m0ffyEYrcx24qu + S7gXYQjRsJmrep1awoaCa20BMGuqK2WKI3IvZ7YiT22qkBqK..+hIFffX6u3Jy+B + 77pR6YcATtpMHW/AaOx+OX9l80dIRsRZKMDxYQ== + -----END CERTIFICATE----- + private_key: | + -----BEGIN PRIVATE KEY----- + MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIB..DL45vSN+ZZinAu + L25W0+cz1Oi69AKkI7d9nbFics2ay5+7o+4rKqf3en2R4MSx..vy+iDlOmATib5O + x8TN5pJ9AgMBAAECggEADDLM8tHXXUoUFihzv+BUwff8p8Yc..cXFcSes+xTd5li + po8lNsx/v2pQx4ByBkuaYLZGIEXOWS6gkp44xhIXgQKBgQD4..7862u5HLbmhrV3 + vs8nC69b3QKBgQDacCD8d8JpwPbg8t2VjXM3UvdmgAaLUfU7..DWV+W3jqzmDOoN + zWVgPbPNj0UmzvLDbgxLoxe77wjn2BHsAJVAfJ9VeQKBgGqF..gYO+wHR8lJUoa5 + ZEe8Upy2oBtvND/0dnwO2ym2FGsBJN0Gr4NKdG5vkzLsthKk..Rm0ikwEUOUZQKE + K8J5yEVeo9K2v3wggtq8fYn6 + -----END PRIVATE KEY----- + validate_certs: false + + - name: Create a HTTP Gateway Endpoint with HA Group Binding + netapp.storagegrid.na_sg_grid_gateway: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + displayName: "App Endpoint 1" + port: 10501 + secure: false + enable_ipv4: True + enable_ipv6: True + default_service_type: "s3" + binding_mode: ha-groups + ha_groups: site1_ha_group + validate_certs: false + + - name: Create a HTTP Gateway Endpoint with Node Interface Binding + netapp.storagegrid.na_sg_grid_gateway: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + displayName: "App Endpoint 2" + port: 10502 + secure: false + enable_ipv4: True + enable_ipv6: True + default_service_type: "s3" + binding_mode: node-interfaces + node_interfaecs: + - node: SITE1_ADM1 + interface: eth2 + - node: SITE2_ADM1 + interface: eth2 + validate_certs: false + + - name: Delete Gateway Endpoint + netapp.storagegrid.na_sg_grid_gateway: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + displayName: "App Endpoint 2" + port: 10502 + default_service_type: "s3" + validate_certs: false +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID Load Balancer Endpoint. + returned: success + type: dict + sample: { + "id": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "displayName": "ansibletest-secure", + "enableIPv4": True, + "enableIPv6": True, + "port": 10443, + "secure": True, + "accountId": "0", + "defaultServiceType": "s3", + "certSource": "plaintext", + "plaintextCertData": { + "serverCertificateEncoded": "-----BEGIN CERTIFICATE-----MIIC6DCCAdACCQC7l4WukhKD0zANBgkqhkiG9w0BAQsFADA2MQswCQYDVQQGE...-----END CERTIFICATE-----", + "caBundleEncoded": "-----BEGIN CERTIFICATE-----MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELM...-----END CERTIFICATE-----", + "metadata": {...} + } + } +""" + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridGateway: + """ + Create, modify and delete Gateway entries for StorageGRID + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + # Arguments for Creating Gateway Port + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + gateway_id=dict(required=False, type="str"), + display_name=dict(required=False, type="str"), + port=dict(required=True, type="int"), + secure=dict(required=False, type="bool", default=True), + enable_ipv4=dict(required=False, type="bool", default=True), + enable_ipv6=dict(required=False, type="bool", default=True), + binding_mode=dict( + required=False, type="str", choices=["global", "ha-groups", "node-interfaces"], default="global" + ), + ha_groups=dict(required=False, type="list", elements="str"), + node_interfaces=dict( + required=False, + type="list", + elements="dict", + options=dict( + node=dict(required=False, type="str"), + interface=dict(required=False, type="str"), + ), + ), + # Arguments for setting Gateway Virtual Server + default_service_type=dict(required=False, type="str", choices=["s3", "swift"], default="s3"), + server_certificate=dict(required=False, type="str"), + ca_bundle=dict(required=False, type="str"), + private_key=dict(required=False, type="str", no_log=True), + ) + ) + + parameter_map_gateway = { + "gateway_id": "id", + "display_name": "displayName", + "port": "port", + "secure": "secure", + "enable_ipv4": "enableIPv4", + "enable_ipv6": "enableIPv6", + } + parameter_map_server = { + "server_certificate": "serverCertificateEncoded", + "ca_bundle": "caBundleEncoded", + "private_key": "privateKeyEncoded", + } + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "present", ["display_name"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Get API version + self.rest_api.get_sg_product_version() + + # Checking for the parameters passed and create new parameters list + + # Parameters for creating a new gateway port configuration + self.data_gateway = {} + self.data_gateway["accountId"] = "0" + + for k in parameter_map_gateway.keys(): + if self.parameters.get(k) is not None: + self.data_gateway[parameter_map_gateway[k]] = self.parameters[k] + + # Parameters for setting a gateway virtual server configuration for a gateway port + self.data_server = {} + self.data_server["defaultServiceType"] = self.parameters["default_service_type"] + + if self.parameters["secure"]: + self.data_server["plaintextCertData"] = {} + self.data_server["certSource"] = "plaintext" + + for k in parameter_map_server.keys(): + if self.parameters.get(k) is not None: + self.data_server["plaintextCertData"][parameter_map_server[k]] = self.parameters[k] + + if self.parameters["binding_mode"] != "global": + self.rest_api.fail_if_not_sg_minimum_version("non-global binding mode", 11, 5) + + if self.parameters["binding_mode"] == "ha-groups": + self.data_gateway["pinTargets"] = {} + self.data_gateway["pinTargets"]["haGroups"] = self.build_ha_group_list() + self.data_gateway["pinTargets"]["nodeInterfaces"] = [] + + elif self.parameters["binding_mode"] == "node-interfaces": + self.data_gateway["pinTargets"] = {} + self.data_gateway["pinTargets"]["nodeInterfaces"] = self.build_node_interface_list() + self.data_gateway["pinTargets"]["haGroups"] = [] + + else: + self.data_gateway["pinTargets"] = {} + self.data_gateway["pinTargets"]["haGroups"] = [] + self.data_gateway["pinTargets"]["nodeInterfaces"] = [] + + def build_ha_group_list(self): + ha_group_ids = [] + + api = "api/v3/private/ha-groups" + ha_groups, error = self.rest_api.get(api) + if error: + self.module.fail_json(msg=error) + + for param in self.parameters["ha_groups"]: + ha_group = next( + (item for item in ha_groups["data"] if (item["name"] == param or item["id"] == param)), None + ) + if ha_group is not None: + ha_group_ids.append(ha_group["id"]) + else: + self.module.fail_json(msg="HA Group '%s' is invalid" % param) + + return ha_group_ids + + def build_node_interface_list(self): + node_interfaces = [] + + api = "api/v3/grid/node-health" + nodes, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + for node_interface in self.parameters["node_interfaces"]: + node_dict = {} + node = next((item for item in nodes["data"] if item["name"] == node_interface["node"]), None) + if node is not None: + node_dict["nodeId"] = node["id"] + node_dict["interface"] = node_interface["interface"] + node_interfaces.append(node_dict) + else: + self.module.fail_json(msg="Node '%s' is invalid" % node_interface["node"]) + + return node_interfaces + + def get_grid_gateway_config(self, gateway_id): + api = "api/v3/private/gateway-configs/%s" % gateway_id + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + gateway = response["data"] + gateway_config = self.get_grid_gateway_server_config(gateway["id"]) + + return gateway, gateway_config + + def get_grid_gateway_server_config(self, gateway_id): + api = "api/v3/private/gateway-configs/%s/server-config" % gateway_id + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def get_grid_gateway_ports(self, target_port): + + configured_ports = [] + gateway = {} + gateway_config = {} + + api = "api/v3/private/gateway-configs" + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + grid_gateway_ports = response["data"] + + # Get only a list of used ports + configured_ports = [data["port"] for data in grid_gateway_ports] + + for index, port in enumerate(configured_ports): + # if port already exists then get gateway ID and get the gateway port server configs + if target_port == port and grid_gateway_ports[index]["displayName"] == self.parameters["display_name"]: + gateway = grid_gateway_ports[index] + gateway_config = self.get_grid_gateway_server_config(gateway["id"]) + break + + return gateway, gateway_config + + def create_grid_gateway(self): + api = "api/v3/private/gateway-configs" + response, error = self.rest_api.post(api, self.data_gateway) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_grid_gateway(self, gateway_id): + api = "api/v3/private/gateway-configs/" + gateway_id + self.data = None + response, error = self.rest_api.delete(api, self.data) + + if error: + self.module.fail_json(msg=error) + + def update_grid_gateway(self, gateway_id): + api = "api/v3/private/gateway-configs/%s" % gateway_id + response, error = self.rest_api.put(api, self.data_gateway) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def update_grid_gateway_server(self, gateway_id): + api = "api/v3/private/gateway-configs/%s/server-config" % gateway_id + response, error = self.rest_api.put(api, self.data_server) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + gateway = None + gateway_config = None + + update_gateway = False + update_gateway_server = False + + if self.parameters.get("gateway_id"): + gateway, gateway_config = self.get_grid_gateway_config(self.parameters["gateway_id"]) + + else: + # Get list of all gateway port configurations + gateway, gateway_config = self.get_grid_gateway_ports(self.data_gateway["port"]) + + cd_action = self.na_helper.get_cd_action(gateway.get("id"), self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + update = False + + if self.data_server.get("plaintextCertData"): + if self.data_server["plaintextCertData"].get("privateKeyEncoded") is not None: + update = True + self.module.warn("This module is not idempotent when private_key is present.") + + if gateway_config.get("plaintextCertData"): + # If certificate private key supplied, update + if gateway_config["plaintextCertData"].get("metadata"): + # remove metadata because we can't compare that + del gateway_config["plaintextCertData"]["metadata"] + + # compare current and desired state + # gateway config cannot be modified until StorageGRID 11.5 + if self.rest_api.meets_sg_minimum_version(11, 5): + update_gateway = self.na_helper.get_modified_attributes(gateway, self.data_gateway) + update_gateway_server = self.na_helper.get_modified_attributes(gateway_config, self.data_server) + + if update: + self.na_helper.changed = True + + result_message = "" + resp_data = {} + + if self.na_helper.changed and not self.module.check_mode: + if cd_action == "delete": + self.delete_grid_gateway(gateway["id"]) + result_message = "Load Balancer Gateway Port Deleted" + + elif cd_action == "create": + resp_data = self.create_grid_gateway() + gateway["id"] = resp_data["id"] + resp_data_server = self.update_grid_gateway_server(gateway["id"]) + resp_data.update(resp_data_server) + result_message = "Load Balancer Gateway Port Created" + + else: + resp_data = gateway + if update_gateway: + resp_data = self.update_grid_gateway(gateway["id"]) + resp_data.update(gateway_config) + + if update_gateway_server: + resp_data_server = self.update_grid_gateway_server(gateway["id"]) + resp_data.update(resp_data_server) + result_message = "Load Balancer Gateway Port Updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_gateway = SgGridGateway() + na_sg_grid_gateway.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_group.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_group.py new file mode 100644 index 000000000..60592c609 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_group.py @@ -0,0 +1,341 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Grid Groups""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_grid_group +short_description: NetApp StorageGRID manage groups. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Administration Groups within NetApp StorageGRID. +options: + state: + description: + - Whether the specified group should exist or not. + type: str + choices: ['present', 'absent'] + default: present + display_name: + description: + - Name of the group. + - Required for create operation + type: str + unique_name: + description: + - Unique Name for the group. Must begin with C(group/) or C(federated-group/) + - Required for create, modify or delete operation. + type: str + required: true + management_policy: + description: + - Management access controls granted to the group within the tenancy. + type: dict + suboptions: + alarm_acknowledgement: + description: + - Group members can have permission to acknowledge alarms. + required: false + type: bool + other_grid_configuration: + description: + - Need to investigate. + required: false + type: bool + grid_topology_page_configuration: + description: + - Users in this group will have permissions to change grid topology. + required: false + type: bool + tenant_accounts: + description: + - Users in this group will have permissions to manage tenant accounts. + required: false + type: bool + change_tenant_root_password: + description: + - Users in this group will have permissions to change tenant password. + required: false + type: bool + maintenance: + description: + - Users in this group will have permissions to run maintenance tasks on StorageGRID. + required: false + type: bool + metrics_query: + description: + - Users in this group will have permissions to query metrics on StorageGRID. + required: false + type: bool + activate_features: + description: + - Users in this group will have permissions to reactivate features. + required: false + type: bool + ilm: + description: + - Users in this group will have permissions to manage ILM rules on StorageGRID. + required: false + type: bool + object_metadata: + description: + - Users in this group will have permissions to manage object metadata. + required: false + type: bool + root_access: + description: + - Users in this group will have root access. + required: false + type: bool +""" + +EXAMPLES = """ + - name: create a StorageGRID group + netapp.storagegrid.na_sg_grid_group: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + display_name: ansiblegroup100 + unique_name: group/ansiblegroup100 + management_policy: + tenant_accounts: true + maintenance: true + root_access: false +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID group attributes. + returned: success + type: dict + sample: { + "displayName": "Example Group", + "policies": { + "management": { + "alarmAcknowledgment": true, + "manageAlerts": true, + "otherGridConfiguration": true, + "gridTopologyPageConfiguration": true, + "tenantAccounts": true, + "changeTenantRootPassword": true, + "maintenance": true, + "metricsQuery": true, + "activateFeatures": false, + "ilm": true, + "objectMetadata": true, + "storageAdmin": true, + "rootAccess": true + } + }, + "uniqueName": "group/examplegroup", + "accountId": "12345678901234567890", + "id": "00000000-0000-0000-0000-000000000000", + "federated": false, + "groupURN": "urn:sgws:identity::12345678901234567890:group/examplegroup" + } +""" + +import json +import re + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridGroup(object): + """ + Create, modify and delete StorageGRID Grid-administration Group + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + display_name=dict(required=False, type="str"), + unique_name=dict(required=True, type="str"), + management_policy=dict( + required=False, + type="dict", + options=dict( + alarm_acknowledgement=dict(required=False, type="bool"), + other_grid_configuration=dict(required=False, type="bool"), + grid_topology_page_configuration=dict(required=False, type="bool"), + tenant_accounts=dict(required=False, type="bool"), + change_tenant_root_password=dict(required=False, type="bool"), + maintenance=dict(required=False, type="bool"), + metrics_query=dict(required=False, type="bool"), + activate_features=dict(required=False, type="bool"), + ilm=dict(required=False, type="bool"), + object_metadata=dict(required=False, type="bool"), + root_access=dict(required=False, type="bool"), + ), + ), + ) + ) + parameter_map = { + "alarm_acknowledgement": "alarmAcknowledgement", + "other_grid_configuration": "otherGridConfiguration", + "grid_topology_page_configuration": "gridTopologyPageConfiguration", + "tenant_accounts": "tenantAccounts", + "change_tenant_root_password": "changeTenantRootPassword", + "maintenance": "maintenance", + "metrics_query": "metricsQuery", + "activate_features": "activateFeatures", + "ilm": "ilm", + "object_metadata": "objectMetadata", + "root_access": "rootAccess", + } + self.module = AnsibleModule(argument_spec=self.argument_spec, supports_check_mode=True,) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + self.data["displayName"] = self.parameters.get("display_name") + self.data["uniqueName"] = self.parameters["unique_name"] + # Only add the parameter if value is True, as JSON response does not include non-true objects + self.data["policies"] = {} + + if self.parameters.get("management_policy"): + self.data["policies"] = { + "management": dict( + (parameter_map[k], v) for (k, v) in self.parameters["management_policy"].items() if v + ) + } + if not self.data["policies"].get("management"): + self.data["policies"]["management"] = None + + self.re_local_group = re.compile("^group/") + self.re_fed_group = re.compile("^federated-group/") + + if ( + self.re_local_group.match(self.parameters["unique_name"]) is None + and self.re_fed_group.match(self.parameters["unique_name"]) is None + ): + self.module.fail_json(msg="unique_name must begin with 'group/' or 'federated-group/'") + + def get_grid_group(self, unique_name): + # Use the unique name to check if the group exists + api = "api/v3/grid/groups/%s" % unique_name + response, error = self.rest_api.get(api) + + if error: + if response["code"] != 404: + self.module.fail_json(msg=error) + else: + return response["data"] + return None + + def create_grid_group(self): + api = "api/v3/grid/groups" + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_grid_group(self, group_id): + api = "api/v3/grid/groups/" + group_id + + self.data = None + response, error = self.rest_api.delete(api, self.data) + if error: + self.module.fail_json(msg=error) + + def update_grid_group(self, group_id): + api = "api/v3/grid/groups/" + group_id + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + grid_group = self.get_grid_group(self.parameters["unique_name"]) + + cd_action = self.na_helper.get_cd_action(grid_group, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + if self.parameters.get("management_policy"): + if ( + grid_group.get("policies") is None + or grid_group.get("policies", {}).get("management") != self.data["policies"]["management"] + ): + update = True + + if update: + self.na_helper.changed = True + result_message = "" + resp_data = grid_group + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + if cd_action == "delete": + self.delete_grid_group(grid_group["id"]) + result_message = "Grid Group deleted" + + elif cd_action == "create": + resp_data = self.create_grid_group() + result_message = "Grid Group created" + + else: + # for a federated group, the displayName parameter needs to be specified + # and must match the existing displayName + if self.re_fed_group.match(self.parameters["unique_name"]): + self.data["displayName"] = grid_group["displayName"] + + resp_data = self.update_grid_group(grid_group["id"]) + result_message = "Grid Group updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_group = SgGridGroup() + na_sg_grid_group.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_ha_group.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_ha_group.py new file mode 100644 index 000000000..c99719c6d --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_ha_group.py @@ -0,0 +1,334 @@ +#!/usr/bin/python + +# (c) 2022, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage HA Groups""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +module: na_sg_grid_ha_group +short_description: Manage high availability (HA) group configuration on StorageGRID. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '21.10.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete HA Groups on NetApp StorageGRID. +options: + state: + description: + - Whether the specified HA Group should exist. + type: str + choices: ['present', 'absent'] + default: present + name: + description: + - Name of the HA Group. + type: str + ha_group_id: + description: + - HA Group ID. + - May be used for modify or delete operation. + type: str + description: + description: + - Description of the HA Group. + type: str + gateway_cidr: + description: + - CIDR for the gateway IP and VIP subnet. + type: str + virtual_ips: + description: + - A list of virtual IP addresses. + type: list + elements: str + interfaces: + description: + - A set of StorageGRID node interface pairs. + - The primary interface is specified first, followed by the other interface pairs in failover order. + type: list + elements: dict + suboptions: + node: + description: + - Name of the StorageGRID node. + type: str + interface: + description: + - The interface to bind to. eth0 corresponds to the Grid Network, eth1 to the Admin Network, and eth2 to the Client Network. + type: str +""" + +EXAMPLES = """ + - name: create HA Group + netapp.storagegrid.na_sg_grid_ha_group: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: Site1-HA-Group + description: "Site 1 HA Group" + gateway_cidr: 192.168.50.1/24 + virtual_ips: 192.168.50.5 + interfaces: + - node: SITE1-ADM1 + interface: eth2 + - node: SITE1-G1 + interface: eth2 + + - name: add VIP to HA Group + netapp.storagegrid.na_sg_grid_ha_group: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: Site1-HA-Group + description: "Site 1 HA Group" + gateway_cidr: 192.168.50.1/24 + virtual_ips: 192.168.50.5,192.168.50.6 + interfaces: + - node: SITE1-ADM1 + interface: eth2 + - node: SITE1-G1 + interface: eth2 + + - name: rename HA Group + netapp.storagegrid.na_sg_grid_ha_group: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + ha_group_id: 00000000-0000-0000-0000-000000000000 + name: Site1-HA-Group-New-Name + description: "Site 1 HA Group" + gateway_cidr: 192.168.50.1/24 + virtual_ips: 192.168.50.5 + interfaces: + - node: SITE1-ADM1 + interface: eth2 + - node: SITE1-G1 + interface: eth2 + + - name: delete HA Group + netapp.storagegrid.na_sg_grid_ha_group: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: absent + name: Site1-HA-Group +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID HA Group. + returned: success + type: dict + sample: { + "description": "Site 1 HA Group", + "gatewayCidr": "192.168.50.1/24", + "id": "bb386f30-805d-4fec-a2c5-85790b460db0", + "interfaces": [ + { + "interface": "eth2", + "nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b" + }, + { + "interface": "eth2", + "nodeId": "7bb5bf05-a04c-4344-8abd-08c5c4048666" + } + ], + "name": "Site1-HA-Group", + "virtualIps": [ + "192.168.50.5", + "192.168.50.6" + ] + } +""" + +import json + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridHaGroup: + """ + Create, modify and delete HA Group configurations for StorageGRID + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + name=dict(required=False, type="str"), + ha_group_id=dict(required=False, type="str"), + description=dict(required=False, type="str"), + gateway_cidr=dict(required=False, type="str"), + virtual_ips=dict(required=False, type="list", elements="str"), + interfaces=dict( + required=False, + type="list", + elements="dict", + options=dict( + node=dict(required=False, type="str"), + interface=dict(required=False, type="str"), + ), + ), + ) + ) + + parameter_map = { + "name": "name", + "description": "description", + "gateway_cidr": "gatewayCidr", + "virtual_ips": "virtualIps", + } + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "present", ["name", "gateway_cidr", "virtual_ips", "interfaces"])], + required_one_of=[("name", "ha_group_id")], + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + + if self.parameters["state"] == "present": + for k in parameter_map.keys(): + if self.parameters.get(k) is not None: + self.data[parameter_map[k]] = self.parameters[k] + + if self.parameters.get("interfaces") is not None: + self.data["interfaces"] = self.build_node_interface_list() + + def build_node_interface_list(self): + node_interfaces = [] + + api = "api/v3/grid/node-health" + nodes, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + for node_interface in self.parameters["interfaces"]: + node_dict = {} + node = next((item for item in nodes["data"] if item["name"] == node_interface["node"]), None) + if node is not None: + node_dict["nodeId"] = node["id"] + node_dict["interface"] = node_interface["interface"] + node_interfaces.append(node_dict) + else: + self.module.fail_json(msg="Node '%s' is invalid" % node_interface["node"]) + + return node_interfaces + + def get_ha_group_id(self): + # Check if HA Group exists + # Return HA Group info if found, or None + api = "api/v3/private/ha-groups" + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return next((item["id"] for item in response.get("data") if item["name"] == self.parameters["name"]), None) + + def get_ha_group(self, ha_group_id): + api = "api/v3/private/ha-groups/%s" % ha_group_id + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def create_ha_group(self): + api = "api/v3/private/ha-groups" + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_ha_group(self, ha_group_id): + api = "api/v3/private/ha-groups/%s" % ha_group_id + dummy, error = self.rest_api.delete(api, self.data) + + if error: + self.module.fail_json(msg=error) + + def update_ha_group(self, ha_group_id): + api = "api/v3/private/ha-groups/%s" % ha_group_id + response, error = self.rest_api.put(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + + ha_group = None + + if self.parameters.get("ha_group_id"): + ha_group = self.get_ha_group(self.parameters["ha_group_id"]) + else: + ha_group_id = self.get_ha_group_id() + if ha_group_id: + ha_group = self.get_ha_group(ha_group_id) + + cd_action = self.na_helper.get_cd_action(ha_group, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + modify = self.na_helper.get_modified_attributes(ha_group, self.data) + + result_message = "" + resp_data = {} + + if self.na_helper.changed and not self.module.check_mode: + if cd_action == "delete": + self.delete_ha_group(ha_group["id"]) + result_message = "HA Group deleted" + elif cd_action == "create": + resp_data = self.create_ha_group() + result_message = "HA Group created" + elif modify: + resp_data = self.update_ha_group(ha_group["id"]) + result_message = "HA Group updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_ha_group = SgGridHaGroup() + na_sg_grid_ha_group.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_identity_federation.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_identity_federation.py new file mode 100644 index 000000000..729cf4545 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_identity_federation.py @@ -0,0 +1,335 @@ +#!/usr/bin/python + +# (c) 2021, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Grid Identity Federation""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +DOCUMENTATION = """ +module: na_sg_grid_identity_federation +short_description: NetApp StorageGRID manage Grid identity federation. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '21.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Configure Grid Identity Federation within NetApp StorageGRID. +- If module is run with I(check_mode), a connectivity test will be performed using the supplied values without changing the configuration. +- This module is idempotent if I(password) is not specified. +options: + state: + description: + - Whether identity federation should be enabled or not. + type: str + choices: ['present', 'absent'] + default: present + username: + description: + - The username to bind to the LDAP server. + type: str + password: + description: + - The password associated with the username. + type: str + hostname: + description: + - The hostname or IP address of the LDAP server. + type: str + port: + description: + - The port used to connect to the LDAP server. Typically 389 for LDAP, or 636 for LDAPS. + type: int + base_group_dn: + description: + - The Distinguished Name of the LDAP subtree to search for groups. + type: str + base_user_dn: + description: + - The Distinguished Name of the LDAP subtree to search for users. + type: str + ldap_service_type: + description: + - The type of LDAP server. + choices: ['Active Directory', 'OpenLDAP', 'Other'] + type: str + type: + description: + - The type of identity source. + - Default is C(ldap). + type: str + default: ldap + ldap_user_id_attribute: + description: + - The LDAP attribute which contains the unique user name of a user. + - Should be configured if I(ldap_service_type=Other). + type: str + ldap_user_uuid_attribute: + description: + - The LDAP attribute which contains the permanent unique identity of a user. + - Should be configured if I(ldap_service_type=Other). + type: str + ldap_group_id_attribute: + description: + - The LDAP attribute which contains the group for a user. + - Should be configured if I(ldap_service_type=Other). + type: str + ldap_group_uuid_attribute: + description: + - The LDAP attribute which contains the group's permanent unique identity. + - Should be configured if I(ldap_service_type=Other). + type: str + tls: + description: + - Whether Transport Layer Security is used to connect to the LDAP server. + choices: ['STARTTLS', 'LDAPS', 'Disabled'] + type: str + default: STARTTLS + ca_cert: + description: + - Custom certificate used to connect to the LDAP server. + - If a custom certificate is not supplied, the operating system CA certificate will be used. + type: str +""" + +EXAMPLES = """ + - name: test identity federation configuration + netapp.storagegrid.na_sg_grid_identity_federation: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + ldap_service_type: "Active Directory" + hostname: "ad.example.com" + port: 389 + username: "binduser" + password: "bindpass" + base_group_dn: "DC=example,DC=com" + base_user_dn: "DC=example,DC=com" + tls: "Disabled" + check_mode: yes + + - name: configure identity federation with AD and TLS + netapp.storagegrid.na_sg_grid_identity_federation: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + ldap_service_type: "Active Directory" + hostname: "ad.example.com" + port: 636 + username: "binduser" + password: "bindpass" + base_group_dn: "DC=example,DC=com" + base_user_dn: "DC=example,DC=com" + tls: "LDAPS" + ca_cert: | + -----BEGIN CERTIFICATE----- + MIIC+jCCAeICCQDmn9Gow08LTzANBgkqhkiG9w0BAQsFADA/..swCQYDVQQGEwJV + bXBsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB..JFzNIXQEGnsgjV + JGU4giuvOLOZ8Q3gyuUbkSUQDjmjpMR8PliwJ6iW2Ity89Dv..dl1TaIYI/ansyZ + Uxk4YXeN6kUkrDtNxCg1McALzXVAfxMTtj2SFlLxne4Z6rX2..UyftQrfM13F1vY + gK8dBPz+l+X/Uozo/xNm7gxe68p9le9/pcULst1CQn5/sPqq..kgWcSvlKUItu82 + lq3B2169rovdIaNdcvaQjMPhrDGo5rvLfMN35U3Hgbz41PL5..x2BcUE6/0ab5T4 + qKBxKa3t9twj+zpUqOzyL0PFfCE+SK5fEXAS1ow4eAcLN+eB..gR/PuvGAyIPCtE + 1+X4GrECAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAFpO+04Ra..FMJPH6dBmzfb7l + k04BWTvSlur6HiQdXY+oFQMJZzyI7MQ8v9HBIzS0ZAzYWLp4..VZhHmRxnrWyxVs + u783V5YfQH2L4QnBDoiDefgxyfDs2PcoF5C+X9CGXmPqzst2..y/6tdOVJzdiA== + -----END CERTIFICATE----- +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID management identity source configuration. + returned: success + type: dict + sample: { + "id": "00000000-0000-0000-0000-000000000000", + "disable": false, + "hostname": "10.1.2.3", + "port": 389, + "username": "MYDOMAIN\\\\Administrator", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "ldapServiceType": "Active Directory", + "type": "ldap", + "disableTLS": false, + "enableLDAPS": false, + "caCert": "-----BEGIN CERTIFICATE----- abcdefghijkl123456780ABCDEFGHIJKL 123456/7890ABCDEFabcdefghijklABCD -----END CERTIFICATE-----\n" + } +""" + +import json +import re + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridIdentityFederation: + """ + Configure and modify StorageGRID Grid Identity Federation + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + username=dict(required=False, type="str"), + password=dict(required=False, type="str", no_log=True), + hostname=dict(required=False, type="str"), + port=dict(required=False, type="int"), + base_group_dn=dict(required=False, type="str"), + base_user_dn=dict(required=False, type="str"), + ldap_service_type=dict(required=False, type="str", choices=["OpenLDAP", "Active Directory", "Other"]), + type=dict(required=False, type="str", default="ldap"), + ldap_user_id_attribute=dict(required=False, type="str"), + ldap_user_uuid_attribute=dict(required=False, type="str"), + ldap_group_id_attribute=dict(required=False, type="str"), + ldap_group_uuid_attribute=dict(required=False, type="str"), + tls=dict(required=False, type="str", choices=["STARTTLS", "LDAPS", "Disabled"], default="STARTTLS"), + ca_cert=dict(required=False, type="str"), + ), + ) + + parameter_map = { + "username": "username", + "password": "password", + "hostname": "hostname", + "port": "port", + "base_group_dn": "baseGroupDn", + "base_user_dn": "baseUserDn", + "ldap_service_type": "ldapServiceType", + "ldap_user_id_attribute": "ldapUserIdAttribute", + "ldap_user_uuid_attribute": "ldapUserUUIDAttribute", + "ldap_group_id_attribute": "ldapGroupIdAttribute", + "ldap_group_uuid_attribute": "ldapGroupUUIDAttribute", + "ca_cert": "caCert", + } + self.module = AnsibleModule(argument_spec=self.argument_spec, supports_check_mode=True,) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + + if self.parameters["state"] == "present": + self.data["disable"] = False + + for k in parameter_map.keys(): + if self.parameters.get(k) is not None: + self.data[parameter_map[k]] = self.parameters[k] + + if self.parameters.get("tls") == "STARTTLS": + self.data["disableTLS"] = False + self.data["enableLDAPS"] = False + elif self.parameters.get("tls") == "LDAPS": + self.data["disableTLS"] = False + self.data["enableLDAPS"] = True + else: + self.data["disableTLS"] = True + self.data["enableLDAPS"] = False + + def get_grid_identity_source(self): + api = "api/v3/grid/identity-source" + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + else: + return response["data"] + return None + + def update_identity_federation(self, test=False): + api = "api/v3/grid/identity-source" + + params = {} + + if test: + params["test"] = True + + response, error = self.rest_api.put(api, self.data, params=params) + if error: + self.module.fail_json(msg=error, payload=self.data) + + if response is not None: + return response["data"] + else: + return None + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + grid_identity_source = self.get_grid_identity_source() + + cd_action = self.na_helper.get_cd_action(grid_identity_source, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + for k in (i for i in self.data.keys() if i != "password"): + if self.data[k] != grid_identity_source.get(k): + update = True + break + + # if a password has been specified we need to update it + if self.data.get("password") and self.parameters["state"] == "present": + update = True + self.module.warn("Password attribute has been specified. Task is not idempotent.") + + if update: + self.na_helper.changed = True + + if cd_action == "delete": + # if identity federation is already in a disable state + if grid_identity_source.get("disable"): + self.na_helper.changed = False + + result_message = "" + resp_data = grid_identity_source + + if self.na_helper.changed and not self.module.check_mode: + if cd_action == "delete": + self.data = dict(disable=True) + resp_data = self.update_identity_federation() + result_message = "Grid identity federation disabled" + else: + resp_data = self.update_identity_federation() + result_message = "Grid identity federation updated" + + if self.module.check_mode: + self.update_identity_federation(test=True) + # if no error, connection test successful + self.module.exit_json(changed=self.na_helper.changed, msg="Connection test successful") + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_identity_federation = SgGridIdentityFederation() + na_sg_grid_identity_federation.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_info.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_info.py new file mode 100644 index 000000000..b14f88a22 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_info.py @@ -0,0 +1,405 @@ +#!/usr/bin/python + +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" NetApp StorageGRID Grid Info using REST APIs """ + + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + +DOCUMENTATION = """ +module: na_sg_grid_info +author: NetApp Ansible Team (@jasonl4) <ng-ansibleteam@netapp.com> +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +short_description: NetApp StorageGRID Grid information gatherer. +description: + - This module allows you to gather various information about StorageGRID Grid configuration. +version_added: 20.11.0 + +options: + gather_subset: + type: list + elements: str + description: + - When supplied, this argument will restrict the information collected to a given subset. + - Either the info name or the REST API can be given. + - Possible values for this argument include + - C(grid_accounts_info) or C(grid/accounts) + - C(grid_alarms_info) or C(grid/alarms) + - C(grid_audit_info) or C(grid/audit) + - C(grid_compliance_global_info) or C(grid/compliance-global) + - C(grid_config_info) or C(grid/config) + - C(grid_config_management_info) or C(grid/config/management) + - C(grid_config_product_version_info) or C(grid/config/product-version) + - C(grid_deactivated_features_info) or C(grid/deactivated-features) + - C(grid_dns_servers_info) or C(grid/dns-servers) + - C(grid_domain_names_info) or C(grid/domain-names) + - C(grid_ec_profiles_info) or C(grid/ec-profiles) + - C(grid_expansion_info) or C(grid/expansion) + - C(grid_expansion_nodes_info) or C(grid/expansion/nodes) + - C(grid_expansion_sites_info) or C(grid/expansion/sites) + - C(grid_grid_networks_info) or C(grid/grid-networks) + - C(grid_groups_info) or C(grid/groups) + - C(grid_health_info) or C(grid/health) + - C(grid_health_topology_info) or C(grid/health/topology) + - C(grid_identity_source_info) or C(grid/identity-source) + - C(grid_ilm_criteria_info) or C(grid/ilm-criteria) + - C(grid_ilm_policies_info) or C(grid/ilm-policies) + - C(grid_ilm_rules_info) or C(grid/ilm-rules) + - C(grid_license_info) or C(grid/license) + - C(grid_management_certificate_info) or C(grid/management-certificate) + - C(grid_ntp_servers_info) or C(grid/ntp-servers) + - C(grid_recovery_available_nodes_info) or C(grid/recovery/available-nodes) + - C(grid_recovery_info) or C(grid/recovery) + - C(grid_regions_info) or C(grid/regions) + - C(grid_schemes_info) or C(grid/schemes) + - C(grid_snmp_info) or C(grid/snmp) + - C(grid_storage_api_certificate_info) or C(grid/storage-api-certificate) + - C(grid_untrusted_client_network_info) or C(grid/untrusted-client-network) + - C(grid_users_info) or C(grid/users) + - C(grid_users_root_info) or C(grid/users/root) + - C(versions_info) or C(versions) + - Can specify a list of values to include a larger subset. + default: all + parameters: + description: + - Allows for any rest option to be passed in. + type: dict +""" + +EXAMPLES = """ +- name: Gather StorageGRID Grid info + netapp.storagegrid.na_sg_grid_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + register: sg_grid_info + +- name: Gather StorageGRID Grid info for grid/accounts and grid/config subsets + netapp.storagegrid.na_sg_grid_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + gather_subset: + - grid_accounts_info + - grid/config + register: sg_grid_info + +- name: Gather StorageGRID Grid info for all subsets + netapp.storagegrid.na_sg_grid_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + gather_subset: + - all + register: sg_grid_info + +- name: Gather StorageGRID Grid info for grid/accounts and grid/users subsets, limit to 5 results for each subset + netapp.storagegrid.na_sg_grid_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + gather_subset: + - grid/accounts + - grid/users + parameters: + limit: 5 + register: sg_grid_info +""" + +RETURN = """ +sg_info: + description: Returns various information about the StorageGRID Grid configuration. + returned: always + type: dict + sample: { + "grid/accounts": {...}, + "grid/alarms": {...}, + "grid/audit": {...}, + "grid/compliance-global": {...}, + "grid/config": {...}, + "grid/config/management": {...}, + "grid/config/product-version": {...}, + "grid/deactivated-features": {...}, + "grid/dns-servers": {...}, + "grid/domain-names": {...}, + "grid/ec-profiles": {...}, + "grid/expansion": {...}, + "grid/expansion/nodes": {...}, + "grid/expansion/sites": {...}, + "grid/networks": {...}, + "grid/groups": {...}, + "grid/health": {...}, + "grid/health/topology": {...}, + "grid/identity-source": {...}, + "grid/ilm-criteria": {...}, + "grid/ilm-policies": {...}, + "grid/ilm-rules": {...}, + "grid/license": {...}, + "grid/management-certificate": {...}, + "grid/ntp-servers": {...}, + "grid/recovery/available-nodes": {...}, + "grid/recovery": {...}, + "grid/regions": {...}, + "grid/schemes": {...}, + "grid/snmp": {...}, + "grid/storage-api-certificate": {...}, + "grid/untrusted-client-network": {...}, + "grid/users": {...}, + "grid/users/root": {...}, + "grid/versions": {...} + } +""" + +from ansible.module_utils.basic import AnsibleModule +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class NetAppSgGatherInfo(object): + """ Class with gather info methods """ + + def __init__(self): + """ + Parse arguments, setup variables, check parameters and ensure + request module is installed. + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update(dict( + gather_subset=dict(default=['all'], type='list', elements='str', required=False), + parameters=dict(type='dict', required=False) + )) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + supports_check_mode=True + ) + + # set up variables + self.na_helper = NetAppModule() + self.parameters = self.na_helper.set_parameters(self.module.params) + self.rest_api = SGRestAPI(self.module) + + def get_subset_info(self, gather_subset_info): + """ + Gather StorageGRID information for the given subset using REST APIs + Input for REST APIs call : (api, data) + return gathered_sg_info + """ + + api = gather_subset_info['api_call'] + data = {} + # allow for passing in any additional rest api parameters + if self.parameters.get('parameters'): + for each in self.parameters['parameters']: + data[each] = self.parameters['parameters'][each] + + gathered_sg_info, error = self.rest_api.get(api, data) + + if error: + self.module.fail_json(msg=error) + else: + return gathered_sg_info + + return None + + def convert_subsets(self): + """ Convert an info to the REST API """ + info_to_rest_mapping = { + 'grid_accounts_info': 'grid/accounts', + 'grid_alarms_info': 'grid/alarms', + 'grid_audit_info': 'grid/audit', + 'grid_compliance_global_info': 'grid/compliance-global', + 'grid_config_info': 'grid/config', + 'grid_config_management_info': 'grid/config/management', + 'grid_config_product_version_info': 'grid/config/product-version', + 'grid_deactivated_features_info': 'grid/deactivated-features', + 'grid_dns_servers_info': 'grid/dns-servers', + 'grid_domain_names_info': 'grid/domain-names', + 'grid_ec_profiles_info': 'grid/ec-profiles', + 'grid_expansion_info': 'grid/expansion', + 'grid_expansion_nodes_info': 'grid/expansion/nodes', + 'grid_expansion_sites_info': 'grid/expansion/sites', + 'grid_grid_networks_info': 'grid/grid-networks', + 'grid_groups_info': 'grid/groups', + 'grid_health_info': 'grid/health', + 'grid_health_topology_info': 'grid/health/topology', + 'grid_identity_source_info': 'grid/identity-source', + 'grid_ilm_criteria_info': 'grid/ilm-criteria', + 'grid_ilm_policies_info': 'grid/ilm-policies', + 'grid_ilm_rules_info': 'grid/ilm-rules', + 'grid_license_info': 'grid/license', + 'grid_management_certificate_info': 'grid/management-certificate', + 'grid_ntp_servers_info': 'grid/ntp-servers', + 'grid_recovery_available_nodes_info': 'grid/recovery/available-nodes', + 'grid_recovery_info': 'grid/recovery', + 'grid_regions_info': 'grid/regions', + 'grid_schemes_info': 'grid/schemes', + 'grid_snmp_info': 'grid/snmp', + 'grid_storage_api_certificate_info': 'grid/storage-api-certificate', + 'grid_untrusted_client_network_info': 'grid/untrusted-client-network', + 'grid_users_info': 'grid/users', + 'grid_users_root_info': 'grid/users/root', + 'versions_info': 'versions', + } + # Add rest API names as there info version, also make sure we don't add a duplicate + subsets = [] + for subset in self.parameters['gather_subset']: + if subset in info_to_rest_mapping: + if info_to_rest_mapping[subset] not in subsets: + subsets.append(info_to_rest_mapping[subset]) + else: + if subset not in subsets: + subsets.append(subset) + return subsets + + def apply(self): + """ Perform pre-checks, call functions and exit """ + + result_message = dict() + + # Defining gather_subset and appropriate api_call + get_sg_subset_info = { + 'grid/accounts': { + 'api_call': 'api/v3/grid/accounts', + }, + 'grid/alarms': { + 'api_call': 'api/v3/grid/alarms', + }, + 'grid/audit': { + 'api_call': 'api/v3/grid/audit', + }, + 'grid/compliance-global': { + 'api_call': 'api/v3/grid/compliance-global', + }, + 'grid/config': { + 'api_call': 'api/v3/grid/config', + }, + 'grid/config/management': { + 'api_call': 'api/v3/grid/config/management', + }, + 'grid/config/product-version': { + 'api_call': 'api/v3/grid/config/product-version', + }, + 'grid/deactivated-features': { + 'api_call': 'api/v3/grid/deactivated-features', + }, + 'grid/dns-servers': { + 'api_call': 'api/v3/grid/dns-servers', + }, + 'grid/domain-names': { + 'api_call': 'api/v3/grid/domain-names', + }, + 'grid/ec-profiles': { + 'api_call': 'api/v3/grid/ec-profiles', + }, + 'grid/expansion': { + 'api_call': 'api/v3/grid/expansion', + }, + 'grid/expansion/nodes': { + 'api_call': 'api/v3/grid/expansion/nodes', + }, + 'grid/expansion/sites': { + 'api_call': 'api/v3/grid/expansion/sites', + }, + 'grid/grid-networks': { + 'api_call': 'api/v3/grid/grid-networks', + }, + 'grid/groups': { + 'api_call': 'api/v3/grid/groups', + }, + 'grid/health': { + 'api_call': 'api/v3/grid/health', + }, + 'grid/health/topology': { + 'api_call': 'api/v3/grid/health/topology', + }, + 'grid/identity-source': { + 'api_call': 'api/v3/grid/identity-source', + }, + 'grid/ilm-criteria': { + 'api_call': 'api/v3/grid/ilm-criteria', + }, + 'grid/ilm-policies': { + 'api_call': 'api/v3/grid/ilm-policies', + }, + 'grid/ilm-rules': { + 'api_call': 'api/v3/grid/ilm-rules', + }, + 'grid/license': { + 'api_call': 'api/v3/grid/license', + }, + 'grid/management-certificate': { + 'api_call': 'api/v3/grid/management-certificate', + }, + 'grid/ntp-servers': { + 'api_call': 'api/v3/grid/ntp-servers', + }, + 'grid/recovery/available-nodes': { + 'api_call': 'api/v3/grid/recovery/available-nodes', + }, + 'grid/recovery': { + 'api_call': 'api/v3/grid/recovery', + }, + 'grid/regions': { + 'api_call': 'api/v3/grid/regions', + }, + 'grid/schemes': { + 'api_call': 'api/v3/grid/schemes', + }, + 'grid/snmp': { + 'api_call': 'api/v3/grid/snmp', + }, + 'grid/storage-api-certificate': { + 'api_call': 'api/v3/grid/storage-api-certificate', + }, + 'grid/untrusted-client-network': { + 'api_call': 'api/v3/grid/untrusted-client-network', + }, + 'grid/users': { + 'api_call': 'api/v3/grid/users', + }, + 'grid/users/root': { + 'api_call': 'api/v3/grid/users/root', + }, + 'versions': { + 'api_call': 'api/v3/versions', + }, + } + + if 'all' in self.parameters['gather_subset']: + # If all in subset list, get the information of all subsets + self.parameters['gather_subset'] = sorted(get_sg_subset_info.keys()) + + converted_subsets = self.convert_subsets() + + for subset in converted_subsets: + try: + # Verify whether the supported subset passed + specified_subset = get_sg_subset_info[subset] + except KeyError: + self.module.fail_json(msg="Specified subset %s not found, supported subsets are %s" % + (subset, list(get_sg_subset_info.keys()))) + + result_message[subset] = self.get_subset_info(specified_subset) + + self.module.exit_json(changed='False', sg_info=result_message) + + +def main(): + """ Main function """ + obj = NetAppSgGatherInfo() + obj.apply() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_ntp.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_ntp.py new file mode 100644 index 000000000..0c22ba2c1 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_ntp.py @@ -0,0 +1,173 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Grid NTP Servers""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_grid_ntp +short_description: NetApp StorageGRID manage external NTP servers for the grid. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@jkandati) <ng-sg-ansibleteam@netapp.com> +description: +- Update NTP server on NetApp StorageGRID. +options: + state: + description: + - Whether the specified user should exist or not. + type: str + choices: ['present'] + default: present + ntp_servers: + description: + - List of comma separated NTP server address. + type: list + elements: str + required: true + passphrase: + description: + - passphrase for GRID. + type: str + required: true +""" + +EXAMPLES = """ + - name: update NTP servers + netapp.storagegrid.na_sg_grid_ntp: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + passphrase: "{{ grid_pass }}" + ntp_servers: "x.x.x.x,xx.x.xx.x" +""" + +RETURN = """ +resp: + description: Returns information about the configured NTP servers. + returned: success + type: list + elements: str + sample: ["10.0.0.1", "10.0.0.2", "10.0.0.3", "10.0.0.4"] +""" + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridNtp(object): + """ + Create, modify and delete NTP entries for StorageGRID + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present"], default="present"), + ntp_servers=dict(required=True, type="list", elements="str"), + passphrase=dict(required=True, type="str", no_log=True), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + # required_if=[("state", "present", ["state", "name", "protocol"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = self.parameters["ntp_servers"] + self.passphrase = self.parameters["passphrase"] + self.ntp_input = {"passphrase": self.passphrase, "servers": self.data} + + def get_grid_ntp(self): + # Check if tenant account exists + # Return tenant account info if found, or None + api = "api/v3/grid/ntp-servers" + + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def update_grid_ntp(self): + api = "api/v3/grid/ntp-servers/update" + + response, error = self.rest_api.post(api, self.ntp_input) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + grid_ntp = self.get_grid_ntp() + + cd_action = self.na_helper.get_cd_action(grid_ntp, self.parameters["ntp_servers"]) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + ntp_diff = [i for i in self.data + grid_ntp if i not in self.data or i not in grid_ntp] + if ntp_diff: + update = True + + if update: + self.na_helper.changed = True + + result_message = "" + resp_data = grid_ntp + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + resp_data = self.update_grid_ntp() + result_message = "Grid NTP updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_ntp = SgGridNtp() + na_sg_grid_ntp.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_regions.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_regions.py new file mode 100644 index 000000000..58179cf03 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_regions.py @@ -0,0 +1,163 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Grid Regions""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_grid_regions +short_description: NetApp StorageGRID manage Regions for the grid. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Users within a NetApp StorageGRID tenant. +options: + state: + description: + - Whether the specified user should exist or not. + type: str + choices: ['present'] + default: present + regions: + description: + - List of regions + required: true + type: list + elements: str +""" + +EXAMPLES = """ + - name: update Regions + netapp.storagegrid.na_sg_grid_regions: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + regions: "us-east-1" +""" + +RETURN = """ +resp: + description: Returns information about the configured regions. + returned: success + type: list + elements: str + sample: ["us-east-1", "us-central-1"] +""" + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridRegions(object): + """ + Create, modify and delete Regions for StorageGRID + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present"], default="present"), + regions=dict(required=True, type="list", elements="str"), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + # required_if=[("state", "present", ["state", "name", "protocol"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = self.parameters["regions"] + + def get_grid_regions(self): + # Check if tenant account exists + # Return tenant account info if found, or None + api = "api/v3/grid/regions" + + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def update_grid_regions(self): + api = "api/v3/grid/regions" + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + grid_regions = self.get_grid_regions() + + cd_action = self.na_helper.get_cd_action(grid_regions, self.parameters["regions"]) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + regions_diff = [i for i in self.data + grid_regions if i not in self.data or i not in grid_regions] + if regions_diff: + update = True + + if update: + self.na_helper.changed = True + + result_message = "" + resp_data = grid_regions + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + resp_data = self.update_grid_regions() + result_message = "Grid Regions updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_regions = SgGridRegions() + na_sg_grid_regions.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_traffic_classes.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_traffic_classes.py new file mode 100644 index 000000000..9901a3e00 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_traffic_classes.py @@ -0,0 +1,375 @@ +#!/usr/bin/python + +# (c) 2022, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Traffic Classification Policies""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +DOCUMENTATION = """ +module: na_sg_grid_traffic_classes +short_description: Manage Traffic Classification Policy configuration on StorageGRID. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '21.10.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Traffic Classification Policies on NetApp StorageGRID. +options: + state: + description: + - Whether the specified Traffic Classification Policy should exist. + type: str + choices: ['present', 'absent'] + default: present + name: + description: + - Name of the Traffic Classification Policy. + type: str + policy_id: + description: + - Traffic Classification Policy ID. + - May be used for modify or delete operation. + type: str + description: + description: + - Description of the Traffic Classification Policy. + type: str + matchers: + description: + - A set of parameters to match. + - The traffic class will match requests where any of these matchers match. + type: list + elements: dict + suboptions: + type: + description: + - The attribute of the request to match. + - C(bucket) - The S3 bucket (or Swift container) being accessed. + - C(bucket-regex) - A regular expression to evaluate against the S3 bucket (or Swift container) being accessed. + - C(cidr) - Matches if the client request source IP is in the specified IPv4 CIDR (RFC4632). + - C(tenant) - Matches if the S3 bucket (or Swift container) is owned by the tenant account with this ID. + choices: ['bucket', 'bucket-regex', 'cidr', 'endpoint', 'tenant'] + type: str + required: true + inverse: + description: + - If I(true), entities that match the value are excluded. + type: bool + default: false + members: + description: + - A list of members to match on. + type: list + elements: str + required: true + limits: + description: + - Optional limits to impose on client requests matched by this traffic class. + - Only one of each limit type can be specified. + type: list + elements: dict + suboptions: + type: + description: + - The type of limit to apply. + - C(aggregateBandwidthIn) - The maximum combined upload bandwidth in bytes/second of all concurrent requests that match this policy. + - C(aggregateBandwidthOut) - The maximum combined download bandwidth in bytes/second of all concurrent requests that match this policy. + - C(concurrentReadRequests) - The maximum number of download requests that can be in progress at the same time. + - C(concurrentWriteRequests) - The maximum number of upload requests that can be in progress at the same time. + - C(readRequestRate) - The maximum number of download requests that can be started each second. + - C(writeRequestRate) - The maximum number of download requests that can be started each second. + - C(perRequestBandwidthIn) - The maximum upload bandwidth in bytes/second allowed for each request that matches this policy. + - C(perRequestBandwidthOut) - The maximum download bandwidth in bytes/second allowed for each request that matches this policy. + choices: [ + 'aggregateBandwidthIn', + 'aggregateBandwidthOut', + 'concurrentReadRequests', + 'concurrentWriteRequests', + 'readRequestRate', + 'writeRequestRate', + 'perRequestBandwidthIn', + 'perRequestBandwidthOut' + ] + type: str + required: true + value: + description: + - The limit to apply. + - Limit values are type specific. + type: int + required: true +""" + +EXAMPLES = """ + - name: create Traffic Classification Policy with bandwidth limit on buckets + netapp.storagegrid.na_sg_grid_traffic_classes: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: Traffic-Policy1 + matchers: + - type: bucket + members: bucket1,anotherbucket + limits: + - type: aggregateBandwidthOut + value: 100000000 + + - name: create Traffic Classification Policy with bandwidth limits except for specific tenant account + netapp.storagegrid.na_sg_grid_traffic_classes: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: Fabricpool-Policy + description: "Limit all to 500MB/s except FabricPool tenant" + matchers: + - type: tenant + inverse: True + members: 12345678901234567890 + limits: + - type: aggregateBandwidthIn + value: 50000000 + - type: aggregateBandwidthOut + value: 50000000 + + - name: rename Traffic Classification Policy + netapp.storagegrid.na_sg_grid_traffic_classes: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + policy_id: 00000000-0000-0000-0000-000000000000 + name: Traffic-Policy1-New-Name + matchers: + - type: bucket + members: bucket1,anotherbucket + limits: + - type: aggregateBandwidthOut + value: 100000000 + + - name: delete Traffic Classification Policy + netapp.storagegrid.na_sg_grid_traffic_classes: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: absent + name: Traffic-Policy1 +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID Traffic Classification Policy. + returned: success + type: dict + sample: { + "id": "6b2946e6-7fed-40d0-9262-8e922580aba7", + "name": "Traffic-Policy1", + "description": "Traffic Classification Policy 1", + "matchers": [ + { + "type": "cidr", + "inverse": False, + "members": [ + "192.168.50.0/24" + ] + }, + { + "type": "bucket", + "inverse": False, + "members": [ + "mybucket1", + "mybucket2" + ] + }, + ], + "limits": [ + { + "type": "aggregateBandwidthOut", + "value": 100000000 + } + ], + } +""" + +import json + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridTrafficClasses: + """ + Create, modify and delete Traffic Classification Policies for StorageGRID + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + name=dict(required=False, type="str"), + policy_id=dict(required=False, type="str"), + description=dict(required=False, type="str"), + matchers=dict( + required=False, + type="list", + elements="dict", + options=dict( + type=dict( + required=True, + type="str", + choices=["bucket", "bucket-regex", "cidr", "endpoint", "tenant"], + ), + inverse=dict(required=False, type="bool", default="false"), + members=dict(required=True, type="list", elements="str"), + ), + ), + limits=dict( + required=False, + type="list", + elements="dict", + options=dict( + type=dict( + required=True, + type="str", + choices=[ + "aggregateBandwidthIn", + "aggregateBandwidthOut", + "concurrentReadRequests", + "concurrentWriteRequests", + "readRequestRate", + "writeRequestRate", + "perRequestBandwidthIn", + "perRequestBandwidthOut", + ], + ), + value=dict(required=True, type="int"), + ), + ), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "present", ["name"])], + required_one_of=[("name", "policy_id")], + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + + if self.parameters["state"] == "present": + for k in ["name", "description", "matchers", "limits"]: + if self.parameters.get(k) is not None: + self.data[k] = self.parameters[k] + + def get_traffic_class_policy_id(self): + # Check if Traffic Classification Policy exists + # Return policy ID if found, or None + api = "api/v3/grid/traffic-classes/policies" + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return next((item["id"] for item in response.get("data") if item["name"] == self.parameters["name"]), None) + + def get_traffic_class_policy(self, policy_id): + api = "api/v3/grid/traffic-classes/policies/%s" % policy_id + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def create_traffic_class_policy(self): + api = "api/v3/grid/traffic-classes/policies" + # self.module.fail_json(msg=self.data) + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_traffic_class_policy(self, policy_id): + api = "api/v3/grid/traffic-classes/policies/%s" % policy_id + dummy, error = self.rest_api.delete(api, self.data) + + if error: + self.module.fail_json(msg=error) + + def update_traffic_class_policy(self, policy_id): + api = "api/v3/grid/traffic-classes/policies/%s" % policy_id + response, error = self.rest_api.put(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + + traffic_class_policy = None + + if self.parameters.get("policy_id"): + traffic_class_policy = self.get_traffic_class_policy(self.parameters["policy_id"]) + else: + policy_id = self.get_traffic_class_policy_id() + if policy_id: + traffic_class_policy = self.get_traffic_class_policy(policy_id) + + cd_action = self.na_helper.get_cd_action(traffic_class_policy, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + modify = self.na_helper.get_modified_attributes(traffic_class_policy, self.data) + + result_message = "" + resp_data = {} + + if self.na_helper.changed and not self.module.check_mode: + if cd_action == "delete": + self.delete_traffic_class_policy(traffic_class_policy["id"]) + result_message = "Traffic Classification Policy deleted" + elif cd_action == "create": + resp_data = self.create_traffic_class_policy() + result_message = "Traffic Classification Policy created" + elif modify: + resp_data = self.update_traffic_class_policy(traffic_class_policy["id"]) + result_message = "Traffic Classification Policy updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_traffic_classes = SgGridTrafficClasses() + na_sg_grid_traffic_classes.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_user.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_user.py new file mode 100644 index 000000000..521d4f566 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_grid_user.py @@ -0,0 +1,316 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Grid-administration Users""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_grid_user +short_description: NetApp StorageGRID manage users. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Administrative Users within NetApp StorageGRID. +options: + state: + description: + - Whether the specified user should exist or not. + type: str + choices: ['present', 'absent'] + default: present + full_name: + description: + - Full Name of the user. + - Required for create operation + type: str + unique_name: + description: + - Unique Name for the user. Must begin with C(user/) or C(federated-user/) + - Required for create, modify or delete operation. + type: str + required: true + member_of: + description: + - List of C(unique_groups) that the user is a member of. + type: list + elements: str + password: + description: + - Set a password for a local user. Does not apply to federated users. + - Requires root privilege. + required: false + type: str + update_password: + description: + - Choose when to update the password. + - When set to C(always), the password will always be updated. + - When set to C(on_create), the password will only be set upon a new user creation. + default: on_create + choices: + - on_create + - always + type: str + disable: + description: + - Disable the user from signing in. Does not apply to federated users. + type: bool +""" + +EXAMPLES = """ + - name: create a user + netapp.storagegrid.na_sg_grid_user: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + full_name: ansibleuser100 + unique_name: user/ansibleuser100 + member_of: "group/ansiblegroup100" + disable: false + +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID Grid user. + returned: always + type: dict + sample: { + "fullName": "Example User", + "memberOf": ["00000000-0000-0000-0000-000000000000"], + "disable": false, + "uniqueName": "user/Example", + "accountId": "0", + "id": "00000000-0000-0000-0000-000000000000", + "federated": false, + "userURN": "urn:sgws:identity::0:user/Example" + } +""" + +import json +import re + + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgGridUser(object): + """ + Create, modify and delete user within a StorageGRID Tenant Account + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + full_name=dict(required=False, type="str"), + unique_name=dict(required=True, type="str"), + member_of=dict(required=False, type="list", elements="str"), + disable=dict(required=False, type="bool"), + password=dict(required=False, type="str", no_log=True), + update_password=dict(default="on_create", choices=["on_create", "always"]), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "present", ["full_name", "unique_name"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + self.data["memberOf"] = [] + if self.parameters.get("full_name"): + self.data["fullName"] = self.parameters["full_name"] + if self.parameters.get("unique_name"): + self.data["uniqueName"] = self.parameters["unique_name"] + + if self.parameters.get("disable") is not None: + self.data["disable"] = self.parameters["disable"] + + re_local_user = re.compile("^user/") + re_fed_user = re.compile("^federated-user/") + + if ( + re_local_user.match(self.parameters["unique_name"]) is None + and re_fed_user.match(self.parameters["unique_name"]) is None + ): + self.module.fail_json(msg="unique_name must begin with 'user/' or 'federated-user/'") + + self.pw_change = {} + if self.parameters.get("password") is not None: + if re_fed_user.match(self.parameters["unique_name"]): + self.module.fail_json(msg="password cannot be set for a federated user") + self.pw_change["password"] = self.parameters["password"] + + def get_grid_groups(self): + # Get list of admin groups + # Retrun mapping of uniqueName to ids if found, or None + api = "api/v3/grid/groups?limit=350" + + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + if response["data"]: + name_to_id_map = dict(zip([i["uniqueName"] for i in response["data"]], [j["id"] for j in response["data"]])) + return name_to_id_map + + return None + + def get_grid_user(self, unique_name): + # Use the unique name to check if the user exists + api = "api/v3/grid/users/%s" % unique_name + response, error = self.rest_api.get(api) + + if error: + if response["code"] != 404: + self.module.fail_json(msg=error["text"]) + else: + return response["data"] + return None + + def create_grid_user(self): + api = "api/v3/grid/users" + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error["text"]) + + return response["data"] + + def delete_grid_user(self, user_id): + api = "api/v3/grid/users/" + user_id + + self.data = None + response, error = self.rest_api.delete(api, self.data) + if error: + self.module.fail_json(msg=error) + + def update_grid_user(self, user_id): + api = "api/v3/grid/users/" + user_id + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error["text"]) + + return response["data"] + + def set_grid_user_password(self, unique_name): + api = "api/v3/grid/users/%s/change-password" % unique_name + response, error = self.rest_api.post(api, self.pw_change) + + if error: + self.module.fail_json(msg=error["text"]) + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + grid_user = self.get_grid_user(self.parameters["unique_name"]) + + if self.parameters.get("member_of"): + grid_groups = self.get_grid_groups() + try: + self.data["memberOf"] = [grid_groups[x] for x in self.parameters["member_of"]] + except KeyError as e: + self.module.fail_json(msg="Invalid unique_group supplied: '%s' not found" % e.args[0]) + + cd_action = self.na_helper.get_cd_action(grid_user, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + if grid_user["memberOf"] is None: + member_of_diff = [] + else: + member_of_diff = [ + i + for i in self.data["memberOf"] + grid_user["memberOf"] + if i not in self.data["memberOf"] or i not in grid_user["memberOf"] + ] + if member_of_diff: + update = True + + if self.parameters.get("disable") is not None and self.parameters["disable"] != grid_user.get("disable"): + update = True + + if update: + self.na_helper.changed = True + result_message = "" + resp_data = grid_user + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + if cd_action == "delete": + self.delete_grid_user(grid_user["id"]) + result_message = "Grid User deleted" + + elif cd_action == "create": + resp_data = self.create_grid_user() + result_message = "Grid User created" + + else: + resp_data = self.update_grid_user(grid_user["id"]) + result_message = "Grid User updated" + + # If a password has been set + if self.pw_change: + if self.module.check_mode: + pass + else: + # Only update the password if update_password is always, or a create activity has occurred + if cd_action == "create" or self.parameters["update_password"] == "always": + self.set_grid_user_password(self.parameters["unique_name"]) + self.na_helper.changed = True + + results = [result_message, "Grid User password updated"] + result_message = "; ".join(filter(None, results)) + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_grid_user = SgGridUser() + na_sg_grid_user.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_container.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_container.py new file mode 100644 index 000000000..da9663184 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_container.py @@ -0,0 +1,352 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Buckets""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_org_container +short_description: Manage buckets on StorageGRID. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create S3 buckets on NetApp StorageGRID. +options: + state: + description: + - Whether the specified bucket should exist or not. + type: str + choices: ['present', 'absent'] + default: present + name: + description: + - Name of the bucket. + required: true + type: str + region: + description: + - Set a region for the bucket. + type: str + compliance: + description: + - Configure compliance settings for an S3 bucket. + - Cannot be specified along with I(s3_object_lock_enabled). + type: dict + suboptions: + auto_delete: + description: + - If enabled, objects will be deleted automatically when its retention period expires, unless the bucket is under a legal hold. + type: bool + legal_hold: + description: + - If enabled, objects in this bucket cannot be deleted, even if their retention period has expired. + type: bool + retention_period_minutes: + description: + - specify the length of the retention period for objects added to this bucket, in minutes. + type: int + s3_object_lock_enabled: + description: + - Enable S3 Object Lock on the bucket. + - S3 Object Lock requires StorageGRID 11.5 or greater. + type: bool + version_added: '21.9.0' + bucket_versioning_enabled: + description: + - Enable versioning on the bucket. + - This API requires StorageGRID 11.6 or greater. + type: bool + version_added: '21.11.0' +""" + +EXAMPLES = """ + - name: create a s3 bucket + netapp.storagegrid.na_sg_org_container: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: ansiblebucket1 + + - name: delete a s3 bucket + netapp.storagegrid.na_sg_org_container: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: absent + name: ansiblebucket1 + + - name: create a s3 bucket with Object Lock + netapp.storagegrid.na_sg_org_container: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: objectlock-bucket1 + s3_object_lock_enabled: true + + - name: create a s3 bucket with versioning enabled + netapp.storagegrid.na_sg_org_container: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + name: ansiblebucket1 + bucket_versioning_enabled: true +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID bucket. + returned: always + type: dict + sample: { + "name": "example-bucket", + "creationTime": "2021-01-01T00:00:00.000Z", + "region": "us-east-1", + "compliance": { + "autoDelete": false, + "legalHold": false, + "retentionPeriodMinutes": 2629800 + }, + "s3ObjectLock": { + "enabled": false + } + } +""" + +import json + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgOrgContainer(object): + """ + Create, modify and delete StorageGRID Tenant Account + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + name=dict(required=True, type="str"), + region=dict(required=False, type="str"), + compliance=dict( + required=False, + type="dict", + options=dict( + auto_delete=dict(required=False, type="bool"), + legal_hold=dict(required=False, type="bool"), + retention_period_minutes=dict(required=False, type="int"), + ), + ), + s3_object_lock_enabled=dict(required=False, type="bool"), + bucket_versioning_enabled=dict(required=False, type="bool"), + ) + ) + parameter_map = { + "auto_delete": "autoDelete", + "legal_hold": "legalHold", + "retention_period_minutes": "retentionPeriodMinutes", + } + self.module = AnsibleModule( + argument_spec=self.argument_spec, + mutually_exclusive=[("compliance", "s3_object_lock_enabled")], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Get API version + self.rest_api.get_sg_product_version(api_root="org") + + # Checking for the parameters passed and create new parameters list + + self.data_versioning = {} + self.data_versioning["versioningSuspended"] = True + + self.data = {} + self.data["name"] = self.parameters["name"] + self.data["region"] = self.parameters.get("region") + if self.parameters.get("compliance"): + self.data["compliance"] = dict( + (parameter_map[k], v) for (k, v) in self.parameters["compliance"].items() if v is not None + ) + + if self.parameters.get("s3_object_lock_enabled") is not None: + self.rest_api.fail_if_not_sg_minimum_version("S3 Object Lock", 11, 5) + self.data["s3ObjectLock"] = dict(enabled=self.parameters["s3_object_lock_enabled"]) + + if self.parameters.get("bucket_versioning_enabled") is not None: + self.rest_api.fail_if_not_sg_minimum_version("Bucket versioning configuration", 11, 6) + self.data_versioning["versioningEnabled"] = self.parameters["bucket_versioning_enabled"] + if self.data_versioning["versioningEnabled"]: + self.data_versioning["versioningSuspended"] = False + + def get_org_container(self): + # Check if bucket/container exists + # Return info if found, or None + + params = {"include": "compliance,region"} + response, error = self.rest_api.get("api/v3/org/containers", params=params) + + if error: + self.module.fail_json(msg=error) + + for container in response["data"]: + if container["name"] == self.parameters["name"]: + return container + + return None + + def create_org_container(self): + api = "api/v3/org/containers" + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def get_org_container_versioning(self): + api = "api/v3/org/containers/%s/versioning" % self.parameters["name"] + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def update_org_container_versioning(self): + api = "api/v3/org/containers/%s/versioning" % self.parameters["name"] + + response, error = self.rest_api.put(api, self.data_versioning) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def fail_if_global_object_lock_disabled(self): + api = "api/v3/org/compliance-global" + + response, error = self.rest_api.get(api) + if error: + self.module.fail_json(msg=error) + + if not response["data"]["complianceEnabled"]: + self.module.fail_json(msg="Error: Global S3 Object Lock setting is not enabled.") + + def update_org_container_compliance(self): + api = "api/v3/org/containers/%s/compliance" % self.parameters["name"] + + response, error = self.rest_api.put(api, self.data["compliance"]) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_org_container(self): + api = "api/v3/org/containers/%s" % self.parameters["name"] + + response, error = self.rest_api.delete(api, None) + if error: + self.module.fail_json(msg=error["text"]) + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + versioning_config = None + update_versioning = False + + org_container = self.get_org_container() + + if org_container and self.parameters.get("bucket_versioning_enabled") is not None: + versioning_config = self.get_org_container_versioning() + + cd_action = self.na_helper.get_cd_action(org_container, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update_compliance = False + + if self.parameters.get("compliance") and org_container.get("compliance") != self.data["compliance"]: + update_compliance = True + self.na_helper.changed = True + + if ( + versioning_config + and versioning_config["versioningEnabled"] != self.data_versioning["versioningEnabled"] + ): + update_versioning = True + self.na_helper.changed = True + + result_message = "" + resp_data = org_container + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + if cd_action == "delete": + self.delete_org_container() + resp_data = None + result_message = "Org Container deleted" + + elif cd_action == "create": + if self.parameters.get("s3_object_lock_enabled"): # if it is set and true + self.fail_if_global_object_lock_disabled() + + resp_data = self.create_org_container() + + if self.parameters.get("bucket_versioning_enabled") is not None: + self.update_org_container_versioning() + result_message = "Org Container created" + + else: + if update_compliance: + resp_data = self.update_org_container_compliance() + if update_versioning: + self.update_org_container_versioning() + result_message = "Org Container updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_org_container = SgOrgContainer() + na_sg_org_container.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_group.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_group.py new file mode 100644 index 000000000..d13a7559a --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_group.py @@ -0,0 +1,301 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage tenant Groups""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_org_group +short_description: NetApp StorageGRID manage groups within a tenancy. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Groups within NetApp StorageGRID tenant. +options: + state: + description: + - Whether the specified group should exist or not. + type: str + choices: ['present', 'absent'] + default: present + unique_name: + description: + - Unique Name for the group. Must begin with C(group/) or C(federated-group/). + - Required for create, modify or delete operation. + type: str + required: true + display_name: + description: + - Name of the group. + - Required for create operation. + type: str + management_policy: + description: + - Management access controls granted to the group within the tenancy. + type: dict + suboptions: + manage_all_containers: + description: + - Allows users to manage the settings for all S3 buckets in the tenant account, regardless of S3 bucket or group policies. + type: bool + manage_endpoints: + description: + - Allows users to use the Tenant Manager or the Tenant Management API to create or edit endpoints. + - Endpoints are used as the destination for StorageGRID platform services. + type: bool + manage_own_s3_credentials: + description: + - Allows users to create and remove their own S3 access keys. + - Users who do not have this permission do not see the S3 > My Credentials menu option. + type: bool + root_access: + description: + - Provides full access to the Tenant Manager and the Tenant Management API. + type: bool + s3_policy: + description: + - StorageGRID S3 Group Policy. + default: "" + type: json +""" + +EXAMPLES = """ + - name: create a group + netapp.storagegrid.na_sg_org_group: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + display_name: ansiblegroup1 + unique_name: group/ansiblegroup1 + management_policy: + manage_all_containers: true + manage_endpoints: true + manage_own_s3_credentials: false + root_access: false + s3_policy: {"Statement":[{"Effect":"Deny","Action":"s3:*","Resource":"arn:aws:s3:::*"}]} +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID tenant group attributes. + returned: success + type: dict + sample: { + "displayName": "Example Group", + "policies": { + "management": { + "manageAllContainers": true, + "manageEndpoints": true, + "manageOwnS3Credentials": true, + "rootAccess": true + }, + "s3": {...}, + "swift": {...} + }, + "uniqueName": "group/examplegroup", + "accountId": "12345678901234567890", + "id": "00000000-0000-0000-0000-000000000000", + "federated": false, + "groupURN": "urn:sgws:identity::12345678901234567890:group/examplegroup" + } +""" + +import json +import re + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgOrgGroup(object): + """ + Create, modify and delete StorageGRID Tenant Account + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + display_name=dict(required=False, type="str"), + unique_name=dict(required=True, type="str"), + management_policy=dict( + required=False, + type="dict", + options=dict( + manage_all_containers=dict(required=False, type="bool"), + manage_endpoints=dict(required=False, type="bool"), + manage_own_s3_credentials=dict(required=False, type="bool"), + root_access=dict(required=False, type="bool"), + ), + ), + s3_policy=dict(required=False, type="json"), + ) + ) + parameter_map = { + "manage_all_containers": "manageAllContainers", + "manage_endpoints": "manageEndpoints", + "manage_own_s3_credentials": "manageOwnS3Credentials", + "root_access": "rootAccess", + } + self.module = AnsibleModule( + argument_spec=self.argument_spec, + # required_if=[("state", "present", ["display_name"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + self.data["displayName"] = self.parameters.get("display_name") + self.data["uniqueName"] = self.parameters["unique_name"] + # Only add the parameter if value is True, as JSON response does not include non-true objects + self.data["policies"] = {} + + if self.parameters.get("management_policy"): + self.data["policies"] = { + "management": dict( + (parameter_map[k], v) for (k, v) in self.parameters["management_policy"].items() if v + ) + } + if not self.data["policies"].get("management"): + self.data["policies"]["management"] = None + + if self.parameters.get("s3_policy"): + try: + self.data["policies"]["s3"] = json.loads(self.parameters["s3_policy"]) + except ValueError: + self.module.fail_json(msg="Failed to decode s3_policy. Invalid JSON.") + + self.re_local_group = re.compile("^group/") + self.re_fed_group = re.compile("^federated-group/") + + if ( + self.re_local_group.match(self.parameters["unique_name"]) is None + and self.re_fed_group.match(self.parameters["unique_name"]) is None + ): + self.module.fail_json(msg="unique_name must begin with 'group/' or 'federated-group/'") + + def get_org_group(self, unique_name): + # Use the unique name to check if the group exists + api = "api/v3/org/groups/%s" % unique_name + response, error = self.rest_api.get(api) + + if error: + if response["code"] != 404: + self.module.fail_json(msg=error) + else: + return response["data"] + return None + + def create_org_group(self): + api = "api/v3/org/groups" + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_org_group(self, group_id): + api = "api/v3/org/groups/" + group_id + + self.data = None + response, error = self.rest_api.delete(api, self.data) + if error: + self.module.fail_json(msg=error) + + def update_org_group(self, group_id): + api = "api/v3/org/groups/" + group_id + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + org_group = self.get_org_group(self.parameters["unique_name"]) + + cd_action = self.na_helper.get_cd_action(org_group, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + if self.parameters.get("management_policy"): + if org_group.get("policies") is None or org_group.get("policies", {}).get("management") != self.data["policies"]["management"]: + update = True + if self.parameters.get("s3_policy"): + if org_group.get("policies") is None or org_group.get("policies", {}).get("s3") != self.data["policies"]["s3"]: + update = True + + if update: + self.na_helper.changed = True + result_message = "" + resp_data = org_group + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + if cd_action == "delete": + self.delete_org_group(org_group["id"]) + result_message = "Org Group deleted" + + elif cd_action == "create": + resp_data = self.create_org_group() + result_message = "Org Group created" + + else: + # for a federated group, the displayName parameter needs to be specified + # and must match the existing displayName + if self.re_fed_group.match(self.parameters["unique_name"]): + self.data["displayName"] = org_group["displayName"] + + resp_data = self.update_org_group(org_group["id"]) + result_message = "Org Group updated" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_org_group = SgOrgGroup() + na_sg_org_group.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_identity_federation.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_identity_federation.py new file mode 100644 index 000000000..4b6811cd6 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_identity_federation.py @@ -0,0 +1,335 @@ +#!/usr/bin/python + +# (c) 2021, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Tenant Identity Federation""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +DOCUMENTATION = """ +module: na_sg_org_identity_federation +short_description: NetApp StorageGRID manage Tenant identity federation. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '21.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Configure Tenant Identity Federation within NetApp StorageGRID. +- If module is run with C(check_mode), a connectivity test will be performed using the supplied values without changing the configuration. +- This module is idempotent if I(password) is not specified. +options: + state: + description: + - Whether identity federation should be enabled or not. + type: str + choices: ['present', 'absent'] + default: present + username: + description: + - The username to bind to the LDAP server. + type: str + password: + description: + - The password associated with the username. + type: str + hostname: + description: + - The hostname or IP address of the LDAP server. + type: str + port: + description: + - The port used to connect to the LDAP server. Typically 389 for LDAP, or 636 for LDAPS. + type: int + base_group_dn: + description: + - The Distinguished Name of the LDAP subtree to search for groups. + type: str + base_user_dn: + description: + - The Distinguished Name of the LDAP subtree to search for users. + type: str + ldap_service_type: + description: + - The type of LDAP server. + choices: ['Active Directory', 'OpenLDAP', 'Other'] + type: str + type: + description: + - The type of identity source. + - Default is 'ldap'. + type: str + default: ldap + ldap_user_id_attribute: + description: + - The LDAP attribute which contains the unique user name of a user. + - Should be configured if I(ldap_service_type=Other). + type: str + ldap_user_uuid_attribute: + description: + - The LDAP attribute which contains the permanent unique identity of a user. + - Should be configured if I(ldap_service_type=Other). + type: str + ldap_group_id_attribute: + description: + - The LDAP attribute which contains the group for a user. + - Should be configured if I(ldap_service_type=Other). + type: str + ldap_group_uuid_attribute: + description: + - The LDAP attribute which contains the group's permanent unique identity. + - Should be configured if I(ldap_service_type=Other). + type: str + tls: + description: + - Whether Transport Layer Security is used to connect to the LDAP server. + choices: ['STARTTLS', 'LDAPS', 'Disabled'] + type: str + default: STARTTLS + ca_cert: + description: + - Custom certificate used to connect to the LDAP server. + - If a custom certificate is not supplied, the operating system CA certificate will be used. + type: str +""" + +EXAMPLES = """ + - name: test identity federation configuration + netapp.storagegrid.na_sg_org_identity_federation: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + ldap_service_type: "Active Directory" + hostname: "ad.example.com" + port: 389 + username: "binduser" + password: "bindpass" + base_group_dn: "DC=example,DC=com" + base_user_dn: "DC=example,DC=com" + tls: "Disabled" + check_mode: yes + + - name: configure identity federation with AD and TLS + netapp.storagegrid.na_sg_org_identity_federation: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + ldap_service_type: "Active Directory" + hostname: "ad.example.com" + port: 636, + username: "binduser" + password: "bindpass" + base_group_dn: "DC=example,DC=com" + base_user_dn: "DC=example,DC=com" + tls: "LDAPS" + ca_cert: | + -----BEGIN CERTIFICATE----- + MIIC+jCCAeICCQDmn9Gow08LTzANBgkqhkiG9w0BAQsFADA/..swCQYDVQQGEwJV + bXBsZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB..JFzNIXQEGnsgjV + JGU4giuvOLOZ8Q3gyuUbkSUQDjmjpMR8PliwJ6iW2Ity89Dv..dl1TaIYI/ansyZ + Uxk4YXeN6kUkrDtNxCg1McALzXVAfxMTtj2SFlLxne4Z6rX2..UyftQrfM13F1vY + gK8dBPz+l+X/Uozo/xNm7gxe68p9le9/pcULst1CQn5/sPqq..kgWcSvlKUItu82 + lq3B2169rovdIaNdcvaQjMPhrDGo5rvLfMN35U3Hgbz41PL5..x2BcUE6/0ab5T4 + qKBxKa3t9twj+zpUqOzyL0PFfCE+SK5fEXAS1ow4eAcLN+eB..gR/PuvGAyIPCtE + 1+X4GrECAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAFpO+04Ra..FMJPH6dBmzfb7l + k04BWTvSlur6HiQdXY+oFQMJZzyI7MQ8v9HBIzS0ZAzYWLp4..VZhHmRxnrWyxVs + u783V5YfQH2L4QnBDoiDefgxyfDs2PcoF5C+X9CGXmPqzst2..y/6tdOVJzdiA== + -----END CERTIFICATE----- +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID tenant account identity source configuration. + returned: success + type: dict + sample: { + "id": "00000000-0000-0000-0000-000000000000", + "disable": false, + "hostname": "10.1.2.3", + "port": 389, + "username": "MYDOMAIN\\\\Administrator", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "ldapServiceType": "Active Directory", + "type": "ldap", + "disableTLS": false, + "enableLDAPS": false, + "caCert": "-----BEGIN CERTIFICATE----- abcdefghijkl123456780ABCDEFGHIJKL 123456/7890ABCDEFabcdefghijklABCD -----END CERTIFICATE-----\n" + } +""" + +import json +import re + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgOrgIdentityFederation: + """ + Configure and modify StorageGRID Tenant Identity Federation + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + username=dict(required=False, type="str"), + password=dict(required=False, type="str", no_log=True), + hostname=dict(required=False, type="str"), + port=dict(required=False, type="int"), + base_group_dn=dict(required=False, type="str"), + base_user_dn=dict(required=False, type="str"), + ldap_service_type=dict(required=False, type="str", choices=["OpenLDAP", "Active Directory", "Other"]), + type=dict(required=False, type="str", default="ldap"), + ldap_user_id_attribute=dict(required=False, type="str"), + ldap_user_uuid_attribute=dict(required=False, type="str"), + ldap_group_id_attribute=dict(required=False, type="str"), + ldap_group_uuid_attribute=dict(required=False, type="str"), + tls=dict(required=False, type="str", choices=["STARTTLS", "LDAPS", "Disabled"], default="STARTTLS"), + ca_cert=dict(required=False, type="str"), + ), + ) + + parameter_map = { + "username": "username", + "password": "password", + "hostname": "hostname", + "port": "port", + "base_group_dn": "baseGroupDn", + "base_user_dn": "baseUserDn", + "ldap_service_type": "ldapServiceType", + "ldap_user_id_attribute": "ldapUserIdAttribute", + "ldap_user_uuid_attribute": "ldapUserUUIDAttribute", + "ldap_group_id_attribute": "ldapGroupIdAttribute", + "ldap_group_uuid_attribute": "ldapGroupUUIDAttribute", + "ca_cert": "caCert", + } + self.module = AnsibleModule(argument_spec=self.argument_spec, supports_check_mode=True,) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + + if self.parameters["state"] == "present": + self.data["disable"] = False + + for k in parameter_map.keys(): + if self.parameters.get(k) is not None: + self.data[parameter_map[k]] = self.parameters[k] + + if self.parameters.get("tls") == "STARTTLS": + self.data["disableTLS"] = False + self.data["enableLDAPS"] = False + elif self.parameters.get("tls") == "LDAPS": + self.data["disableTLS"] = False + self.data["enableLDAPS"] = True + else: + self.data["disableTLS"] = True + self.data["enableLDAPS"] = False + + def get_org_identity_source(self): + api = "api/v3/org/identity-source" + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + else: + return response["data"] + return None + + def update_identity_federation(self, test=False): + api = "api/v3/org/identity-source" + + params = {} + + if test: + params["test"] = True + + response, error = self.rest_api.put(api, self.data, params=params) + if error: + self.module.fail_json(msg=error, payload=self.data) + + if response is not None: + return response["data"] + else: + return None + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + org_identity_source = self.get_org_identity_source() + + cd_action = self.na_helper.get_cd_action(org_identity_source, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + for k in (i for i in self.data.keys() if i != "password"): + if self.data[k] != org_identity_source.get(k): + update = True + break + + # if a password has been specified we need to update it + if self.data.get("password") and self.parameters["state"] == "present": + update = True + self.module.warn("Password attribute has been specified. Task is not idempotent.") + + if update: + self.na_helper.changed = True + + if cd_action == "delete": + # if identity federation is already in a disable state + if org_identity_source.get("disable"): + self.na_helper.changed = False + + result_message = "" + resp_data = org_identity_source + + if self.na_helper.changed and not self.module.check_mode: + if cd_action == "delete": + self.data = dict(disable=True) + resp_data = self.update_identity_federation() + result_message = "Tenant identity federation disabled" + else: + resp_data = self.update_identity_federation() + result_message = "Tenant identity federation updated" + + if self.module.check_mode: + self.update_identity_federation(test=True) + # if no error, connection test successful + self.module.exit_json(changed=self.na_helper.changed, msg="Connection test successful") + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_org_identity_federation = SgOrgIdentityFederation() + na_sg_org_identity_federation.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_info.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_info.py new file mode 100644 index 000000000..b2d3c4e48 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_info.py @@ -0,0 +1,279 @@ +#!/usr/bin/python + +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" NetApp StorageGRID Org Info using REST APIs """ + + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + +DOCUMENTATION = """ +module: na_sg_org_info +author: NetApp Ansible Team (@jasonl4) <ng-ansibleteam@netapp.com> +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +short_description: NetApp StorageGRID Org information gatherer. +description: + - This module allows you to gather various information about StorageGRID Org configuration. +version_added: 20.11.0 + +options: + gather_subset: + type: list + elements: str + description: + - When supplied, this argument will restrict the information collected to a given subset. + - Either the info name or the Rest API can be given. + - Possible values for this argument include + - C(org_compliance_global_info) or C(org/compliance-global) + - C(org_config_info) or C(org/config) + - C(org_config_product_version_info) or C(org/config/product-version) + - C(org_containers_info) or C(org/containers) + - C(org_deactivated_features_info) or C(org/deactivated-features) + - C(org_endpoints_info) or C(org/endpoints) + - C(org_groups_info) or C(org/groups) + - C(org_identity_source_info) or C(org/identity-source) + - C(org_regions_info) or C(org/regions) + - C(org_users_current_user_s3_access_keys_info) or C(org/users/current-user/s3-access-keys) + - C(org_usage_info) or C(org/usage) + - C(org_users_info) or C(org/users) + - C(org_users_root_info) or C(org/users/root) + - C(versions_info) or C(versions) + - Can specify a list of values to include a larger subset. + default: "all" + parameters: + description: + - Allows for any rest option to be passed in. + type: dict +""" + +EXAMPLES = """ +- name: Gather StorageGRID Org info + netapp.storagegrid.na_sg_org_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + register: sg_org_info + +- name: Gather StorageGRID Org info for org/containers and org/config subsets + netapp.storagegrid.na_sg_org_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + gather_subset: + - org_containers_info + - org/config + register: sg_org_info + +- name: Gather StorageGRID Org info for all subsets + netapp.storagegrid.na_sg_org_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + gather_subset: + - all + register: sg_org_info + +- name: Gather StorageGRID Org info for org/containers and org/users subsets, limit to 5 results for each subset + netapp.storagegrid.na_sg_org_info: + api_url: "https://1.2.3.4/" + auth_token: "storagegrid-auth-token" + validate_certs: false + gather_subset: + - org/containers + - org/users + parameters: + limit: 5 + register: sg_org_info +""" + +RETURN = """ +sg_info: + description: Returns various information about the StorageGRID Grid configuration. + returned: always + type: dict + sample: { + "org/compliance-global": {...}, + "org/config": {...}, + "org/config/product-version": {...}, + "org/containers": {...}, + "org/deactivated-features": {...}, + "org/endpoints": {...}, + "org/groups": {...}, + "org/identity-source": {...}, + "org/regions": {...}, + "org/users/current-user/s3-access-keys": {...}, + "org/usage": {...}, + "org/users": {...}, + "org/users/root": {...}, + "org/versions": {...} + } +""" + +from ansible.module_utils.basic import AnsibleModule +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class NetAppSgGatherInfo(object): + """ Class with gather info methods """ + + def __init__(self): + """ + Parse arguments, setup variables, check parameters and ensure + request module is installed. + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update(dict( + gather_subset=dict(default=['all'], type='list', elements='str', required=False), + parameters=dict(type='dict', required=False) + )) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + supports_check_mode=True + ) + + # set up variables + self.na_helper = NetAppModule() + self.parameters = self.na_helper.set_parameters(self.module.params) + self.rest_api = SGRestAPI(self.module) + + def get_subset_info(self, gather_subset_info): + """ + Gather StorageGRID information for the given subset using REST APIs + Input for REST APIs call : (api, data) + return gathered_sg_info + """ + + api = gather_subset_info['api_call'] + data = {} + # allow for passing in any additional rest api parameters + if self.parameters.get('parameters'): + for each in self.parameters['parameters']: + data[each] = self.parameters['parameters'][each] + + gathered_sg_info, error = self.rest_api.get(api, data) + + if error: + self.module.fail_json(msg=error) + else: + return gathered_sg_info + + return None + + def convert_subsets(self): + """ Convert an info to the REST API """ + info_to_rest_mapping = { + 'org_compliance_global_info': 'org/compliance-global', + 'org_config_info': 'org/config', + 'org_config_product_version_info': 'org/config/product-version', + 'org_containers_info': 'org/containers', + 'org_deactivated_features_info': 'org/deactivated-features', + 'org_endpoints_info': 'org/endpoints', + 'org_groups_info': 'org/groups', + 'org_identity_source_info': 'org/identity-source', + 'org_regions_info': 'org/regions', + 'org_users_current_user_s3_access_keys_info': 'org/users/current-user/s3-access-keys', + 'org_usage_info': 'org/usage', + 'org_users_info': 'org/users', + 'org_users_root_info': 'org/users/root', + 'versions_info': 'versions' + } + # Add rest API names as there info version, also make sure we don't add a duplicate + subsets = [] + for subset in self.parameters['gather_subset']: + if subset in info_to_rest_mapping: + if info_to_rest_mapping[subset] not in subsets: + subsets.append(info_to_rest_mapping[subset]) + else: + if subset not in subsets: + subsets.append(subset) + return subsets + + def apply(self): + """ Perform pre-checks, call functions and exit """ + + result_message = dict() + + # Defining gather_subset and appropriate api_call + get_sg_subset_info = { + 'org/compliance-global': { + 'api_call': 'api/v3/org/compliance-global', + }, + 'org/config': { + 'api_call': 'api/v3/org/config', + }, + 'org/config/product-version': { + 'api_call': 'api/v3/org/config/product-version', + }, + 'org/containers': { + 'api_call': 'api/v3/org/containers', + }, + 'org/deactivated-features': { + 'api_call': 'api/v3/org/deactivated-features', + }, + 'org/endpoints': { + 'api_call': 'api/v3/org/endpoints', + }, + 'org/groups': { + 'api_call': 'api/v3/org/groups', + }, + 'org/identity-source': { + 'api_call': 'api/v3/org/identity-source', + }, + 'org/regions': { + 'api_call': 'api/v3/org/regions', + }, + 'org/users/current-user/s3-access-keys': { + 'api_call': 'api/v3/org/users/current-user/s3-access-keys', + }, + 'org/usage': { + 'api_call': 'api/v3/org/usage', + }, + 'org/users': { + 'api_call': 'api/v3/org/users', + }, + 'org/users/root': { + 'api_call': 'api/v3/org/users/root', + }, + 'versions': { + 'api_call': 'api/v3/versions', + }, + } + + if 'all' in self.parameters['gather_subset']: + # If all in subset list, get the information of all subsets + self.parameters['gather_subset'] = sorted(get_sg_subset_info.keys()) + + converted_subsets = self.convert_subsets() + + for subset in converted_subsets: + try: + # Verify whether the supported subset passed + specified_subset = get_sg_subset_info[subset] + except KeyError: + self.module.fail_json(msg="Specified subset %s not found, supported subsets are %s" % + (subset, list(get_sg_subset_info.keys()))) + + result_message[subset] = self.get_subset_info(specified_subset) + + self.module.exit_json(changed='False', sg_info=result_message) + + +def main(): + """ Main function """ + obj = NetAppSgGatherInfo() + obj.apply() + + +if __name__ == '__main__': + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_user.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_user.py new file mode 100644 index 000000000..455ffa345 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_user.py @@ -0,0 +1,335 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage Tenant Users""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_org_user +short_description: NetApp StorageGRID manage users within a tenancy. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Update, Delete Users within a NetApp StorageGRID tenant. +options: + state: + description: + - Whether the specified user should exist or not. + type: str + choices: ['present', 'absent'] + default: present + full_name: + description: + - Full Name of the user. + - Required for create operation + type: str + unique_name: + description: + - Unique Name for the user. Must begin with C(user/) or C(federated-user/). + - Required for create, modify or delete operation. + type: str + required: true + member_of: + description: + - List of unique_groups that the user is a member of. + type: list + elements: str + password: + description: + - Set a password for a local user. Does not apply to federated users. + - Requires root privilege. + required: false + type: str + update_password: + description: + - Choose when to update the password. + - When set to C(always), the password will always be updated. + - When set to C(on_create), the password will only be set upon a new user creation. + default: on_create + choices: + - on_create + - always + type: str + disable: + description: + - Disable the user from signing in. Does not apply to federated users. + type: bool +""" + +EXAMPLES = """ + - name: create a tenant user + netapp.storagegrid.na_sg_org_user: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + full_name: ansibleuser1 + unique_name: user/ansibleuser1 + member_of: "group/ansiblegroup1" + disable: false + +""" + +RETURN = """ +resp: + description: Returns information about the StorageGRID tenant user. + returned: always + type: dict + sample: { + "fullName": "Example User", + "memberOf": ["00000000-0000-0000-0000-000000000000"], + "disable": false, + "uniqueName": "user/Example", + "accountId": "0", + "id": "00000000-0000-0000-0000-000000000000", + "federated": false, + "userURN": "urn:sgws:identity::0:user/Example" + } +""" + +import json +import re + + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import ( + NetAppModule, +) +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgOrgUser(object): + """ + Create, modify and delete user within a StorageGRID Tenant Account + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + full_name=dict(required=False, type="str"), + unique_name=dict(required=True, type="str"), + member_of=dict(required=False, type="list", elements="str"), + disable=dict(required=False, type="bool"), + password=dict(required=False, type="str", no_log=True), + update_password=dict( + default="on_create", choices=["on_create", "always"] + ), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "present", ["full_name", "unique_name"])], + supports_check_mode=True, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + self.data["memberOf"] = [] + if self.parameters.get("full_name"): + self.data["fullName"] = self.parameters["full_name"] + if self.parameters.get("unique_name"): + self.data["uniqueName"] = self.parameters["unique_name"] + + if self.parameters.get("disable") is not None: + self.data["disable"] = self.parameters["disable"] + + re_local_user = re.compile("^user/") + re_fed_user = re.compile("^federated-user/") + + if ( + re_local_user.match(self.parameters["unique_name"]) is None + and re_fed_user.match(self.parameters["unique_name"]) is None + ): + self.module.fail_json( + msg="unique_name must begin with 'user/' or 'federated-user/'" + ) + + self.pw_change = {} + if self.parameters.get("password") is not None: + if re_fed_user.match(self.parameters["unique_name"]): + self.module.fail_json(msg="password cannot be set for a federated user") + self.pw_change["password"] = self.parameters["password"] + + def get_org_groups(self): + # Get list of groups + # Retrun mapping of uniqueName to ids if found, or None + api = "api/v3/org/groups?limit=350" + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + + if response["data"]: + name_to_id_map = dict( + zip( + [i["uniqueName"] for i in response["data"]], + [j["id"] for j in response["data"]], + ) + ) + return name_to_id_map + + return None + + def get_org_user(self, unique_name): + # Use the unique name to check if the user exists + api = "api/v3/org/users/%s" % unique_name + response, error = self.rest_api.get(api) + + if error: + if response["code"] != 404: + self.module.fail_json(msg=error) + else: + return response["data"] + return None + + def create_org_user(self): + api = "api/v3/org/users" + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_org_user(self, user_id): + api = "api/v3/org/users/" + user_id + + self.data = None + response, error = self.rest_api.delete(api, self.data) + if error: + self.module.fail_json(msg=error) + + def update_org_user(self, user_id): + api = "api/v3/org/users/" + user_id + + response, error = self.rest_api.put(api, self.data) + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def set_org_user_password(self, unique_name): + api = "api/v3/org/users/%s/change-password" % unique_name + response, error = self.rest_api.post(api, self.pw_change) + + if error: + self.module.fail_json(msg=error["text"]) + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + org_user = self.get_org_user(self.parameters["unique_name"]) + + if self.parameters.get("member_of"): + org_groups = self.get_org_groups() + try: + self.data["memberOf"] = [ + org_groups[x] for x in self.parameters["member_of"] + ] + except KeyError as e: + self.module.fail_json( + msg="Invalid unique_group supplied: '%s' not found" % e.args[0] + ) + + cd_action = self.na_helper.get_cd_action(org_user, self.parameters) + + if cd_action is None and self.parameters["state"] == "present": + # let's see if we need to update parameters + update = False + + if org_user["memberOf"] is None: + member_of_diff = [] + else: + member_of_diff = [ + i + for i in self.data["memberOf"] + org_user["memberOf"] + if i not in self.data["memberOf"] or i not in org_user["memberOf"] + ] + if member_of_diff: + update = True + + if self.parameters.get("disable") is not None and self.parameters[ + "disable" + ] != org_user.get("disable"): + update = True + + if update: + self.na_helper.changed = True + + result_message = "" + resp_data = org_user + if self.na_helper.changed: + if self.module.check_mode: + pass + else: + if cd_action == "delete": + self.delete_org_user(org_user["id"]) + result_message = "Org User deleted" + + elif cd_action == "create": + resp_data = self.create_org_user() + result_message = "Org User created" + + else: + resp_data = self.update_org_user(org_user["id"]) + result_message = "Org User updated" + + # If a password has been set + if self.pw_change: + if self.module.check_mode: + pass + else: + # Only update the password if update_password is always, or a create activity has occurred + if cd_action == "create" or self.parameters["update_password"] == "always": + self.set_org_user_password(self.parameters["unique_name"]) + self.na_helper.changed = True + + results = [result_message, "Org User password updated"] + result_message = "; ".join(filter(None, results)) + + self.module.exit_json( + changed=self.na_helper.changed, msg=result_message, resp=resp_data + ) + + +def main(): + """ + Main function + """ + na_sg_org_user = SgOrgUser() + na_sg_org_user.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_user_s3_key.py b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_user_s3_key.py new file mode 100644 index 000000000..0de396eb7 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/plugins/modules/na_sg_org_user_s3_key.py @@ -0,0 +1,210 @@ +#!/usr/bin/python + +# (c) 2020, NetApp Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""NetApp StorageGRID - Manage User S3 keys""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + + +DOCUMENTATION = """ +module: na_sg_org_user_s3_key +short_description: Creates NetApp StorageGRID User S3 keys. +extends_documentation_fragment: + - netapp.storagegrid.netapp.sg +version_added: '20.6.0' +author: NetApp Ansible Team (@joshedmonds) <ng-ansibleteam@netapp.com> +description: +- Create, Delete Users S3 keys on NetApp StorageGRID. +options: + state: + description: + - Whether the specified account should exist or not. + type: str + choices: ['present', 'absent'] + default: present + unique_user_name: + description: + - Unique user name owning the S3 Key. + required: true + type: str + expires: + description: + - Date-Time string for the key to expire. + type: str + access_key: + description: + - Access Key or S3 credential pair identifier. + - Required for delete operation. + type: str +""" + +EXAMPLES = """ + - name: create a s3 key + netapp.storagegrid.na_sg_org_user_s3_key: + api_url: "https://<storagegrid-endpoint-url>" + auth_token: "storagegrid-auth-token" + validate_certs: false + state: present + unique_user_name: user/ansibleuser1 +""" + +RETURN = """ +resp: + description: Returns information about an S3 access key for the user. + returned: always + type: dict + sample: { + "id": "abcABC_01234-0123456789abcABCabc0123456789==", + "accountId": 12345678901234567000, + "displayName": "****************AB12", + "userURN": "urn:sgws:identity::12345678901234567000:root", + "userUUID": "00000000-0000-0000-0000-000000000000", + "expires": "2020-09-04T00:00:00.000Z" + } +""" + +import json + +import ansible_collections.netapp.storagegrid.plugins.module_utils.netapp as netapp_utils +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp_module import NetAppModule +from ansible_collections.netapp.storagegrid.plugins.module_utils.netapp import SGRestAPI + + +class SgOrgUserS3Key(object): + """ + Create, modify and delete StorageGRID Tenant Account + """ + + def __init__(self): + """ + Parse arguments, setup state variables, + check parameters and ensure request module is installed + """ + self.argument_spec = netapp_utils.na_storagegrid_host_argument_spec() + self.argument_spec.update( + dict( + state=dict(required=False, type="str", choices=["present", "absent"], default="present"), + unique_user_name=dict(required=True, type="str"), + expires=dict(required=False, type="str"), + access_key=dict(required=False, type="str", no_log=False), + ) + ) + + self.module = AnsibleModule( + argument_spec=self.argument_spec, + required_if=[("state", "absent", ["access_key"])], + supports_check_mode=False, + ) + + self.na_helper = NetAppModule() + + # set up state variables + self.parameters = self.na_helper.set_parameters(self.module.params) + # Calling generic SG rest_api class + self.rest_api = SGRestAPI(self.module) + # Checking for the parameters passed and create new parameters list + self.data = {} + self.data["expires"] = self.parameters.get("expires") + + def get_org_user_id(self, unique_name): + # Use the unique name to check if the user exists + api = "api/v3/org/users/%s" % unique_name + response, error = self.rest_api.get(api) + + if error: + if response["code"] != 404: + self.module.fail_json(msg=error) + else: + return response["data"]["id"] + return None + + def get_org_user_s3_key(self, user_id, access_key): + # Use the unique name to check if the user exists + api = "api/v3/org/users/current-user/s3-access-keys/%s" % access_key + + if user_id: + api = "api/v3/org/users/%s/s3-access-keys/%s" % (user_id, access_key,) + + response, error = self.rest_api.get(api) + + if error: + self.module.fail_json(msg=error) + else: + return response["data"] + return None + + def create_org_user_s3_key(self, user_id): + api = "api/v3/org/users/current-user/s3-access-keys" + + if user_id: + api = "api/v3/org/users/%s/s3-access-keys" % user_id + + response, error = self.rest_api.post(api, self.data) + + if error: + self.module.fail_json(msg=error) + + return response["data"] + + def delete_org_user_s3_key(self, user_id, access_key): + api = "api/v3/org/users/current-user/s3-access-keys" + + if user_id: + api = "api/v3/org/users/%s/s3-access-keys/%s" % (user_id, access_key,) + + self.data = None + response, error = self.rest_api.delete(api, self.data) + if error: + self.module.fail_json(msg=error) + + def apply(self): + """ + Perform pre-checks, call functions and exit + """ + result_message = "" + resp_data = {} + user_id = None + + if self.parameters.get("unique_user_name"): + user_id = self.get_org_user_id(self.parameters["unique_user_name"]) + + if self.parameters["state"] == "present": + org_user_s3_key = None + if self.parameters.get("access_key"): + org_user_s3_key = self.get_org_user_s3_key(user_id, self.parameters["access_key"]) + resp_data = org_user_s3_key + + if not org_user_s3_key: # create + resp_data = self.create_org_user_s3_key(user_id) + self.na_helper.changed = True + + if self.parameters["state"] == "absent": + self.delete_org_user_s3_key(user_id, self.parameters["access_key"]) + self.na_helper.changed = True + result_message = "Org User S3 key deleted" + + self.module.exit_json(changed=self.na_helper.changed, msg=result_message, resp=resp_data) + + +def main(): + """ + Main function + """ + na_sg_org_user_s3_key = SgOrgUserS3Key() + na_sg_org_user_s3_key.apply() + + +if __name__ == "__main__": + main() diff --git a/ansible_collections/netapp/storagegrid/requirements.txt b/ansible_collections/netapp/storagegrid/requirements.txt new file mode 100644 index 000000000..f2293605c --- /dev/null +++ b/ansible_collections/netapp/storagegrid/requirements.txt @@ -0,0 +1 @@ +requests diff --git a/ansible_collections/netapp/storagegrid/tests/unit/compat/__init__.py b/ansible_collections/netapp/storagegrid/tests/unit/compat/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/compat/__init__.py diff --git a/ansible_collections/netapp/storagegrid/tests/unit/compat/builtins.py b/ansible_collections/netapp/storagegrid/tests/unit/compat/builtins.py new file mode 100644 index 000000000..bfc8adfbe --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/compat/builtins.py @@ -0,0 +1,34 @@ +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +# +# Compat for python2.7 +# + +# One unittest needs to import builtins via __import__() so we need to have +# the string that represents it +try: + import __builtin__ +except ImportError: + BUILTINS = "builtins" +else: + BUILTINS = "__builtin__" diff --git a/ansible_collections/netapp/storagegrid/tests/unit/compat/mock.py b/ansible_collections/netapp/storagegrid/tests/unit/compat/mock.py new file mode 100644 index 000000000..ce13d07cb --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/compat/mock.py @@ -0,0 +1,125 @@ +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +""" +Compat module for Python3.x's unittest.mock module +""" +import sys + +# Python 2.7 + +# Note: Could use the pypi mock library on python3.x as well as python2.x. It +# is the same as the python3 stdlib mock library + +try: + # Allow wildcard import because we really do want to import all of mock's + # symbols into this compat shim + # pylint: disable=wildcard-import,unused-wildcard-import + from unittest.mock import * +except ImportError: + # Python 2 + # pylint: disable=wildcard-import,unused-wildcard-import + try: + from mock import * + except ImportError: + print("You need the mock library installed on python2.x to run tests") + + +# Prior to 3.4.4, mock_open cannot handle binary read_data +if sys.version_info >= (3,) and sys.version_info < (3, 4, 4): + file_spec = None + + def _iterate_read_data(read_data): + # Helper for mock_open: + # Retrieve lines from read_data via a generator so that separate calls to + # readline, read, and readlines are properly interleaved + sep = b"\n" if isinstance(read_data, bytes) else "\n" + data_as_list = [l + sep for l in read_data.split(sep)] + + if data_as_list[-1] == sep: + # If the last line ended in a newline, the list comprehension will have an + # extra entry that's just a newline. Remove this. + data_as_list = data_as_list[:-1] + else: + # If there wasn't an extra newline by itself, then the file being + # emulated doesn't have a newline to end the last line remove the + # newline that our naive format() added + data_as_list[-1] = data_as_list[-1][:-1] + + for line in data_as_list: + yield line + + def mock_open(mock=None, read_data=""): + """ + A helper function to create a mock to replace the use of `open`. It works + for `open` called directly or used as a context manager. + The `mock` argument is the mock object to configure. If `None` (the + default) then a `MagicMock` will be created for you, with the API limited + to methods or attributes available on standard file handles. + `read_data` is a string for the `read` methoddline`, and `readlines` of the + file handle to return. This is an empty string by default. + """ + + def _readlines_side_effect(*args, **kwargs): + if handle.readlines.return_value is not None: + return handle.readlines.return_value + return list(_data) + + def _read_side_effect(*args, **kwargs): + if handle.read.return_value is not None: + return handle.read.return_value + return type(read_data)().join(_data) + + def _readline_side_effect(): + if handle.readline.return_value is not None: + while True: + yield handle.readline.return_value + for line in _data: + yield line + + global file_spec + if file_spec is None: + import _io + + file_spec = list( + set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))) + ) + + if mock is None: + mock = MagicMock(name="open", spec=open) + + handle = MagicMock(spec=file_spec) + handle.__enter__.return_value = handle + + _data = _iterate_read_data(read_data) + + handle.write.return_value = None + handle.read.return_value = None + handle.readline.return_value = None + handle.readlines.return_value = None + + handle.read.side_effect = _read_side_effect + handle.readline.side_effect = _readline_side_effect() + handle.readlines.side_effect = _readlines_side_effect + + mock.return_value = handle + return mock diff --git a/ansible_collections/netapp/storagegrid/tests/unit/compat/unittest.py b/ansible_collections/netapp/storagegrid/tests/unit/compat/unittest.py new file mode 100644 index 000000000..73a20cf8c --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/compat/unittest.py @@ -0,0 +1,44 @@ +# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +''' +Compat module for Python2.7's unittest module +''' + +import sys + +import pytest + +# Allow wildcard import because we really do want to import all of +# unittests's symbols into this compat shim +# pylint: disable=wildcard-import,unused-wildcard-import +if sys.version_info < (2, 7): + try: + # Need unittest2 on python2.6 + from unittest2 import * + except ImportError: + print('You need unittest2 installed on python2.6.x to run tests') + + class TestCase: + """ skip everything """ + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as unittest2 may not be available') +else: + from unittest import * diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_account.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_account.py new file mode 100644 index 000000000..e96697381 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_account.py @@ -0,0 +1,380 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Tenant Ansible module: na_sg_grid_account""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_account import ( + SgGridAccount as grid_account_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "pw_change_good": ({"code": 204}, None), + "grid_accounts": ( + { + "data": [ + { + "name": "TestTenantAccount", + "capabilities": ["management", "s3"], + "policy": { + "useAccountIdentitySource": True, + "allowPlatformServices": False, + "quotaObjectBytes": None, + }, + "id": "12345678901234567890", + } + ] + }, + None, + ), + "grid_account_record": ( + { + "data": { + "name": "TestTenantAccount", + "capabilities": ["management", "s3"], + "policy": { + "useAccountIdentitySource": True, + "allowPlatformServices": False, + "quotaObjectBytes": None, + }, + "id": "12345678901234567890", + } + }, + None, + ), + "grid_account_record_with_quota": ( + { + "data": { + "name": "TestTenantAccount", + "capabilities": ["management", "s3"], + "policy": { + "useAccountIdentitySource": True, + "allowPlatformServices": False, + "quotaObjectBytes": 10737418240, + }, + "id": "12345678901234567890", + } + }, + None, + ), + "grid_account_record_update_quota": ( + { + "data": { + "name": "TestTenantAccount", + "capabilities": ["management", "s3"], + "policy": { + "useAccountIdentitySource": True, + "allowPlatformServices": False, + "quotaObjectBytes": 21474836480, + }, + "id": "12345678901234567890", + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "name": "TestTenantAccount", + "protocol": "s3", + "management": True, + "use_own_identity_source": True, + "allow_platform_services": False, + "password": "abc123", + "quota_size": 0, + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "name": "TestTenantAccount", + "protocol": "s3", + "management": True, + "use_own_identity_source": True, + "allow_platform_services": False, + "password": "abc123", + "quota_size": 0, + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_account(self): + return dict( + { + "state": "present", + "name": "TestTenantAccount", + "protocol": "s3", + "management": True, + "use_own_identity_source": True, + "allow_platform_services": False, + "password": "abc123", + "quota_size": 0, + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_account(self): + return dict( + { + "state": "absent", + "name": "TestTenantAccount", + "protocol": "s3", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_account_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_account_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_grid_account_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_account()) + my_obj = grid_account_module() + mock_request.side_effect = [ + SRR["empty_good"], # get + SRR["grid_accounts"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_tenant_account_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_grid_account_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_account()) + my_obj = grid_account_module() + mock_request.side_effect = [ + SRR["grid_accounts"], # get id + SRR["grid_account_record"], # get account + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_tenant_account_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_grid_account_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_account() + args["quota_size"] = 10 + set_module_args(args) + my_obj = grid_account_module() + mock_request.side_effect = [ + SRR["grid_accounts"], # get + SRR["grid_account_record"], # get + SRR["grid_account_record_with_quota"], # put + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_update_na_sg_tenant_account_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_grid_account_quota_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_account() + args["quota_size"] = 20480 + args["quota_size_unit"] = "mb" + set_module_args(args) + my_obj = grid_account_module() + mock_request.side_effect = [ + SRR["grid_accounts"], # get + SRR["grid_account_record_with_quota"], # get + SRR["grid_account_record_update_quota"], # put + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_tenant_account_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # update Tenant Account and set pass + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_grid_account_and_set_password_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_account() + args["quota_size"] = 20480 + args["quota_size_unit"] = "mb" + args["update_password"] = "always" + + set_module_args(args) + my_obj = grid_account_module() + mock_request.side_effect = [ + SRR["grid_accounts"], # get + SRR["grid_account_record_with_quota"], # get + SRR["grid_account_record_update_quota"], # put + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_update_na_sg_grid_account_and_set_password_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # set pass only + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_set_na_sg_grid_account_root_password_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_account() + args["update_password"] = "always" + + set_module_args(args) + my_obj = grid_account_module() + mock_request.side_effect = [ + SRR["grid_accounts"], # get id + SRR["grid_account_record"], # get account + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_set_na_sg_grid_account_root_password_pass: %s" % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_delete_na_sg_grid_account_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_grid_account()) + my_obj = grid_account_module() + mock_request.side_effect = [ + SRR["grid_accounts"], # get + SRR["grid_account_record"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_tenant_account_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_certificate.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_certificate.py new file mode 100644 index 000000000..74974abff --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_certificate.py @@ -0,0 +1,342 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid Certificate Ansible module: na_sg_grid_certificate""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_certificate import ( + SgGridCertificate as grid_certificate_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ({"status": "error", "code": 404, "data": {}}, {"key": "error.404"},), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": (None, None), + "update_good": (None, None), + "cert_unset": ({"data": {"serverCertificateEncoded": None, "caBundleEncoded": None}}, None), + "storage_api_cert": ( + { + "data": { + "serverCertificateEncoded": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "NDI5MDQ1NTM1WjAmMQswCQYDVQQGEwJVUzEXMBUGA1UEAwwOczMuZXhhbXBsZS5j\n" + "b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0LMcJUdWmTtxi7U7B\n" + "yldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36QC22n\n" + "+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIaQ8l8\n" + "STa7nLS7BIc6rD15BJaNWZpDVHIzhljlnhfnqwio/ZfP++lAjk4/j8pPGPEEI5Fe\n" + "WxhOtQjr7xTHeJxKHp2VKiLEvFxniL3qk4uJ3k5fJ7IqALUEPWH92brFp2IkObUA\n" + "EGsZYB4KFV7asBVhGuspYNzUQ6NqWbEUmtTjKEXcb1TA8RK+Pc2TotOrQ2E7Z+rU\n" + "gl2fAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAD5PW1WI7GCfxLQjaitnXpD1MR2O\n" + "6b5csymPYwRejMsSswd8egjs+vO2pbF9TptLjqGliE9XUoI+mWpuMzzd75F0jcjq\n" + "1DhlINgAmjUJEAg0RAqce0Kn8xQF+SofMtkOH+nZm3Q9nbTJKr1H5m2TnCq3v5TH\n" + "Qo0ASf0LLGgrwUtT0IghdSttYLS89dJprZ6c5wK7qeBzxfdHxxjiaSnvByL2Ryn5\n" + "cec9lptYKoRY42hWvkQv9Wkr3DDoyNA3xPdZJr0Hpf8/mSPnt9r/AR8E32xi0SXp\n" + "hOMTDgMicbK82ycxz0yW88gm6yhrChlJrWaEsVGod3FU+lbMAnagYZ/Vwp8=\n" + "-----END CERTIFICATE-----\n" + ), + "caBundleEncoded": None, + } + }, + None, + ), + "storage_api_cert_update": ( + { + "data": { + "serverCertificateEncoded": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICzjCCAbYCCQDZVi1OT89SAjANBgkqhkiG9w0BAQsFADApMQswCQYDVQQGEwJV\n" + "UzEaMBgGA1UEAwwRczMubmV3ZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NzIxWhcN\n" + "MjIwNDI5MDQ1NzIxWjApMQswCQYDVQQGEwJVUzEaMBgGA1UEAwwRczMubmV3ZXhh\n" + "bXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCmg37q2sjZ\n" + "k+HsXtai3PSMtGUiqij04JtG9ahMqIejuxy5sDCWnigh//NjdK+wPYc2VfYd6KFA\n" + "Uk9rP84M7sqdqGzIzmyEu7INyCnlbxcXlST6UZDsZnVU7Gk2GvUzk2OoO5N+G0oI\n" + "Lfc/3eKTx9j9BguOaWUy+ni+Te8j6EwK6HolGRBjLYqf1SYFBzaoVpy7pmzaFZ4R\n" + "10jFSxHbotIZ+kR8pPE5jGkP8OjOfrpbhEgmffpeq2MSCMRuhRtRiVp4ULwkMTRN\n" + "tFj89mu1gl9T3lYM/LO1SmBv3il0mNmrTL+99UJ4s2eL0zr/uHAVYJcVqFgWP7X8\n" + "WnOk+d86b0TXAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFmGV3IOuNYeM3LQxls+\n" + "/CNHznvIqvoiJOWq0S7LFy1eO7PVzCl3l/fDKjGMt2lGXeU89YKdFVPqsainNEFT\n" + "cNEWlezVut+/CWQpBXujyBqPLkYbzyGsakMImDb+MrSkBO5MCjlt38vppm5a97fB\n" + "9o/wM31e+N6gJLiHWs0XB9TK6bY9CvcutcGUOH/oxH1TEBgrJ3SoS7/HmZJSaCQA\n" + "hjZappzuEpGVXT8YDlb67PzUoE2rDWjdSFRXCk/0U6VR0xNgnN1WtfHaypU71DrB\n" + "zxbDaOIZoDp5G4OgjkFxoCoSWLant+LsqEwclIbCFgEvJPE8855UThelTHmIfivP\n" + "veI=\n-----END CERTIFICATE-----\n" + ), + "caBundleEncoded": None, + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "-----END PRIVATE KEY-----\n" + ), + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "type": "storage-api", + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "-----END PRIVATE KEY-----\n" + ), + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_grid_storage_api_certificate(self): + return dict( + { + "state": "present", + "type": "storage-api", + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "NDI5MDQ1NTM1WjAmMQswCQYDVQQGEwJVUzEXMBUGA1UEAwwOczMuZXhhbXBsZS5j\n" + "b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0LMcJUdWmTtxi7U7B\n" + "yldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36QC22n\n" + "+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIaQ8l8\n" + "STa7nLS7BIc6rD15BJaNWZpDVHIzhljlnhfnqwio/ZfP++lAjk4/j8pPGPEEI5Fe\n" + "WxhOtQjr7xTHeJxKHp2VKiLEvFxniL3qk4uJ3k5fJ7IqALUEPWH92brFp2IkObUA\n" + "EGsZYB4KFV7asBVhGuspYNzUQ6NqWbEUmtTjKEXcb1TA8RK+Pc2TotOrQ2E7Z+rU\n" + "gl2fAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAD5PW1WI7GCfxLQjaitnXpD1MR2O\n" + "6b5csymPYwRejMsSswd8egjs+vO2pbF9TptLjqGliE9XUoI+mWpuMzzd75F0jcjq\n" + "1DhlINgAmjUJEAg0RAqce0Kn8xQF+SofMtkOH+nZm3Q9nbTJKr1H5m2TnCq3v5TH\n" + "Qo0ASf0LLGgrwUtT0IghdSttYLS89dJprZ6c5wK7qeBzxfdHxxjiaSnvByL2Ryn5\n" + "cec9lptYKoRY42hWvkQv9Wkr3DDoyNA3xPdZJr0Hpf8/mSPnt9r/AR8E32xi0SXp\n" + "hOMTDgMicbK82ycxz0yW88gm6yhrChlJrWaEsVGod3FU+lbMAnagYZ/Vwp8=\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "Q8l8STa7nLS7BIc6rD15BJaNWZpDVHIzhljlnhfnqwio/ZfP++lAjk4/j8pPGPEE\n" + "I5FeWxhOtQjr7xTHeJxKHp2VKiLEvFxniL3qk4uJ3k5fJ7IqALUEPWH92brFp2Ik\n" + "ObUAEGsZYB4KFV7asBVhGuspYNzUQ6NqWbEUmtTjKEXcb1TA8RK+Pc2TotOrQ2E7\n" + "Z+rUgl2fAgMBAAECggEAAwSSqTDTvSx4WNiqAocnsPMqfckIUUOnLjLef5yzKRuQ\n" + "6l/9NpXDP3b5S6fLDBJrrw46tNIW/BgWjl01y7+rCxqE13L9SvLgtHjbua52ITOf\n" + "l0u/fDmcKHOfOqpsPhlaloYYeqsuAwLGl4CC+wBEpuj26uDRcw4x7E78NV8IIxDf\n" + "8kUNPQXI9ox6P3isXrFkMncDfKLWOYJ5fF5zCoVZai/SS8z3FhGjAXlMkay48RX4\n" + "4vuP7TNLZ2O2pAk2aVs54tQyBn9MOxIzOg3/ZFLiKZR4pY6H5sm+bT263TdvN+A4\n" + "C8kwML5HnsCjVkTzJ/3dYc9SeUOuqvJI332GCQ9YcQKBgQD8Ev2qhS61kZ3WGO6G\n" + "DRkZ6tDyt5vCuzWQ8uAAXcAerFDWN6XtDPfXq2UVcWnoCQOUpnjslCb/NJgCetLh\n" + "mOPeJGRWyMly+YuYb4/rnbwSbUs28PO4D9B/f5YQBnBjGDLL/i2+wnXg3WZTVogf\n" + "WfdKziOHGSxmWd6JinI+4UkpiwKBgQD3+krkFORTsUAlTgeIy8+QzXSuclwNygcX\n" + "HAe0F96hSYHBC7+1n7nzC1lwcbkU3jLIt3A90Uwew4nr5GCu4sSVwDeWrqP2I9WH\n" + "4w0zeaFPC1QKfKGBtsIf/89pDz/7iGlcKWlEg+56VVIJn7qC2lO8qbeUCoglsSwC\n" + "vr2Qld5WvQKBgQCHM2xpHHv8GPlOTxsIPVg8RW0C8iYSITVO5GXu7FnSWdwVuc0+\n" + "QtlgDObvxF/oe4U3Ir7zLVdpRH1Pvy8Cn22AxYYn4hPiniQYg6Xu2zB3tbVE56Hh\n" + "FGJhMD59o+Z90AnWziMdENIG5NkwU9Y48pknvz7hBEiDMSqiHObAATerlwKBgQCP\n" + "5LhCY3Ees3MCcqXilkmqv93eQFP0WHAG0+gQc+1m7+2QJI4pCTdwtfw/SG5akpkr\n" + "aW6DIIkoLNVCgbIsqT/jmbdoA4z3DlIg2PrXDNQytuMcdreNOoyo3trvHr9E6SIi\n" + "LZF9BYWDjTDejsY+mgwPJPh2uinInWdpbF85oA11jQKBgQCc6U2fSwpPQowOaat/\n" + "pY5bDCKxhfwrKk3Ecye5HfhbBZ0pu6Oneiq6cNhQC0X69iFn6ogTFx5qqyMQrWH0\n" + "L+kQRkyYFLnebCzUA8364lieRzc3cN+xQEn+jX8z7eDZ8JsvVnKdc6lTjPTwN1Fj\n" + "FZtaH2L1IEiA8ZZapMb/MNNozg==\n" + "-----END PRIVATE KEY-----\n" + ), + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_storage_api_certificate(self): + return dict( + { + "state": "absent", + "type": "storage-api", + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "-----END PRIVATE KEY-----\n" + ), + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_certificate_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + def test_module_pass_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_certificate_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_pass_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_set_na_sg_grid_storage_api_certificate_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_storage_api_certificate()) + my_obj = grid_certificate_module() + mock_request.side_effect = [ + SRR["cert_unset"], # get + SRR["update_good"], # post + SRR["storage_api_cert"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_set_na_sg_grid_storage_api_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_set_na_sg_grid_storage_api_certificate_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_storage_api_certificate()) + my_obj = grid_certificate_module() + mock_request.side_effect = [ + SRR["storage_api_cert"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_set_na_sg_grid_storage_api_certificate_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_storage_api_certificate_pass(self, mock_request): + args = self.set_args_set_na_sg_grid_storage_api_certificate() + args["server_certificate"] = "" + args["private_key"] = "" + + set_module_args(args) + my_obj = grid_certificate_module() + mock_request.side_effect = [ + SRR["storage_api_cert"], # get + SRR["update_good"], # put + SRR["storage_api_cert_update"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_storage_api_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_delete_na_sg_storage_api_certificate_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_grid_storage_api_certificate()) + my_obj = grid_certificate_module() + mock_request.side_effect = [ + SRR["storage_api_cert"], # get + SRR["delete_good"], # delete + SRR["cert_unset"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_storage_api_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_client_certificate.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_client_certificate.py new file mode 100644 index 000000000..d21f9da9c --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_client_certificate.py @@ -0,0 +1,347 @@ +# (c) 2022, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid HA Group Ansible module: na_sg_grid_client_certificate""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys + +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip("Skipping Unit Tests on 2.6 as requests is not available") + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_client_certificate import ( + SgGridClientCertificate as grid_client_certificate_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": (None, None), + "update_good": (None, None), + "version_114": ({"data": {"productVersion": "11.4.0-20200721.1338.d3969b3"}}, None), + "version_116": ({"data": {"productVersion": "11.6.0-20211120.0301.850531e"}}, None), + "client_cert_record": ( + { + "data": { + "id": "841ee2c7-3144-4c3c-8709-335462c5b05d", + "displayName": "testcert1", + "publicKey": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIEOzCCAyOgAwIBAgIIFuVL2ktGT0MwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UE\n" + "BhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQHDAlTdW5ueXZhbGUxFDASBgNVBAoM\n" + "-----END CERTIFICATE-----\n" + ), + "allowPrometheus": True, + "expiryDate": "2024-01-01T00:00:00.000Z", + } + }, + None, + ), + "client_cert_record_updated": ( + { + "data": { + "id": "841ee2c7-3144-4c3c-8709-335462c5b05d", + "displayName": "testcert1", + "publicKey": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICrDCCAZSgAwIBAgIUM3IQEKIypqPrXmoA/KmELXfFAz8wDQYJKoZIhvcNAQEL\n" + "BQAwADAeFw0yMjA5MDUyMzI3MTVaFw0yNDA5MDQyMzI3MTVaMAAwggEiMA0GCSqG\n" + "-----END CERTIFICATE-----\n" + ), + "allowPrometheus": True, + "expiryDate": "2024-01-01T00:00:00.000Z", + } + }, + None, + ), + "client_cert_record_rename": ( + { + "data": { + "id": "841ee2c7-3144-4c3c-8709-335462c5b05d", + "displayName": "testcert1-rename", + "publicKey": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIEOzCCAyOgAwIBAgIIFuVL2ktGT0MwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UE\n" + "BhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQHDAlTdW5ueXZhbGUxFDASBgNVBAoM\n" + "-----END CERTIFICATE-----\n" + ), + "allowPrometheus": True, + "expiryDate": "2024-01-01T00:00:00.000Z", + } + }, + None, + ), + "client_certificates": ( + { + "data": [ + { + "id": "841ee2c7-3144-4c3c-8709-335462c5b05d", + "displayName": "testcert1", + "publicKey": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIEOzCCAyOgAwIBAgIIFuVL2ktGT0MwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UE\n" + "BhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQHDAlTdW5ueXZhbGUxFDASBgNVBAoM\n" + "-----END CERTIFICATE-----\n" + ), + "allowPrometheus": True, + "expiryDate": "2024-01-01T00:00:00.000Z", + }, + { + "id": "869e1792-5505-42f1-a1fc-57a04e56f644", + "displayName": "testcert2", + "publicKey": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIC9DCCAdygAwIBAgIUD7y+AyrSqRjQdYVflLJ9aTIJu3wwDQYJKoZIhvcNAQEL\n" + "BQAwFTETMBEGA1UEAwwKUHJvbWV0aGV1czAeFw0yMjA4MjQxMjQxNDhaFw0yNDA4\n" + "-----END CERTIFICATE-----\n" + ), + "allowPrometheus": True, + "expiryDate": "2024-01-01T00:00:00.000Z", + }, + ] + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """a group of related Unit Tests""" + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "allow_prometheus": True, + "public_key": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIEOzCCAyOgAwIBAgIIFuVL2ktGT0MwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UE\n" + "BhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQHDAlTdW5ueXZhbGUxFDASBgNVBAoM\n" + "-----END CERTIFICATE-----\n" + ), + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "display_name": "testcert1", + "allow_prometheus": True, + "public_key": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIEOzCCAyOgAwIBAgIIFuVL2ktGT0MwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UE\n" + "BhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQHDAlTdW5ueXZhbGUxFDASBgNVBAoM\n" + "-----END CERTIFICATE-----\n" + ), + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_client_certificate(self): + return dict( + { + "state": "present", + "display_name": "testcert1", + "allow_prometheus": True, + "public_key": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIEOzCCAyOgAwIBAgIIFuVL2ktGT0MwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UE\n" + "BhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQHDAlTdW5ueXZhbGUxFDASBgNVBAoM\n" + "-----END CERTIFICATE-----\n" + ), + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_client_certificate(self): + return dict( + { + "state": "absent", + "display_name": "testcert1", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_when_required_args_missing(self, mock_request): + """required arguments are reported as errors""" + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_client_certificate_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_pass_when_required_args_present(self, mock_request): + """required arguments are reported as errors""" + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_client_certificate_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_pass_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_client_certificate_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_client_certificate()) + mock_request.side_effect = [ + SRR["empty_good"], # get + SRR["client_cert_record"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_client_certificate_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_grid_client_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_create_na_sg_grid_client_certificate_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_client_certificate() + set_module_args(args) + mock_request.side_effect = [ + SRR["client_certificates"], # get + SRR["client_cert_record"], # get + SRR["end_of_sequence"], + ] + my_obj = grid_client_certificate_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_create_na_sg_grid_client_certificate_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_client_certificate_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_client_certificate() + args["public_key"] = ( + "-----BEGIN CERTIFICATE-----\n" + "MIICrDCCAZSgAwIBAgIUM3IQEKIypqPrXmoA/KmELXfFAz8wDQYJKoZIhvcNAQEL\n" + "BQAwADAeFw0yMjA5MDUyMzI3MTVaFw0yNDA5MDQyMzI3MTVaMAAwggEiMA0GCSqG\n" + "-----END CERTIFICATE-----\n", + ) + set_module_args(args) + mock_request.side_effect = [ + SRR["client_certificates"], # get + SRR["client_cert_record"], # get + SRR["client_cert_record_updated"], # put + SRR["end_of_sequence"], + ] + my_obj = grid_client_certificate_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_client_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_rename_na_sg_grid_client_certificate_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_client_certificate() + args["certificate_id"] = "841ee2c7-3144-4c3c-8709-335462c5b05d" + args["display_name"] = "testcert1-rename" + set_module_args(args) + mock_request.side_effect = [ + SRR["client_cert_record"], # get + SRR["client_cert_record_rename"], # put + SRR["end_of_sequence"], + ] + my_obj = grid_client_certificate_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_rename_na_sg_grid_client_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_delete_na_sg_grid_client_certificate_pass(self, mock_request): + args = self.set_args_delete_na_sg_grid_client_certificate() + set_module_args(args) + mock_request.side_effect = [ + SRR["client_certificates"], # get + SRR["client_cert_record"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + my_obj = grid_client_certificate_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_grid_client_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_client_certificate_bad_certificate_id_fail(self, mock_request): + args = self.set_args_create_na_sg_grid_client_certificate() + args["certificate_id"] = "ffffffff-ffff-aaaa-aaaa-000000000000" + args["display_name"] = "Bad ID" + set_module_args(args) + mock_request.side_effect = [ + SRR["not_found"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + my_obj = grid_client_certificate_module() + my_obj.apply() + print("Info: test_update_na_sg_grid_client_certificate_bad_certificate_id_fail: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["failed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_dns.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_dns.py new file mode 100644 index 000000000..42abde9c8 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_dns.py @@ -0,0 +1,241 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID DNS Ansible module: na_sg_grid_dns""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_dns import ( + SgGridDns as grid_dns_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "no_dns_servers": ({"data": []}, None,), + "dns_servers": ({"data": ["10.11.12.5", "10.11.12.6"]}, None,), + "add_dns_servers": ( + {"data": ["10.11.12.5", "10.11.12.6", "10.11.12.7"]}, + None, + ), + "remove_dns_servers": ({"data": ["10.11.12.5"]}, None,), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "dns_servers": "10.11.12.8", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "dns_servers": "10.11.12.8", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_grid_dns_servers(self): + return dict( + { + "state": "present", + "dns_servers": "10.11.12.5,10.11.12.6", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_add_na_sg_grid_dns_server(self): + return dict( + { + "state": "present", + "dns_servers": "10.11.12.5,10.11.12.6,10.11.12.7", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_remove_na_sg_grid_dns_server(self): + return dict( + { + "state": "present", + "dns_servers": "10.11.12.5", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_dns_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_dns_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_set_na_sg_grid_dns_servers_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_dns_servers()) + my_obj = grid_dns_module() + mock_request.side_effect = [ + SRR["no_dns_servers"], # get + SRR["dns_servers"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_set_na_sg_grid_dns_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_set_na_sg_grid_dns_servers_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_dns_servers()) + my_obj = grid_dns_module() + mock_request.side_effect = [ + SRR["dns_servers"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_set_na_sg_grid_dns_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_add_na_sg_grid_dns_servers_pass(self, mock_request): + set_module_args(self.set_args_add_na_sg_grid_dns_server()) + my_obj = grid_dns_module() + mock_request.side_effect = [ + SRR["dns_servers"], # get + SRR["add_dns_servers"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_add_na_sg_grid_dns_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_remove_na_sg_grid_dns_servers_pass(self, mock_request): + set_module_args(self.set_args_remove_na_sg_grid_dns_server()) + my_obj = grid_dns_module() + mock_request.side_effect = [ + SRR["dns_servers"], # get + SRR["remove_dns_servers"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_remove_na_sg_grid_dns_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_gateway.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_gateway.py new file mode 100644 index 000000000..0a5a7e386 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_gateway.py @@ -0,0 +1,693 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid Load Balancer Endpoint Ansible module: na_sg_grid_gateway""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys + +# try: +# from requests import Response +# except ImportError: +# if sys.version_info < (2, 7): +# pytestmark = pytest.mark.skip("Skipping Unit Tests on 2.6 as requests is not available") +# else: +# raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_gateway import ( + SgGridGateway as grid_gateway_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": (None, None), + "update_good": (None, None), + "version_114": ({"data": {"productVersion": "11.4.0-20200721.1338.d3969b3"}}, None), + "version_116": ({"data": {"productVersion": "11.6.0-20211120.0301.850531e"}}, None), + "gateway_record": ( + { + "data": { + "id": "e777d415-057f-4d37-9b0c-6d132d872ea0", + "displayName": "ansibletest-secure", + "enableIPv4": True, + "enableIPv6": True, + "port": 10443, + "secure": True, + "accountId": "0", + } + }, + None, + ), + "gateway_record_ha_group_binding": ( + { + "data": { + "id": "e777d415-057f-4d37-9b0c-6d132d872ea0", + "displayName": "ansibletest-secure", + "enableIPv4": True, + "enableIPv6": True, + "port": 10443, + "secure": True, + "accountId": "0", + "pinTargets": {"haGroups": ["c08e6dca-038d-4a05-9499-6fbd1e6a4c3e"], "nodeInterfaces": []}, + } + }, + None, + ), + "gateway_record_node_interface_binding": ( + { + "data": { + "id": "e777d415-057f-4d37-9b0c-6d132d872ea0", + "displayName": "ansibletest-secure", + "enableIPv4": True, + "enableIPv6": True, + "port": 10443, + "secure": True, + "accountId": "0", + "pinTargets": { + "haGroups": [], + "nodeInterfaces": [ + {"interface": "eth2", "nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b"}, + {"interface": "eth2", "nodeId": "970ad050-b68b-4aae-a94d-aef73f3095c4"}, + ], + }, + } + }, + None, + ), + "gateway_record_rename": ( + { + "data": { + "id": "e777d415-057f-4d37-9b0c-6d132d872ea0", + "displayName": "ansibletest-rename", + "enableIPv4": True, + "enableIPv6": True, + "port": 10443, + "secure": True, + "accountId": "0", + "pinTargets": {"haGroups": ["c08e6dca-038d-4a05-9499-6fbd1e6a4c3e"], "nodeInterfaces": []}, + } + }, + None, + ), + "ha_groups": ( + { + "data": [ + { + "id": "c08e6dca-038d-4a05-9499-6fbd1e6a4c3e", + "name": "site1_primary", + "description": "test ha group", + "virtualIps": ["10.193.174.117"], + "interfaces": [ + { + "nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b", + "nodeName": "SITE1-ADM1", + "interface": "eth2", + "preferredMaster": True, + }, + { + "nodeId": "970ad050-b68b-4aae-a94d-aef73f3095c4", + "nodeName": "SITE2-ADM1", + "interface": "eth2", + }, + ], + "gatewayCidr": "192.168.14.1/24", + }, + { + "id": "da9ac524-9a16-4be0-9d6e-ec9b22218e75", + "name": "site1_gw", + "description": "another test ha group", + "virtualIps": ["10.193.204.200"], + "interfaces": [ + { + "nodeId": "7bb5bf05-a04c-4344-8abd-08c5c4048666", + "nodeName": "SITE1-GW1", + "interface": "eth0", + "preferredMaster": True, + }, + ], + "gatewayCidr": "192.168.14.1/24", + } + ] + }, + None, + ), + "node_health": ( + { + "data": [ + { + "id": "0b1866ed-d6e7-41b4-815f-bf867348b76b", + "isPrimaryAdmin": True, + "name": "SITE1-ADM1", + "siteId": "ae56d06d-bd83-46bd-adce-77146b1d94bd", + "siteName": "SITE1", + "severity": "normal", + "state": "connected", + "type": "adminNode", + }, + { + "id": "970ad050-b68b-4aae-a94d-aef73f3095c4", + "isPrimaryAdmin": False, + "name": "SITE2-ADM1", + "siteId": "7c24002e-5157-43e9-83e5-02db9b265b02", + "siteName": "SITE2", + "severity": "normal", + "state": "connected", + "type": "adminNode", + }, + ] + }, + None, + ), + "present_gateways": ( + { + "data": [ + { + "id": "e777d415-057f-4d37-9b0c-6d132d872ea0", + "displayName": "ansibletest-secure", + "enableIPv4": True, + "enableIPv6": True, + "port": 10443, + "secure": True, + "accountId": "0", + } + ] + }, + None, + ), + "present_gateways_with_binding": ( + { + "data": [ + { + "id": "e777d415-057f-4d37-9b0c-6d132d872ea0", + "displayName": "ansibletest-secure", + "enableIPv4": True, + "enableIPv6": True, + "port": 10443, + "secure": True, + "accountId": "0", + "pinTargets": {"haGroups": [], "nodeInterfaces": []}, + } + ] + }, + None, + ), + "server_config": ( + { + "data": { + "defaultServiceType": "s3", + "certSource": "plaintext", + "plaintextCertData": { + "serverCertificateEncoded": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "NDI5MDQ1NTM1WjAmMQswCQYDVQQGEwJVUzEXMBUGA1UEAwwOczMuZXhhbXBsZS5j\n" + "b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0LMcJUdWmTtxi7U7B\n" + "yldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36QC22n\n" + "+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIaQ8l8\n" + "STa7nLS7BIc6rD15BJaNWZpDVHIzhljlnhfnqwio/ZfP++lAjk4/j8pPGPEEI5Fe\n" + "WxhOtQjr7xTHeJxKHp2VKiLEvFxniL3qk4uJ3k5fJ7IqALUEPWH92brFp2IkObUA\n" + "EGsZYB4KFV7asBVhGuspYNzUQ6NqWbEUmtTjKEXcb1TA8RK+Pc2TotOrQ2E7Z+rU\n" + "gl2fAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAD5PW1WI7GCfxLQjaitnXpD1MR2O\n" + "6b5csymPYwRejMsSswd8egjs+vO2pbF9TptLjqGliE9XUoI+mWpuMzzd75F0jcjq\n" + "1DhlINgAmjUJEAg0RAqce0Kn8xQF+SofMtkOH+nZm3Q9nbTJKr1H5m2TnCq3v5TH\n" + "Qo0ASf0LLGgrwUtT0IghdSttYLS89dJprZ6c5wK7qeBzxfdHxxjiaSnvByL2Ryn5\n" + "cec9lptYKoRY42hWvkQv9Wkr3DDoyNA3xPdZJr0Hpf8/mSPnt9r/AR8E32xi0SXp\n" + "hOMTDgMicbK82ycxz0yW88gm6yhrChlJrWaEsVGod3FU+lbMAnagYZ/Vwp8=\n" + "-----END CERTIFICATE-----\n" + ), + "caBundleEncoded": None, + "metadata": { + "serverCertificateDetails": { + "subject": "/CN=test", + "issuer": "/CN=test", + "serialNumber": "32:6F:20:EB:0E:90:60:7E:07:8F:6E:CC:02:2D:7C:37:3D:AB:42:7E", + "notBefore": "2021-09-27T12:39:17.000Z", + "notAfter": "2023-09-27T12:39:17.000Z", + "fingerPrints": { + "SHA-1": "A4:F9:74:BE:E8:A2:46:C2:E1:23:DE:8F:A8:1B:F1:C4:91:51:C5:56", + "SHA-256": "7B:65:7F:CD:35:8F:33:1C:C8:2D:F0:C1:9F:58:2F:2B:3B:78:44:95:4E:23:8C:1B:2B:91:6C:94:B0:71:64:E8", + }, + "subjectAltNames": ["DNS:*.test.com"], + } + }, + }, + } + }, + None, + ), + "server_config_cert_update": ( + { + "data": { + "defaultServiceType": "s3", + "certSource": "plaintext", + "plaintextCertData": { + "serverCertificateEncoded": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICzjCCAbYCCQDZVi1OT89SAjANBgkqhkiG9w0BAQsFADApMQswCQYDVQQGEwJV\n" + "UzEaMBgGA1UEAwwRczMubmV3ZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NzIxWhcN\n" + "MjIwNDI5MDQ1NzIxWjApMQswCQYDVQQGEwJVUzEaMBgGA1UEAwwRczMubmV3ZXhh\n" + "bXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCmg37q2sjZ\n" + "k+HsXtai3PSMtGUiqij04JtG9ahMqIejuxy5sDCWnigh//NjdK+wPYc2VfYd6KFA\n" + "Uk9rP84M7sqdqGzIzmyEu7INyCnlbxcXlST6UZDsZnVU7Gk2GvUzk2OoO5N+G0oI\n" + "Lfc/3eKTx9j9BguOaWUy+ni+Te8j6EwK6HolGRBjLYqf1SYFBzaoVpy7pmzaFZ4R\n" + "10jFSxHbotIZ+kR8pPE5jGkP8OjOfrpbhEgmffpeq2MSCMRuhRtRiVp4ULwkMTRN\n" + "tFj89mu1gl9T3lYM/LO1SmBv3il0mNmrTL+99UJ4s2eL0zr/uHAVYJcVqFgWP7X8\n" + "WnOk+d86b0TXAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFmGV3IOuNYeM3LQxls+\n" + "/CNHznvIqvoiJOWq0S7LFy1eO7PVzCl3l/fDKjGMt2lGXeU89YKdFVPqsainNEFT\n" + "cNEWlezVut+/CWQpBXujyBqPLkYbzyGsakMImDb+MrSkBO5MCjlt38vppm5a97fB\n" + "9o/wM31e+N6gJLiHWs0XB9TK6bY9CvcutcGUOH/oxH1TEBgrJ3SoS7/HmZJSaCQA\n" + "hjZappzuEpGVXT8YDlb67PzUoE2rDWjdSFRXCk/0U6VR0xNgnN1WtfHaypU71DrB\n" + "zxbDaOIZoDp5G4OgjkFxoCoSWLant+LsqEwclIbCFgEvJPE8855UThelTHmIfivP\n" + "veI=\n-----END CERTIFICATE-----\n" + ), + "caBundleEncoded": None, + "metadata": { + "serverCertificateDetails": { + "subject": "/CN=test", + "issuer": "/CN=test", + "serialNumber": "32:6F:20:EB:0E:90:60:7E:07:8F:6E:CC:02:2D:7C:37:3D:AB:42:7E", + "notBefore": "2021-09-27T12:39:17.000Z", + "notAfter": "2023-09-27T12:39:17.000Z", + "fingerPrints": { + "SHA-1": "F2:C2:6F:A8:45:DA:86:09:91:F5:04:B0:25:43:B7:FC:FA:C1:43:F8", + "SHA-256": "99:3E:21:1A:03:25:69:C8:0A:D5:FE:E3:FB:6E:51:03:BD:A7:0E:88:6B:53:06:04:92:3B:34:17:68:43:F7:2F", + }, + "subjectAltNames": ["DNS:*.test.com"], + } + }, + }, + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """a group of related Unit Tests""" + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "display_name": "ansibletest-secure", + "default_service_type": "s3", + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "-----END PRIVATE KEY-----\n" + ), + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "display_name": "ansibletest-secure", + "default_service_type": "s3", + "port": 10443, + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "-----END PRIVATE KEY-----\n" + ), + "api_url": "https://gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_gateway_port(self): + return dict( + { + "state": "present", + "display_name": "ansibletest-secure", + "default_service_type": "s3", + "port": 10443, + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "NDI5MDQ1NTM1WjAmMQswCQYDVQQGEwJVUzEXMBUGA1UEAwwOczMuZXhhbXBsZS5j\n" + "b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0LMcJUdWmTtxi7U7B\n" + "yldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36QC22n\n" + "+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIaQ8l8\n" + "STa7nLS7BIc6rD15BJaNWZpDVHIzhljlnhfnqwio/ZfP++lAjk4/j8pPGPEEI5Fe\n" + "WxhOtQjr7xTHeJxKHp2VKiLEvFxniL3qk4uJ3k5fJ7IqALUEPWH92brFp2IkObUA\n" + "EGsZYB4KFV7asBVhGuspYNzUQ6NqWbEUmtTjKEXcb1TA8RK+Pc2TotOrQ2E7Z+rU\n" + "gl2fAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAD5PW1WI7GCfxLQjaitnXpD1MR2O\n" + "6b5csymPYwRejMsSswd8egjs+vO2pbF9TptLjqGliE9XUoI+mWpuMzzd75F0jcjq\n" + "1DhlINgAmjUJEAg0RAqce0Kn8xQF+SofMtkOH+nZm3Q9nbTJKr1H5m2TnCq3v5TH\n" + "Qo0ASf0LLGgrwUtT0IghdSttYLS89dJprZ6c5wK7qeBzxfdHxxjiaSnvByL2Ryn5\n" + "cec9lptYKoRY42hWvkQv9Wkr3DDoyNA3xPdZJr0Hpf8/mSPnt9r/AR8E32xi0SXp\n" + "hOMTDgMicbK82ycxz0yW88gm6yhrChlJrWaEsVGod3FU+lbMAnagYZ/Vwp8=\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "Q8l8STa7nLS7BIc6rD15BJaNWZpDVHIzhljlnhfnqwio/ZfP++lAjk4/j8pPGPEE\n" + "I5FeWxhOtQjr7xTHeJxKHp2VKiLEvFxniL3qk4uJ3k5fJ7IqALUEPWH92brFp2Ik\n" + "ObUAEGsZYB4KFV7asBVhGuspYNzUQ6NqWbEUmtTjKEXcb1TA8RK+Pc2TotOrQ2E7\n" + "Z+rUgl2fAgMBAAECggEAAwSSqTDTvSx4WNiqAocnsPMqfckIUUOnLjLef5yzKRuQ\n" + "6l/9NpXDP3b5S6fLDBJrrw46tNIW/BgWjl01y7+rCxqE13L9SvLgtHjbua52ITOf\n" + "l0u/fDmcKHOfOqpsPhlaloYYeqsuAwLGl4CC+wBEpuj26uDRcw4x7E78NV8IIxDf\n" + "8kUNPQXI9ox6P3isXrFkMncDfKLWOYJ5fF5zCoVZai/SS8z3FhGjAXlMkay48RX4\n" + "4vuP7TNLZ2O2pAk2aVs54tQyBn9MOxIzOg3/ZFLiKZR4pY6H5sm+bT263TdvN+A4\n" + "C8kwML5HnsCjVkTzJ/3dYc9SeUOuqvJI332GCQ9YcQKBgQD8Ev2qhS61kZ3WGO6G\n" + "DRkZ6tDyt5vCuzWQ8uAAXcAerFDWN6XtDPfXq2UVcWnoCQOUpnjslCb/NJgCetLh\n" + "mOPeJGRWyMly+YuYb4/rnbwSbUs28PO4D9B/f5YQBnBjGDLL/i2+wnXg3WZTVogf\n" + "WfdKziOHGSxmWd6JinI+4UkpiwKBgQD3+krkFORTsUAlTgeIy8+QzXSuclwNygcX\n" + "HAe0F96hSYHBC7+1n7nzC1lwcbkU3jLIt3A90Uwew4nr5GCu4sSVwDeWrqP2I9WH\n" + "4w0zeaFPC1QKfKGBtsIf/89pDz/7iGlcKWlEg+56VVIJn7qC2lO8qbeUCoglsSwC\n" + "vr2Qld5WvQKBgQCHM2xpHHv8GPlOTxsIPVg8RW0C8iYSITVO5GXu7FnSWdwVuc0+\n" + "QtlgDObvxF/oe4U3Ir7zLVdpRH1Pvy8Cn22AxYYn4hPiniQYg6Xu2zB3tbVE56Hh\n" + "FGJhMD59o+Z90AnWziMdENIG5NkwU9Y48pknvz7hBEiDMSqiHObAATerlwKBgQCP\n" + "5LhCY3Ees3MCcqXilkmqv93eQFP0WHAG0+gQc+1m7+2QJI4pCTdwtfw/SG5akpkr\n" + "aW6DIIkoLNVCgbIsqT/jmbdoA4z3DlIg2PrXDNQytuMcdreNOoyo3trvHr9E6SIi\n" + "LZF9BYWDjTDejsY+mgwPJPh2uinInWdpbF85oA11jQKBgQCc6U2fSwpPQowOaat/\n" + "pY5bDCKxhfwrKk3Ecye5HfhbBZ0pu6Oneiq6cNhQC0X69iFn6ogTFx5qqyMQrWH0\n" + "L+kQRkyYFLnebCzUA8364lieRzc3cN+xQEn+jX8z7eDZ8JsvVnKdc6lTjPTwN1Fj\n" + "FZtaH2L1IEiA8ZZapMb/MNNozg==\n" + "-----END PRIVATE KEY-----\n" + ), + "api_url": "https://gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_gateway_port(self): + return dict( + { + "state": "absent", + "display_name": "ansibletest-secure", + "default_service_type": "s3", + "port": 10443, + "server_certificate": ( + "-----BEGIN CERTIFICATE-----\n" + "MIICyDCCAbACCQCgFntI3q7iADANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJV\n" + "UzEXMBUGA1UEAwwOczMuZXhhbXBsZS5jb20wHhcNMjEwNDI5MDQ1NTM1WhcNMjIw\n" + "-----END CERTIFICATE-----\n" + ), + "private_key": ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQD0LMcJUdWmTtxi\n" + "7U7ByldDRfyCD9W+QJ1Ygm7E9iFwvkThUCV5q+DIcgSfogoSKaQuHaImLXMZn36Q\n" + "C22n+Ah2EGrQiggyny3wDzuWf5/Qg7ogqQRqiespBFLlV4RGCREHK0y5uq8mzpIa\n" + "-----END PRIVATE KEY-----\n" + ), + "api_url": "https://gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_when_required_args_missing(self, mock_request): + """required arguments are reported as errors""" + mock_request.side_effect = [ + SRR["version_114"], + ] + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_gateway_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_pass_when_required_args_present(self, mock_request): + """required arguments are reported as errors""" + mock_request.side_effect = [ + SRR["version_114"], + ] + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_gateway_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_pass_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_gateway_port_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_gateway_port()) + mock_request.side_effect = [ + SRR["version_114"], # get + SRR["empty_good"], # get + SRR["gateway_record"], # post + SRR["server_config"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_grid_gateway_port_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_create_na_sg_grid_gateway_port_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_gateway_port() + del args["private_key"] + set_module_args(args) + mock_request.side_effect = [ + SRR["version_114"], # get + SRR["present_gateways"], # get + SRR["server_config"], # get + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_create_na_sg_grid_gateway_port_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_gateway_certificate_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_gateway_port() + args["server_certificate"] = "-----BEGIN CERTIFICATE-----\nABCDEFGABCD\n-----END CERTIFICATE-----\n" + args["private_key"] = "-----BEGIN PRIVATE KEY-----\nABCDEFGABCD\n-----END PRIVATE KEY-----\n" + + set_module_args(args) + mock_request.side_effect = [ + SRR["version_114"], # get + SRR["present_gateways"], # get + SRR["server_config"], # get + SRR["server_config_cert_update"], # put + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_gateway_certificate_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_delete_na_sg_grid_gateway_port_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_grid_gateway_port()) + mock_request.side_effect = [ + SRR["version_114"], # get + SRR["present_gateways"], # get + SRR["server_config"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_grid_gateway_port_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_minimum_version_not_met(self, mock_request): + args = self.set_args_create_na_sg_grid_gateway_port() + args["binding_mode"] = "ha-groups" + set_module_args(args) + mock_request.side_effect = [ + SRR["version_114"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + grid_gateway_module() + print("Info: test_module_fail_minimum_version_not_met: %s" % exc.value.args[0]["msg"]) + + # test create with ha groups + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_gateway_port_with_ha_group_binding_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_gateway_port() + args["binding_mode"] = "ha-groups" + args["ha_groups"] = ["site1_primary", "da9ac524-9a16-4be0-9d6e-ec9b22218e75"] + set_module_args(args) + mock_request.side_effect = [ + SRR["version_116"], # get + SRR["ha_groups"], # get + SRR["empty_good"], # get + SRR["gateway_record_ha_group_binding"], # post + SRR["server_config"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_grid_gateway_port_with_ha_group_binding_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + # test create with bad ha group ID + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_gateway_port_with_bad_ha_group_binding_fail(self, mock_request): + mock_request.side_effect = [ + SRR["version_116"], # get + SRR["ha_groups"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + args = self.set_args_create_na_sg_grid_gateway_port() + args["binding_mode"] = "ha-groups" + args["ha_groups"] = ["fffac524-9a16-4be0-9d6e-ec9b22218e75"] + set_module_args(args) + grid_gateway_module() + print("Info: test_create_na_sg_grid_gateway_port_with_bad_ha_group_binding_fail: %s" % repr(exc.value.args[0])) + + # test create with node interfaces + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_gateway_port_with_node_interface_binding_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_gateway_port() + args["binding_mode"] = "node-interfaces" + args["node_interfaces"] = [ + {"node": "SITE1-ADM1", "interface": "eth2"}, + {"node": "SITE2-ADM1", "interface": "eth2"}, + ] + set_module_args(args) + mock_request.side_effect = [ + SRR["version_116"], # get + SRR["node_health"], # get + SRR["empty_good"], # get + SRR["gateway_record_node_interface_binding"], # post + SRR["server_config"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_grid_gateway_port_with_node_interface_binding_pass: %s" % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # test change from global to ha groups + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_gateway_binding_to_ha_groups_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_gateway_port() + args["binding_mode"] = "ha-groups" + args["ha_groups"] = "site1_primary" + args["server_certificate"] = "-----BEGIN CERTIFICATE-----\nABCDEFGABCD\n-----END CERTIFICATE-----\n" + args["private_key"] = "-----BEGIN PRIVATE KEY-----\nABCDEFGABCD\n-----END PRIVATE KEY-----\n" + set_module_args(args) + mock_request.side_effect = [ + SRR["version_116"], # get + SRR["ha_groups"], # get + SRR["present_gateways_with_binding"], # get + SRR["server_config"], # get + SRR["gateway_record_ha_group_binding"], # put + SRR["server_config_cert_update"], # put + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_gateway_binding_to_ha_groups_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + # test rename by supplying gateway_id + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_gateway_rename_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_gateway_port() + args["gateway_id"] = "e777d415-057f-4d37-9b0c-6d132d872ea0" + args["binding_mode"] = "ha-groups" + args["ha_groups"] = "site1_primary" + set_module_args(args) + mock_request.side_effect = [ + SRR["version_116"], # get + SRR["ha_groups"], # get + SRR["gateway_record_ha_group_binding"], # get + SRR["server_config"], # get + SRR["gateway_record_rename"], # put + SRR["end_of_sequence"], + ] + my_obj = grid_gateway_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_gateway_rename_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_group.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_group.py new file mode 100644 index 000000000..fd9fdf15c --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_group.py @@ -0,0 +1,317 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid Group Ansible module: na_sg_grid_group""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_group import ( + SgGridGroup as grid_group_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "grid_groups": ( + { + "data": [ + { + "displayName": "TestGridGroup", + "uniqueName": "group/testgridgroup", + "policies": { + "management": { + "tenantAccounts": True, + "metricsQuery": True, + "maintenance": True, + }, + }, + "id": "00000000-0000-0000-0000-000000000000", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testgridgroup", + } + ] + }, + None, + ), + "grid_group_record": ( + { + "data": { + "displayName": "TestGridGroup", + "uniqueName": "group/testgridgroup", + "policies": { + "management": { + "tenantAccounts": True, + "metricsQuery": True, + "maintenance": True, + }, + }, + "id": "00000000-0000-0000-0000-000000000000", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testgridgroup", + } + }, + None, + ), + "grid_group_record_update": ( + { + "data": { + "displayName": "TestGridGroup", + "uniqueName": "group/testgridgroup", + "policies": { + "management": { + "tenantAccounts": True, + "metricsQuery": False, + "maintenance": True, + "ilm": True, + }, + }, + "id": "00000000-0000-0000-0000-000000000000", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testgridgroup", + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "display_name": "TestGroup", + "management_policy": { + "maintenance": True, + "ilm": True, + "root_access": False, + }, + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "display_name": "TestGroup", + "unique_name": "group/testgroup", + "management_policy": { + "maintenance": True, + "ilm": True, + "root_access": False, + }, + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_group(self): + return dict( + { + "state": "present", + "display_name": "TestGridGroup", + "unique_name": "group/testgridgroup", + "management_policy": { + "tenant_accounts": True, + "metrics_query": True, + "maintenance": True, + }, + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_group(self): + return dict( + { + "state": "absent", + "unique_name": "group/testgridgroup", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_group_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_group_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + def test_module_fail_with_bad_unique_name(self): + """ error returned if unique_name doesn't start with group or federated_group """ + with pytest.raises(AnsibleFailJson) as exc: + args = self.set_default_args_pass_check() + args["unique_name"] = "noprefixgroup" + set_module_args(args) + grid_group_module() + print( + "Info: test_module_fail_with_bad_unique_name: %s" + % exc.value.args[0]["msg"] + ) + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_grid_group_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_group()) + my_obj = grid_group_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["grid_group_record"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_grid_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_grid_group_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_group()) + my_obj = grid_group_module() + mock_request.side_effect = [ + SRR["grid_group_record"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_grid_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_grid_group_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_group() + args["management_policy"]["tenant_accounts"] = True + args["management_policy"]["metrics_query"] = False + args["management_policy"]["ilm"] = False + + set_module_args(args) + my_obj = grid_group_module() + mock_request.side_effect = [ + SRR["grid_group_record"], # get + SRR["grid_group_record_update"], # put + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_update_na_sg_grid_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_delete_na_sg_grid_group_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_grid_group()) + my_obj = grid_group_module() + mock_request.side_effect = [ + SRR["grid_group_record"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_delete_na_sg_grid_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_ha_group.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_ha_group.py new file mode 100644 index 000000000..fbc8fd0ce --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_ha_group.py @@ -0,0 +1,408 @@ +# (c) 2022, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid HA Group Ansible module: na_sg_grid_ha_group""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys + +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip("Skipping Unit Tests on 2.6 as requests is not available") + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_ha_group import ( + SgGridHaGroup as grid_ha_group_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": (None, None), + "update_good": (None, None), + "version_114": ({"data": {"productVersion": "11.4.0-20200721.1338.d3969b3"}}, None), + "version_116": ({"data": {"productVersion": "11.6.0-20211120.0301.850531e"}}, None), + "ha_group_record": ( + { + "data": { + "id": "fbe724da-c941-439b-bb61-a536f6211ca9", + "name": "ansible-ha-group", + "description": None, + "virtualIps": ["192.168.50.5"], + "interfaces": [ + {"nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b", "interface": "ens256"}, + {"nodeId": "7bb5bf05-a04c-4344-8abd-08c5c4048666", "interface": "ens256"}, + ], + "gatewayCidr": "192.168.50.1/24", + } + }, + None, + ), + "ha_group_record_twovip": ( + { + "data": { + "id": "fbe724da-c941-439b-bb61-a536f6211ca9", + "name": "ansible-ha-group", + "description": "2 VIP HA Group", + "virtualIps": ["192.168.50.5", "192.168.50.6"], + "interfaces": [ + {"nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b", "interface": "ens256"}, + {"nodeId": "7bb5bf05-a04c-4344-8abd-08c5c4048666", "interface": "ens256"}, + ], + "gatewayCidr": "192.168.50.1/24", + } + }, + None, + ), + "ha_group_record_rename": ( + { + "data": { + "id": "fbe724da-c941-439b-bb61-a536f6211ca9", + "name": "ansible-ha-group-rename", + "description": None, + "virtualIps": ["192.168.50.5"], + "interfaces": [ + {"nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b", "interface": "ens256"}, + {"nodeId": "7bb5bf05-a04c-4344-8abd-08c5c4048666", "interface": "ens256"}, + ], + "gatewayCidr": "192.168.50.1/24", + } + }, + None, + ), + "ha_groups": ( + { + "data": [ + { + "id": "c08e6dca-038d-4a05-9499-6fbd1e6a4c3e", + "name": "site1_primary", + "description": "test ha group", + "virtualIps": ["10.193.174.117"], + "interfaces": [ + { + "nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b", + "nodeName": "SITE1-ADM1", + "interface": "eth2", + "preferredMaster": True, + }, + { + "nodeId": "970ad050-b68b-4aae-a94d-aef73f3095c4", + "nodeName": "SITE2-ADM1", + "interface": "eth2", + }, + ], + "gatewayCidr": "192.168.14.1/24", + }, + { + "id": "fbe724da-c941-439b-bb61-a536f6211ca9", + "name": "ansible-ha-group", + "description": None, + "virtualIps": ["192.168.50.5"], + "interfaces": [ + {"nodeId": "0b1866ed-d6e7-41b4-815f-bf867348b76b", "interface": "ens256"}, + {"nodeId": "7bb5bf05-a04c-4344-8abd-08c5c4048666", "interface": "ens256"}, + ], + "gatewayCidr": "192.168.50.1/24", + }, + ] + }, + None, + ), + "node_health": ( + { + "data": [ + { + "id": "0b1866ed-d6e7-41b4-815f-bf867348b76b", + "isPrimaryAdmin": True, + "name": "SITE1-ADM1", + "siteId": "ae56d06d-bd83-46bd-adce-77146b1d94bd", + "siteName": "SITE1", + "severity": "normal", + "state": "connected", + "type": "adminNode", + }, + { + "id": "7bb5bf05-a04c-4344-8abd-08c5c4048666", + "isPrimaryAdmin": None, + "name": "SITE1-G1", + "siteId": "ae56d06d-bd83-46bd-adce-77146b1d94bd", + "siteName": "SITE1", + "severity": "normal", + "state": "connected", + "type": "apiGatewayNode", + }, + { + "id": "970ad050-b68b-4aae-a94d-aef73f3095c4", + "isPrimaryAdmin": False, + "name": "SITE2-ADM1", + "siteId": "7c24002e-5157-43e9-83e5-02db9b265b02", + "siteName": "SITE2", + "severity": "normal", + "state": "connected", + "type": "adminNode", + }, + ] + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """a group of related Unit Tests""" + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "gateway_cidr": "192.168.50.1/24", + "virtual_ips": "192.168.50.5", + "interfaces": [ + {"node": "SITE1-ADM1", "interface": "ens256"}, + {"node": "SITE1-G1", "interface": "ens256"}, + ], + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "name": "ansible-test-ha-group", + "gateway_cidr": "192.168.50.1/24", + "virtual_ips": "192.168.50.5", + "interfaces": [ + {"node": "SITE1-ADM1", "interface": "ens256"}, + {"node": "SITE1-G1", "interface": "ens256"}, + ], + "api_url": "https://gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_ha_group(self): + return dict( + { + "state": "present", + "name": "ansible-ha-group", + "gateway_cidr": "192.168.50.1/24", + "virtual_ips": "192.168.50.5", + "interfaces": [ + {"node": "SITE1-ADM1", "interface": "ens256"}, + {"node": "SITE1-G1", "interface": "ens256"}, + ], + "api_url": "https://gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_ha_group(self): + return dict( + { + "state": "absent", + "name": "ansible-ha-group", + "gateway_cidr": "192.168.50.1/24", + "virtual_ips": "192.168.50.5", + "interfaces": [ + {"node": "SITE1-ADM1", "interface": "ens256"}, + {"node": "SITE1-G1", "interface": "ens256"}, + ], + "api_url": "https://gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_when_required_args_missing(self, mock_request): + """required arguments are reported as errors""" + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_ha_group_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_pass_when_required_args_present(self, mock_request): + """required arguments are reported as errors""" + mock_request.side_effect = [ + SRR["node_health"], # get + ] + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_ha_group_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_pass_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_ha_group_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_ha_group()) + mock_request.side_effect = [ + SRR["node_health"], # get + SRR["empty_good"], # get + SRR["ha_group_record"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_ha_group_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_grid_ha_group_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_create_na_sg_grid_ha_group_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_ha_group() + set_module_args(args) + mock_request.side_effect = [ + SRR["node_health"], # get + SRR["ha_groups"], # get + SRR["ha_group_record"], # get + SRR["end_of_sequence"], + ] + my_obj = grid_ha_group_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_create_na_sg_grid_ha_group_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_ha_group_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_ha_group() + args["description"] = "2 VIP HA Group" + args["virtual_ips"] = ["192.168.50.5", "192.168.50.6"] + set_module_args(args) + mock_request.side_effect = [ + SRR["node_health"], # get + SRR["ha_groups"], # get + SRR["ha_group_record"], # get + SRR["ha_group_record_twovip"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_ha_group_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_ha_group_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_rename_na_sg_grid_ha_group_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_ha_group() + args["ha_group_id"] = "fbe724da-c941-439b-bb61-a536f6211ca9" + args["name"] = "ansible-ha-group-rename" + set_module_args(args) + mock_request.side_effect = [ + SRR["node_health"], # get + SRR["ha_group_record"], # get + SRR["ha_group_record_rename"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_ha_group_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_rename_na_sg_grid_ha_group_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_delete_na_sg_grid_ha_group_pass(self, mock_request): + args = self.set_args_delete_na_sg_grid_ha_group() + set_module_args(args) + mock_request.side_effect = [ + SRR["ha_groups"], # get + SRR["ha_group_record"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + my_obj = grid_ha_group_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_grid_ha_group_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_ha_group_bad_node_fail(self, mock_request): + args = self.set_args_create_na_sg_grid_ha_group() + args["interfaces"] = [{"node": "FakeNode", "interface": "eth0"}] + set_module_args(args) + mock_request.side_effect = [ + SRR["node_health"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + grid_ha_group_module() + print("Info: test_create_na_sg_grid_ha_group_bad_node_fail: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["failed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_ha_group_bad_ha_group_id_fail(self, mock_request): + args = self.set_args_create_na_sg_grid_ha_group() + args["ha_group_id"] = "ffffffff-ffff-aaaa-aaaa-000000000000" + args["virtual_ips"] = "192.168.50.10" + set_module_args(args) + mock_request.side_effect = [ + SRR["node_health"], # get + SRR["not_found"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + my_obj = grid_ha_group_module() + my_obj.apply() + print("Info: test_create_na_sg_grid_ha_group_bad_node_fail: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["failed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_identity_federation.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_identity_federation.py new file mode 100644 index 000000000..058fc609e --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_identity_federation.py @@ -0,0 +1,354 @@ +# (c) 2021, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid Identity Federation Ansible module: na_sg_grid_identity_federation""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_identity_federation import ( + SgGridIdentityFederation as grid_identity_federation_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "check_mode_good": (None, None), + "identity_federation_unset": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": True, + "type": "", + "ldapServiceType": "", + "hostname": "", + "port": 0, + "username": "", + "password": None, + "baseGroupDn": "", + "baseUserDn": "", + "disableTLS": False, + "enableLDAPS": False, + "caCert": "", + } + }, + None, + ), + "identity_federation": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": False, + "type": "ldap", + "ldapServiceType": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "disableTLS": True, + "enableLDAPS": False, + "caCert": "", + } + }, + None, + ), + "identity_federation_tls": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": False, + "type": "ldap", + "ldapServiceType": "Active Directory", + "hostname": "ad.example.com", + "port": 636, + "username": "binduser", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "disableTLS": False, + "enableLDAPS": True, + "caCert": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIF+DCCBOCgAwIBAgITRwAAAAIg5KzMrJo+kQAAAAAAAjANBgkqhkiG9w0BAQUF\n" + "ADBlMRIwEAYKCZImiZPyLGQBGRYCYXUxFjAUBgoJkiaJk/IsZAEZFgZuZXRhcHAx\n" + "FjAUBgoJkiaJk/IsZAEZFgZhdXNuZ3MxHzAdBgNVBAMTFmF1c25ncy1NRUxOR1NE\n" + "QzAxLUNBLTEwHhcNMjEwMjExMDkzMTIwWhcNMjMwMjExMDk0MTIwWjAAMIIBIjAN\n" + "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt2xPi4FS4Uc37KrDLEXXUoc4lhhT\n" + "uQmMnLc0PYZCIpzYOaosFIeGqco3woiC7wSZJ2whKE4RDcxxgE+azuGiSWVjIxIL\n" + "AimmcDhFid/T3KRN5jmkjBzUKuPBYzZBFih8iU9056rqgN7eMKQYjRwPeV0+AeiB\n" + "irw46OgkwVQu3shEUtXxZPP2Mb6Md23+4vSmcElUcW28Opt2q/M5fs7DNomG3eaG\n" + "-----END CERTIFICATE-----\n" + ), + } + }, + None, + ), + "identity_federation_disable": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": True, + "type": "ldap", + "ldapServiceType": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "disableTLS": True, + "enableLDAPS": False, + "caCert": "", + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "Disabled", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "Disabled", + "state": "present", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_grid_identity_federation(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "Disabled", + "state": "present", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_grid_identity_federation_tls(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 636, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "LDAPS", + "ca_cert": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIF+DCCBOCgAwIBAgITRwAAAAIg5KzMrJo+kQAAAAAAAjANBgkqhkiG9w0BAQUF\n" + "ADBlMRIwEAYKCZImiZPyLGQBGRYCYXUxFjAUBgoJkiaJk/IsZAEZFgZuZXRhcHAx\n" + "FjAUBgoJkiaJk/IsZAEZFgZhdXNuZ3MxHzAdBgNVBAMTFmF1c25ncy1NRUxOR1NE\n" + "QzAxLUNBLTEwHhcNMjEwMjExMDkzMTIwWhcNMjMwMjExMDk0MTIwWjAAMIIBIjAN\n" + "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt2xPi4FS4Uc37KrDLEXXUoc4lhhT\n" + "uQmMnLc0PYZCIpzYOaosFIeGqco3woiC7wSZJ2whKE4RDcxxgE+azuGiSWVjIxIL\n" + "AimmcDhFid/T3KRN5jmkjBzUKuPBYzZBFih8iU9056rqgN7eMKQYjRwPeV0+AeiB\n" + "irw46OgkwVQu3shEUtXxZPP2Mb6Md23+4vSmcElUcW28Opt2q/M5fs7DNomG3eaG\n" + "-----END CERTIFICATE-----\n" + ), + "state": "present", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_remove_na_sg_grid_identity_federation(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "state": "absent", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_identity_federation_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_identity_federation_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_fail_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_set_na_sg_grid_identity_federation_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_identity_federation()) + my_obj = grid_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation_unset"], # get + SRR["identity_federation"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_set_na_sg_grid_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_set_na_sg_grid_identity_federation_pass(self, mock_request): + args = self.set_args_set_na_sg_grid_identity_federation() + # remove password + del args["password"] + set_module_args(args) + my_obj = grid_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_set_na_sg_grid_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_set_na_sg_grid_identity_federation_tls_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_identity_federation_tls()) + my_obj = grid_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation_unset"], # get + SRR["identity_federation_tls"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_set_na_sg_grid_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_remove_na_sg_grid_identity_federation_pass(self, mock_request): + set_module_args(self.set_args_remove_na_sg_grid_identity_federation()) + my_obj = grid_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation"], # get + SRR["identity_federation_disable"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_remove_na_sg_grid_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + # test check mode + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_check_mode_na_sg_grid_identity_federation_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_identity_federation()) + my_obj = grid_identity_federation_module() + my_obj.module.check_mode = True + mock_request.side_effect = [ + SRR["identity_federation_unset"], # get + SRR["check_mode_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_check_mode_na_sg_grid_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + assert exc.value.args[0]["msg"] == "Connection test successful" diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_info.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_info.py new file mode 100644 index 000000000..2de26109b --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_info.py @@ -0,0 +1,362 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +''' Unit Tests NetApp StorageGRID Grid Ansible module: na_sg_grid_info ''' + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type +import json +import pytest + +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import patch + +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_info \ + import NetAppSgGatherInfo as sg_grid_info_module + +# REST API canned responses when mocking send_request +SRR = { + # common responses + 'empty_good': ({'data': []}, None), + 'end_of_sequence': (None, 'Unexpected call to send_request'), + 'generic_error': (None, 'Expected error'), + 'grid_accounts': ( + { + 'data': [ + { + 'name': 'TestTenantAccount1', + 'capabilities': ['management', 's3'], + 'policy': { + 'useAccountIdentitySource': True, + 'allowPlatformServices': False, + 'quotaObjectBytes': None, + }, + 'id': '12345678901234567891', + }, + { + 'name': 'TestTenantAccount2', + 'capabilities': ['management', 's3'], + 'policy': { + 'useAccountIdentitySource': True, + 'allowPlatformServices': False, + 'quotaObjectBytes': None, + }, + 'id': '12345678901234567892', + }, + { + 'name': 'TestTenantAccount3', + 'capabilities': ['management', 's3'], + 'policy': { + 'useAccountIdentitySource': True, + 'allowPlatformServices': False, + 'quotaObjectBytes': None, + }, + 'id': '12345678901234567893', + }, + ] + }, + None, + ), + 'grid_alarms': ({'data': []}, None), + 'grid_audit': ({'data': {}}, None), + 'grid_compliance_global': ({'data': {}}, None), + 'grid_config': ({'data': {}}, None), + 'grid_config_management': ({'data': {}}, None), + 'grid_config_product_version': ({'data': {}}, None), + 'grid_deactivated_features': ({'data': {}}, None), + 'grid_dns_servers': ({'data': []}, None), + 'grid_domain_names': ({'data': []}, None), + 'grid_ec_profiles': ({'data': []}, None), + 'grid_expansion': ({'data': {}}, None), + 'grid_expansion_nodes': ({'data': []}, None), + 'grid_expansion_sites': ({'data': []}, None), + 'grid_grid_networks': ({'data': []}, None), + 'grid_groups': ({'data': []}, None), + 'grid_health': ({'data': {}}, None), + 'grid_health_topology': ({'data': {}}, None), + 'grid_identity_source': ({'data': {}}, None), + 'grid_ilm_criteria': ({'data': []}, None), + 'grid_ilm_policies': ({'data': []}, None), + 'grid_ilm_rules': ({'data': []}, None), + 'grid_license': ({'data': []}, None), + 'grid_management_certificate': ({'data': {}}, None), + 'grid_ntp_servers': ({'data': []}, None), + 'grid_recovery': ({'data': {}}, None), + 'grid_recovery_available_nodes': ({'data': []}, None), + 'grid_regions': ({'data': []}, None), + 'grid_schemes': ({'data': []}, None), + 'grid_snmp': ({'data': {}}, None), + 'grid_storage_api_certificate': ({'data': {}}, None), + 'grid_untrusted_client_network': ({'data': {}}, None), + 'grid_users': ( + { + 'data': [ + { + 'accountId': '0', + 'disable': False, + 'federated': False, + 'fullName': 'Root', + 'id': '00000000-0000-0000-0000-000000000000', + 'memberOf': None, + 'uniqueName': 'root', + 'userURN': 'urn:sgws:identity::0:root' + }, + ] + }, + None + ), + 'grid_users_root': ( + { + 'data': { + 'accountId': '0', + 'disable': False, + 'federated': False, + 'fullName': 'Root', + 'id': '00000000-0000-0000-0000-000000000000', + 'memberOf': None, + 'uniqueName': 'root', + 'userURN': 'urn:sgws:identity::0:root' + }, + }, + None + ), + 'versions': ({'data': [2, 3]}, None), +} + + +def set_module_args(args): + ''' Prepare arguments so that they will be picked up during module creation ''' + args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + ''' Exception class to be raised by module.exit_json and caught by the test case ''' + pass + + +class AnsibleFailJson(Exception): + ''' Exception class to be raised by module.fail_json and caught by the test case ''' + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + ''' Function to patch over exit_json; package return data into an exception ''' + if 'changed' not in kwargs: + kwargs['changed'] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + ''' Function to patch over fail_json; package return data into an exception ''' + kwargs['failed'] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + ''' A group of related Unit Tests ''' + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, + exit_json=exit_json, + fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + 'api_url': 'sgmi.example.com', + } + ) + + def set_default_args_pass_check(self): + return dict( + { + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + } + ) + + def set_default_optional_args_pass_check(self): + return dict( + { + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + 'gather_subset': ['all'], + 'parameters': {'limit': 5}, + } + ) + + def set_args_run_sg_gather_facts_for_all_info(self): + return dict({ + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + }) + + def set_args_run_sg_gather_facts_for_grid_accounts_info(self): + return dict({ + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + 'gather_subset': ['grid_accounts_info'], + }) + + def set_args_run_sg_gather_facts_for_grid_accounts_and_grid_users_root_info(self): + return dict({ + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + 'gather_subset': ['grid_accounts_info', 'grid/users/root'], + }) + + def test_module_fail_when_required_args_missing(self): + ''' required arguments are reported as errors ''' + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + sg_grid_info_module() + print( + 'Info: test_module_fail_when_required_args_missing: %s' + % exc.value.args[0]['msg'] + ) + + def test_module_pass_when_required_args_present(self): + ''' required arguments are reported as errors ''' + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + sg_grid_info_module() + exit_json(changed=True, msg='Induced arguments check') + print( + 'Info: test_module_pass_when_required_args_present: %s' + % exc.value.args[0]['msg'] + ) + assert exc.value.args[0]['changed'] + + def test_module_pass_when_optional_args_present(self): + ''' Optional arguments are reported as pass ''' + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_optional_args_pass_check()) + sg_grid_info_module() + exit_json(changed=True, msg='Induced arguments check') + print( + 'Info: test_module_pass_when_optional_args_present: %s' + % exc.value.args[0]['msg'] + ) + assert exc.value.args[0]['changed'] + + @patch('ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request') + def test_run_sg_gather_facts_for_all_info_pass(self, mock_request): + set_module_args(self.set_args_run_sg_gather_facts_for_all_info()) + my_obj = sg_grid_info_module() + gather_subset = [ + 'grid/accounts', + 'grid/alarms', + 'grid/audit', + 'grid/compliance-global', + 'grid/config', + 'grid/config/management', + 'grid/config/product-version', + 'grid/deactivated-features', + 'grid/dns-servers', + 'grid/domain-names', + 'grid/ec-profiles', + 'grid/expansion', + 'grid/expansion/nodes', + 'grid/expansion/sites', + 'grid/grid-networks', + 'grid/groups', + 'grid/health', + 'grid/health/topology', + 'grid/identity-source', + 'grid/ilm-criteria', + 'grid/ilm-policies', + 'grid/ilm-rules', + 'grid/license', + 'grid/management-certificate', + 'grid/ntp-servers', + 'grid/recovery/available-nodes', + 'grid/recovery', + 'grid/regions', + 'grid/schemes', + 'grid/snmp', + 'grid/storage-api-certificate', + 'grid/untrusted-client-network', + 'grid/users', + 'grid/users/root', + 'versions', + ] + mock_request.side_effect = [ + SRR['grid_accounts'], + SRR['grid_alarms'], + SRR['grid_audit'], + SRR['grid_compliance_global'], + SRR['grid_config'], + SRR['grid_config_management'], + SRR['grid_config_product_version'], + SRR['grid_deactivated_features'], + SRR['grid_dns_servers'], + SRR['grid_domain_names'], + SRR['grid_ec_profiles'], + SRR['grid_expansion'], + SRR['grid_expansion_nodes'], + SRR['grid_expansion_sites'], + SRR['grid_grid_networks'], + SRR['grid_groups'], + SRR['grid_health'], + SRR['grid_health_topology'], + SRR['grid_identity_source'], + SRR['grid_ilm_criteria'], + SRR['grid_ilm_policies'], + SRR['grid_ilm_rules'], + SRR['grid_license'], + SRR['grid_management_certificate'], + SRR['grid_ntp_servers'], + SRR['grid_recovery_available_nodes'], + SRR['grid_recovery'], + SRR['grid_regions'], + SRR['grid_schemes'], + SRR['grid_snmp'], + SRR['grid_storage_api_certificate'], + SRR['grid_untrusted_client_network'], + SRR['grid_users'], + SRR['grid_users_root'], + SRR['versions'], + SRR['end_of_sequence'], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print('Info: test_run_sg_gather_facts_for_all_info_pass: %s' % repr(exc.value.args)) + assert set(exc.value.args[0]['sg_info']) == set(gather_subset) + + @patch('ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request') + def test_run_sg_gather_facts_for_grid_accounts_info_pass(self, mock_request): + set_module_args(self.set_args_run_sg_gather_facts_for_grid_accounts_info()) + my_obj = sg_grid_info_module() + gather_subset = ['grid/accounts'] + mock_request.side_effect = [ + SRR['grid_accounts'], + SRR['end_of_sequence'], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print('Info: test_run_sg_gather_facts_for_grid_accounts_info_pass: %s' % repr(exc.value.args)) + assert set(exc.value.args[0]['sg_info']) == set(gather_subset) + + @patch('ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request') + def test_run_sg_gather_facts_for_grid_accounts_and_grid_users_root_info_pass(self, mock_request): + set_module_args(self.set_args_run_sg_gather_facts_for_grid_accounts_and_grid_users_root_info()) + my_obj = sg_grid_info_module() + gather_subset = ['grid/accounts', 'grid/users/root'] + mock_request.side_effect = [ + SRR['grid_accounts'], + SRR['grid_users_root'], + SRR['end_of_sequence'], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print('Info: test_run_sg_gather_facts_for_grid_accounts_and_grid_users_root_info_pass: %s' % repr(exc.value.args)) + assert set(exc.value.args[0]['sg_info']) == set(gather_subset) diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_ntp.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_ntp.py new file mode 100644 index 000000000..eed83d49b --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_ntp.py @@ -0,0 +1,257 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID NTP Ansible module: na_sg_grid_ntp""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_ntp import ( + SgGridNtp as grid_ntp_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "ntp_servers": ({"data": ["123.12.3.123", "123.1.23.123"]}, None,), + "update_ntp_servers": ({"data": ["123.12.3.123", "12.3.12.3"]}, None,), + "add_ntp_servers": ( + {"data": ["123.12.3.123", "123.1.23.123", "12.3.12.3"]}, + None, + ), + "remove_ntp_servers": ({"data": ["123.12.3.123"]}, None,), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "ntp_servers": "123.12.3.123,123.1.23.123", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "passphrase": "secretstring", + "ntp_servers": "123.12.3.123,123.1.23.123", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "passphrase": "secretstring", + "ntp_servers": "123.12.3.123,123.1.23.123", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_grid_ntp_servers(self): + return dict( + { + "state": "present", + "passphrase": "secretstring", + "ntp_servers": "123.12.3.123,12.3.12.3", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_add_na_sg_grid_ntp_servers(self): + return dict( + { + "state": "present", + "passphrase": "secretstring", + "ntp_servers": "123.12.3.123,123.1.23.123,12.3.12.3", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_remove_na_sg_grid_ntp_server(self): + return dict( + { + "state": "present", + "passphrase": "secretstring", + "ntp_servers": "123.12.3.123", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_ntp_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_ntp_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_set_na_sg_grid_ntp_servers_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_ntp_servers()) + my_obj = grid_ntp_module() + mock_request.side_effect = [ + SRR["ntp_servers"], # get + SRR["update_ntp_servers"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_set_na_sg_grid_ntp_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_set_na_sg_grid_ntp_servers_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_ntp_servers()) + my_obj = grid_ntp_module() + mock_request.side_effect = [ + SRR["update_ntp_servers"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_set_na_sg_grid_ntp_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_add_na_sg_grid_ntp_servers_pass(self, mock_request): + set_module_args(self.set_args_add_na_sg_grid_ntp_servers()) + my_obj = grid_ntp_module() + mock_request.side_effect = [ + SRR["ntp_servers"], # get + SRR["add_ntp_servers"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_add_na_sg_grid_ntp_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_remove_na_sg_grid_ntp_servers_pass(self, mock_request): + set_module_args(self.set_args_remove_na_sg_grid_ntp_server()) + my_obj = grid_ntp_module() + mock_request.side_effect = [ + SRR["ntp_servers"], # get + SRR["remove_ntp_servers"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_remove_na_sg_grid_ntp_servers_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_regions.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_regions.py new file mode 100644 index 000000000..585ba3f45 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_regions.py @@ -0,0 +1,206 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Regions Ansible module: na_sg_grid_regions""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_regions import ( + SgGridRegions as grid_regions_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "default_regions": ({"data": ["us-east-1"]}, None,), + "regions": ({"data": ["us-east-1", "us-west-1"]}, None,), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "regions": "us-east-1,us-west-1", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "regions": "us-east-1,us-west-1", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_grid_regions(self): + return dict( + { + "state": "present", + "regions": "us-east-1,us-west-1", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_remove_na_sg_grid_regions(self): + return dict( + { + "state": "present", + "regions": "us-east-1", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_regions_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_regions_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_set_na_sg_grid_regions_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_regions()) + my_obj = grid_regions_module() + mock_request.side_effect = [ + SRR["default_regions"], # get + SRR["regions"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_set_na_sg_grid_regions_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_set_na_sg_grid_regions_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_grid_regions()) + my_obj = grid_regions_module() + mock_request.side_effect = [ + SRR["regions"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_set_na_sg_grid_regions_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_remove_na_sg_grid_regions_pass(self, mock_request): + set_module_args(self.set_args_remove_na_sg_grid_regions()) + my_obj = grid_regions_module() + mock_request.side_effect = [ + SRR["regions"], # get + SRR["default_regions"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_remove_na_sg_grid_regions_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_traffic_classes.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_traffic_classes.py new file mode 100644 index 000000000..42fce0e3b --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_traffic_classes.py @@ -0,0 +1,355 @@ +# (c) 2022, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid HA Group Ansible module: na_sg_grid_traffic_classes""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys + +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip("Skipping Unit Tests on 2.6 as requests is not available") + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_traffic_classes import ( + SgGridTrafficClasses as grid_traffic_classes_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": (None, None), + "update_good": (None, None), + "version_114": ({"data": {"productVersion": "11.4.0-20200721.1338.d3969b3"}}, None), + "version_116": ({"data": {"productVersion": "11.6.0-20211120.0301.850531e"}}, None), + "traffic_class_record": ( + { + "data": { + "id": "6b2946e6-7fed-40d0-9262-8e922580aba7", + "name": "ansible-test-traffic-class", + "description": "Ansible Test", + "matchers": [ + {"type": "cidr", "inverse": False, "members": ["192.168.50.0/24"]}, + {"type": "bucket", "inverse": False, "members": ["ansible-test1", "ansible-test2"]}, + ], + "limits": [], + } + }, + None, + ), + "traffic_class_record_updated": ( + { + "data": { + "id": "6b2946e6-7fed-40d0-9262-8e922580aba7", + "name": "ansible-test-traffic-class", + "description": "Ansible Test", + "matchers": [ + {"type": "cidr", "inverse": False, "members": ["192.168.50.0/24"]}, + {"type": "bucket", "inverse": False, "members": ["ansible-test1", "ansible-test2"]}, + ], + "limits": [{"type": "aggregateBandwidthIn", "value": 888888}], + } + }, + None, + ), + "traffic_class_record_rename": ( + { + "data": { + "id": "6b2946e6-7fed-40d0-9262-8e922580aba7", + "name": "ansible-test-traffic-class-rename", + "description": "Ansible Test", + "matchers": [ + {"type": "cidr", "inverse": False, "members": ["192.168.50.0/24"]}, + {"type": "bucket", "inverse": False, "members": ["ansible-test1", "ansible-test2"]}, + ], + "limits": [], + } + }, + None, + ), + "traffic_classes": ( + { + "data": [ + { + "id": "6b2946e6-7fed-40d0-9262-8e922580aba7", + "name": "ansible-test-traffic-class", + "description": "Ansible Test", + }, + { + "id": "531e6be1-e9b1-4010-bb79-03437c7c13d2", + "name": "policy-test1", + "description": "First test policy", + }, + ] + }, + None, + ), + "node_health": ( + { + "data": [ + { + "id": "0b1866ed-d6e7-41b4-815f-bf867348b76b", + "isPrimaryAdmin": True, + "name": "SITE1-ADM1", + "siteId": "ae56d06d-bd83-46bd-adce-77146b1d94bd", + "siteName": "SITE1", + "severity": "normal", + "state": "connected", + "type": "adminNode", + }, + { + "id": "7bb5bf05-a04c-4344-8abd-08c5c4048666", + "isPrimaryAdmin": None, + "name": "SITE1-G1", + "siteId": "ae56d06d-bd83-46bd-adce-77146b1d94bd", + "siteName": "SITE1", + "severity": "normal", + "state": "connected", + "type": "apiGatewayNode", + }, + { + "id": "970ad050-b68b-4aae-a94d-aef73f3095c4", + "isPrimaryAdmin": False, + "name": "SITE2-ADM1", + "siteId": "7c24002e-5157-43e9-83e5-02db9b265b02", + "siteName": "SITE2", + "severity": "normal", + "state": "connected", + "type": "adminNode", + }, + ] + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """a group of related Unit Tests""" + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "matchers": [ + {"type": "bucket", "members": ["ansible-test1", "ansible-test2"]}, + {"type": "cidr", "members": ["192.168.50.0/24"]}, + ], + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "name": "ansible-test-traffic-class", + "matchers": [ + {"type": "bucket", "members": ["ansible-test1", "ansible-test2"]}, + {"type": "cidr", "members": ["192.168.50.0/24"]}, + ], + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_traffic_class(self): + return dict( + { + "state": "present", + "name": "ansible-test-traffic-class", + "description": "Ansible Test", + "matchers": [ + {"type": "bucket", "members": ["ansible-test1", "ansible-test2"]}, + {"type": "cidr", "members": ["192.168.50.0/24"]}, + ], + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_traffic_class(self): + return dict( + { + "state": "absent", + "name": "ansible-test-traffic-class", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_when_required_args_missing(self, mock_request): + """required arguments are reported as errors""" + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_traffic_classes_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_pass_when_required_args_present(self, mock_request): + """required arguments are reported as errors""" + mock_request.side_effect = [ + SRR["node_health"], # get + ] + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_traffic_classes_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_pass_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_grid_traffic_class_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_traffic_class()) + mock_request.side_effect = [ + SRR["empty_good"], # get + SRR["traffic_class_record"], # post + SRR["end_of_sequence"], + ] + my_obj = grid_traffic_classes_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_grid_traffic_class_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_create_na_sg_grid_traffic_class_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_traffic_class() + set_module_args(args) + mock_request.side_effect = [ + SRR["traffic_classes"], # get + SRR["traffic_class_record"], # get + SRR["end_of_sequence"], + ] + my_obj = grid_traffic_classes_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_create_na_sg_grid_traffic_class_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_traffic_class_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_traffic_class() + args["description"] = "Ansible Test with Limit" + args["limits"] = [{"type": "aggregateBandwidthIn", "value": 888888}] + set_module_args(args) + mock_request.side_effect = [ + SRR["traffic_classes"], # get + SRR["traffic_class_record"], # get + SRR["traffic_class_record_updated"], # put + SRR["end_of_sequence"], + ] + my_obj = grid_traffic_classes_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_traffic_class_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_rename_na_sg_grid_traffic_class_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_traffic_class() + args["policy_id"] = "6b2946e6-7fed-40d0-9262-8e922580aba7" + args["name"] = "ansible-test-traffic-class-rename" + set_module_args(args) + mock_request.side_effect = [ + SRR["traffic_class_record"], # get + SRR["traffic_class_record_rename"], # put + SRR["end_of_sequence"], + ] + my_obj = grid_traffic_classes_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_rename_na_sg_grid_traffic_class_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_delete_na_sg_grid_traffic_class_pass(self, mock_request): + args = self.set_args_delete_na_sg_grid_traffic_class() + set_module_args(args) + mock_request.side_effect = [ + SRR["traffic_classes"], # get + SRR["traffic_class_record"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + my_obj = grid_traffic_classes_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_grid_traffic_class_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_grid_traffic_class_bad_policy_id_fail(self, mock_request): + args = self.set_args_create_na_sg_grid_traffic_class() + args["policy_id"] = "ffffffff-ffff-aaaa-aaaa-000000000000" + args["description"] = "Bad ID" + set_module_args(args) + mock_request.side_effect = [ + SRR["not_found"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + my_obj = grid_traffic_classes_module() + my_obj.apply() + print("Info: test_update_na_sg_grid_traffic_class_bad_policy_id_fail: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["failed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_user.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_user.py new file mode 100644 index 000000000..c8ec38c09 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_grid_user.py @@ -0,0 +1,476 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Grid User Ansible module: na_sg_grid_user""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_grid_user import ( + SgGridUser as grid_user_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ({"status": "error", "code": 404, "data": {}}, {"key": "error.404"},), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "pw_change_good": ({"code": 204}, None), + "grid_groups": ( + { + "data": [ + { + "displayName": "TestGridGroup1", + "uniqueName": "group/testgridgroup1", + "accountId": "12345678901234567890", + "id": "12345678-abcd-1234-abcd-1234567890ab", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testgridgroup1", + }, + { + "displayName": "TestGridGroup2", + "uniqueName": "group/testgridgroup2", + "accountId": "12345678901234567890", + "id": "87654321-abcd-1234-cdef-1234567890ab", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testgridgroup2", + }, + ] + }, + None, + ), + "grid_users": ( + { + "data": [ + { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testgriduser", + "uniqueName": "user/ansible-sg-adm-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testgriduser", + "federated": False, + "memberOf": ["12345678-abcd-1234-abcd-1234567890ab"], + "disable": False, + } + ] + }, + None, + ), + "grid_user_record_no_group": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testgriduser", + "uniqueName": "user/ansible-sg-adm-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testgriduser", + "federated": False, + "disable": False, + } + }, + None, + ), + "grid_user_record": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testgriduser", + "uniqueName": "user/ansible-sg-adm-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testgriduser", + "federated": False, + "memberOf": ["12345678-abcd-1234-abcd-1234567890ab"], + "disable": False, + } + }, + None, + ), + "grid_user_record_update": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testgriduser", + "uniqueName": "user/ansible-sg-adm-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testgriduser", + "federated": False, + "memberOf": [ + "12345678-abcd-1234-abcd-1234567890ab", + "87654321-abcd-1234-cdef-1234567890ab", + ], + "disable": False, + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "full_name": "TestUser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_user_no_group(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_grid_user(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "member_of": ["group/testgridgroup1"], + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_grid_user(self): + return dict( + { + "state": "absent", + "unique_name": "user/testuser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + grid_user_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + grid_user_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + def test_module_fail_with_bad_unique_name(self): + """ error returned if unique_name doesn't start with user or federated_user """ + with pytest.raises(AnsibleFailJson) as exc: + args = self.set_default_args_pass_check() + args["unique_name"] = "noprefixuser" + set_module_args(args) + grid_user_module() + print( + "Info: test_module_fail_with_bad_unique_name: %s" % exc.value.args[0]["msg"] + ) + + def set_args_create_na_sg_grid_user_with_password(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "member_of": ["group/testgridgroup1"], + "password": "netapp123", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_grid_user_no_group_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_user_no_group()) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["grid_user_record_no_group"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_grid_user_no_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_grid_user_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_user()) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["grid_groups"], # get + SRR["grid_user_record"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_grid_user_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_grid_user_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_user()) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["grid_user_record"], # get + SRR["grid_groups"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_grid_user_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_grid_user_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_user() + args["member_of"] = ["group/testgridgroup1", "group/testgridgroup2"] + + set_module_args(args) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["grid_user_record"], # get + SRR["grid_groups"], # get + SRR["grid_user_record_update"], # put + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_grid_user_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_delete_na_sg_grid_user_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_grid_user()) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["grid_user_record"], # get + SRR["grid_groups"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_grid_user_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + # create user and set pass + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_grid_user_and_set_password_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_grid_user_with_password()) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["grid_groups"], # get + SRR["grid_user_record"], # post + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_grid_user_and_set_password_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # Idempotent user with password defined + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_grid_user_and_set_password_pass( + self, mock_request + ): + set_module_args(self.set_args_create_na_sg_grid_user_with_password()) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["grid_user_record"], # get + SRR["grid_groups"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_grid_user_and_set_password_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + # update user and set pass + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_grid_user_and_set_password_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_user_with_password() + args["member_of"] = ["group/testgridgroup1", "group/testgridgroup2"] + args["update_password"] = "always" + + set_module_args(args) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["grid_user_record"], # get + SRR["grid_groups"], # get + SRR["grid_user_record_update"], # put + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_update_na_sg_grid_user_and_set_password_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # set pass only + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_set_na_sg_grid_user_password_pass(self, mock_request): + args = self.set_args_create_na_sg_grid_user_with_password() + args["update_password"] = "always" + + set_module_args(args) + my_obj = grid_user_module() + mock_request.side_effect = [ + SRR["grid_user_record"], # get + SRR["grid_groups"], # get + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_set_na_sg_grid_user_password_pass: %s" % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # attempt to set password on federated user + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_fail_set_federated_user_password(self, mock_request): + with pytest.raises(AnsibleFailJson) as exc: + args = self.set_args_create_na_sg_grid_user_with_password() + args["unique_name"] = "federated-user/abc123" + args["update_password"] = "always" + set_module_args(args) + grid_user_module() + print( + "Info: test_fail_set_federated_user_password: %s" % repr(exc.value.args[0]) + ) diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_container.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_container.py new file mode 100644 index 000000000..21c49a556 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_container.py @@ -0,0 +1,348 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Org Container Ansible module: na_sg_org_container""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys + +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip("Skipping Unit Tests on 2.6 as requests is not available") + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_org_container import ( + SgOrgContainer as org_container_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": (None, None), + "version_114": ({"data": {"productVersion": "11.4.0-20200721.1338.d3969b3"}}, None), + "version_116": ({"data": {"productVersion": "11.6.0-20211120.0301.850531e"}}, None), + "global_compliance_disabled": ( + { + "data": { + "complianceEnabled": False, + } + }, + None, + ), + "global_compliance_enabled": ( + { + "data": { + "complianceEnabled": True, + } + }, + None, + ), + "org_containers": ( + {"data": [{"name": "testbucket", "creationTime": "2020-02-04T12:43:50.777Z", "region": "us-east-1"}]}, + None, + ), + "org_container_record": ( + {"data": {"name": "testbucket", "creationTime": "2020-02-04T12:43:50.777Z", "region": "us-east-1"}}, + None, + ), + "org_container_objectlock_record": ( + { + "data": { + "name": "testbucket", + "creationTime": "2020-02-04T12:43:50.777Z", + "region": "us-east-1", + "s3ObjectLock": {"enabled": True}, + } + }, + None, + ), + "org_container_record_update": ( + { + "data": { + "name": "testbucket", + "creationTime": "2020-02-04T12:43:50.777Z", + "region": "us-east-1", + "compliance": {"autoDelete": False, "legalHold": False}, + } + }, + None, + ), + "org_container_versioning_disabled": ({"data": {"versioningEnabled": False, "versioningSuspended": False}}, None), + "org_container_versioning_enabled": ({"data": {"versioningEnabled": True, "versioningSuspended": False}}, None), + "org_container_versioning_suspended": ({"data": {"versioningEnabled": False, "versioningSuspended": True}}, None), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """a group of related Unit Tests""" + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + {"name": "testbucket", "auth_token": "01234567-5678-9abc-78de-9fgabc123def", "validate_certs": False} + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "name": "testbucket", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_org_container(self): + return dict( + { + "state": "present", + "name": "testbucket", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_org_container(self): + return dict( + { + "state": "absent", + "name": "testbucket", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_when_required_args_missing(self, mock_request): + """required arguments are reported as errors""" + mock_request.side_effect = [ + SRR["version_114"], + ] + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + org_container_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_when_required_args_present(self, mock_request): + """required arguments are reported as errors""" + mock_request.side_effect = [ + SRR["version_114"], + ] + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + org_container_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_fail_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_org_container_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_container()) + mock_request.side_effect = [ + SRR["version_114"], + SRR["empty_good"], # get + SRR["org_container_record"], # post + SRR["end_of_sequence"], + ] + my_obj = org_container_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_org_container_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_create_na_sg_org_container_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_container()) + mock_request.side_effect = [ + SRR["version_114"], + SRR["org_containers"], # get + SRR["end_of_sequence"], + ] + my_obj = org_container_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_create_na_sg_org_container_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_org_container_pass(self, mock_request): + args = self.set_args_create_na_sg_org_container() + args["compliance"] = {"auto_delete": False, "legal_hold": False} + set_module_args(args) + mock_request.side_effect = [ + SRR["version_114"], + SRR["org_containers"], # get + SRR["org_container_record_update"], # put + SRR["end_of_sequence"], + ] + my_obj = org_container_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_org_container_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_delete_na_sg_org_container_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_org_container()) + mock_request.side_effect = [ + SRR["version_114"], + SRR["org_containers"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + my_obj = org_container_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_org_container_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_minimum_version_not_met_object_lock(self, mock_request): + args = self.set_args_create_na_sg_org_container() + args["s3_object_lock_enabled"] = True + set_module_args(args) + mock_request.side_effect = [ + SRR["version_114"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + org_container_module() + print("Info: test_module_fail_minimum_version_not_met_object_lock: %s" % exc.value.args[0]["msg"]) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_org_container_objectlock_global_compliance_fail(self, mock_request): + args = self.set_args_create_na_sg_org_container() + args["s3_object_lock_enabled"] = True + set_module_args(args) + mock_request.side_effect = [ + SRR["version_116"], + SRR["empty_good"], # get + SRR["global_compliance_disabled"], # get + ] + my_obj = org_container_module() + with pytest.raises(AnsibleFailJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_org_container_objectlock_global_compliance_fail: %s" % repr(exc.value.args[0])) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_org_container_objectlock_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_container()) + mock_request.side_effect = [ + SRR["version_116"], + SRR["empty_good"], # get + SRR["global_compliance_enabled"], # get + SRR["org_container_objectlock_record"], # post + SRR["end_of_sequence"], + ] + my_obj = org_container_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_org_container_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_module_fail_minimum_version_not_met_versioning(self, mock_request): + args = self.set_args_create_na_sg_org_container() + args["bucket_versioning_enabled"] = True + set_module_args(args) + mock_request.side_effect = [ + SRR["version_114"], # get + ] + with pytest.raises(AnsibleFailJson) as exc: + org_container_module() + print("Info: test_module_fail_minimum_version_not_met_versioning: %s" % exc.value.args[0]["msg"]) + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_create_na_sg_org_container_with_versioning_pass(self, mock_request): + args = self.set_args_create_na_sg_org_container() + args["bucket_versioning_enabled"] = True + set_module_args(args) + mock_request.side_effect = [ + SRR["version_116"], + SRR["empty_good"], # get + SRR["org_container_record"], # post + SRR["org_container_versioning_enabled"], # post + SRR["end_of_sequence"], + ] + my_obj = org_container_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_org_container_with_versioning_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_update_na_sg_org_container_enable_versioning_pass(self, mock_request): + args = self.set_args_create_na_sg_org_container() + args["bucket_versioning_enabled"] = True + set_module_args(args) + mock_request.side_effect = [ + SRR["version_116"], + SRR["org_containers"], # get + SRR["org_container_versioning_disabled"], # get + SRR["org_container_versioning_enabled"], # put + SRR["end_of_sequence"], + ] + my_obj = org_container_module() + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_org_container_enable_versioning_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_group.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_group.py new file mode 100644 index 000000000..c229130c2 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_group.py @@ -0,0 +1,403 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Org Group Ansible module: na_sg_org_group""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_org_group import ( + SgOrgGroup as org_group_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "org_groups": ( + { + "data": [ + { + "displayName": "TestOrgGroup", + "uniqueName": "group/testorggroup", + "policies": { + "management": { + "manageAllContainers": True, + "manageEndpoints": True, + "manageOwnS3Credentials": True, + }, + "s3": { + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": "arn:aws:s3:::*", + } + ] + }, + }, + "accountId": "12345678901234567890", + "id": "00000000-0000-0000-0000-000000000000", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testorggroup", + } + ] + }, + None, + ), + "org_group_record": ( + { + "data": { + "displayName": "TestOrgGroup", + "uniqueName": "group/testorggroup", + "policies": { + "management": { + "manageAllContainers": True, + "manageEndpoints": True, + "manageOwnS3Credentials": True, + }, + "s3": { + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": "arn:aws:s3:::*", + } + ] + }, + }, + "accountId": "12345678901234567890", + "id": "00000000-0000-0000-0000-000000000000", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testorggroup", + } + }, + None, + ), + "org_group_record_update": ( + { + "data": { + "displayName": "TestOrgGroup", + "uniqueName": "group/testorggroup", + "policies": { + "management": { + "manageAllContainers": True, + "manageEndpoints": True, + "manageOwnS3Credentials": True, + # "rootAccess": False, + }, + "s3": { + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": "arn:aws:s3:::mybucket/*", + } + ] + }, + }, + "accountId": "12345678901234567890", + "id": "00000000-0000-0000-0000-000000000000", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testorggroup", + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "display_name": "TestGroup", + "management_policy": { + "manage_all_containers": True, + "manage_endpoints": True, + "manage_own_s3_credentials": True, + "root_access": False, + }, + "s3_policy": { + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": "arn:aws:s3:::*", + } + ] + }, + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "display_name": "TestGroup", + "unique_name": "group/testgroup", + "management_policy": { + "manage_all_containers": True, + "manage_endpoints": True, + "manage_own_s3_credentials": True, + "root_access": False, + }, + "s3_policy": { + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": "arn:aws:s3:::*", + } + ] + }, + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_org_group(self): + return dict( + { + "state": "present", + "display_name": "TestOrgGroup", + "unique_name": "group/testorggroup", + "management_policy": { + "manage_all_containers": True, + "manage_endpoints": True, + "manage_own_s3_credentials": True, + "root_access": False, + }, + "s3_policy": { + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": "arn:aws:s3:::*", + } + ] + }, + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_org_group(self): + return dict( + { + "state": "absent", + # "display_name": "TestOrgGroup", + "unique_name": "group/testorggroup", + # "management_policy": { + # "manage_all_containers": True, + # "manage_endpoints": True, + # "manage_own_s3_credentials": True, + # "root_access": False, + # }, + # "s3_policy": { + # "Statement": [ + # { + # "Effect": "Allow", + # "Action": "s3:*", + # "Resource": "arn:aws:s3:::*", + # } + # ] + # }, + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + org_group_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + org_group_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + def test_module_fail_with_bad_unique_name(self): + """ error returned if unique_name doesn't start with group or federated_group """ + with pytest.raises(AnsibleFailJson) as exc: + args = self.set_default_args_pass_check() + args["unique_name"] = "noprefixgroup" + set_module_args(args) + org_group_module() + print( + "Info: test_module_fail_with_bad_unique_name: %s" + % exc.value.args[0]["msg"] + ) + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_org_group_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_group()) + my_obj = org_group_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["org_group_record"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_org_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_org_group_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_group()) + my_obj = org_group_module() + mock_request.side_effect = [ + SRR["org_group_record"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_org_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_org_group_pass(self, mock_request): + args = self.set_args_create_na_sg_org_group() + args["s3_policy"] = ( + { + "Statement": [ + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": "arn:aws:s3:::mybucket/*", + } + ] + }, + ) + + args["management_policy"]["manage_endpoints"] = False + + set_module_args(args) + my_obj = org_group_module() + mock_request.side_effect = [ + SRR["org_group_record"], # get + SRR["org_group_record_update"], # put + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_update_na_sg_org_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_delete_na_sg_org_group_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_org_group()) + my_obj = org_group_module() + mock_request.side_effect = [ + SRR["org_group_record"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_delete_na_sg_org_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_identity_federation.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_identity_federation.py new file mode 100644 index 000000000..b02259005 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_identity_federation.py @@ -0,0 +1,354 @@ +# (c) 2021, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Tenant Identity Federation Ansible module: na_sg_org_identity_federation""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_org_identity_federation import ( + SgOrgIdentityFederation as org_identity_federation_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "check_mode_good": (None, None), + "identity_federation_unset": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": True, + "type": "", + "ldapServiceType": "", + "hostname": "", + "port": 0, + "username": "", + "password": None, + "baseGroupDn": "", + "baseUserDn": "", + "disableTLS": False, + "enableLDAPS": False, + "caCert": "", + } + }, + None, + ), + "identity_federation": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": False, + "type": "ldap", + "ldapServiceType": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "disableTLS": True, + "enableLDAPS": False, + "caCert": "", + } + }, + None, + ), + "identity_federation_tls": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": False, + "type": "ldap", + "ldapServiceType": "Active Directory", + "hostname": "ad.example.com", + "port": 636, + "username": "binduser", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "disableTLS": False, + "enableLDAPS": True, + "caCert": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIF+DCCBOCgAwIBAgITRwAAAAIg5KzMrJo+kQAAAAAAAjANBgkqhkiG9w0BAQUF\n" + "ADBlMRIwEAYKCZImiZPyLGQBGRYCYXUxFjAUBgoJkiaJk/IsZAEZFgZuZXRhcHAx\n" + "FjAUBgoJkiaJk/IsZAEZFgZhdXNuZ3MxHzAdBgNVBAMTFmF1c25ncy1NRUxOR1NE\n" + "QzAxLUNBLTEwHhcNMjEwMjExMDkzMTIwWhcNMjMwMjExMDk0MTIwWjAAMIIBIjAN\n" + "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt2xPi4FS4Uc37KrDLEXXUoc4lhhT\n" + "uQmMnLc0PYZCIpzYOaosFIeGqco3woiC7wSZJ2whKE4RDcxxgE+azuGiSWVjIxIL\n" + "AimmcDhFid/T3KRN5jmkjBzUKuPBYzZBFih8iU9056rqgN7eMKQYjRwPeV0+AeiB\n" + "irw46OgkwVQu3shEUtXxZPP2Mb6Md23+4vSmcElUcW28Opt2q/M5fs7DNomG3eaG\n" + "-----END CERTIFICATE-----\n" + ), + } + }, + None, + ), + "identity_federation_disable": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "disable": True, + "type": "ldap", + "ldapServiceType": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "********", + "baseGroupDn": "DC=example,DC=com", + "baseUserDn": "DC=example,DC=com", + "disableTLS": True, + "enableLDAPS": False, + "caCert": "", + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "Disabled", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "Disabled", + "state": "present", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_org_identity_federation(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 389, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "Disabled", + "state": "present", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_set_na_sg_org_identity_federation_tls(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "port": 636, + "username": "binduser", + "password": "bindpass", + "base_group_dn": "DC=example,DC=com", + "base_user_dn": "DC=example,DC=com", + "tls": "LDAPS", + "ca_cert": ( + "-----BEGIN CERTIFICATE-----\n" + "MIIF+DCCBOCgAwIBAgITRwAAAAIg5KzMrJo+kQAAAAAAAjANBgkqhkiG9w0BAQUF\n" + "ADBlMRIwEAYKCZImiZPyLGQBGRYCYXUxFjAUBgoJkiaJk/IsZAEZFgZuZXRhcHAx\n" + "FjAUBgoJkiaJk/IsZAEZFgZhdXNuZ3MxHzAdBgNVBAMTFmF1c25ncy1NRUxOR1NE\n" + "QzAxLUNBLTEwHhcNMjEwMjExMDkzMTIwWhcNMjMwMjExMDk0MTIwWjAAMIIBIjAN\n" + "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt2xPi4FS4Uc37KrDLEXXUoc4lhhT\n" + "uQmMnLc0PYZCIpzYOaosFIeGqco3woiC7wSZJ2whKE4RDcxxgE+azuGiSWVjIxIL\n" + "AimmcDhFid/T3KRN5jmkjBzUKuPBYzZBFih8iU9056rqgN7eMKQYjRwPeV0+AeiB\n" + "irw46OgkwVQu3shEUtXxZPP2Mb6Md23+4vSmcElUcW28Opt2q/M5fs7DNomG3eaG\n" + "-----END CERTIFICATE-----\n" + ), + "state": "present", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_remove_na_sg_org_identity_federation(self): + return dict( + { + "ldap_service_type": "Active Directory", + "hostname": "ad.example.com", + "state": "absent", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + org_identity_federation_module() + print("Info: test_module_fail_when_required_args_missing: %s" % exc.value.args[0]["msg"]) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + org_identity_federation_module() + exit_json(changed=True, msg="Induced arguments check") + print("Info: test_module_fail_when_required_args_present: %s" % exc.value.args[0]["msg"]) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_set_na_sg_org_identity_federation_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_org_identity_federation()) + my_obj = org_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation_unset"], # get + SRR["identity_federation"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_set_na_sg_org_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_idempotent_set_na_sg_org_identity_federation_pass(self, mock_request): + args = self.set_args_set_na_sg_org_identity_federation() + # remove password + del args["password"] + set_module_args(args) + my_obj = org_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_idempotent_set_na_sg_org_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert not exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_set_na_sg_org_identity_federation_tls_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_org_identity_federation_tls()) + my_obj = org_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation_unset"], # get + SRR["identity_federation_tls"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_set_na_sg_org_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_remove_na_sg_org_identity_federation_pass(self, mock_request): + set_module_args(self.set_args_remove_na_sg_org_identity_federation()) + my_obj = org_identity_federation_module() + mock_request.side_effect = [ + SRR["identity_federation"], # get + SRR["identity_federation_disable"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_remove_na_sg_org_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + # test check mode + + @patch("ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request") + def test_check_mode_na_sg_org_identity_federation_pass(self, mock_request): + set_module_args(self.set_args_set_na_sg_org_identity_federation()) + my_obj = org_identity_federation_module() + my_obj.module.check_mode = True + mock_request.side_effect = [ + SRR["identity_federation_unset"], # get + SRR["check_mode_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_check_mode_na_sg_org_identity_federation_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + assert exc.value.args[0]["msg"] == "Connection test successful" diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_info.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_info.py new file mode 100644 index 000000000..e24c7cd46 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_info.py @@ -0,0 +1,263 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +''' Unit Tests NetApp StorageGRID Org Ansible module: na_sg_org_info ''' + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type +import json +import pytest + +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import patch + +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_org_info \ + import NetAppSgGatherInfo as sg_org_info_module + +# REST API canned responses when mocking send_request +SRR = { + # common responses + 'empty_good': ({'data': []}, None), + 'end_of_sequence': (None, 'Unexpected call to send_request'), + 'generic_error': (None, 'Expected error'), + 'org_compliance_global': ({'data': {}}, None), + 'org_config': ({'data': {}}, None), + 'org_config_product_version': ({'data': {}}, None), + 'org_containers': ({'data': {}}, None), + 'org_deactivated_features': ({'data': {}}, None), + 'org_endpoints': ({'data': []}, None), + 'org_groups': ({'data': []}, None), + 'org_identity_source': ({'data': {}}, None), + 'org_regions': ({'data': []}, None), + 'org_users_current_user_s3_access_keys': ({'data': []}, None), + 'org_usage': ({'data': {}}, None), + 'org_users': ( + { + 'data': [ + { + 'accountId': '99846664116007910793', + 'disable': False, + 'federated': False, + 'fullName': 'Root', + 'id': '00000000-0000-0000-0000-000000000000', + 'memberOf': None, + 'uniqueName': 'root', + 'userURN': 'urn:sgws:identity::99846664116007910793:root' + }, + ] + }, + None + ), + 'org_users_root': ( + { + 'data': { + 'accountId': '99846664116007910793', + 'disable': False, + 'federated': False, + 'fullName': 'Root', + 'id': '00000000-0000-0000-0000-000000000000', + 'memberOf': None, + 'uniqueName': 'root', + 'userURN': 'urn:sgws:identity::99846664116007910793:root' + }, + }, + None + ), + 'versions': ({'data': [2, 3]}, None), +} + + +def set_module_args(args): + ''' Prepare arguments so that they will be picked up during module creation ''' + args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + ''' Exception class to be raised by module.exit_json and caught by the test case ''' + pass + + +class AnsibleFailJson(Exception): + ''' Exception class to be raised by module.fail_json and caught by the test case ''' + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + ''' Function to patch over exit_json; package return data into an exception ''' + if 'changed' not in kwargs: + kwargs['changed'] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + ''' Function to patch over fail_json; package return data into an exception ''' + kwargs['failed'] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + ''' A group of related Unit Tests ''' + + def setUp(self): + self.mock_module_helper = patch.multiple(basic.AnsibleModule, + exit_json=exit_json, + fail_json=fail_json) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + 'api_url': 'sgmi.example.com', + } + ) + + def set_default_args_pass_check(self): + return dict( + { + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + } + ) + + def set_default_optional_args_pass_check(self): + return dict( + { + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + 'gather_subset': ['all'], + 'parameters': {'limit': 5}, + } + ) + + def set_args_run_sg_gather_facts_for_all_info(self): + return dict({ + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + }) + + def set_args_run_sg_gather_facts_for_org_users_info(self): + return dict({ + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + 'gather_subset': ['org_users_info'], + }) + + def set_args_run_sg_gather_facts_for_org_users_and_org_users_root_info(self): + return dict({ + 'api_url': 'sgmi.example.com', + 'auth_token': '01234567-5678-9abc-78de-9fgabc123def', + 'validate_certs': False, + 'gather_subset': ['org_users_info', 'org/users/root'], + }) + + def test_module_fail_when_required_args_missing(self): + ''' required arguments are reported as errors ''' + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + sg_org_info_module() + print( + 'Info: test_module_fail_when_required_args_missing: %s' + % exc.value.args[0]['msg'] + ) + + def test_module_pass_when_required_args_present(self): + ''' required arguments are reported as errors ''' + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + sg_org_info_module() + exit_json(changed=True, msg='Induced arguments check') + print( + 'Info: test_module_pass_when_required_args_present: %s' + % exc.value.args[0]['msg'] + ) + assert exc.value.args[0]['changed'] + + def test_module_pass_when_optional_args_present(self): + ''' Optional arguments are reported as pass ''' + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_optional_args_pass_check()) + sg_org_info_module() + exit_json(changed=True, msg='Induced arguments check') + print( + 'Info: test_module_pass_when_optional_args_present: %s' + % exc.value.args[0]['msg'] + ) + assert exc.value.args[0]['changed'] + + @patch('ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request') + def test_run_sg_gather_facts_for_all_info_pass(self, mock_request): + set_module_args(self.set_args_run_sg_gather_facts_for_all_info()) + my_obj = sg_org_info_module() + gather_subset = [ + 'org/compliance-global', + 'org/config', + 'org/config/product-version', + 'org/containers', + 'org/deactivated-features', + 'org/endpoints', + 'org/groups', + 'org/identity-source', + 'org/regions', + 'org/users/current-user/s3-access-keys', + 'org/usage', + 'org/users', + 'org/users/root', + 'versions', + ] + mock_request.side_effect = [ + SRR['org_compliance_global'], + SRR['org_config'], + SRR['org_config_product_version'], + SRR['org_containers'], + SRR['org_deactivated_features'], + SRR['org_endpoints'], + SRR['org_groups'], + SRR['org_identity_source'], + SRR['org_regions'], + SRR['org_users_current_user_s3_access_keys'], + SRR['org_usage'], + SRR['org_users'], + SRR['org_users_root'], + SRR['versions'], + SRR['end_of_sequence'], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print('Info: test_run_sg_gather_facts_for_all_info_pass: %s' % repr(exc.value.args)) + assert set(exc.value.args[0]['sg_info']) == set(gather_subset) + + @patch('ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request') + def test_run_sg_gather_facts_for_org_users_info_pass(self, mock_request): + set_module_args(self.set_args_run_sg_gather_facts_for_org_users_info()) + my_obj = sg_org_info_module() + gather_subset = ['org/users'] + mock_request.side_effect = [ + SRR['org_users'], + SRR['end_of_sequence'], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print('Info: test_run_sg_gather_facts_for_org_users_info_pass: %s' % repr(exc.value.args)) + assert set(exc.value.args[0]['sg_info']) == set(gather_subset) + + @patch('ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request') + def test_run_sg_gather_facts_for_org_users_and_org_users_root_info_pass(self, mock_request): + set_module_args(self.set_args_run_sg_gather_facts_for_org_users_and_org_users_root_info()) + my_obj = sg_org_info_module() + gather_subset = ['org/users', 'org/users/root'] + mock_request.side_effect = [ + SRR['org_users'], + SRR['org_users_root'], + SRR['end_of_sequence'], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print('Info: test_run_sg_gather_facts_for_org_users_and_org_users_root_info_pass: %s' % repr(exc.value.args)) + assert set(exc.value.args[0]['sg_info']) == set(gather_subset) diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_user.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_user.py new file mode 100644 index 000000000..8fcec6734 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_user.py @@ -0,0 +1,476 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Org Group Ansible module: na_sg_org_user""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_org_user import ( + SgOrgUser as org_user_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ({"status": "error", "code": 404, "data": {}}, {"key": "error.404"},), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "pw_change_good": ({"code": 204}, None), + "org_groups": ( + { + "data": [ + { + "displayName": "TestOrgGroup1", + "uniqueName": "group/testorggroup1", + "accountId": "12345678901234567890", + "id": "12345678-abcd-1234-abcd-1234567890ab", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testorggroup1", + }, + { + "displayName": "TestOrgGroup2", + "uniqueName": "group/testorggroup2", + "accountId": "12345678901234567890", + "id": "87654321-abcd-1234-cdef-1234567890ab", + "federated": False, + "groupURN": "urn:sgws:identity::12345678901234567890:group/testorggroup2", + }, + ] + }, + None, + ), + "org_users": ( + { + "data": [ + { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testorguser", + "uniqueName": "user/ansible-sg-demo-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testorguser", + "federated": False, + "memberOf": ["12345678-abcd-1234-abcd-1234567890ab"], + "disable": False, + } + ] + }, + None, + ), + "org_user_record_no_group": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testorguser", + "uniqueName": "user/ansible-sg-demo-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testorguser", + "federated": False, + "disable": False, + } + }, + None, + ), + "org_user_record": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testorguser", + "uniqueName": "user/ansible-sg-demo-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testorguser", + "federated": False, + "memberOf": ["12345678-abcd-1234-abcd-1234567890ab"], + "disable": False, + } + }, + None, + ), + "org_user_record_update": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testorguser", + "uniqueName": "user/ansible-sg-demo-user1", + "userURN": "urn:sgws:identity::12345678901234567890:user/testorguser", + "federated": False, + "memberOf": [ + "12345678-abcd-1234-abcd-1234567890ab", + "87654321-abcd-1234-cdef-1234567890ab", + ], + "disable": False, + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "full_name": "TestUser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_org_user_no_group(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_org_user(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "member_of": ["group/testorggroup1"], + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_org_user(self): + return dict( + { + "state": "absent", + # "full_name": "TestUser", + "unique_name": "user/testuser", + # "member_of": ["group/testorggroup1"], + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + org_user_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + org_user_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + def test_module_fail_with_bad_unique_name(self): + """ error returned if unique_name doesn't start with user or federated_user """ + with pytest.raises(AnsibleFailJson) as exc: + args = self.set_default_args_pass_check() + args["unique_name"] = "noprefixuser" + set_module_args(args) + org_user_module() + print( + "Info: test_module_fail_with_bad_unique_name: %s" % exc.value.args[0]["msg"] + ) + + def set_args_create_na_sg_org_user_with_password(self): + return dict( + { + "state": "present", + "full_name": "TestUser", + "unique_name": "user/testuser", + "member_of": ["group/testorggroup1"], + "password": "netapp123", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_org_user_no_group_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_user_no_group()) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["org_user_record_no_group"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_org_user_no_group_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_org_user_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_user()) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["org_groups"], # get + SRR["org_user_record"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_create_na_sg_org_user_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_org_user_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_user()) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_groups"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_org_user_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_org_user_pass(self, mock_request): + args = self.set_args_create_na_sg_org_user() + args["member_of"] = ["group/testorggroup1", "group/testorggroup2"] + + set_module_args(args) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_groups"], # get + SRR["org_user_record_update"], # put + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_update_na_sg_org_user_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_delete_na_sg_org_user_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_org_user()) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_groups"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print("Info: test_delete_na_sg_org_user_pass: %s" % repr(exc.value.args[0])) + assert exc.value.args[0]["changed"] + + # create user and set pass + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_org_user_and_set_password_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_user_with_password()) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["not_found"], # get + SRR["org_groups"], # get + SRR["org_user_record"], # post + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_org_user_and_set_password_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # Idempotent user with password defined + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_org_user_and_set_password_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_user_with_password()) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_groups"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_org_user_and_set_password_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + # update user and set pass + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_update_na_sg_org_user_and_set_password_pass(self, mock_request): + args = self.set_args_create_na_sg_org_user_with_password() + args["member_of"] = ["group/testorggroup1", "group/testorggroup2"] + args["update_password"] = "always" + + set_module_args(args) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_groups"], # get + SRR["org_user_record_update"], # put + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_update_na_sg_org_user_and_set_password_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # set pass only + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_set_na_sg_org_user_password_pass(self, mock_request): + args = self.set_args_create_na_sg_org_user_with_password() + args["update_password"] = "always" + + set_module_args(args) + my_obj = org_user_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_groups"], # get + SRR["pw_change_good"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_set_na_sg_org_user_password_pass: %s" % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + # attempt to set password on federated user + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_fail_set_federated_user_password(self, mock_request): + with pytest.raises(AnsibleFailJson) as exc: + args = self.set_args_create_na_sg_org_user_with_password() + args["unique_name"] = "federated-user/abc123" + args["update_password"] = "always" + set_module_args(args) + org_user_module() + print( + "Info: test_fail_set_federated_user_password: %s" % repr(exc.value.args[0]) + ) diff --git a/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_user_s3_key.py b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_user_s3_key.py new file mode 100644 index 000000000..53696bdbf --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/plugins/modules/test_na_sg_org_user_s3_key.py @@ -0,0 +1,238 @@ +# (c) 2020, NetApp, Inc +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" unit tests NetApp StorageGRID Org Group Ansible module: na_sg_org_user_s3_key""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import json +import pytest +import sys +try: + from requests import Response +except ImportError: + if sys.version_info < (2, 7): + pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available') + else: + raise + +from ansible_collections.netapp.storagegrid.tests.unit.compat import unittest +from ansible_collections.netapp.storagegrid.tests.unit.compat.mock import ( + patch, + Mock, +) +from ansible.module_utils import basic +from ansible.module_utils._text import to_bytes +from ansible_collections.netapp.storagegrid.plugins.modules.na_sg_org_user_s3_key import ( + SgOrgUserS3Key as org_s3_key_module, +) + +# REST API canned responses when mocking send_request +SRR = { + # common responses + "empty_good": ({"data": []}, None), + "not_found": ( + {"status": "error", "code": 404, "data": {}}, + {"key": "error.404"}, + ), + "end_of_sequence": (None, "Unexpected call to send_request"), + "generic_error": (None, "Expected error"), + "delete_good": ({"code": 204}, None), + "org_user_record": ( + { + "data": { + "id": "09876543-abcd-4321-abcd-0987654321ab", + "accountId": "12345678901234567890", + "fullName": "testorguser", + "uniqueName": "user/testorguser", + "userURN": "urn:sgws:identity::12345678901234567890:user/testorguser", + "federated": False, + "memberOf": ["12345678-abcd-1234-abcd-1234567890ab"], + "disable": False, + } + }, + None, + ), + "org_s3_key": ( + { + "data": { + "id": "abcABC_01234-0123456789abcABCabc0123456789==", + "accountId": 12345678901234567000, + "displayName": "****************AB12", + "userURN": "urn:sgws:identity::12345678901234567890:root", + "userUUID": "09876543-abcd-4321-abcd-0987654321ab", + "expires": "2020-09-04T00:00:00.000Z", + "accessKey": "ABCDEFabcd1234567890", + "secretAccessKey": "abcABC+123456789012345678901234567890123", + } + }, + None, + ), +} + + +def set_module_args(args): + """prepare arguments so that they will be picked up during module creation""" + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + pass + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + pass + + +def exit_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over exit_json; package return data into an exception""" + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + +def fail_json(*args, **kwargs): # pylint: disable=unused-argument + """function to patch over fail_json; package return data into an exception""" + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + +class TestMyModule(unittest.TestCase): + """ a group of related Unit Tests """ + + def setUp(self): + self.mock_module_helper = patch.multiple( + basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json + ) + self.mock_module_helper.start() + self.addCleanup(self.mock_module_helper.stop) + + def set_default_args_fail_check(self): + return dict( + { + "unique_user_name": "user/testorguser", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_default_args_pass_check(self): + return dict( + { + "state": "present", + "unique_user_name": "user/testorguser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_create_na_sg_org_user_s3_keys(self): + return dict( + { + "state": "present", + "unique_user_name": "user/testorguser", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def set_args_delete_na_sg_org_user_s3_keys(self): + return dict( + { + "state": "absent", + "unique_user_name": "user/testorguser", + "access_key": "ABCDEFabcd1234567890", + "api_url": "gmi.example.com", + "auth_token": "01234567-5678-9abc-78de-9fgabc123def", + "validate_certs": False, + } + ) + + def test_module_fail_when_required_args_missing(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleFailJson) as exc: + set_module_args(self.set_default_args_fail_check()) + org_s3_key_module() + print( + "Info: test_module_fail_when_required_args_missing: %s" + % exc.value.args[0]["msg"] + ) + + def test_module_fail_when_required_args_present(self): + """ required arguments are reported as errors """ + with pytest.raises(AnsibleExitJson) as exc: + set_module_args(self.set_default_args_pass_check()) + org_s3_key_module() + exit_json(changed=True, msg="Induced arguments check") + print( + "Info: test_module_fail_when_required_args_present: %s" + % exc.value.args[0]["msg"] + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_create_na_sg_org_user_s3_key_pass(self, mock_request): + set_module_args(self.set_args_create_na_sg_org_user_s3_keys()) + my_obj = org_s3_key_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_s3_key"], # post + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_create_na_sg_org_user_s3_key_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_idempotent_create_na_sg_org_user_s3_key_pass(self, mock_request): + args = self.set_args_create_na_sg_org_user_s3_keys() + args["access_key"] = "ABCDEFabcd1234567890" + set_module_args(args) + my_obj = org_s3_key_module() + mock_request.side_effect = [ + SRR["org_user_record"], # get + SRR["org_s3_key"], # get + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_idempotent_create_na_sg_org_user_s3_key_pass: %s" + % repr(exc.value.args[0]) + ) + assert not exc.value.args[0]["changed"] + + @patch( + "ansible_collections.netapp.storagegrid.plugins.module_utils.netapp.SGRestAPI.send_request" + ) + def test_delete_na_sg_org_user_s3_keys_pass(self, mock_request): + set_module_args(self.set_args_delete_na_sg_org_user_s3_keys()) + my_obj = org_s3_key_module() + mock_request.side_effect = [ + SRR["org_s3_key"], # get + SRR["delete_good"], # delete + SRR["end_of_sequence"], + ] + with pytest.raises(AnsibleExitJson) as exc: + my_obj.apply() + print( + "Info: test_delete_na_sg_org_user_s3_keys_pass: %s" + % repr(exc.value.args[0]) + ) + assert exc.value.args[0]["changed"] diff --git a/ansible_collections/netapp/storagegrid/tests/unit/requirements.txt b/ansible_collections/netapp/storagegrid/tests/unit/requirements.txt new file mode 100644 index 000000000..b754473a9 --- /dev/null +++ b/ansible_collections/netapp/storagegrid/tests/unit/requirements.txt @@ -0,0 +1 @@ +requests ; python_version >= '2.7' |