summaryrefslogtreecommitdiffstats
path: root/lib/ansible/modules/assemble.py
blob: 2b443ce85546be28ab3857520c6ad19e77de165d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
# -*- coding: utf-8 -*-

# Copyright: (c) 2012, Stephen Fromm <sfromm@gmail.com>
# Copyright: (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)

from __future__ import absolute_import, division, print_function
__metaclass__ = type


DOCUMENTATION = r'''
---
module: assemble
short_description: Assemble configuration files from fragments
description:
- Assembles a configuration file from fragments.
- Often a particular program will take a single configuration file and does not support a
  C(conf.d) style structure where it is easy to build up the configuration
  from multiple sources. C(assemble) will take a directory of files that can be
  local or have already been transferred to the system, and concatenate them
  together to produce a destination file.
- Files are assembled in string sorting order.
- Puppet calls this idea I(fragments).
version_added: '0.5'
options:
  src:
    description:
    - An already existing directory full of source files.
    type: path
    required: true
  dest:
    description:
    - A file to create using the concatenation of all of the source files.
    type: path
    required: true
  backup:
    description:
    - Create a backup file (if C(true)), including the timestamp information so
      you can get the original file back if you somehow clobbered it
      incorrectly.
    type: bool
    default: no
  delimiter:
    description:
    - A delimiter to separate the file contents.
    type: str
    version_added: '1.4'
  remote_src:
    description:
    - If C(false), it will search for src at originating/master machine.
    - If C(true), it will go to the remote/target machine for the src.
    type: bool
    default: yes
    version_added: '1.4'
  regexp:
    description:
    - Assemble files only if C(regex) matches the filename.
    - If not set, all files are assembled.
    - Every C(\) (backslash) must be escaped as C(\\) to comply to YAML syntax.
    - Uses L(Python regular expressions,https://docs.python.org/3/library/re.html).
    type: str
  ignore_hidden:
    description:
    - A boolean that controls if files that start with a '.' will be included or not.
    type: bool
    default: no
    version_added: '2.0'
  validate:
    description:
    - The validation command to run before copying into place.
    - The path to the file to validate is passed in via '%s' which must be present as in the sshd example below.
    - The command is passed securely so shell features like expansion and pipes won't work.
    type: str
    version_added: '2.0'
attributes:
    action:
      support: full
    async:
      support: none
    bypass_host_loop:
      support: none
    check_mode:
      support: none
    diff_mode:
      support: full
    platform:
      platforms: posix
    safe_file_operations:
      support: full
    vault:
      support: full
      version_added: '2.2'
seealso:
- module: ansible.builtin.copy
- module: ansible.builtin.template
- module: ansible.windows.win_copy
author:
- Stephen Fromm (@sfromm)
extends_documentation_fragment:
    - action_common_attributes
    - action_common_attributes.flow
    - action_common_attributes.files
    - decrypt
    - files
'''

EXAMPLES = r'''
- name: Assemble from fragments from a directory
  ansible.builtin.assemble:
    src: /etc/someapp/fragments
    dest: /etc/someapp/someapp.conf

- name: Insert the provided delimiter between fragments
  ansible.builtin.assemble:
    src: /etc/someapp/fragments
    dest: /etc/someapp/someapp.conf
    delimiter: '### START FRAGMENT ###'

- name: Assemble a new "sshd_config" file into place, after passing validation with sshd
  ansible.builtin.assemble:
    src: /etc/ssh/conf.d/
    dest: /etc/ssh/sshd_config
    validate: /usr/sbin/sshd -t -f %s
'''

RETURN = r'''#'''

import codecs
import os
import re
import tempfile

from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import b, indexbytes
from ansible.module_utils._text import to_native


def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, tmpdir=None):
    ''' assemble a file from a directory of fragments '''
    tmpfd, temp_path = tempfile.mkstemp(dir=tmpdir)
    tmp = os.fdopen(tmpfd, 'wb')
    delimit_me = False
    add_newline = False

    for f in sorted(os.listdir(src_path)):
        if compiled_regexp and not compiled_regexp.search(f):
            continue
        fragment = os.path.join(src_path, f)
        if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
            continue
        with open(fragment, 'rb') as fragment_fh:
            fragment_content = fragment_fh.read()

        # always put a newline between fragments if the previous fragment didn't end with a newline.
        if add_newline:
            tmp.write(b('\n'))

        # delimiters should only appear between fragments
        if delimit_me:
            if delimiter:
                # un-escape anything like newlines
                delimiter = codecs.escape_decode(delimiter)[0]
                tmp.write(delimiter)
                # always make sure there's a newline after the
                # delimiter, so lines don't run together

                # byte indexing differs on Python 2 and 3,
                # use indexbytes for compat
                # chr(10) == '\n'
                if indexbytes(delimiter, -1) != 10:
                    tmp.write(b('\n'))

        tmp.write(fragment_content)
        delimit_me = True
        if fragment_content.endswith(b('\n')):
            add_newline = False
        else:
            add_newline = True

    tmp.close()
    return temp_path


def cleanup(path, result=None):
    # cleanup just in case
    if os.path.exists(path):
        try:
            os.remove(path)
        except (IOError, OSError) as e:
            # don't error on possible race conditions, but keep warning
            if result is not None:
                result['warnings'] = ['Unable to remove temp file (%s): %s' % (path, to_native(e))]


def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            delimiter=dict(type='str'),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            remote_src=dict(type='bool', default=True),
            regexp=dict(type='str'),
            ignore_hidden=dict(type='bool', default=False),
            validate=dict(type='str'),
        ),
        add_file_common_args=True,
    )

    changed = False
    path_hash = None
    dest_hash = None
    src = module.params['src']
    dest = module.params['dest']
    backup = module.params['backup']
    delimiter = module.params['delimiter']
    regexp = module.params['regexp']
    compiled_regexp = None
    ignore_hidden = module.params['ignore_hidden']
    validate = module.params.get('validate', None)

    result = dict(src=src, dest=dest)
    if not os.path.exists(src):
        module.fail_json(msg="Source (%s) does not exist" % src)

    if not os.path.isdir(src):
        module.fail_json(msg="Source (%s) is not a directory" % src)

    if regexp is not None:
        try:
            compiled_regexp = re.compile(regexp)
        except re.error as e:
            module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" % (to_native(e), regexp))

    if validate and "%s" not in validate:
        module.fail_json(msg="validate must contain %%s: %s" % validate)

    path = assemble_from_fragments(src, delimiter, compiled_regexp, ignore_hidden, module.tmpdir)
    path_hash = module.sha1(path)
    result['checksum'] = path_hash

    # Backwards compat.  This won't return data if FIPS mode is active
    try:
        pathmd5 = module.md5(path)
    except ValueError:
        pathmd5 = None
    result['md5sum'] = pathmd5

    if os.path.exists(dest):
        dest_hash = module.sha1(dest)

    if path_hash != dest_hash:
        if validate:
            (rc, out, err) = module.run_command(validate % path)
            result['validation'] = dict(rc=rc, stdout=out, stderr=err)
            if rc != 0:
                cleanup(path)
                module.fail_json(msg="failed to validate: rc:%s error:%s" % (rc, err))
        if backup and dest_hash is not None:
            result['backup_file'] = module.backup_local(dest)

        module.atomic_move(path, dest, unsafe_writes=module.params['unsafe_writes'])
        changed = True

    cleanup(path, result)

    # handle file permissions
    file_args = module.load_file_common_arguments(module.params)
    result['changed'] = module.set_fs_attributes_if_different(file_args, changed)

    # Mission complete
    result['msg'] = "OK"
    module.exit_json(**result)


if __name__ == '__main__':
    main()