diff --git a/.ansible-lint b/.ansible-lint index f68da384..e750c579 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -4,3 +4,6 @@ skip_list: exclude_paths: - .forgejo/ + - "**/*.sops.yaml" + - ansible_collections/ + - galaxy_roles/ diff --git a/.forgejo/workflows/lint.yaml b/.forgejo/workflows/lint.yaml index 1002532f..5113e9fb 100644 --- a/.forgejo/workflows/lint.yaml +++ b/.forgejo/workflows/lint.yaml @@ -10,7 +10,7 @@ jobs: name: Ansible Lint runs-on: docker steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install pip run: | apt update @@ -24,7 +24,7 @@ jobs: # work in our environmnet. # Rather manually setup python (pip) before instead. - name: Run ansible-lint - uses: https://github.com/ansible/ansible-lint@v24.10.0 + uses: https://github.com/ansible/ansible-lint@v26.1.1 with: setup_python: "false" requirements_file: "requirements.yml" diff --git a/.gitignore b/.gitignore index e69de29b..424bd262 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1 @@ +.ansible/ diff --git a/.sops.yaml b/.sops.yaml new file mode 100644 index 00000000..60da9eb9 --- /dev/null +++ b/.sops.yaml @@ -0,0 +1,203 @@ +keys: + admins: + gpg: &admin_gpg_keys + - &admin_gpg_djerun EF643F59E008414882232C78FFA8331EEB7D6B70 + - &admin_gpg_stb F155144FC925A1BEA1F8A2C59A2A4CD59BFDC5EC + - &admin_gpg_jtbx 18DFCE01456DAB52EA38A6584EDC64F35FA1D6A5 + - &admin_gpg_yuri 87AB00D45D37C9E9167B5A5A333448678B60E505 + - &admin_gpg_june 057870A2C72CD82566A3EC983695F4FCBCAE4912 + - &admin_gpg_haegar F38C9D4228FC6F674E322D9C3326D914EB9B8F55 + - &admin_gpg_dario 5DA93D5C9D7320E1BD3522C79C78172B3551C9FD + - &admin_gpg_echtnurich 8996B62CBD159DCADD3B6DC08BB33A8ABCF7BC4A + - &admin_gpg_c6ristian B71138A6A8964A3C3B8899857B4F70C356765BAB + - &admin_gpg_lilly D2E9C0807BF681F5E164DAFC5EE1B61CD90954CD + - &admin_gpg_langoor 878FEA3CB6A6F6E7CD80ECBE28506E3585F9F533 + hosts: + chaosknoten: + age: &host_chaosknoten_age_keys + - &host_netbox_ansible_pull_age_key age1ss82zwqkj438re78355p886r89csqrrfmkfp8lrrf8v23nza492qza4ey3 + - &host_cloud_ansible_pull_age_key age1gdfhx5hy829uqkw4nwjwlpvl7zqvljguzsnjv0dpwz5q5u7dtf6s90wndt + - &host_eh22_wiki_ansible_pull_age_key age13nm6hfz66ce4wpn89fye05mag3l3h04etvz6wj7szm3vzrdlfupqhrp3fa + - &host_grafana_ansible_pull_age_key age1jtusr294t8mzar2qy857v6s329ret9s353y4kuulxwnlyy4dvpjsvyl67m + - &host_onlyoffice_ansible_pull_age_key age1a27euccw8j23wec76ls8vmzp7mntfcn4v8tkyegmg8alzfhk3suqwm6vgv + - &host_pretalx_ansible_pull_age_key age133wy6sxhgx3kkwxecra6xf9ey2uhnvtjpgwawwfmpvz0jpd0s5dqe385u3 + - &host_sunders_ansible_pull_age_key age1na0nh9ndnr9cxpnlvstrxskr4fxf4spnkw48ufl7m43f98y40y7shhnvgd + - &host_wiki_ansible_pull_age_key age1sqs05anv4acculyap35e6vehdxw3g6ycwnvh6hsuv8u33re984zsnqfvqv + - &host_renovate_ansible_pull_age_key age18qam683rva3ee3wgue7r0ey4ws4jttz4a4dpe3q8kq8lmrp97ezq2cns8d + - &host_ccchoir_ansible_pull_age_key age19rg2cuj9smv8nzxmr03azfqe69edhep53dep6kvh83paf08zv58sntm0fg + - &host_tickets_ansible_pull_age_key age16znyzvquuy8467gg27mdwdt8k6kcu3fjrvfm6gnl4nmqp8tuvqaspqgcet + - &host_keycloak_ansible_pull_age_key age1azkgwrcwqhc6flj7gturptpl2uvay6pd94cam4t6yuk2n4wlnsqsj38hca + - &host_lists_ansible_pull_age_key age17x20h3m6wgfhereusc224u95ac8aj68fzlkkj5ptvs9c5vlz3usqdu7crq + - &host_mumble_ansible_pull_age_key age1wnympe3x8ce8hk87cymmt6wvccs4aes5rhhs44hq0s529v5z4g5sfyphwx + - &host_pad_ansible_pull_age_key age172pk7lyc6p4ewy0f2h6pau5d5sz6z8cq66hm4u4tpzx3an496a2sljx7x5 + - &host_public_reverse_proxy_ansible_pull_age_key age1p7pxgq5kwcpdkhkh3qq4pvnltrdk4gwf60hdhv8ka0mdxmgnjepqyleyen + - &host_zammad_ansible_pull_age_key age1sv7uhpnk9d3u3je9zzvlux0kd83f627aclpamnz2h3ksg599838qjgrvqs + - &host_ntfy_ansible_pull_age_key age1dkecypmfuj0tcm2cz8vnvq5drpu2ddhgnfkzxvscs7m4e79gpseqyhr9pg + - &host_spaceapiccc_ansible_pull_age_key age1mdtnk78aeqnwqadjqje5pfha04wu92d3ecchyqajjmy434kwq98qksq2wa + - &host_acmedns_ansible_pull_age_key age16pxqxdj25xz6w200sf8duc62vyk0xkhzc7y63nyhg29sm077vp8qy4sywv + external: + age: &host_external_age_keys + - &host_status_ansible_pull_age_key age1yl9ts8k6ceymaxjs72r5puetes5mtuzxuger7qgme9qkagfrm9hqzxx9qr +creation_rules: + ## group vars + - path_regex: inventories/chaosknoten/group_vars/all.* + key_groups: + - pgp: + *admin_gpg_keys + age: + *host_chaosknoten_age_keys + - path_regex: inventories/external/group_vars/all.* + key_groups: + - pgp: + *admin_gpg_keys + age: + *host_external_age_keys + - path_regex: inventories/z9/group_vars/all.* + key_groups: + - pgp: + *admin_gpg_keys + ## host vars + # chaosknoten hosts + - path_regex: inventories/chaosknoten/host_vars/acmedns.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_acmedns_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/cloud.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_cloud_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/keycloak.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_keycloak_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/grafana.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_grafana_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/pad.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_pad_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/ccchoir.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_ccchoir_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/pretalx.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_pretalx_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/netbox.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_netbox_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/tickets.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_tickets_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/onlyoffice.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_onlyoffice_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/zammad.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_zammad_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/ntfy.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_ntfy_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/eh22-wiki.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_eh22_wiki_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/sunders.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_sunders_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/wiki.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_wiki_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/renovate.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_renovate_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/lists.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_lists_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/mumble.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_mumble_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/public-reverse-proxy.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_public_reverse_proxy_ansible_pull_age_key + - path_regex: inventories/chaosknoten/host_vars/spaceapiccc.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_spaceapiccc_ansible_pull_age_key + # external hosts + - path_regex: inventories/external/host_vars/status.* + key_groups: + - pgp: + *admin_gpg_keys + age: + - *host_status_ansible_pull_age_key + # z9 hosts + - path_regex: inventories/z9/host_vars/dooris.* + key_groups: + - pgp: + *admin_gpg_keys + - path_regex: inventories/z9/host_vars/yate.* + key_groups: + - pgp: + *admin_gpg_keys + # general + - key_groups: + - pgp: + *admin_gpg_keys +stores: + yaml: + indent: 2 diff --git a/README.md b/README.md index 6906a7f3..dff670ab 100644 --- a/README.md +++ b/README.md @@ -7,20 +7,27 @@ Folgende Geräte und Server werden duch dieses Ansible Repository verwaltet: Host-spezifische Konfigurationsdateien liegen unter `resources/` und werden für jeweils über eine `host_vars`-Datei im Inventory geladen. -## Galaxy-Collections und -Rollen installieren +## Galaxy-Collections und -Rollen -Für einige Aspekte verwenden wir Rollen aus Ansible Galaxy. Die müssen zunächst installiert werden: +Für einige Aspekte verwenden wir Collections und Rollen aus Ansible Galaxy. Diese werden in [`ansible_collections`](./ansible_collections/) bzw. [`galaxy-roles`](./galaxy-roles/) hier im Repo vorgehalten. +Um unsere gevendorte Version zu aktualisieren, kann man folgendes machen: ```bash ansible-galaxy install -r requirements.yml +ansible-galaxy role install -r requirements.yml ``` ## Secrets -Grundsätzlich sollten Secrets vermieden werden. (Also z.B.: Nutze SSH Keys statt Passwort.) +Generally try to avoid secrets (e.g. use SSH keys instead of passwords). -Da Secrets aber durchaus doch gebraucht werden, werden diese dann in diesem Repo direkt aus dem [password-store](https://git.hamburg.ccc.de/CCCHH/password-store) (meist aus einem Sub-Eintrag des `noc/` Ordners) geladen. -Dies geschieht mit Hilfe des `community.general.passwordstore` lookup Plugins. +Because secrets are nonetheless needed sometimes, we use [SOPS](https://github.com/getsops/sops) to securely store secrets in this repository. +SOPS encrypts secrets according to "creation rules" which are defined in the `.sops.yaml`. +Generally all secrets get encrypted for all GPG-keys of all members of the infrastructure team. +Ansible then has access to the secrets with the help of the [`community.sops.sops` vars plugin](https://docs.ansible.com/ansible/latest/collections/community/sops/docsite/guide.html#working-with-encrypted-variables), which is configured in this repository. +A local Ansible run then uses the locally available GPG-key to decrypt the secrets. + +For a tutorial on how to set up secrets using SOPS for a new host, see [Setting Up Secrets Using SOPS for a New Host](./docs/setting_up_secrets_using_sops_for_a_new_host.md). ## Playbook nur für einzelne Hosts ausführen diff --git a/ansible.cfg b/ansible.cfg index ca065480..805406f0 100644 --- a/ansible.cfg +++ b/ansible.cfg @@ -1,6 +1,6 @@ [defaults] inventory = ./inventories/z9/hosts.yaml pipelining = True - -[passwordstore_lookup] -backend = pass +vars_plugins_enabled = host_group_vars,community.sops.sops +collections_path = ./ +roles_path = ./galaxy-roles diff --git a/ansible_collections/community/docker/.ansible-lint b/ansible_collections/community/docker/.ansible-lint new file mode 100644 index 00000000..bd650004 --- /dev/null +++ b/ansible_collections/community/docker/.ansible-lint @@ -0,0 +1,30 @@ +--- +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +skip_list: + # Ignore rules that make no sense: + - galaxy[tags] + - galaxy[version-incorrect] + - meta-runtime[unsupported-version] + - no-changed-when + - sanity[cannot-ignore] # some of the rules you cannot ignore actually MUST be ignored, like yamllint:unparsable-with-libyaml + - yaml # we're using yamllint ourselves + - run-once[task] # wtf??? + + # To be checked and maybe fixed: + - ignore-errors + - key-order[task] + - name[casing] + - name[missing] + - name[play] + - name[template] + - no-free-form + - no-handler + - risky-file-permissions + - risky-shell-pipe + - var-naming[no-reserved] + - var-naming[no-role-prefix] + - var-naming[pattern] + - var-naming[read-only] diff --git a/ansible_collections/community/docker/.azure-pipelines/README.md b/ansible_collections/community/docker/.azure-pipelines/README.md new file mode 100644 index 00000000..9e8ad741 --- /dev/null +++ b/ansible_collections/community/docker/.azure-pipelines/README.md @@ -0,0 +1,9 @@ + + +## Azure Pipelines Configuration + +Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information. diff --git a/ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml b/ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml new file mode 100644 index 00000000..1919fe7d --- /dev/null +++ b/ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml @@ -0,0 +1,280 @@ +--- +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +trigger: + batch: true + branches: + include: + - main + - stable-* + +pr: + autoCancel: true + branches: + include: + - main + - stable-* + +schedules: + - cron: 0 9 * * * + displayName: Nightly + always: true + branches: + include: + - main + - cron: 0 12 * * 0 + displayName: Weekly (old stable branches) + always: true + branches: + include: + - stable-4 + +variables: + - name: checkoutPath + value: ansible_collections/community/docker + - name: coverageBranches + value: main + - name: entryPoint + value: tests/utils/shippable/shippable.sh + - name: fetchDepth + value: 0 + +resources: + containers: + - container: default + image: quay.io/ansible/azure-pipelines-test-container:7.0.0 + +pool: Standard + +stages: + +### Sanity & units + - stage: Ansible_devel + displayName: Sanity & Units devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: 'devel/sanity/1' + - name: Units + test: 'devel/units/1' + - stage: Ansible_2_20 + displayName: Sanity & Units 2.20 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: '2.20/sanity/1' + - name: Units + test: '2.20/units/1' + - stage: Ansible_2_19 + displayName: Sanity & Units 2.19 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: '2.19/sanity/1' + - name: Units + test: '2.19/units/1' + - stage: Ansible_2_18 + displayName: Sanity & Units 2.18 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: Sanity + test: '2.18/sanity/1' + - name: Units + test: '2.18/units/1' + +### Docker + - stage: Docker_devel + displayName: Docker devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: devel/linux/{0} + targets: + - name: Fedora 42 + test: fedora42 + - name: Ubuntu 22.04 + test: ubuntu2204 + - name: Ubuntu 24.04 + test: ubuntu2404 + - name: Alpine 3.22 + test: alpine322 + groups: + - 4 + - 5 + - stage: Docker_2_20 + displayName: Docker 2.20 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.20/linux/{0} + targets: + - name: Fedora 42 + test: fedora42 + - name: Alpine 3.22 + test: alpine322 + groups: + - 4 + - 5 + - stage: Docker_2_19 + displayName: Docker 2.19 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.19/linux/{0} + targets: + - name: Fedora 41 + test: fedora41 + - name: Alpine 3.21 + test: alpine321 + groups: + - 4 + - 5 + - stage: Docker_2_18 + displayName: Docker 2.18 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.18/linux/{0} + targets: + - name: Fedora 40 + test: fedora40 + - name: Ubuntu 22.04 + test: ubuntu2204 + - name: Alpine 3.20 + test: alpine320 + groups: + - 4 + - 5 + +### Community Docker + - stage: Docker_community_devel + displayName: Docker (community images) devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: devel/linux-community/{0} + targets: + - name: Debian 11 Bullseye + test: debian-bullseye/3.9 + - name: Debian 12 Bookworm + test: debian-bookworm/3.11 + - name: Debian 13 Trixie + test: debian-13-trixie/3.13 + - name: ArchLinux + test: archlinux/3.13 + groups: + - 4 + - 5 + +### Remote + - stage: Remote_devel + displayName: Remote devel + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: devel/{0} + targets: + - name: RHEL 10.0 + test: rhel/10.0 + - name: RHEL 9.6 with Docker SDK, urllib3, requests from sources + test: rhel/9.6-dev-latest + # For some reason, Ubuntu 24.04 is *extremely* slower than RHEL 9.6 + # - name: Ubuntu 24.04 + # test: ubuntu/24.04 + groups: + - 1 + - 2 + - 3 + - 4 + - 5 + - stage: Remote_2_20 + displayName: Remote 2.20 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.20/{0} + targets: + - name: RHEL 9.6 + test: rhel/9.6 + groups: + - 1 + - 2 + - 3 + - 4 + - 5 + - stage: Remote_2_19 + displayName: Remote 2.19 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.19/{0} + targets: + - name: RHEL 9.5 + test: rhel/9.5 + - name: Ubuntu 22.04 + test: ubuntu/22.04 + groups: + - 1 + - 2 + - 3 + - 4 + - 5 + - stage: Remote_2_18 + displayName: Remote 2.18 + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: 2.18/{0} + targets: + - name: RHEL 9.4 + test: rhel/9.4 + groups: + - 1 + - 2 + - 3 + - 4 + - 5 + + ## Finally + + - stage: Summary + condition: succeededOrFailed() + dependsOn: + - Ansible_devel + - Ansible_2_20 + - Ansible_2_19 + - Ansible_2_18 + - Remote_devel + - Remote_2_20 + - Remote_2_19 + - Remote_2_18 + - Docker_devel + - Docker_2_20 + - Docker_2_19 + - Docker_2_18 + - Docker_community_devel + jobs: + - template: templates/coverage.yml diff --git a/ansible_collections/community/docker/.azure-pipelines/scripts/aggregate-coverage.sh b/ansible_collections/community/docker/.azure-pipelines/scripts/aggregate-coverage.sh new file mode 100755 index 00000000..0ccef353 --- /dev/null +++ b/ansible_collections/community/docker/.azure-pipelines/scripts/aggregate-coverage.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# Aggregate code coverage results for later processing. + +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +set -o pipefail -eu + +agent_temp_directory="$1" + +PATH="${PWD}/bin:${PATH}" + +mkdir "${agent_temp_directory}/coverage/" + +if [[ "$(ansible --version)" =~ \ 2\.9\. ]]; then + exit +fi + +options=(--venv --venv-system-site-packages --color -v) + +ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}" + +if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then + # Only analyze coverage if the installed version of ansible-test supports it. + # Doing so allows this script to work unmodified for multiple Ansible versions. + ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}" +fi diff --git a/ansible_collections/community/docker/.azure-pipelines/scripts/combine-coverage.py b/ansible_collections/community/docker/.azure-pipelines/scripts/combine-coverage.py new file mode 100755 index 00000000..3b2fd993 --- /dev/null +++ b/ansible_collections/community/docker/.azure-pipelines/scripts/combine-coverage.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +""" +Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job. +Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}" +The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName) +Keep in mind that Azure Pipelines does not enforce unique job display names (only names). +It is up to pipeline authors to avoid name collisions when deviating from the recommended format. +""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import re +import shutil +import sys + + +def main(): + """Main program entry point.""" + source_directory = sys.argv[1] + + if '/ansible_collections/' in os.getcwd(): + output_path = "tests/output" + else: + output_path = "test/results" + + destination_directory = os.path.join(output_path, 'coverage') + + if not os.path.exists(destination_directory): + os.makedirs(destination_directory) + + jobs = {} + count = 0 + + for name in os.listdir(source_directory): + match = re.search('^Coverage (?P[0-9]+) (?P