Vendor Galaxy Roles and Collections

This commit is contained in:
Stefan Bethke 2026-02-06 22:07:16 +01:00
commit 2aed20393f
3553 changed files with 387444 additions and 2 deletions

View file

@ -0,0 +1,18 @@
---
profile: production
exclude_paths:
- .cache/
- .github/
- examples/
parseable: true
verbosity: 1
use_default_rules: true
skip_list:
- '204' # Allow string length greater than 160 chars
- 'no-changed-when' # False positives for running command shells
- 'yaml' # Disable YAML linting since it's done by yamllint
- 'empty-string-compare' # Allow compare to empty string

View file

@ -0,0 +1,83 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: almalinux-8
image: dokken/almalinux-8
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: almalinux-9
image: dokken/almalinux-9
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: centos-7
image: dokken/centos-7
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /usr/lib/systemd/systemd
- name: centos-stream-8
image: dokken/centos-stream-8
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: centos-stream-9
image: dokken/centos-stream-9
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: debian-10
image: dokken/debian-10
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: debian-11
image: dokken/debian-11
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: fedora-36
image: dokken/fedora-36
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: fedora-37
image: dokken/fedora-37
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: ubuntu-18.04
image: dokken/ubuntu-18.04
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: ubuntu-20.04
image: dokken/ubuntu-20.04
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: ubuntu-22.04
image: dokken/ubuntu-22.04
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
provisioner:
env:
ANSIBLE_INJECT_FACT_VARS: false
verifier:
name: testinfra

View file

@ -0,0 +1,42 @@
# This file is the top-most EditorConfig file
root = true
# All Files
[*]
charset = utf-8
indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
# JSON Files
[*.{json,json5,webmanifest}]
indent_size = 2
# YAML Files
[*.{yml,yaml}]
indent_size = 2
# Markdown Files
[*.{md,mdx}]
indent_size = 2
trim_trailing_whitespace = false
# Web Files
[*.{htm,html,js,jsm,ts,tsx,cjs,cts,ctsx,mjs,mts,mtsx,css,sass,scss,less,pcss,svg,vue}]
indent_size = 2
# Bash Files
[*.sh]
indent_size = 2
end_of_line = lf
# Makefiles
[{Makefile,**.mk}]
indent_style = tab
# Python Files
[*.py]
indent_style = space
indent_size = 4

View file

@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"

View file

@ -0,0 +1,45 @@
---
name: Alloy Molecule
on:
push:
branches:
- main
pull_request:
branches:
- main
defaults:
run:
working-directory: roles/alloy
jobs:
molecule:
name: Molecule
runs-on: ubuntu-latest
strategy:
matrix:
distro:
- rockylinux9
- ubuntu2204
- debian12
- opensuseleap15
steps:
- name: Check out the codebase.
uses: actions/checkout@v4
- name: Set up Python 3.
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install test dependencies.
run: pip3 install ansible molecule molecule-plugins[docker] docker
- name: Run Molecule tests.
run: molecule test
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
MOLECULE_DISTRO: ${{ matrix.distro }}

View file

@ -0,0 +1,69 @@
---
name: Full Integration Test
# yamllint disable-line rule:truthy
on:
schedule:
- cron: '0 0 * * 0'
workflow_dispatch:
env:
NAMESPACE: grafana
COLLECTION_NAME: grafana
jobs:
integration:
runs-on: ubuntu-20.04
name: ${{ matrix.ansible }}-py${{ matrix.python }}
strategy:
fail-fast: true
max-parallel: 1
matrix:
ansible:
- stable-2.11
- stable-2.12
- stable-2.13
- devel
python:
- '2.7'
- '3.5'
- '3.6'
- '3.7'
- '3.8'
- '3.9'
- '3.10'
exclude:
- ansible: stable-2.11
python: '3.10'
steps:
- name: Check out code
uses: actions/checkout@v2
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: create integration_config
working-directory: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}/tests/integration
run: |
cat <<EOF > integration_config.yml
stack_name: ${{ secrets.ANSIBLE_TEST_STACK_NAME }}
org_name: ${{ secrets.ANSIBLE_TEST_ORG_NAME }}
grafana_cloud_api_key: ${{ secrets.ANSIBLE_TEST_CLOUD_API_KEY }}
grafana_api_key: ${{ secrets.ANSIBLE_TEST_GRAFANA_API_KEY }}
test_stack_name: ${{ secrets.ANSIBLE_TEST_CI_STACK }}
EOF
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10'
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
- name: Test Modules
run: ansible-test integration -v alert_contact_point alert_notification_policy cloud_api_key cloud_plugin cloud_stack dashboard datasource folder --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Cooling Period
run: sleep 3m

View file

@ -0,0 +1,63 @@
---
name: Lint
# yamllint disable-line rule:truthy
on:
push:
branches: ["main"]
pull_request:
branches: ["main"]
jobs:
lint:
name: Perform Linting
runs-on: ubuntu-latest
steps:
- name: Install shellcheck
run: |
wget -c https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.linux.x86_64.tar.xz -O shellcheck.tar.xz && \
tar -xvf shellcheck.tar.xz && \
sudo mv ./shellcheck-v0.9.0/shellcheck /usr/bin/shellcheck && \
rm -rf shellcheck-v0.9.0
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install pipenv
run: |
python -m pip install --upgrade pipenv wheel
- name: Install dependencies
run: make install
- name: Shell Linting
run: make ci-lint-shell
if: success() || failure()
# - name: Markdown Linting
# run: make ci-lint-markdown
# if: success() || failure()
# - name: Text Linting
# run: make ci-lint-text
# if: success() || failure()
- name: Yaml Linting
run: make ci-lint-yaml
if: success() || failure()
- name: Editorconfig Linting
run: make ci-lint-editorconfig
if: success() || failure()
- name: Ansible Linting
run: make ci-lint-ansible
if: success() || failure()

View file

@ -0,0 +1,44 @@
---
name: Loki Molecule
on:
push:
branches:
- main
pull_request:
branches:
- main
defaults:
run:
working-directory: roles/loki
jobs:
molecule:
name: Molecule
runs-on: ubuntu-latest
strategy:
matrix:
distro:
- rockylinux9
- ubuntu2204
- debian12
steps:
- name: Check out the codebase.
uses: actions/checkout@v4
- name: Set up Python 3.
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install test dependencies.
run: pip3 install ansible molecule molecule-plugins[docker] docker
- name: Run Molecule tests.
run: molecule test
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
MOLECULE_DISTRO: ${{ matrix.distro }}

View file

@ -0,0 +1,51 @@
---
name: Mimir Molecule
on:
push:
branches:
- main
pull_request:
branches:
- main
defaults:
run:
working-directory: roles/mimir
jobs:
molecule:
name: Molecule
runs-on: ubuntu-latest
strategy:
matrix:
distro:
- rockylinux9
- rockylinux8
- ubuntu2204
- debian12
steps:
- name: Check out the codebase.
uses: actions/checkout@v4
- name: Set up Python 3.
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install test dependencies.
run: pip3 install ansible-core==2.16 'molecule-plugins[docker]' pytest-testinfra jmespath selinux passlib
- name: create docker network
run: docker network create molecule
- name: Start s3 backend
run: docker run -d -p 9000:9000 -p 9001:9001 --name minio-mimir --network molecule -e "MINIO_ROOT_USER=testtest" -e "MINIO_ROOT_PASSWORD=testtest" -e "MINIO_DEFAULT_BUCKETS=mimir" bitnamilegacy/minio:latest
- name: Run Molecule tests.
run: molecule --debug test
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
MOLECULE_DISTRO: ${{ matrix.distro }}

View file

@ -0,0 +1,98 @@
---
name: Modules Test
# yamllint disable-line rule:truthy
on:
push:
branches:
- "main"
pull_request:
schedule:
- cron: '0 6 * * *'
env:
NAMESPACE: grafana
COLLECTION_NAME: grafana
jobs:
sanity:
name: Sanity (Ⓐ${{ matrix.ansible }})
strategy:
matrix:
ansible:
- stable-2.12
- stable-2.13
- stable-2.14
- devel
runs-on: ubuntu-20.04
steps:
- name: Check out code
uses: actions/checkout@v3
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.10'
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
- name: Run sanity tests
run: ansible-test sanity -v --docker --color --coverage
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
integration:
runs-on: ubuntu-20.04
name: Integration (Ⓐ${{ matrix.ansible }}-py${{ matrix.python }})
strategy:
fail-fast: true
max-parallel: 1
matrix:
ansible:
- stable-2.13
python:
- '3.10'
steps:
- name: Check out code
uses: actions/checkout@v2
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: create integration_config
working-directory: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}/tests/integration
run: |
cat <<EOF > integration_config.yml
org_name: ${{ secrets.ANSIBLE_TEST_ORG_NAME }}
grafana_cloud_api_key: ${{ secrets.ANSIBLE_TEST_CLOUD_API_KEY }}
grafana_api_key: ${{ secrets.ANSIBLE_TEST_GRAFANA_API_KEY }}
grafana_url: ${{ secrets.ANSIBLE_GRAFANA_URL }}
test_stack_name: ${{ secrets.ANSIBLE_TEST_CI_STACK }}
EOF
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
- name: Install Requests
run: pip install requests
- name: Create Test Stack
run: ansible-test integration -v create_cloud_stack --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Test Modules
run: ansible-test integration -v alert_contact_point alert_notification_policy cloud_api_key cloud_plugin dashboard datasource folder --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Delete Test Stack
if: success() || failure()
run: ansible-test integration -v delete_cloud_stack --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}

View file

@ -0,0 +1,44 @@
---
name: OpenTelemetry Collector Molecule
on:
push:
branches:
- main
pull_request:
branches:
- main
defaults:
run:
working-directory: roles/opentelemetry_collector
jobs:
molecule:
name: Molecule
runs-on: ubuntu-latest
strategy:
matrix:
scenario:
- default
- default-check-first
- latest
- non-contrib
steps:
- name: Check out the codebase.
uses: actions/checkout@v4
- name: Set up Python 3.
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install test dependencies.
run: pip3 install ansible molecule molecule-plugins[docker] docker pytest-testinfra
- name: Run Molecule tests.
run: molecule test -s ${{ matrix.scenario }}
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'

View file

@ -0,0 +1,44 @@
---
name: Promtail Molecule
on:
push:
branches:
- main
pull_request:
branches:
- main
defaults:
run:
working-directory: roles/promtail
jobs:
molecule:
name: Molecule
runs-on: ubuntu-latest
strategy:
matrix:
distro:
- rockylinux9
- ubuntu2204
- debian12
steps:
- name: Check out the codebase.
uses: actions/checkout@v4
- name: Set up Python 3.
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install test dependencies.
run: pip3 install ansible molecule molecule-plugins[docker] docker
- name: Run Molecule tests.
run: molecule test
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
MOLECULE_DISTRO: ${{ matrix.distro }}

View file

@ -0,0 +1,159 @@
---
name: GitHub Release
# yamllint disable-line rule:truthy
on:
workflow_dispatch:
inputs:
version:
description: 'Version number to release'
required: true
env:
NAMESPACE: grafana
COLLECTION_NAME: grafana
jobs:
sanity:
name: Sanity (Ⓐ${{ matrix.ansible }})
strategy:
matrix:
ansible:
- stable-2.12
- stable-2.13
- stable-2.14
- devel
runs-on: ubuntu-20.04
steps:
- name: Check out code
uses: actions/checkout@v3
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.10'
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
- name: Run sanity tests
run: ansible-test sanity -v --docker --color --coverage
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
integration:
runs-on: ubuntu-20.04
name: Integration (Ⓐ${{ matrix.ansible }}-py${{ matrix.python }})
strategy:
fail-fast: true
max-parallel: 1
matrix:
ansible:
- stable-2.13
python:
- '3.10'
steps:
- name: Check out code
uses: actions/checkout@v2
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: create integration_config
working-directory: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}/tests/integration
run: |
cat <<EOF > integration_config.yml
org_name: ${{ secrets.ANSIBLE_TEST_ORG_NAME }}
grafana_cloud_api_key: ${{ secrets.ANSIBLE_TEST_CLOUD_API_KEY }}
grafana_api_key: ${{ secrets.ANSIBLE_TEST_GRAFANA_API_KEY }}
grafana_url: ${{ secrets.ANSIBLE_GRAFANA_URL }}
test_stack_name: ${{ secrets.ANSIBLE_TEST_CI_STACK }}
EOF
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
- name: Install Requests
run: pip install requests
- name: Create Test Stack
run: ansible-test integration -v create_cloud_stack --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Test Modules
run: ansible-test integration -v alert_contact_point alert_notification_policy cloud_api_key cloud_plugin dashboard datasource folder --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Delete Test Stack
if: success() || failure()
run: ansible-test integration -v delete_cloud_stack --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
publish:
name: Publish to Galaxy
runs-on: ubuntu-latest
needs: [sanity, integration]
timeout-minutes: 15
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10'
- name: Install Ansible
run: pip install ansible-core
- name: Install PyYaml
run: pip install pyyaml
- name: Validate version matches
run: |
VERSION=$(python -c "import yaml; print(yaml.safe_load(open('galaxy.yml'))['version'])")
if [ "$VERSION" != "${{ github.event.inputs.version }}" ]; then
echo "Error: Input version (${{ github.event.inputs.version }}) doesn't match galaxy.yml version ($VERSION)"
exit 1
fi
- name: Build collection
id: build
run: |
ansible-galaxy collection build
echo "archive_path=$(ls *.tar.gz)" >> $GITHUB_OUTPUT
- name: Publish collection to Galaxy
env:
ANSIBLE_GALAXY_API_KEY: ${{ secrets.ANSIBLE_GALAXY_API_KEY }}
run: ansible-galaxy collection publish --api-key ${{ secrets.ANSIBLE_GALAXY_API_KEY }} ${{ steps.build.outputs.archive_path }}
release:
name: Create GitHub Release
runs-on: ubuntu-latest
needs: [publish]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10'
- name: Install PyYaml
run: pip install pyyaml
- name: Validate version is published to Galaxy
run: curl --head -s -f -o /dev/null https://galaxy.ansible.com/download/grafana-grafana-${{ github.event.inputs.version }}.tar.gz
- name: Release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ github.event.inputs.version }}

View file

@ -0,0 +1,90 @@
---
name: Roles Test
# yamllint disable-line rule:truthy
on:
push:
branches:
- "main"
pull_request:
schedule:
- cron: '0 6 * * *'
env:
NAMESPACE: grafana
COLLECTION_NAME: grafana
jobs:
sanity:
name: Sanity (Ⓐ${{ matrix.ansible }})
strategy:
matrix:
ansible:
- stable-2.12
- stable-2.13
- stable-2.14
- devel
runs-on: ubuntu-20.04
steps:
- name: Check out code
uses: actions/checkout@v3
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.10'
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
- name: Run sanity tests
run: ansible-test sanity -v --docker --color --coverage
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
integration:
runs-on: ubuntu-20.04
name: Integration (Ⓐ${{ matrix.ansible }}-py${{ matrix.python }})
strategy:
fail-fast: true
max-parallel: 1
matrix:
ansible:
- stable-2.13
python:
- '3.10'
steps:
- name: Check out code
uses: actions/checkout@v2
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: create integration_config
working-directory: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}/tests/integration
run: |
cat <<EOF > integration_config.yml
stack_name: ${{ secrets.ANSIBLE_TEST_STACK_NAME }}
org_name: ${{ secrets.ANSIBLE_TEST_ORG_NAME }}
grafana_cloud_api_key: ${{ secrets.ANSIBLE_TEST_CLOUD_API_KEY }}
grafana_api_key: ${{ secrets.ANSIBLE_TEST_GRAFANA_API_KEY }}
grafana_url: ${{ secrets.ANSIBLE_GRAFANA_URL }}
test_stack_name: ${{ secrets.ANSIBLE_TEST_CI_STACK }}
EOF
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
- name: Install Requests
run: pip install requests
- name: Test Roles
run: ansible-test integration -v molecule-grafana-alternative molecule-grafana-default --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --coverage --docker
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}

View file

@ -0,0 +1,237 @@
# Project
# ---------------------------------------------------
*.pyc
.vscode
.idea
hosts
*.log
yala
# MacOS
# ---------------------------------------------------
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# Windows
# ---------------------------------------------------
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# MS Office
# ---------------------------------------------------
*.tmp
# Word temporary
~$*.doc*
# Word Auto Backup File
Backup of *.doc*
# Excel temporary
~$*.xls*
# Excel Backup File
*.xlk
# PowerPoint temporary
~$*.ppt*
# Visio autosave temporary files
*.~vsd*
# VS Code
# ---------------------------------------------------
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
*.app
.snapshots/*
# NodeJS
# ---------------------------------------------------
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

View file

@ -0,0 +1,261 @@
---
# Example markdownlint YAML configuration with all properties set to their default value
# Default state for all rules
default: true
# Path to configuration file to extend
extends: null
# MD001/heading-increment/header-increment - Heading levels should only increment by one level at a time
MD001: true
# MD002/first-heading-h1/first-header-h1 - First heading should be a top-level heading
MD002:
# Heading level
level: 1
# MD003/heading-style/header-style - Heading style
MD003:
# Heading style
style: "consistent"
# MD004/ul-style - Unordered list style
MD004:
# List style
style: "consistent"
# MD005/list-indent - Inconsistent indentation for list items at the same level
MD005: true
# MD006/ul-start-left - Consider starting bulleted lists at the beginning of the line
MD006: true
# MD007/ul-indent - Unordered list indentation
MD007:
# Spaces for indent
indent: 2
# Whether to indent the first level of the list
start_indented: false
# Spaces for first level indent (when start_indented is set)
start_indent: 2
# MD009/no-trailing-spaces - Trailing spaces
MD009:
# Spaces for line break
br_spaces: 2
# Allow spaces for empty lines in list items
list_item_empty_lines: false
# Include unnecessary breaks
strict: false
# MD010/no-hard-tabs - Hard tabs
MD010:
# Include code blocks
code_blocks: true
# Fenced code languages to ignore
ignore_code_languages: []
# Number of spaces for each hard tab
spaces_per_tab: 2
# MD011/no-reversed-links - Reversed link syntax
MD011: true
# MD012/no-multiple-blanks - Multiple consecutive blank lines
MD012:
# Consecutive blank lines
maximum: 1
# MD013/line-length - Line length
MD013:
# Number of characters
line_length: 150
# Number of characters for headings
heading_line_length: 80
# Number of characters for code blocks
code_block_line_length: 100
# Include code blocks
code_blocks: true
# Include tables
tables: false
# Include headings
headings: true
# Include headings
headers: true
# Strict length checking
strict: false
# Stern length checking
stern: false
# MD014/commands-show-output - Dollar signs used before commands without showing output
MD014: false
# MD018/no-missing-space-atx - No space after hash on atx style heading
MD018: true
# MD019/no-multiple-space-atx - Multiple spaces after hash on atx style heading
MD019: true
# MD020/no-missing-space-closed-atx - No space inside hashes on closed atx style heading
MD020: true
# MD021/no-multiple-space-closed-atx - Multiple spaces inside hashes on closed atx style heading
MD021: true
# MD022/blanks-around-headings/blanks-around-headers - Headings should be surrounded by blank lines
MD022:
# Blank lines above heading
lines_above: 1
# Blank lines below heading
lines_below: 1
# MD023/heading-start-left/header-start-left - Headings must start at the beginning of the line
MD023: true
# MD024/no-duplicate-heading/no-duplicate-header - Multiple headings with the same content
MD024: false
# MD025/single-title/single-h1 - Multiple top-level headings in the same document
MD025:
# Heading level
level: 1
# RegExp for matching title in front matter
front_matter_title: "^\\s*title\\s*[:=]"
# MD026/no-trailing-punctuation - Trailing punctuation in heading
MD026:
# Punctuation characters not allowed at end of headings
punctuation: ".,;:!。,;:!"
# MD027/no-multiple-space-blockquote - Multiple spaces after blockquote symbol
MD027: true
# MD028/no-blanks-blockquote - Blank line inside blockquote
MD028: true
# MD029/ol-prefix - Ordered list item prefix
MD029:
# List style
style: "one_or_ordered"
# MD030/list-marker-space - Spaces after list markers
MD030:
# Spaces for single-line unordered list items
ul_single: 3
# Spaces for single-line ordered list items
ol_single: 2
# Spaces for multi-line unordered list items
ul_multi: 3
# Spaces for multi-line ordered list items
ol_multi: 2
# MD031/blanks-around-fences - Fenced code blocks should be surrounded by blank lines
MD031:
# Include list items
list_items: true
# MD032/blanks-around-lists - Lists should be surrounded by blank lines
MD032: true
# MD033/no-inline-html - Inline HTML
MD033:
# Allowed elements
allowed_elements:
- div
- br
- hr
# MD034/no-bare-urls - Bare URL used
MD034: true
# MD035/hr-style - Horizontal rule style
MD035:
# Horizontal rule style
style: "consistent"
# MD036/no-emphasis-as-heading/no-emphasis-as-header - Emphasis used instead of a heading
MD036: false
# MD037/no-space-in-emphasis - Spaces inside emphasis markers
MD037: true
# MD038/no-space-in-code - Spaces inside code span elements
MD038: true
# MD039/no-space-in-links - Spaces inside link text
MD039: true
# MD040/fenced-code-language - Fenced code blocks should have a language specified
MD040:
# List of languages
allowed_languages: []
# Require language only
language_only: false
# MD041/first-line-heading/first-line-h1 - First line in a file should be a top-level heading
MD041:
# Heading level
level: 1
# RegExp for matching title in front matter
front_matter_title: "^\\s*title\\s*[:=]"
# MD042/no-empty-links - No empty links
MD042: true
# MD043/required-headings/required-headers - Required heading structure
MD043:
# List of headings
headings:
- "*"
# List of headings
headers: []
# Match case of headings
match_case: false
# MD044/proper-names - Proper names should have the correct capitalization
MD044:
# List of proper names
names: []
# Include code blocks
code_blocks: true
# Include HTML elements
html_elements: true
# MD045/no-alt-text - Images should have alternate text (alt text)
MD045: true
# MD046/code-block-style - Code block style
MD046:
# Block style
style: "consistent"
# MD047/single-trailing-newline - Files should end with a single newline character
MD047: true
# MD048/code-fence-style - Code fence style
MD048:
# Code fence style
style: "consistent"
# MD049/emphasis-style - Emphasis style should be consistent
MD049:
# Emphasis style should be consistent
style: "consistent"
# MD050/strong-style - Strong style should be consistent
MD050:
# Strong style should be consistent
style: "consistent"
# MD051/link-fragments - Link fragments should be valid
MD051: false
# MD052/reference-links-images - Reference links and images should use a label that is defined
MD052: true
# MD053/link-image-reference-definitions - Link and image reference definitions should be needed
MD053:
# Ignored definitions
ignored_definitions: [
"//"
]

View file

@ -0,0 +1,7 @@
# Allow opening any 'source'd file, even if not specified as input
external-sources=true
# some files are sourced which can make some areas appear unreachable
disable=SC2317
disable=SC2250
disable=SC2312

View file

@ -0,0 +1,187 @@
{
"rules": {
"common-misspellings": true,
"no-todo": true,
"terminology": {
"defaultTerms": false,
"terms": [
"Grafana",
["GrafanaLabs", "Grafana Labs"],
["GrafanaCloud", "Grafana Cloud"],
"Mimir",
"Loki",
"Phlare",
"Tempo",
"Faro",
"Raintank",
"Prometheus",
"PromQL",
["(E|e)xamplars", "$1xemplars"],
["(D|d)atasource", "$1ata source"],
"CData",
"Google",
"Amazon",
"RedHat",
"Azure",
"Airbnb",
"Android",
"AppleScript",
"AppVeyor",
"AVA",
"BrowserStack",
"Browsersync",
"Codecov",
"CodePen",
"CodeSandbox",
"DefinitelyTyped",
"EditorConfig",
"ESLint",
"GitHub",
"GraphQL",
"iOS",
"JavaScript",
"JetBrains",
"jQuery",
"LinkedIn",
"Lodash",
"MacBook",
"Markdown",
"OpenType",
"PayPal",
"PhpStorm",
"RubyMine",
"Sass",
"SemVer",
"TypeScript",
"UglifyJS",
"Wasm",
"WebAssembly",
"WebStorm",
"WordPress",
"YouTube",
["Common[ .]js", "CommonJS"],
["JSDocs?", "JSDoc"],
["Nodejs", "Node.js"],
["React[ .]js", "React"],
["SauceLabs", "Sauce Labs"],
["StackOverflow", "Stack Overflow"],
["styled ?components", "styled-components"],
["HTTP[ /]2(?:\\.0)?", "HTTP/2"],
["OS X", "macOS"],
["Mac ?OS", "macOS"],
["a npm", "an npm"],
"ECMAScript",
["ES2015", "ES6"],
["ES7", "ES2016"],
"3D",
["3-D", "3D"],
"Ajax",
"API",
["API[']?s", "APIs"],
"CSS",
"GIF",
" HTML ",
"HTTPS",
"IoT",
"I/O",
["I-O", "I/O"],
"JPEG",
"MIME",
"OK",
"PaaS",
" PDF ",
"PNG",
"SaaS",
"URL",
["URL[']?s", "URLs"],
["an URL", "a URL"],
["wi[- ]?fi", "Wi-Fi"],
"McKenzie",
"McConnell",
[" id", " ID"],
["id[']?s", "IDs"],
["backwards compatible", "backward compatible"],
["build system(s?)", "build tool$1"],
["CLI tool(s?)", "command-line tool$1"],
["he or she", "they"],
["he/she", "they"],
["\\(s\\)he", "they"],
["repo\\b", "repository"],
["smartphone(s?)", "mobile phone$1"],
["web[- ]?site(s?)", "site$1"],
["auto[- ]complete", "autocomplete"],
["auto[- ]format", "autoformat"],
["auto[- ]fix", "autofix"],
["auto[- ]fixing", "autofixing"],
["back[- ]end(\\w*)", "backend$1"],
["bug[- ]fix(es?)", "bugfix$1"],
["change[- ]log(s?)", "changelog$1"],
["check[- ]box(es?)", "checkbox$1"],
["code[- ]base(es?)", "codebase$1"],
["co[- ]locate(d?)", "colocate$1"],
["end[- ]point(s?)", "endpoint$1"],
["e[- ]mail(s?)", "email$1"],
["file[- ]name(s?)", "filename$1"],
["front[- ]end(\\w*)", "frontend$1"],
["hack[- ]a[- ]thon(s?)", "hackathon$1"],
["host[- ]name(s?)", "hostname$1"],
["hot[- ]key(s?)", "hotkey$1"],
["life[- ]cycle", "lifecycle"],
["life[- ]stream(s?)", "lifestream$1"],
["lock[- ]file(s?)", "lockfile$1"],
["mark-up", "markup"],
["meta[- ]data", "metadata"],
["micro[- ]service(s?)", "microservice$1"],
["name[- ]space(s?)", "namespace$1"],
["pre[- ]condition(s?)", "precondition$1"],
["pre[- ]defined", "predefined"],
["pre[- ]release(s?)", "prerelease$1"],
["re[- ]write", "rewrite"],
["run[- ]time", "runtime"],
["screen[- ]shot(s?)", "screenshot$1"],
["screen[- ]?snap(s?)", "screenshot$1"],
["sub[- ]class((?:es|ing)?)", "subclass$1"],
["sub[- ]tree(s?)", "subtree$1"],
["time[- ]stamp(s?)", "timestamp$1"],
["touch[- ]screen(s?)", "touchscreen$1"],
["user[- ]name(s?)", "username$1"],
["walk[- ]through", "walkthrough"],
["white[- ]space", "whitespace"],
["wild[- ]card(s?)", "wildcard$1"],
["css-?in-?js", "CSS in JS"],
["code-?review(s?)", "code review$1"],
["code-?splitting", "code splitting"],
["end-?user(s?)", "end user$1"],
["file-?type(s?)", "file type$1"],
["micro-?frontend(s?)", "micro frontend$1"],
["open-?source(ed?)", "open source$1"],
["regexp?(s?)", "regular expression$1"],
["style-?guide(s?)", "style guide$1"],
["tree-?shaking", "tree shaking"],
["source-?map(s?)", "source map$1"],
["style-?sheet(s?)", "style sheet$1"],
["user-?base", "user base"],
["web-?page(s?)", "web page$1"],
["built ?in", "built-in"],
["client ?side", "client-side"],
["command ?line", "command-line"],
["end ?to ?end", "end-to-end"],
["error ?prone", "error-prone"],
["higher ?order", "higher-order"],
["key[/ ]?value", "key-value"],
["server ?side", "server-side"],
["two ?steps?", "two-step"],
["2 ?steps?", "two-step"],
["(\\w+[^.?!]\\)? )base64", "$1base64"],
["(\\w+[^.?!]\\)? )internet", "$1internet"],
["(\\w+[^.?!]\\)? )stylelint", "$1stylelint"],
["(\\w+[^.?!]\\)? )webpack", "$1webpack"],
["(\\w+[^.?!]\\)? )npm", "$1npm"],
["environemnt(s?)", "environment$1"],
["pacakge(s?)", "package$1"],
["tilda", "tilde"],
["falsey", "falsy"]
]
}
}
}

View file

@ -0,0 +1,15 @@
---
yaml-files:
- "*.yaml"
- "*.yml"
- ".yamllint"
ignore:
- node_modules
extends: default
rules:
line-length:
max: 150
level: warning

View file

@ -0,0 +1,695 @@
=============================
Grafana.Grafana Release Notes
=============================
.. contents:: Topics
v6.0.6
======
Major Changes
-------------
- Restore default listen address and port in Mimir by @56quarters in https://github.com/grafana/grafana-ansible-collection/pull/456
- fix broken Grafana apt repository addition by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/454
v6.0.5
======
Major Changes
-------------
- Fallback to empty dict in case grafana_ini is undefined by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/403
- Fix Mimir config file validation task by @Windos in https://github.com/grafana/grafana-ansible-collection/pull/428
- Fixes issue by @digiserg in https://github.com/grafana/grafana-ansible-collection/pull/421
- Import custom dashboards only when directory exists by @mahendrapaipuri in https://github.com/grafana/grafana-ansible-collection/pull/430
- Updated YUM repo urls from `packages.grafana.com` to `rpm.grafana.com` by @DejfCold in https://github.com/grafana/grafana-ansible-collection/pull/414
- Use credentials from grafana_ini when importing dashboards by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/402
- do not skip scrape latest github version even in check_mode by @cmehat in https://github.com/grafana/grafana-ansible-collection/pull/408
- fix datasource documentation by @jeremad in https://github.com/grafana/grafana-ansible-collection/pull/437
- fix mimir_download_url_deb & mimir_download_url_rpm by @germebl in https://github.com/grafana/grafana-ansible-collection/pull/400
- update catalog info by @Duologic in https://github.com/grafana/grafana-ansible-collection/pull/434
- use deb822 for newer debian versions by @Lukas-Heindl in https://github.com/grafana/grafana-ansible-collection/pull/440
v6.0.4
======
Major Changes
-------------
- Add SUSE support to Alloy role by @pozsa in https://github.com/grafana/grafana-ansible-collection/pull/423
- Fixes to foldersFromFilesStructure option by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/351
- Migrate RedHat install to ansible.builtin.package by @r65535 in https://github.com/grafana/grafana-ansible-collection/pull/431
- add macOS support to alloy role by @l50 in https://github.com/grafana/grafana-ansible-collection/pull/418
- replace None with [] for safe length checks by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/426
v6.0.3
======
Major Changes
-------------
- Bump ansible-lint from 24.9.2 to 25.6.1 by @dependabot[bot] in https://github.com/grafana/grafana-ansible-collection/pull/391
- Bump brace-expansion from 1.1.11 to 1.1.12 in the npm_and_yarn group across 1 directory by @dependabot[bot] in https://github.com/grafana/grafana-ansible-collection/pull/396
- Changes for issue
- Update Mimir README.md by @Gufderald in https://github.com/grafana/grafana-ansible-collection/pull/397
- declare collection dependencies by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/390
- declare collection dependencies by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/392
- ensure IP assert returns boolean result by @aardbol in https://github.com/grafana/grafana-ansible-collection/pull/398
- improve mimir/alloy examples playbook by @smCloudInTheSky in https://github.com/grafana/grafana-ansible-collection/pull/369
- store APT key with .asc extension by @derhuerst in https://github.com/grafana/grafana-ansible-collection/pull/394
v6.0.2
======
Major Changes
-------------
- Add delete protection by @KucicM in https://github.com/grafana/grafana-ansible-collection/pull/381
- Don't override defaults by @56quarters in https://github.com/grafana/grafana-ansible-collection/pull/382
- Don't use a proxy when doing Alloy readiness check by @benoitc-croesus in https://github.com/grafana/grafana-ansible-collection/pull/375
- Fix Mimir URL verify task by @parcimonic in https://github.com/grafana/grafana-ansible-collection/pull/358
- Fix some regression introduced by v6 by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/376
- Update when statement to test for dashboard files found by @hal58th in https://github.com/grafana/grafana-ansible-collection/pull/363
- Use become false in find task by @santilococo in https://github.com/grafana/grafana-ansible-collection/pull/368
- alloy_readiness_check_use_https by @piotr-g in https://github.com/grafana/grafana-ansible-collection/pull/359
- declare collection dependencies by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/386
- ensure alerting provisioning directory exists by @derhuerst in https://github.com/grafana/grafana-ansible-collection/pull/364
- mark configuration deployment task with `no_log` by @kkantonop in https://github.com/grafana/grafana-ansible-collection/pull/380
- properly validate config by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/354
- template ingester and querier section by @Gufderald in https://github.com/grafana/grafana-ansible-collection/pull/371
- use ansible_facts instead of variables by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/365
v6.0.1
======
Minor Changes
-------------
- Remove Node modules from Ansible Collection build
v6.0.0
======
Major Changes
-------------
- Add foldersFromFilesStructure option by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/326
- Add tempo role by @CSTDev in https://github.com/grafana/grafana-ansible-collection/pull/323
- Do not log grafana.ini contents when setting facts by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/325
- Fix loki_operational_config section not getting rendered in config.yml by @olegkaspersky in https://github.com/grafana/grafana-ansible-collection/pull/330
- Fix sectionless items edge case by @santilococo in https://github.com/grafana/grafana-ansible-collection/pull/303
- Fix tags Inherit default vars by @MJurayev in https://github.com/grafana/grafana-ansible-collection/pull/341
- Fix the markdown code fences for install command by @benmatselby in https://github.com/grafana/grafana-ansible-collection/pull/306
- Grafana fix facts in main.yml by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/315
- Make dashboard imports more flexible by @torfbolt in https://github.com/grafana/grafana-ansible-collection/pull/308
- Make systemd create /var/lib/otel-collector by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/336
- Validate config by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/327
- add catalog-info file for internal dev catalog by @theSuess in https://github.com/grafana/grafana-ansible-collection/pull/317
- add publish step to GitHub Actions workflow for Ansible Galaxy by @thelooter in https://github.com/grafana/grafana-ansible-collection/pull/340
- add user module to create/update/delete grafana users by @mvalois in https://github.com/grafana/grafana-ansible-collection/pull/178
- force temporary directory even in check mode for dashboards.yml by @cmehat in https://github.com/grafana/grafana-ansible-collection/pull/339
- integrate sles legacy init-script support by @floerica in https://github.com/grafana/grafana-ansible-collection/pull/184
- management of the config.river with the conversion of the config.yaml by @lbrule in https://github.com/grafana/grafana-ansible-collection/pull/149
- use ansible_facts instead of ansible_* variables by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/296
v5.7.0
======
Major Changes
-------------
- Ability to set custom directory path for \*.alloy config files by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/294
- Add tests and support version latest by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/299
- Fix 'dict object' has no attribute 'path' when running with --check by @JMLX42 in https://github.com/grafana/grafana-ansible-collection/pull/283
- Update grafana template by @santilococo in https://github.com/grafana/grafana-ansible-collection/pull/300
- add loki bloom support by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/298
- grafana.ini yaml syntax by @intermittentnrg in https://github.com/grafana/grafana-ansible-collection/pull/232
v5.6.0
======
Major Changes
-------------
- Adding "distributor" section support to mimir config file by @HamzaKhait in https://github.com/grafana/grafana-ansible-collection/pull/247
- Allow alloy_user_groups variable again by @pjezek in https://github.com/grafana/grafana-ansible-collection/pull/276
- Alloy Role Improvements by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/281
- Bump ansible-lint from 24.6.0 to 24.9.2 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/270
- Bump pylint from 3.2.5 to 3.3.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/273
- Ensure check-mode works for otel collector by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/264
- Fix message argument of dashboard task by @Nemental in https://github.com/grafana/grafana-ansible-collection/pull/256
- Update Alloy variables to use the `grafana_alloy_` namespace so they are unique by @Aethylred in https://github.com/grafana/grafana-ansible-collection/pull/209
- Update README.md by @aioue in https://github.com/grafana/grafana-ansible-collection/pull/272
- Update README.md by @aioue in https://github.com/grafana/grafana-ansible-collection/pull/275
- Update main.yml by @aioue in https://github.com/grafana/grafana-ansible-collection/pull/274
- add grafana_plugins_ops to defaults and docs by @weakcamel in https://github.com/grafana/grafana-ansible-collection/pull/251
- add option to populate google_analytics_4_id value by @copolycube in https://github.com/grafana/grafana-ansible-collection/pull/249
- fix ansible-lint warnings on Forbidden implicit octal value "0640" by @copolycube in https://github.com/grafana/grafana-ansible-collection/pull/279
v5.5.1
======
Bugfixes
--------
- Add check_mode: false to Loki "Scrape GitHub" Task by @winsmith in https://github.com/grafana/grafana-ansible-collection/pull/262
v5.5.0
======
Major Changes
-------------
- add support for extra args by @harryfinbow in https://github.com/grafana/grafana-ansible-collection/pull/259
- mimir molecule should use ansible core 2.16 by @GVengelen in https://github.com/grafana/grafana-ansible-collection/pull/254
v5.4.1
======
Major Changes
-------------
- Updated promtail arch map for aarch64 matching by @gianmarco-mameli in https://github.com/grafana/grafana-ansible-collection/pull/257
v5.4.0
======
Major Changes
-------------
- Use a variable to control uninstall behavior instead of tags by @dobbi84 in https://github.com/grafana/grafana-ansible-collection/pull/253
v5.3.0
======
Major Changes
-------------
- Add a config check before restarting mimir by @panfantastic in https://github.com/grafana/grafana-ansible-collection/pull/198
- Add support for configuring feature_toggles in grafana role by @LexVar in https://github.com/grafana/grafana-ansible-collection/pull/173
- Backport post-setup healthcheck from agent to alloy by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/213
- Bump ansible-lint from 24.2.3 to 24.5.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/207
- Bump ansible-lint from 24.5.0 to 24.6.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/216
- Bump braces from 3.0.2 to 3.0.3 in the npm_and_yarn group across 1 directory by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/218
- Bump pylint from 3.1.0 to 3.1.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/200
- Bump pylint from 3.1.1 to 3.2.2 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/208
- Bump pylint from 3.2.2 to 3.2.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/217
- Bump pylint from 3.2.3 to 3.2.5 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/234
- Change from config.river to config.alloy by @cardasac in https://github.com/grafana/grafana-ansible-collection/pull/225
- Fix Grafana Configuration for Unified and Legacy Alerting Based on Version by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/215
- Fix env file location by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/211
- Support adding alloy user to extra groups by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/212
- Updated result.json['message'] to result.json()['message'] by @CPreun in https://github.com/grafana/grafana-ansible-collection/pull/223
- readme styling & language improvements by @tigattack in https://github.com/grafana/grafana-ansible-collection/pull/214
v5.2.0
======
Major Changes
-------------
- Add a new config part to configure KeyCloak based auth by @he0s in https://github.com/grafana/grafana-ansible-collection/pull/191
- Add promtail role by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/197
- Bump ansible-lint from 24.2.2 to 24.2.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/195
v5.1.0
======
Major Changes
-------------
- Uninstall Step for Loki and Mimir by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/193
v5.0.0
======
Major Changes
-------------
- Add Grafana Loki role by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/188
- Add Grafana Mimir role by @GVengelen in https://github.com/grafana/grafana-ansible-collection/pull/183
v4.0.0
======
Major Changes
-------------
- Add an Ansible role for Grafana Alloy by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/169
Minor Changes
-------------
- Apply correct uid + gid for imported dashboards by @hypery2k in https://github.com/grafana/grafana-ansible-collection/pull/167
- Bump ansible-lint from 24.2.0 to 24.2.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/164
- Bump ansible-lint from 24.2.0 to 24.2.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/168
- Bump black from 24.1.1 to 24.3.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/165
- Clarify grafana-server configuration in README by @VGerris in https://github.com/grafana/grafana-ansible-collection/pull/177
- Update description to match module by @brmurphy in https://github.com/grafana/grafana-ansible-collection/pull/179
v3.0.0
======
Major Changes
-------------
- Add an Ansible role for OpenTelemetry Collector by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/138
Minor Changes
-------------
- Bump pylint from 3.0.3 to 3.1.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/158
- Bump pylint from 3.0.3 to 3.1.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/161
- Bump the pip group across 1 directories with 1 update by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/156
- Bump yamllint from 1.33.0 to 1.35.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/155
- Bump yamllint from 1.33.0 to 1.35.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/159
- ExecStartPre and EnvironmentFile settings to system unit file by @fabiiw05 in https://github.com/grafana/grafana-ansible-collection/pull/157
- datasources url parameter fix by @dergudzon in https://github.com/grafana/grafana-ansible-collection/pull/162
v2.2.5
======
Release Summary
---------------
Grafana and Agent Role bug fixes and security updates
Minor Changes
-------------
- Add 'run_once' to download&unzip tasks by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/136
- Adding `oauth_allow_insecure_email_lookup` to fix oauth user sync error by @hypery2k in https://github.com/grafana/grafana-ansible-collection/pull/132
- Bump ansible-core from 2.15.4 to 2.15.8 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/137
- Bump ansible-lint from 6.13.1 to 6.14.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/139
- Bump ansible-lint from 6.14.3 to 6.22.2 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/142
- Bump ansible-lint from 6.22.2 to 24.2.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/150
- Bump jinja2 from 3.1.2 to 3.1.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/129
- Bump pylint from 2.16.2 to 3.0.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/141
- Bump yamllint from 1.29.0 to 1.33.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/140
- Bump yamllint from 1.29.0 to 1.33.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/143
- Bump yamllint from 1.33.0 to 1.34.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/151
- Change handler to systemd by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/135
- Fix links in grafana_agent/defaults/main.yaml by @PabloCastellano in https://github.com/grafana/grafana-ansible-collection/pull/134
- Topic/grafana agent idempotency by @ohdearaugustin in https://github.com/grafana/grafana-ansible-collection/pull/147
v2.2.4
======
Release Summary
---------------
Grafana and Agent Role bug fixes and security updates
Minor Changes
-------------
- Bump cryptography from 41.0.4 to 41.0.6 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/126
- Drop curl check by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/120
- Fix check mode for grafana role by @Boschung-Mecatronic-AG-Infrastructure in https://github.com/grafana/grafana-ansible-collection/pull/125
- Fix check mode in Grafana Agent by @AmandaCameron in https://github.com/grafana/grafana-ansible-collection/pull/124
- Update tags in README by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/121
v2.2.3
======
Release Summary
---------------
Remove dependency on local-fs.target from Grafana Agent role
Minor Changes
-------------
- Remove dependency on local-fs.target from Grafana Agent role
v2.2.2
======
Release Summary
---------------
Grafana Role bug fixes and security updates
Minor Changes
-------------
- Bump cryptography from 41.0.3 to 41.0.4
- Create missing notification directory in Grafana Role
- Remove check_mode from create local directory task in Grafana Role
v2.2.1
======
Release Summary
---------------
Allow alert resource provisioning in Grafana Role
Minor Changes
-------------
- Allow alert resource provisioning in Grafana Role
v2.2.0
======
Release Summary
---------------
Grafana Agent Role Updates
Minor Changes
-------------
- Use 'ansible_system' env variable to detect os typ in Grafana Agent Role
- hange grafana Agent Wal and Positions Directory in Grafana Agent Role
v2.1.9
======
Release Summary
---------------
Security Updates and Grafana Agent Version failure fixes
Minor Changes
-------------
- Add check for Curl and failure step if Agent Version is not retrieved
- Bump cryptography from 39.0.2 to 41.0.3
- Bump semver from 5.7.1 to 5.7.2
- Bump word-wrap from 1.2.3 to 1.2.5
- Create local dashboard directory in check mode
- Update CI Testing
- Update Cloud Stack Module failures
v2.1.8
======
Release Summary
---------------
Fix grafana dashboard import in Grafana Role
Minor Changes
-------------
- Fix grafana dashboard import in Grafana Role
v2.1.7
======
Release Summary
---------------
YAML Fixes
Minor Changes
-------------
- YAML Fixes
v2.1.6
======
Release Summary
---------------
Grafana and Grafana Agent role updates
Minor Changes
-------------
- Add overrides.conf with CAP_NET_BIND_SERVICE for grafana-server unit
- Fix Grafana Dashboard Import for Grafana Role
- Make grafana_agent Idempotent
- Provisioning errors in YAML
- Use new standard to configure Grafana APT source for Grafana Role
v2.1.5
======
Release Summary
---------------
Update Grafana Agent Download varibale and ZIP file
Minor Changes
-------------
- Add Grafana Agent Version and CPU Arch to Downloaded ZIP in Grafana Agent Role
- Move _grafana_agent_base_download_url from /vars to /defaults in Grafana Agent Role
v2.1.4
======
Release Summary
---------------
Update Datasource Tests and minor fixes
Minor Changes
-------------
- Datasource test updates and minor fixes
v2.1.3
======
Release Summary
---------------
Update modules to fix failing Sanity Tests
Minor Changes
-------------
- indentation and Lint fixes to modules
v2.1.2
======
Release Summary
---------------
Idempotency Updates and minor api_url fixes
Minor Changes
-------------
- Fix Deleting datasources
- Fix alert_notification_policy failing on fresh instance
- Making Deleting folders idempotent
- Remove trailing slash automatically from grafana_url
v2.1.1
======
Release Summary
---------------
Update Download tasks in Grafana Agent Role
Minor Changes
-------------
- Update Download tasks in Grafana Agent Role
v2.1.0
======
Release Summary
---------------
Add Grafana Server role and plugins support on-prem Grafana
Major Changes
-------------
- Addition of Grafana Server role by @gardar
- Configurable agent user groups by @NormanJS
- Grafana Plugins support on-prem Grafana installation by @ishanjainn
- Updated Service for flow mode by @bentonam
Minor Changes
-------------
- Ability to configure date format in grafana server role by @RomainMou
- Avoid using shell for fetching latest version in Grafana Agent Role by @gardar
- Fix for invalid yaml with datasources list enclosed in quotes by @elkozmon
- Remove agent installation custom check by @VLZZZ
- Remove explicit user creation check by @v-zhuravlev
v2.0.0
======
Release Summary
---------------
Updated Grafana Agent Role
Major Changes
-------------
- Added Lint support
- Configs for server, metrics, logs, traces, and integrations
- Installation of the latest version
- Local installations when internet connection is not allowed
- Only download binary to controller once instead of hosts
- Skip install if the agent is already installed and the version is the same as the requested version
- Support for Grafana Agent Flow
- Validation of variables
v1.1.1
======
Release Summary
---------------
Updated return description and value for grafana.grafana.folder module
Minor Changes
-------------
- Updated the return message in grafana.grafana.folder module
v1.1.0
======
Release Summary
---------------
Added Role to deploy Grafana Agent on linux hosts
Major Changes
-------------
- Added Role for Grafana Agent
v1.0.5
======
Release Summary
---------------
Add Note to modules which don't support Idempotency
Minor Changes
-------------
- Added Note to datasource and dashboard module about not supporting Idempotency
v1.0.4
======
Release Summary
---------------
Bug fixes and idempotency fixes for modules
Major Changes
-------------
- All modules except dashboard and datasource modules now support idempotency
Minor Changes
-------------
- All modules use `missing_required_lib`` to compose the message for module.fail_json() when required library is missing from host
Bugfixes
--------
- Fixed cases where cloud_stack and alert_contact_point modules do not return a tuple when nothing in loop matches
v1.0.3
======
Minor Changes
-------------
- Add a fail method to modules source code if `requests` library is not present
- Fixed markup for arg option in Documentation
- Updated Documentation with `notes` to specify if the check_mode feature is supported by modules
- removed `supports_check_mode=True` from source code of modules
v1.0.2
======
Release Summary
---------------
Documentation updates with updated description for modules
v1.0.1
======
Release Summary
---------------
Documentation updates with updated examples
v1.0.0
======
Release Summary
---------------
CI and testing improvements
v0.0.7
======
Release Summary
---------------
Documentation update for return values in `grafana.grafana.dashboard`
v0.0.6
======
Minor Changes
-------------
- Idempotency updates to cloud_api_key and datasource modules
v0.0.5
======
Release Summary
---------------
Documentation update and code cleanup
v0.0.4
======
Bugfixes
--------
- Fix an issue with `cloud_stack` idempotency
v0.0.3
======
Release Summary
---------------
Documentation update and code cleanup
v0.0.2
======
Release Summary
---------------
Updated input parameters description for all modules
v0.0.1
======
Release Summary
---------------
It's a release! First version to publish to Ansible Galaxy

View file

@ -0,0 +1,8 @@
./ @ishanjainn
/roles/grafana @gardar @ishanjainn
/roles/grafana_agent @ishanjainn @v-zhuravlev @gardar
/roles/alloy @ishanjainn @v-zhuravlev @gardar @voidquark
/roles/opentelemetry_collector @ishanjainn
/roles/loki @voidquark @ishanjainn
/roles/mimir @GVengelen @gardar @ishanjainn
/roles/promtail @voidquark @ishanjainn

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

View file

@ -0,0 +1,40 @@
{
"collection_info": {
"namespace": "grafana",
"name": "grafana",
"version": "6.0.6",
"authors": [
"Grafana Labs <grafana.com>",
"Ishan Jain <ishan.jain@grafana.com>",
"Gerard van Engelen <g.vanengelen@codepeople.nl>"
],
"readme": "README.md",
"tags": [
"grafana",
"observability",
"monitoring"
],
"description": "Ansible collection to manage Grafana resources",
"license": [
"GPL-3.0-or-later"
],
"license_file": null,
"dependencies": {
"community.general": ">=8.2.0",
"community.grafana": ">=1.5.4",
"ansible.posix": ">=1.5.4"
},
"repository": "https://github.com/grafana/grafana-ansible-collection",
"documentation": "https://docs.ansible.com/ansible/latest/collections/grafana/grafana/index.html",
"homepage": null,
"issues": "https://github.com/grafana/grafana-ansible-collection/issues"
},
"file_manifest_file": {
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "80df907b5a9fb9307fe01b40e266d87f9c9f55c389a6692ef6677f5e08a5b5f4",
"format": 1
},
"format": 1
}

View file

@ -0,0 +1,86 @@
.DEFAULT_GOAL:= lint
PATH := ./node_modules/.bin:$(PATH)
SHELL := /bin/bash
args = $(filter-out $@, $(MAKECMDGOALS))
.PHONY: all setup install clean reinstall build compile pdfs lint lint-sh lint-shell lint-md lint-markdown lint-txt lint-text pdf lint-yaml lint-yml lint-editorconfig lint-ec ci-lint ci-lint-shell ci-lint-markdown ci-lint-text ci-lint-yaml ci-lint-editorconfig lint-ansible ci-lint-ansible
default: all
all: install
####################################################################
# Installation / Setup #
####################################################################
setup:
@./tools/setup.sh
install:
yarn install
pipenv install
# remove the build and log folders
clean:
rm -rf build node_modules
# reinstall the node_modules and start with a fresh node build
reinstall: clean install
####################################################################
# Linting #
####################################################################
lint: lint-shell lint-markdown lint-text lint-yaml lint-editorconfig lint-ansible
# Note "|| true" is added to locally make lint can be ran and all linting is preformed, regardless of exit code
# Shell Linting
lint-sh lint-shell:
@./tools/lint-shell.sh || true
# Markdown Linting
lint-md lint-markdown:
@./tools/lint-markdown.sh || true
# Text Linting
lint-txt lint-text:
@./tools/lint-text.sh || true
# Yaml Linting
lint-yml lint-yaml:
@./tools/lint-yaml.sh || true
# Editorconfig Linting
lint-ec lint-editorconfig:
@./tools/lint-editorconfig.sh || true
# Ansible Linting
lint-ansible:
@./tools/lint-ansible.sh || true
####################################################################
# CI #
####################################################################
ci-lint: ci-lint-shell ci-lint-markdown ci-lint-text ci-lint-yaml ci-lint-editorconfig ci-lint-ansible
# Shell Linting
ci-lint-shell:
@./tools/lint-shell.sh
# Markdown Linting
ci-lint-markdown:
@./tools/lint-markdown.sh
# Text Linting
ci-lint-text:
@./tools/lint-text.sh
# Yaml Linting
ci-lint-yaml:
@./tools/lint-yaml.sh
# Editorconfig Linting
ci-lint-editorconfig:
@./tools/lint-editorconfig.sh
# Ansible Linting
ci-lint-ansible:
@./tools/lint-ansible.sh

View file

@ -0,0 +1,14 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
yamllint = "==1.35.1"
ansible-lint = ">=6.13.1,<26.0.0"
pylint = ">=2.16.2,<4.0.0"
[dev-packages]
[requires]
python_version = "3.10"

View file

@ -0,0 +1,909 @@
{
"_meta": {
"hash": {
"sha256": "d803b04fc4ca6ceec39a226c518fc618049f8996dc9d14d09f42ab1b3471e085"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.10"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"ansible-compat": {
"hashes": [
"sha256:c2b4bfeca6383b2047b2e1dea473cec4f1f9f2dd59beef71d6c47f632eaf97c9",
"sha256:cced722001bd7b617d418e54017e308c8b27ef3815f377843c00e020fa07165e"
],
"markers": "python_version >= '3.10'",
"version": "==25.6.0"
},
"ansible-core": {
"hashes": [
"sha256:24fb30783fcd3e800b839b15a396a1f9d622c007bc358e98f2992156ace52671",
"sha256:cb74f3a148b77fa0c89a284e48e7515d13fda10ad8c789eb92274c72f017a9a0"
],
"markers": "python_version >= '3.10'",
"version": "==2.17.12"
},
"ansible-lint": {
"hashes": [
"sha256:69fe294a3cc30d8819b5a30625a7e25225f48558cadb83ad3d4dec597c1b8c2c",
"sha256:6a1dd2b7a9f3f202c9e92a6c80296ff33ca863348c3acf978f80fb0d4536dce4"
],
"index": "pypi",
"markers": "python_version >= '3.10'",
"version": "==25.6.1"
},
"astroid": {
"hashes": [
"sha256:5eba185467253501b62a9f113c263524b4f5d55e1b30456370eed4cdbd6438fd",
"sha256:e73d0b62dd680a7c07cb2cd0ce3c22570b044dd01bd994bc3a2dd16c6cbba162"
],
"markers": "python_full_version >= '3.9.0'",
"version": "==3.3.4"
},
"attrs": {
"hashes": [
"sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3",
"sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"
],
"markers": "python_version >= '3.8'",
"version": "==25.3.0"
},
"black": {
"hashes": [
"sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171",
"sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7",
"sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da",
"sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2",
"sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc",
"sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666",
"sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f",
"sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b",
"sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32",
"sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f",
"sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717",
"sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299",
"sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0",
"sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18",
"sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0",
"sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3",
"sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355",
"sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096",
"sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e",
"sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9",
"sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba",
"sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"
],
"markers": "python_version >= '3.9'",
"version": "==25.1.0"
},
"bracex": {
"hashes": [
"sha256:0b0049264e7340b3ec782b5cb99beb325f36c3782a32e36e876452fd49a09952",
"sha256:98f1347cd77e22ee8d967a30ad4e310b233f7754dbf31ff3fceb76145ba47dc7"
],
"markers": "python_version >= '3.9'",
"version": "==2.6"
},
"cffi": {
"hashes": [
"sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8",
"sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2",
"sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1",
"sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15",
"sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36",
"sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824",
"sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8",
"sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36",
"sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17",
"sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf",
"sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc",
"sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3",
"sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed",
"sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702",
"sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1",
"sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8",
"sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903",
"sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6",
"sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d",
"sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b",
"sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e",
"sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be",
"sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c",
"sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683",
"sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9",
"sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c",
"sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8",
"sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1",
"sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4",
"sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655",
"sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67",
"sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595",
"sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0",
"sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65",
"sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41",
"sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6",
"sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401",
"sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6",
"sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3",
"sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16",
"sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93",
"sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e",
"sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4",
"sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964",
"sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c",
"sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576",
"sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0",
"sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3",
"sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662",
"sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3",
"sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff",
"sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5",
"sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd",
"sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f",
"sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5",
"sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14",
"sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d",
"sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9",
"sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7",
"sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382",
"sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a",
"sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e",
"sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a",
"sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4",
"sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99",
"sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87",
"sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"
],
"markers": "python_version >= '3.8'",
"version": "==1.17.1"
},
"click": {
"hashes": [
"sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202",
"sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"
],
"markers": "python_version >= '3.10'",
"version": "==8.2.1"
},
"cryptography": {
"hashes": [
"sha256:0339a692de47084969500ee455e42c58e449461e0ec845a34a6a9b9bf7df7fb8",
"sha256:03dbff8411206713185b8cebe31bc5c0eb544799a50c09035733716b386e61a4",
"sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6",
"sha256:0cf13c77d710131d33e63626bd55ae7c0efb701ebdc2b3a7952b9b23a0412862",
"sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750",
"sha256:25eb4d4d3e54595dc8adebc6bbd5623588991d86591a78c2548ffb64797341e2",
"sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999",
"sha256:3530382a43a0e524bc931f187fc69ef4c42828cf7d7f592f7f249f602b5a4ab0",
"sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069",
"sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d",
"sha256:4828190fb6c4bcb6ebc6331f01fe66ae838bb3bd58e753b59d4b22eb444b996c",
"sha256:49fe9155ab32721b9122975e168a6760d8ce4cffe423bcd7ca269ba41b5dfac1",
"sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036",
"sha256:51dfbd4d26172d31150d84c19bbe06c68ea4b7f11bbc7b3a5e146b367c311349",
"sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872",
"sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22",
"sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d",
"sha256:6a3511ae33f09094185d111160fd192c67aa0a2a8d19b54d36e4c78f651dc5ad",
"sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637",
"sha256:6b613164cb8425e2f8db5849ffb84892e523bf6d26deb8f9bb76ae86181fa12b",
"sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57",
"sha256:7aad98a25ed8ac917fdd8a9c1e706e5a0956e06c498be1f713b61734333a4507",
"sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee",
"sha256:7ef2dde4fa9408475038fc9aadfc1fb2676b174e68356359632e980c661ec8f6",
"sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8",
"sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4",
"sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723",
"sha256:96d4819e25bf3b685199b304a0029ce4a3caf98947ce8a066c9137cc78ad2c58",
"sha256:a77c6fb8d76e9c9f99f2f3437c1a4ac287b34eaf40997cfab1e9bd2be175ac39",
"sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2",
"sha256:b97737a3ffbea79eebb062eb0d67d72307195035332501722a9ca86bab9e3ab2",
"sha256:bbc505d1dc469ac12a0a064214879eac6294038d6b24ae9f71faae1448a9608d",
"sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97",
"sha256:ce1678a2ccbe696cf3af15a75bb72ee008d7ff183c9228592ede9db467e64f1b",
"sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257",
"sha256:eaa3e28ea2235b33220b949c5a0d6cf79baa80eab2eb5607ca8ab7525331b9ff",
"sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e"
],
"markers": "python_version >= '3.7' and python_full_version not in '3.9.0, 3.9.1'",
"version": "==45.0.4"
},
"dill": {
"hashes": [
"sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a",
"sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"
],
"markers": "python_version < '3.11'",
"version": "==0.3.9"
},
"filelock": {
"hashes": [
"sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2",
"sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"
],
"markers": "python_version >= '3.9'",
"version": "==3.18.0"
},
"importlib-metadata": {
"hashes": [
"sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000",
"sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"
],
"markers": "python_version >= '3.9'",
"version": "==8.7.0"
},
"isort": {
"hashes": [
"sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109",
"sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"
],
"markers": "python_full_version >= '3.8.0'",
"version": "==5.13.2"
},
"jinja2": {
"hashes": [
"sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d",
"sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"
],
"markers": "python_version >= '3.7'",
"version": "==3.1.6"
},
"jsonschema": {
"hashes": [
"sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196",
"sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"
],
"markers": "python_version >= '3.9'",
"version": "==4.24.0"
},
"jsonschema-specifications": {
"hashes": [
"sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af",
"sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"
],
"markers": "python_version >= '3.9'",
"version": "==2025.4.1"
},
"lazy-object-proxy": {
"hashes": [
"sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382",
"sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82",
"sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9",
"sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494",
"sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46",
"sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30",
"sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63",
"sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4",
"sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae",
"sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be",
"sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701",
"sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd",
"sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006",
"sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a",
"sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586",
"sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8",
"sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821",
"sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07",
"sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b",
"sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171",
"sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b",
"sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2",
"sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7",
"sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4",
"sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8",
"sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e",
"sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f",
"sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda",
"sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4",
"sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e",
"sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671",
"sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11",
"sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455",
"sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734",
"sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb",
"sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"
],
"markers": "python_version >= '3.7'",
"version": "==1.9.0"
},
"markdown-it-py": {
"hashes": [
"sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1",
"sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"
],
"markers": "python_version >= '3.8'",
"version": "==3.0.0"
},
"markupsafe": {
"hashes": [
"sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4",
"sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30",
"sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0",
"sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9",
"sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396",
"sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13",
"sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028",
"sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca",
"sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557",
"sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832",
"sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0",
"sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b",
"sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579",
"sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a",
"sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c",
"sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff",
"sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c",
"sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22",
"sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094",
"sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb",
"sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e",
"sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5",
"sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a",
"sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d",
"sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a",
"sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b",
"sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8",
"sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225",
"sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c",
"sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144",
"sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f",
"sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87",
"sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d",
"sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93",
"sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf",
"sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158",
"sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84",
"sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb",
"sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48",
"sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171",
"sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c",
"sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6",
"sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd",
"sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d",
"sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1",
"sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d",
"sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca",
"sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a",
"sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29",
"sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe",
"sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798",
"sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c",
"sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8",
"sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f",
"sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f",
"sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a",
"sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178",
"sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0",
"sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79",
"sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430",
"sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"
],
"markers": "python_version >= '3.9'",
"version": "==3.0.2"
},
"mccabe": {
"hashes": [
"sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325",
"sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"
],
"markers": "python_version >= '3.6'",
"version": "==0.7.0"
},
"mdurl": {
"hashes": [
"sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8",
"sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"
],
"markers": "python_version >= '3.7'",
"version": "==0.1.2"
},
"mypy-extensions": {
"hashes": [
"sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505",
"sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"
],
"markers": "python_version >= '3.8'",
"version": "==1.1.0"
},
"packaging": {
"hashes": [
"sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484",
"sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"
],
"markers": "python_version >= '3.8'",
"version": "==25.0"
},
"pathspec": {
"hashes": [
"sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08",
"sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"
],
"markers": "python_version >= '3.8'",
"version": "==0.12.1"
},
"platformdirs": {
"hashes": [
"sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc",
"sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"
],
"markers": "python_version >= '3.9'",
"version": "==4.3.8"
},
"pycparser": {
"hashes": [
"sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6",
"sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"
],
"markers": "python_version >= '3.8'",
"version": "==2.22"
},
"pygments": {
"hashes": [
"sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199",
"sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"
],
"markers": "python_version >= '3.8'",
"version": "==2.18.0"
},
"pylint": {
"hashes": [
"sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9",
"sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"
],
"index": "pypi",
"markers": "python_full_version >= '3.9.0'",
"version": "==3.3.1"
},
"pyyaml": {
"hashes": [
"sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff",
"sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48",
"sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086",
"sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e",
"sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133",
"sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5",
"sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484",
"sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee",
"sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5",
"sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68",
"sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a",
"sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf",
"sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99",
"sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8",
"sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85",
"sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19",
"sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc",
"sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a",
"sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1",
"sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317",
"sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c",
"sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631",
"sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d",
"sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652",
"sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5",
"sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e",
"sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b",
"sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8",
"sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476",
"sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706",
"sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563",
"sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237",
"sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b",
"sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083",
"sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180",
"sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425",
"sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e",
"sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f",
"sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725",
"sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183",
"sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab",
"sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774",
"sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725",
"sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e",
"sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5",
"sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d",
"sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290",
"sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44",
"sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed",
"sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4",
"sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba",
"sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12",
"sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"
],
"markers": "python_version >= '3.8'",
"version": "==6.0.2"
},
"referencing": {
"hashes": [
"sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa",
"sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"
],
"markers": "python_version >= '3.9'",
"version": "==0.36.2"
},
"resolvelib": {
"hashes": [
"sha256:04ce76cbd63fded2078ce224785da6ecd42b9564b1390793f64ddecbe997b309",
"sha256:d2da45d1a8dfee81bdd591647783e340ef3bcb104b54c383f70d422ef5cc7dbf"
],
"version": "==1.0.1"
},
"rich": {
"hashes": [
"sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06",
"sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"
],
"markers": "python_full_version >= '3.7.0'",
"version": "==13.8.1"
},
"rpds-py": {
"hashes": [
"sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d",
"sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e",
"sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f",
"sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da",
"sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c",
"sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9",
"sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a",
"sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f",
"sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908",
"sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b",
"sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f",
"sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd",
"sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11",
"sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf",
"sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65",
"sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0",
"sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7",
"sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9",
"sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea",
"sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523",
"sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692",
"sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda",
"sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992",
"sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b",
"sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9",
"sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8",
"sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40",
"sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a",
"sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24",
"sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763",
"sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8",
"sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be",
"sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd",
"sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65",
"sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255",
"sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2",
"sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b",
"sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66",
"sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4",
"sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79",
"sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31",
"sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf",
"sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d",
"sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f",
"sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793",
"sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559",
"sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9",
"sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1",
"sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34",
"sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728",
"sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b",
"sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038",
"sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000",
"sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98",
"sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d",
"sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23",
"sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb",
"sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e",
"sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540",
"sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1",
"sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd",
"sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3",
"sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f",
"sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba",
"sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40",
"sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72",
"sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78",
"sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5",
"sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe",
"sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449",
"sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b",
"sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1",
"sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf",
"sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c",
"sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325",
"sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129",
"sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890",
"sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa",
"sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500",
"sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb",
"sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762",
"sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28",
"sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c",
"sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451",
"sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0",
"sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042",
"sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7",
"sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b",
"sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6",
"sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80",
"sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b",
"sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e",
"sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc",
"sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd",
"sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1",
"sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2",
"sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309",
"sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13",
"sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295",
"sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634",
"sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192",
"sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4",
"sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5",
"sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a",
"sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e",
"sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54",
"sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b",
"sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72",
"sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe",
"sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380",
"sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954",
"sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d",
"sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194",
"sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9",
"sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa",
"sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a",
"sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83"
],
"markers": "python_version >= '3.9'",
"version": "==0.25.1"
},
"ruamel.yaml": {
"hashes": [
"sha256:710ff198bb53da66718c7db27eec4fbcc9aa6ca7204e4c1df2f282b6fe5eb6b2",
"sha256:7227b76aaec364df15936730efbf7d72b30c0b79b1d578bbb8e3dcb2d81f52b7"
],
"markers": "python_version >= '3.8'",
"version": "==0.18.14"
},
"ruamel.yaml.clib": {
"hashes": [
"sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b",
"sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4",
"sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef",
"sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5",
"sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3",
"sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632",
"sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6",
"sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7",
"sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680",
"sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf",
"sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da",
"sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6",
"sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a",
"sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01",
"sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519",
"sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6",
"sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f",
"sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd",
"sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2",
"sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52",
"sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd",
"sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d",
"sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c",
"sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6",
"sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb",
"sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a",
"sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969",
"sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28",
"sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d",
"sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e",
"sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45",
"sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4",
"sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12",
"sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31",
"sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642",
"sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e",
"sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285",
"sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed",
"sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1",
"sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7",
"sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3",
"sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475",
"sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5",
"sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76",
"sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987",
"sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"
],
"markers": "python_version >= '3.9'",
"version": "==0.2.12"
},
"setuptools": {
"hashes": [
"sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05",
"sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"
],
"markers": "python_version >= '3.8'",
"version": "==69.0.3"
},
"subprocess-tee": {
"hashes": [
"sha256:21942e976715af4a19a526918adb03a8a27a8edab959f2d075b777e3d78f532d",
"sha256:91b2b4da3aae9a7088d84acaf2ea0abee3f4fd9c0d2eae69a9b9122a71476590"
],
"markers": "python_version >= '3.8'",
"version": "==0.4.2"
},
"tomli": {
"hashes": [
"sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6",
"sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd",
"sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c",
"sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b",
"sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8",
"sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6",
"sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77",
"sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff",
"sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea",
"sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192",
"sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249",
"sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee",
"sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4",
"sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98",
"sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8",
"sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4",
"sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281",
"sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744",
"sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69",
"sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13",
"sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140",
"sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e",
"sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e",
"sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc",
"sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff",
"sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec",
"sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2",
"sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222",
"sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106",
"sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272",
"sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a",
"sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"
],
"markers": "python_version >= '3.8'",
"version": "==2.2.1"
},
"tomlkit": {
"hashes": [
"sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde",
"sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"
],
"markers": "python_version >= '3.8'",
"version": "==0.13.2"
},
"typing-extensions": {
"hashes": [
"sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4",
"sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"
],
"markers": "python_version >= '3.9'",
"version": "==4.14.0"
},
"wcmatch": {
"hashes": [
"sha256:5848ace7dbb0476e5e55ab63c6bbd529745089343427caa5537f230cc01beb8a",
"sha256:f11f94208c8c8484a16f4f48638a85d771d9513f4ab3f37595978801cb9465af"
],
"markers": "python_version >= '3.9'",
"version": "==10.1"
},
"wrapt": {
"hashes": [
"sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0",
"sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420",
"sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a",
"sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c",
"sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079",
"sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923",
"sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f",
"sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1",
"sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8",
"sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86",
"sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0",
"sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364",
"sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e",
"sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c",
"sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e",
"sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c",
"sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727",
"sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff",
"sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e",
"sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29",
"sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7",
"sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72",
"sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475",
"sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a",
"sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317",
"sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2",
"sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd",
"sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640",
"sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98",
"sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248",
"sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e",
"sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d",
"sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec",
"sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1",
"sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e",
"sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9",
"sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92",
"sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb",
"sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094",
"sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46",
"sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29",
"sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd",
"sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705",
"sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8",
"sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975",
"sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb",
"sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e",
"sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b",
"sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418",
"sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019",
"sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1",
"sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba",
"sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6",
"sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2",
"sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3",
"sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7",
"sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752",
"sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416",
"sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f",
"sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1",
"sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc",
"sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145",
"sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee",
"sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a",
"sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7",
"sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b",
"sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653",
"sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0",
"sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90",
"sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29",
"sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6",
"sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034",
"sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09",
"sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559",
"sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"
],
"markers": "python_version < '3.11'",
"version": "==1.15.0"
},
"yamllint": {
"hashes": [
"sha256:2e16e504bb129ff515b37823b472750b36b6de07963bd74b307341ef5ad8bdc3",
"sha256:7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==1.35.1"
},
"zipp": {
"hashes": [
"sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e",
"sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"
],
"markers": "python_version >= '3.9'",
"version": "==3.23.0"
}
},
"develop": {}
}

View file

@ -0,0 +1,159 @@
# Ansible Collection for Grafana
[![Grafana](https://img.shields.io/badge/grafana-%23F46800.svg?&logo=grafana&logoColor=white)](https://grafana.com)
[![Ansible Collection](https://img.shields.io/badge/grafana.grafana-orange)](https://galaxy.ansible.com/ui/repo/published/grafana/grafana/)
[![GitHub tag](https://img.shields.io/github/tag/grafana/grafana-ansible-collection.svg)](https://github.com/grafana/grafana-ansible-collection/tags)
[![GitHub Last Commit](https://img.shields.io/github/last-commit/grafana/grafana-ansible-collection)](https://github.com/grafana/grafana-ansible-collection/tags)
[![GitHub Contributors](https://img.shields.io/github/contributors/grafana/grafana-ansible-collection)](https://github.com/grafana/grafana-ansible-collection/tags)
This collection (`grafana.grafana`) contains modules and roles to assist in automating the management of resources in **Grafana**, **Grafana Agent**, **OpenTelemetry Collector**, **Loki**, **Mimir**, **Alloy**, and **Promtail** with Ansible.
- [Ansible collection Documentation](https://docs.ansible.com/ansible/latest/collections/grafana/grafana/)
- [Grafana](https://grafana.com)
- [Grafana Cloud](https://grafana.com/products/cloud/)
## Ansible version compatibility
The collection is tested and supported with: `ansible >= 2.9`
## Installing the collection
Before using the Grafana collection, you need to install it using the below command:
```shell
ansible-galaxy collection install grafana.grafana
```
You can also include it in a `requirements.yml` file and install it via `ansible-galaxy collection install -r requirements.yml`, using the format:
```yaml
---
collections:
- name: grafana.grafana
```
A specific version of the collection can be installed by using the version keyword in the `requirements.yml` file:
```yaml
---
collections:
- name: grafana.grafana
version: 1.0.0
```
## Roles included in the collection
This collection includes the following roles to help set up and manage Grafana, Grafana Agent, Alloy, OpenTelemetry Collector, Loki, Mimir and Promtail:
- **Grafana**: Installs and configures Grafana on your target hosts.
- **Grafana Agent**: Deploys and configures Grafana Agent, allowing for efficient metrics, logs, and trace data shipping to Grafana Cloud or other endpoints.
- **Alloy**: The replacement for Grafana Agent and Promtail. Alloy can be used to collect traces, metrics, and logs.
- **OpenTelemetry Collector**: Sets up and configures the OpenTelemetry Collector, enabling advanced observability features through data collection and transmission.
- **Loki**: Deploy and manage Loki, the log aggregation system.
- **Mimir**: Deploy and manage Mimir, the scalable long-term storage for Prometheus.
- **Promtail**: Deploy and manage Promtail, the agent which ships the contents of local logs to a private Grafana Loki.
## Using this collection
You can call modules by their Fully Qualified Collection Namespace (FQCN), such as `grafana.grafana.cloud_stack`:
```yaml
- name: Using grafana collection
hosts: localhost
tasks:
- name: Create a Grafana Cloud stack
grafana.grafana.cloud_stack:
name: mystack
stack_slug: mystack
org_slug: myorg
cloud_api_key: "{{ cloud_api_key }}"
region: eu
state: present
```
or you can add full namespace and collection name in the `collections` element in your playbook
```yaml
- name: Using grafana collection
hosts: localhost
collection:
- grafana.grafana
tasks:
- name: Create a Grafana Cloud stack
cloud_stack:
name: mystack
stack_slug: mystack
org_slug: myorg
cloud_api_key: "{{ cloud_api_key }}"
region: eu
state: present
```
## Contributing
We are accepting GitHub pull requests and issues. There are many ways in which you can participate in the project, for example:
- Submit bugs and feature requests, and help us verify them
- Submit and review source code changes in GitHub pull requests
- Add new modules for more Grafana resources
## Testing and Development
If you want to develop new content for this collection or improve what is already
here, the easiest way to work on the collection is to clone it into one of the configured
[`COLLECTIONS_PATHS`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths),
and work on it there.
### Testing with `ansible-test`
We use `ansible-test` for sanity.
## Commands
| Command | Description |
| :--- | :----------- |
| `make setup` | Checks to see if necessary tools are installed |
| `make install` | Installs project dependencies |
| `make lint` | Performs all linting commands |
| `make lint-sh` / `make lint-shell` | Performs shell script linting |
| `make lint-md` / `make lint-markdown` | Performs Markdown linting |
| `make lint-txt` / `make lint-text` | Performs text linting |
| `make lint-yml` / `make lint-yaml` | Performs Yaml linting |
| `make lint-ec` / `make lint-editorconfig` | Performs EditorConfig Checks |
| `make lint-ansible` | Performs Ansible linting |
| `make clean` | Removes the `./node_modules` and `./build` directories |
| `make reinstall` | Shortcut to `make clean` and `make install` |
## Releasing, Versioning and Deprecation
This collection follows [Semantic Versioning](https://semver.org/). More details on versioning can be found [in the Ansible docs](https://docs.ansible.com/ansible/latest/dev_guide/developing_collections.html#collection-versions).
We plan to regularly release new minor or bugfix versions once new features or bugfixes have been implemented.
Releasing the current major version on GitHub happens from the `main` branch by the
[GitHub Release Workflow](https://github.com/grafana/grafana-ansible-collection/blob/main/.github/workflows/release.yml).
Before the [GitHub Release Workflow](https://github.com/grafana/grafana-ansible-collection/blob/main/.github/workflows/release.yml)
is run, Contributors should push the new version on Ansible Galaxy Manually.
To generate changelogs for a new release, Refer [Generating Changelogs](https://docs.ansible.com/ansible/latest/dev_guide/developing_collections_changelogs.html#generating-changelogs) or run `antsibull-changelog generate`and `antsibull-changelog lint-changelog-yaml changelogs/changelog.yaml` to validate the YAML file.
To generate the tarball to be uploaded on Ansible Galaxy, Refer [Building collection tarball](https://docs.ansible.com/ansible/latest/dev_guide/developing_collections_distributing.html#building-your-collection-tarball)
## Code of Conduct
This collection follows the Ansible project's [Code of Conduct](https://docs.ansible.com/ansible/devel/community/code_of_conduct.html).
Please read and familiarize yourself with this doc
## More information
- [Maintainer guidelines](https://docs.ansible.com/ansible/devel/community/maintainers.html)
- Subscribe to the [news-for-maintainers](https://github.com/ansible-collections/news-for-maintainers) repository and track announcements there.
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
- [Ansible Collection Developer Guide](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html)
- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html)
## License
GPL-3.0-or-later

View file

@ -0,0 +1,2 @@
[defaults]
collections_paths = ./

View file

@ -0,0 +1,19 @@
apiVersion: backstage.io/v1alpha1
kind: Component
metadata:
name: grafana-ansible-collection
title: grafana-ansible-collection
description: |
This collection (grafana.grafana) contains modules and roles to assist in automating the management of resources in Grafana, Grafana Agent, OpenTelemetry Collector, Loki, Mimir, Alloy, and Promtail with Ansible.
tags:
- gitops
links:
- title: "Internal Slack Channel #ansible-collection"
url: https://grafanalabs.enterprise.slack.com/archives/C04T7GX8C69
annotations:
backstage.io/techdocs-ref: dir:.
github.com/project-slug: grafana/grafana-ansible-collection
spec:
type: tool
owner: group:default/devex
lifecycle: production

View file

@ -0,0 +1,59 @@
objects:
role: {}
plugins:
become: {}
cache: {}
callback: {}
cliconf: {}
connection: {}
filter: {}
httpapi: {}
inventory: {}
lookup: {}
module:
alert_contact_point:
description: Manage Alerting Contact points in Grafana
name: alert_contact_point
namespace: ''
version_added: 0.0.1
alert_notification_policy:
description: Manage Alerting Policies points in Grafana
name: alert_notification_policy
namespace: ''
version_added: 0.0.1
cloud_api_key:
description: Manage Grafana Cloud API keys
name: cloud_api_key
namespace: ''
version_added: 0.0.1
cloud_plugin:
description: Manage Grafana Cloud Plugins
name: cloud_plugin
namespace: ''
version_added: 0.0.1
cloud_stack:
description: Manage Grafana Cloud stack
name: cloud_stack
namespace: ''
version_added: 0.0.1
dashboard:
description: Manage Dashboards in Grafana
name: dashboard
namespace: ''
version_added: 0.0.1
datasource:
description: Manage Data sources in Grafana
name: datasource
namespace: ''
version_added: 0.0.1
folder:
description: Manage Folders in Grafana
name: folder
namespace: ''
version_added: 0.0.1
netconf: {}
shell: {}
strategy: {}
test: {}
vars: {}
version: 6.2.0

View file

@ -0,0 +1,442 @@
ancestor: null
releases:
0.0.1:
changes:
release_summary: It's a release! First version to publish to Ansible Galaxy
release_date: '2022-08-09'
0.0.2:
changes:
release_summary: Updated input parameters description for all modules
release_date: '2022-08-10'
0.0.3:
changes:
release_summary: Documentation update and code cleanup
release_date: '2022-08-10'
0.0.4:
changes:
bugfixes:
- Fix an issue with `cloud_stack` idempotency
release_date: '2022-08-10'
0.0.5:
changes:
release_summary: Documentation update and code cleanup
release_date: '2022-08-10'
0.0.6:
changes:
minor_changes:
- Idempotency updates to cloud_api_key and datasource modules
release_date: '2022-08-10'
0.0.7:
changes:
release_summary: Documentation update for return values in `grafana.grafana.dashboard`
release_date: '2022-08-11'
1.0.0:
changes:
release_summary: CI and testing improvements
release_date: '2022-08-16'
1.0.1:
changes:
release_summary: Documentation updates with updated examples
release_date: '2022-08-23'
1.0.2:
changes:
release_summary: Documentation updates with updated description for modules
release_date: '2022-08-30'
1.0.3:
changes:
minor_changes:
- Add a fail method to modules source code if `requests` library is not present
- Fixed markup for arg option in Documentation
- Updated Documentation with `notes` to specify if the check_mode feature is
supported by modules
- removed `supports_check_mode=True` from source code of modules
release_date: '2022-10-20'
1.0.4:
changes:
bugfixes:
- Fixed cases where cloud_stack and alert_contact_point modules do not return
a tuple when nothing in loop matches
major_changes:
- All modules except dashboard and datasource modules now support idempotency
minor_changes:
- All modules use `missing_required_lib`` to compose the message for module.fail_json()
when required library is missing from host
release_summary: Bug fixes and idempotency fixes for modules
release_date: '2022-11-01'
1.0.5:
changes:
minor_changes:
- Added Note to datasource and dashboard module about not supporting Idempotency
release_summary: Add Note to modules which don't support Idempotency
release_date: '2022-11-10'
1.1.0:
changes:
major_changes:
- Added Role for Grafana Agent
release_summary: Added Role to deploy Grafana Agent on linux hosts
release_date: '2022-11-22'
1.1.1:
changes:
minor_changes:
- Updated the return message in grafana.grafana.folder module
release_summary: Updated return description and value for grafana.grafana.folder
module
release_date: '2023-02-08'
2.0.0:
changes:
major_changes:
- Added Lint support
- Configs for server, metrics, logs, traces, and integrations
- Installation of the latest version
- Local installations when internet connection is not allowed
- Only download binary to controller once instead of hosts
- Skip install if the agent is already installed and the version is the same
as the requested version
- Support for Grafana Agent Flow
- Validation of variables
release_summary: Updated Grafana Agent Role
release_date: '2023-03-27'
2.1.0:
changes:
major_changes:
- Addition of Grafana Server role by @gardar
- Configurable agent user groups by @NormanJS
- Grafana Plugins support on-prem Grafana installation by @ishanjainn
- Updated Service for flow mode by @bentonam
minor_changes:
- Ability to configure date format in grafana server role by @RomainMou
- Avoid using shell for fetching latest version in Grafana Agent Role by @gardar
- Fix for invalid yaml with datasources list enclosed in quotes by @elkozmon
- Remove agent installation custom check by @VLZZZ
- Remove explicit user creation check by @v-zhuravlev
release_summary: Add Grafana Server role and plugins support on-prem Grafana
release_date: '2023-06-26'
2.1.1:
changes:
minor_changes:
- Update Download tasks in Grafana Agent Role
release_summary: Update Download tasks in Grafana Agent Role
release_date: '2023-06-26'
2.1.2:
changes:
minor_changes:
- Fix Deleting datasources
- Fix alert_notification_policy failing on fresh instance
- Making Deleting folders idempotent
- Remove trailing slash automatically from grafana_url
release_summary: Idempotency Updates and minor api_url fixes
release_date: '2023-06-27'
2.1.3:
changes:
minor_changes:
- indentation and Lint fixes to modules
release_summary: Update modules to fix failing Sanity Tests
release_date: '2023-06-27'
2.1.4:
changes:
minor_changes:
- Datasource test updates and minor fixes
release_summary: Update Datasource Tests and minor fixes
release_date: '2023-06-27'
2.1.5:
changes:
minor_changes:
- Add Grafana Agent Version and CPU Arch to Downloaded ZIP in Grafana Agent
Role
- Move _grafana_agent_base_download_url from /vars to /defaults in Grafana Agent
Role
release_summary: Update Grafana Agent Download varibale and ZIP file
release_date: '2023-08-10'
2.1.6:
changes:
minor_changes:
- Add overrides.conf with CAP_NET_BIND_SERVICE for grafana-server unit
- Fix Grafana Dashboard Import for Grafana Role
- Make grafana_agent Idempotent
- Provisioning errors in YAML
- Use new standard to configure Grafana APT source for Grafana Role
release_summary: Grafana and Grafana Agent role updates
release_date: '2023-09-11'
2.1.7:
changes:
minor_changes:
- YAML Fixes
release_summary: YAML Fixes
release_date: '2023-09-11'
2.1.8:
changes:
minor_changes:
- Fix grafana dashboard import in Grafana Role
release_summary: Fix grafana dashboard import in Grafana Role
release_date: '2023-09-12'
2.1.9:
changes:
minor_changes:
- Add check for Curl and failure step if Agent Version is not retrieved
- Bump cryptography from 39.0.2 to 41.0.3
- Bump semver from 5.7.1 to 5.7.2
- Bump word-wrap from 1.2.3 to 1.2.5
- Create local dashboard directory in check mode
- Update CI Testing
- Update Cloud Stack Module failures
release_summary: Security Updates and Grafana Agent Version failure fixes
release_date: '2023-09-19'
2.2.0:
changes:
minor_changes:
- Use 'ansible_system' env variable to detect os typ in Grafana Agent Role
- hange grafana Agent Wal and Positions Directory in Grafana Agent Role
release_summary: Grafana Agent Role Updates
release_date: '2023-09-20'
2.2.1:
changes:
minor_changes:
- Allow alert resource provisioning in Grafana Role
release_summary: Allow alert resource provisioning in Grafana Role
release_date: '2023-09-27'
2.2.2:
changes:
minor_changes:
- Bump cryptography from 41.0.3 to 41.0.4
- Create missing notification directory in Grafana Role
- Remove check_mode from create local directory task in Grafana Role
release_summary: Grafana Role bug fixes and security updates
release_date: '2023-09-29'
2.2.3:
changes:
minor_changes:
- Remove dependency on local-fs.target from Grafana Agent role
release_summary: Remove dependency on local-fs.target from Grafana Agent role
release_date: '2023-10-05'
2.2.4:
changes:
minor_changes:
- Bump cryptography from 41.0.4 to 41.0.6 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/126
- Drop curl check by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/120
- Fix check mode for grafana role by @Boschung-Mecatronic-AG-Infrastructure
in https://github.com/grafana/grafana-ansible-collection/pull/125
- Fix check mode in Grafana Agent by @AmandaCameron in https://github.com/grafana/grafana-ansible-collection/pull/124
- Update tags in README by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/121
release_summary: Grafana and Agent Role bug fixes and security updates
release_date: '2023-12-08'
2.2.5:
changes:
minor_changes:
- Add 'run_once' to download&unzip tasks by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/136
- Adding `oauth_allow_insecure_email_lookup` to fix oauth user sync error by
@hypery2k in https://github.com/grafana/grafana-ansible-collection/pull/132
- Bump ansible-core from 2.15.4 to 2.15.8 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/137
- Bump ansible-lint from 6.13.1 to 6.14.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/139
- Bump ansible-lint from 6.14.3 to 6.22.2 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/142
- Bump ansible-lint from 6.22.2 to 24.2.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/150
- Bump jinja2 from 3.1.2 to 3.1.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/129
- Bump pylint from 2.16.2 to 3.0.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/141
- Bump yamllint from 1.29.0 to 1.33.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/140
- Bump yamllint from 1.29.0 to 1.33.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/143
- Bump yamllint from 1.33.0 to 1.34.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/151
- Change handler to systemd by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/135
- Fix links in grafana_agent/defaults/main.yaml by @PabloCastellano in https://github.com/grafana/grafana-ansible-collection/pull/134
- Topic/grafana agent idempotency by @ohdearaugustin in https://github.com/grafana/grafana-ansible-collection/pull/147
release_summary: Grafana and Agent Role bug fixes and security updates
release_date: '2024-02-13'
3.0.0:
changes:
major_changes:
- Add an Ansible role for OpenTelemetry Collector by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/138
minor_changes:
- Bump pylint from 3.0.3 to 3.1.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/158
- Bump pylint from 3.0.3 to 3.1.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/161
- Bump the pip group across 1 directories with 1 update by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/156
- Bump yamllint from 1.33.0 to 1.35.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/155
- Bump yamllint from 1.33.0 to 1.35.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/159
- ExecStartPre and EnvironmentFile settings to system unit file by @fabiiw05
in https://github.com/grafana/grafana-ansible-collection/pull/157
- datasources url parameter fix by @dergudzon in https://github.com/grafana/grafana-ansible-collection/pull/162
release_date: '2024-03-12'
4.0.0:
changes:
major_changes:
- Add an Ansible role for Grafana Alloy by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/169
minor_changes:
- Bump ansible-lint from 24.2.0 to 24.2.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/164
- Update description to match module by @brmurphy in https://github.com/grafana/grafana-ansible-collection/pull/179
- Clarify grafana-server configuration in README by @VGerris in https://github.com/grafana/grafana-ansible-collection/pull/177
- Bump ansible-lint from 24.2.0 to 24.2.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/168
- Bump black from 24.1.1 to 24.3.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/165
- Apply correct uid + gid for imported dashboards by @hypery2k in https://github.com/grafana/grafana-ansible-collection/pull/167
release_date: '2024-04-10'
5.0.0:
changes:
major_changes:
- Add Grafana Mimir role by @GVengelen in https://github.com/grafana/grafana-ansible-collection/pull/183
- Add Grafana Loki role by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/188
release_date: '2024-04-29'
5.1.0:
changes:
major_changes:
- Uninstall Step for Loki and Mimir by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/193
release_date: '2024-05-07'
5.2.0:
changes:
major_changes:
- Bump ansible-lint from 24.2.2 to 24.2.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/195
- Add promtail role by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/197
- Add a new config part to configure KeyCloak based auth by @he0s in https://github.com/grafana/grafana-ansible-collection/pull/191
release_date: '2024-05-13'
5.3.0:
changes:
major_changes:
- Add support for configuring feature_toggles in grafana role by @LexVar in https://github.com/grafana/grafana-ansible-collection/pull/173
- Bump pylint from 3.1.0 to 3.1.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/200
- Add a config check before restarting mimir by @panfantastic in https://github.com/grafana/grafana-ansible-collection/pull/198
- Bump pylint from 3.1.1 to 3.2.2 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/208
- Bump ansible-lint from 24.2.3 to 24.5.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/207
- Fix env file location by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/211
- Support adding alloy user to extra groups by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/212
- Backport post-setup healthcheck from agent to alloy by @v-zhuravlev in https://github.com/grafana/grafana-ansible-collection/pull/213
- readme styling & language improvements by @tigattack in https://github.com/grafana/grafana-ansible-collection/pull/214
- Bump ansible-lint from 24.5.0 to 24.6.0 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/216
- Bump pylint from 3.2.2 to 3.2.3 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/217
- Bump braces from 3.0.2 to 3.0.3 in the npm_and_yarn group across 1 directory by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/218
- Change from config.river to config.alloy by @cardasac in https://github.com/grafana/grafana-ansible-collection/pull/225
- Updated result.json['message'] to result.json()['message'] by @CPreun in https://github.com/grafana/grafana-ansible-collection/pull/223
- Bump pylint from 3.2.3 to 3.2.5 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/234
- Fix Grafana Configuration for Unified and Legacy Alerting Based on Version by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/215
release_date: '2024-07-12'
5.4.0:
changes:
major_changes:
- Use a variable to control uninstall behavior instead of tags by @dobbi84 in https://github.com/grafana/grafana-ansible-collection/pull/253
release_date: '2024-08-09'
5.4.1:
changes:
major_changes:
- Updated promtail arch map for aarch64 matching by @gianmarco-mameli in https://github.com/grafana/grafana-ansible-collection/pull/257
release_date: '2024-08-13'
5.5.0:
changes:
major_changes:
- mimir molecule should use ansible core 2.16 by @GVengelen in https://github.com/grafana/grafana-ansible-collection/pull/254
- add support for extra args by @harryfinbow in https://github.com/grafana/grafana-ansible-collection/pull/259
release_date: '2024-08-16'
5.5.1:
changes:
bugfixes:
- 'Add check_mode: false to Loki "Scrape GitHub" Task by @winsmith in https://github.com/grafana/grafana-ansible-collection/pull/262'
release_date: '2024-09-13'
5.6.0:
changes:
major_changes:
- Update Alloy variables to use the `grafana_alloy_` namespace so they are unique by @Aethylred in https://github.com/grafana/grafana-ansible-collection/pull/209
- Allow alloy_user_groups variable again by @pjezek in https://github.com/grafana/grafana-ansible-collection/pull/276
- Update README.md by @aioue in https://github.com/grafana/grafana-ansible-collection/pull/275
- Update main.yml by @aioue in https://github.com/grafana/grafana-ansible-collection/pull/274
- Update README.md by @aioue in https://github.com/grafana/grafana-ansible-collection/pull/272
- Ensure check-mode works for otel collector by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/264
- Bump pylint from 3.2.5 to 3.3.1 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/273
- Bump ansible-lint from 24.6.0 to 24.9.2 by @dependabot in https://github.com/grafana/grafana-ansible-collection/pull/270
- Alloy Role Improvements by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/281
- Fix message argument of dashboard task by @Nemental in https://github.com/grafana/grafana-ansible-collection/pull/256
- add grafana_plugins_ops to defaults and docs by @weakcamel in https://github.com/grafana/grafana-ansible-collection/pull/251
- fix ansible-lint warnings on Forbidden implicit octal value "0640" by @copolycube in https://github.com/grafana/grafana-ansible-collection/pull/279
- add option to populate google_analytics_4_id value by @copolycube in https://github.com/grafana/grafana-ansible-collection/pull/249
- Adding "distributor" section support to mimir config file by @HamzaKhait in https://github.com/grafana/grafana-ansible-collection/pull/247
release_date: '2024-10-21'
5.7.0:
changes:
major_changes:
- Fix 'dict object' has no attribute 'path' when running with --check by @JMLX42 in https://github.com/grafana/grafana-ansible-collection/pull/283
- Ability to set custom directory path for \*.alloy config files by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/294
- grafana.ini yaml syntax by @intermittentnrg in https://github.com/grafana/grafana-ansible-collection/pull/232
- Update grafana template by @santilococo in https://github.com/grafana/grafana-ansible-collection/pull/300
- Add tests and support version latest by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/299
- add loki bloom support by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/298
release_date: '2024-12-05'
6.0.0:
changes:
major_changes:
- use ansible_facts instead of ansible_* variables by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/296
- Fix the markdown code fences for install command by @benmatselby in https://github.com/grafana/grafana-ansible-collection/pull/306
- Grafana fix facts in main.yml by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/315
- add catalog-info file for internal dev catalog by @theSuess in https://github.com/grafana/grafana-ansible-collection/pull/317
- Fix sectionless items edge case by @santilococo in https://github.com/grafana/grafana-ansible-collection/pull/303
- Fix loki_operational_config section not getting rendered in config.yml by @olegkaspersky in https://github.com/grafana/grafana-ansible-collection/pull/330
- Fix tags Inherit default vars by @MJurayev in https://github.com/grafana/grafana-ansible-collection/pull/341
- add publish step to GitHub Actions workflow for Ansible Galaxy by @thelooter in https://github.com/grafana/grafana-ansible-collection/pull/340
- force temporary directory even in check mode for dashboards.yml by @cmehat in https://github.com/grafana/grafana-ansible-collection/pull/339
- Make systemd create /var/lib/otel-collector by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/336
- Validate config by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/327
- Add foldersFromFilesStructure option by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/326
- Do not log grafana.ini contents when setting facts by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/325
- Add tempo role by @CSTDev in https://github.com/grafana/grafana-ansible-collection/pull/323
- Make dashboard imports more flexible by @torfbolt in https://github.com/grafana/grafana-ansible-collection/pull/308
- integrate sles legacy init-script support by @floerica in https://github.com/grafana/grafana-ansible-collection/pull/184
- add user module to create/update/delete grafana users by @mvalois in https://github.com/grafana/grafana-ansible-collection/pull/178
- management of the config.river with the conversion of the config.yaml by @lbrule in https://github.com/grafana/grafana-ansible-collection/pull/149
release_date: '2025-04-24'
6.0.1:
changes:
minor_changes:
- Remove Node modules from Ansible Collection build
release_date: '2025-05-06'
6.0.2:
changes:
major_changes:
- Update when statement to test for dashboard files found by @hal58th in https://github.com/grafana/grafana-ansible-collection/pull/363
- Fix Mimir URL verify task by @parcimonic in https://github.com/grafana/grafana-ansible-collection/pull/358
- properly validate config by @pieterlexis-tomtom in https://github.com/grafana/grafana-ansible-collection/pull/354
- alloy_readiness_check_use_https by @piotr-g in https://github.com/grafana/grafana-ansible-collection/pull/359
- use ansible_facts instead of variables by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/365
- Use become false in find task by @santilococo in https://github.com/grafana/grafana-ansible-collection/pull/368
- Don't use a proxy when doing Alloy readiness check by @benoitc-croesus in https://github.com/grafana/grafana-ansible-collection/pull/375
- mark configuration deployment task with `no_log` by @kkantonop in https://github.com/grafana/grafana-ansible-collection/pull/380
- Don't override defaults by @56quarters in https://github.com/grafana/grafana-ansible-collection/pull/382
- Add delete protection by @KucicM in https://github.com/grafana/grafana-ansible-collection/pull/381
- declare collection dependencies by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/386
- Fix some regression introduced by v6 by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/376
- template ingester and querier section by @Gufderald in https://github.com/grafana/grafana-ansible-collection/pull/371
- ensure alerting provisioning directory exists by @derhuerst in https://github.com/grafana/grafana-ansible-collection/pull/364
release_date: '2025-06-23'
6.0.3:
changes:
major_changes:
- declare collection dependencies by @ishanjainn in https://github.com/grafana/grafana-ansible-collection/pull/390
- improve mimir/alloy examples playbook by @smCloudInTheSky in https://github.com/grafana/grafana-ansible-collection/pull/369
- Changes for issue #383. Added alloy_github_api_url varaible. by @ILikePhysics in https://github.com/grafana/grafana-ansible-collection/pull/393
- store APT key with .asc extension by @derhuerst in https://github.com/grafana/grafana-ansible-collection/pull/394
- declare collection dependencies by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/392
- Bump ansible-lint from 24.9.2 to 25.6.1 by @dependabot[bot] in https://github.com/grafana/grafana-ansible-collection/pull/391
- Bump brace-expansion from 1.1.11 to 1.1.12 in the npm_and_yarn group across 1 directory by @dependabot[bot] in https://github.com/grafana/grafana-ansible-collection/pull/396
- ensure IP assert returns boolean result by @aardbol in https://github.com/grafana/grafana-ansible-collection/pull/398
- Update Mimir README.md by @Gufderald in https://github.com/grafana/grafana-ansible-collection/pull/397
release_date: '2025-07-29'
6.0.4:
changes:
major_changes:
- Fixes to foldersFromFilesStructure option by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/351
- add macOS support to alloy role by @l50 in https://github.com/grafana/grafana-ansible-collection/pull/418
- replace None with [] for safe length checks by @voidquark in https://github.com/grafana/grafana-ansible-collection/pull/426
- Add SUSE support to Alloy role by @pozsa in https://github.com/grafana/grafana-ansible-collection/pull/423
- Migrate RedHat install to ansible.builtin.package by @r65535 in https://github.com/grafana/grafana-ansible-collection/pull/431
release_date: '2025-09-27'
6.0.5:
changes:
major_changes:
- Fixes issue by @digiserg in https://github.com/grafana/grafana-ansible-collection/pull/421
- use deb822 for newer debian versions by @Lukas-Heindl in https://github.com/grafana/grafana-ansible-collection/pull/440
- fix datasource documentation by @jeremad in https://github.com/grafana/grafana-ansible-collection/pull/437
- update catalog info by @Duologic in https://github.com/grafana/grafana-ansible-collection/pull/434
- Fix Mimir config file validation task by @Windos in https://github.com/grafana/grafana-ansible-collection/pull/428
- Import custom dashboards only when directory exists by @mahendrapaipuri in https://github.com/grafana/grafana-ansible-collection/pull/430
- fix mimir_download_url_deb & mimir_download_url_rpm by @germebl in https://github.com/grafana/grafana-ansible-collection/pull/400
- Use credentials from grafana_ini when importing dashboards by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/402
- Fallback to empty dict in case grafana_ini is undefined by @root-expert in https://github.com/grafana/grafana-ansible-collection/pull/403
- do not skip scrape latest github version even in check_mode by @cmehat in https://github.com/grafana/grafana-ansible-collection/pull/408
- Updated YUM repo urls from `packages.grafana.com` to `rpm.grafana.com` by @DejfCold in https://github.com/grafana/grafana-ansible-collection/pull/414
release_date: '2025-10-11'
6.0.6:
changes:
major_changes:
- Restore default listen address and port in Mimir by @56quarters in https://github.com/grafana/grafana-ansible-collection/pull/456
- fix broken Grafana apt repository addition by @kleini in https://github.com/grafana/grafana-ansible-collection/pull/454
release_date: '2025-10-22'

View file

@ -0,0 +1,33 @@
---
changelog_filename_template: ../CHANGELOG.rst
changelog_filename_version_depth: 0
changes_file: changelog.yaml
changes_format: combined
ignore_other_fragment_extensions: true
keep_fragments: false
mention_ancestor: true
new_plugins_after_name: removed_features
notesdir: fragments
prelude_section_name: release_summary
prelude_section_title: Release Summary
sanitize_changelog: true
sections:
- - major_changes
- Major Changes
- - minor_changes
- Minor Changes
- - breaking_changes
- Breaking Changes / Porting Guide
- - deprecated_features
- Deprecated Features
- - removed_features
- Removed Features (previously deprecated)
- - security_fixes
- Security Fixes
- - bugfixes
- Bugfixes
- - known_issues
- Known Issues
title: Grafana.Grafana
trivial_section_name: trivial
use_fqcn: true

View file

@ -0,0 +1,10 @@
---
- hosts: all
become: true
# pre_tasks happen before roles are executed / applied
pre_tasks: []
# roles are ran after pre_tasks
roles:
- grafana_agent
# tasks are ran after roles
tasks: []

View file

@ -0,0 +1,12 @@
- hosts: all
tasks:
- name: Install Grafana Agent
ansible.builtin.include_role:
name: grafana.grafana.grafana_agent
vars:
grafana_agent_mode: flow
# Change config file on the host to .river
grafana_agent_config_filename: config.river
# Remove default flags
grafana_agent_flags_extra:
server.http.listen-addr: '0.0.0.0:12345'

View file

@ -0,0 +1,95 @@
---
- hosts: all
become: true
vars:
grafana_agent_metrics_config:
global:
external_labels:
datacenter: primary
cluster: my-cluster
instance: "{{ ansible_host }}"
remote_write:
- url: https://prometheus-<your region>.grafana.net/api/prom/push
basic_auth:
username: "1234567" # your username / instanceID
password: "..." # your grafana.com token
configs:
- name: local
scrape_configs:
# scrape a an application on the localhost
- job_name: my-app
metrics_path: /metrics
static_configs:
- targets:
- localhost:8080
relabel_configs: []
metric_relabel_configs: []
grafana_agent_logs_config:
global:
clients:
- url: https://logs-<your region>.grafana.net/loki/api/v1/push
basic_auth:
username: "1234567" # your username / instanceID
password: "..." # your grafana.com token
configs:
- name: local
positions:
filename: /tmp/positions.yaml
target_config:
sync_period: 10s
scrape_configs:
# scrape all of the log files in /var/log on the localhost
- job_name: log-files
static_configs:
- targets:
- localhost
labels:
job: var-logs
instance: "{{ ansible_host }}"
__path__: /var/log/*.log
# scrape all of the journal logs on localhost
- job_name: systemd-journal
journal:
max_age: 12h
labels:
job: systemd-journal
relabel_configs:
- source_labels:
- __journal__systemd_unit
target_label: systemd_unit
- source_labels:
- __journal__hostname
target_label: hostname
- source_labels:
- __journal_syslog_identifier
target_label: syslog_identifier
- source_labels:
- __journal__pid
target_label: pid
- source_labels:
- __journal__uid
target_label: uid
- source_labels:
- __journal__transport
target_label: transport
grafana_agent_integrations_config:
scrape_integrations: true
# get metrics about the agent
agent:
enabled: true
relabel_configs: []
metric_relabel_configs: []
# get node exporter metrics
node_exporter:
enabled: true
relabel_configs: []
metric_relabel_configs: []
# pre_tasks happen before roles are executed / applied
pre_tasks: []
# roles are ran after pre_tasks
roles:
- grafana_agent
# tasks are ran after roles
tasks: []

View file

@ -0,0 +1,17 @@
---
- name: Deploy alloy
hosts: all
become: true
roles:
- role: grafana.grafana.alloy
vars:
alloy_config: |
prometheus.scrape "default" {
targets = [{"__address__" = "127.0.0.1:12345"}]
forward_to = [prometheus.remote_write.prom.receiver]
}
prometheus.remote_write "prom" {
endpoint {
url = "http://mimir:9009/api/v1/push"
}
}

View file

@ -0,0 +1,32 @@
[defaults]
# (string) Sets the macro for the 'ansible_managed' variable available for :ref:`ansible_collections.ansible.builtin.template_module` and :ref:`ansible_collections.ansible.windows.win_template_module`. This is only relevant for those two modules.
ansible_managed="Ansible managed file. Be wary of possible overwrites."
# (boolean) Toggle to control the showing of deprecation warnings
deprecation_warnings=False
# (boolean) Set this to "False" if you want to avoid host key checking by the underlying tools Ansible uses to connect to the host
host_key_checking=False
# (pathlist) Comma separated list of Ansible inventory sources
inventory=hosts
# (pathspec) Colon separated paths in which Ansible will search for Modules.
library=../plugins/modules
# (path) File to which Ansible will log on the controller. When empty logging is disabled.
log_path=./ansible.log
# (pathspec) Colon separated paths in which Ansible will search for Roles.
roles_path=../roles
[ssh_connection]
# ssh arguments to use
# Leaving off ControlPersist will result in poor performance, so use
# paramiko on older platforms rather than removing it
ssh_args = -o ControlMaster=auto -o ControlPersist=60s
# if True, make ansible use scp if the connection type is ssh
# (default is sftp)
scp_if_ssh = True

View file

@ -0,0 +1,6 @@
---
- name: Deploy Loki using the default configuration
hosts: all
become: true
roles:
- role: grafana.grafana.loki

View file

@ -0,0 +1,70 @@
---
- name: Deploy Loki using the local filesystem
hosts: all
become: true
roles:
- role: grafana.grafana.loki
vars:
loki_querier:
max_concurrent: 16
engine:
max_look_back_period: 8760h
loki_storage_config:
tsdb_shipper:
active_index_directory: "{{ loki_working_path }}/tsdb-index"
cache_location: "{{ loki_working_path }}/tsdb-cache"
filesystem:
directory: "{{ loki_working_path }}/chunks"
loki_ingester:
wal:
enabled: true
dir: "{{ loki_working_path }}/wal"
lifecycler:
address: 127.0.0.1
ring:
kvstore:
store: inmemory
replication_factor: 1
final_sleep: 0s
chunk_idle_period: 1h
max_chunk_age: 2h
chunk_target_size: 1048576
query_store_max_look_back_period: 8760h
loki_limits_config:
split_queries_by_interval: 0
reject_old_samples: true
reject_old_samples_max_age: 168h
max_query_length: 0
max_query_series: 50000
retention_period: 8760h
allow_structured_metadata: false
max_query_lookback: 8760h
loki_compactor:
working_directory: "{{ loki_working_path }}/compactor"
compaction_interval: 10m
retention_enabled: true
retention_delete_delay: 2h
retention_delete_worker_count: 150
delete_request_store: filesystem
loki_common:
path_prefix: "{{ loki_working_path }}"
storage:
filesystem:
rules_directory: "{{ loki_working_path }}/rules"
replication_factor: 1
ring:
instance_addr: 127.0.0.1
kvstore:
store: inmemory
loki_ruler_alerts:
- name: Logs.sshd
rules:
- alert: SshLoginFailed
expr: |
count_over_time({job=~"secure"} |="sshd[" |~": Failed|: Invalid|: Connection closed by authenticating user" | __error__="" [15m]) > 6
for: 0m
labels:
severity: critical
annotations:
summary: "{% raw %}SSH authentication failure (instance {{ $labels.instance }}).{% endraw %}"
description: "{% raw %}Increase of SSH authentication failures in last 15 minutes\\n VALUE = {{ $value }}{% endraw %}"

View file

@ -0,0 +1,32 @@
- name: Install mimir
hosts: [mimir-1, mimir-2, mimir-3]
become: true
tasks:
- name: Install mimir
ansible.builtin.include_role:
name: grafana.grafana.mimir
vars:
# Run against minio blob store backed, see readme for local setup or mimir docs for Azure, AWS, etc.
mimir_storage:
storage:
backend: s3
s3:
endpoint: localhost:9000
access_key_id: testtest
secret_access_key: testtest
insecure: true
bucket_name: mimir
# Blocks storage requires a prefix when using a common object storage bucket.
mimir_blocks_storage:
storage_prefix: blocks
tsdb:
dir: "{{ mimir_working_path}}/ingester"
# Use memberlist, a gossip-based protocol, to enable the 3 Mimir replicas to communicate
mimir_memberlist:
join_members:
- mimir-1:7946
- mimir-2:7946
- mimir-3:7946

View file

@ -0,0 +1,34 @@
- name: Install mimir
hosts: monitoring-node
become: true
tasks:
- name: Install mimir
ansible.builtin.include_role:
name: grafana.grafana.mimir
vars:
mimir_storage:
storage:
backend: s3
s3:
endpoint: "{{ s3_endpoint }}"
access_key_id: "{{ vault_s3_access }}"
secret_access_key: "{{ vault_s3_secret }}"
bucket_name: your-mimir-bucket
# Blocks storage requires a prefix when using a common object storage bucket.
mimir_blocks_storage:
storage_prefix: blocks
tsdb:
dir: "{{ mimir_working_path}}/ingester"
mimir_limits:
# set metrics retenion to 30d
compactor_blocks_retention_period: 30d
max_label_names_per_series: 100
# this setting is required to prevent mimir from attempting
# to make quorum
mimir_ingester:
ring:
replication_factor: 1

View file

@ -0,0 +1,193 @@
# Scaling OpenTelemetry Collector Deployments Using Grafana Ansible Collection
This guide is focused on scaling the OpenTelemetry Collector deployment across various Linux hosts by leveraging Ansible, to function both as gateways and agents within your observability architecture. Utilizing the OpenTelemetry Collector in this dual capacity enables a robust collection and forwarding of metrics, traces, and logs to analysis and visualization platforms, such as Grafana Cloud.
Here, we outline a strategy for deploying and managing the OpenTelemetry Collector's scalable instances throughout your infrastructure with Ansible, enhancing your overall monitoring strategy and data visualization capabilities in Grafana Cloud.
## Before You Begin
To follow this guide, ensure you have:
- Linux hosts.
- SSH access to each of these Linux hosts.
- Account permissions to install and configure the OpenTelemetry Collector on these hosts.
## Install the Grafana Ansible collection
The [Grafana Agent role](https://github.com/grafana/grafana-ansible-collection/tree/main/roles/grafana_agent) is available in the Grafana Ansible collection as of the 1.1.0 release.
To install the Grafana Ansible collection, run this command:
```
ansible-galaxy collection install grafana.grafana
```
## Create an Ansible inventory file
Next, you will set up your hosts and create an inventory file.
1. Create your hosts and add public SSH keys to them.
This example uses eight Linux hosts: two Ubuntu hosts, two CentOS hosts, two Fedora hosts, and two Debian hosts.
1. Create an Ansible inventory file.
The Ansible inventory, which resides in a file named `inventory`, looks similar to this:
```
146.190.208.216 # hostname = ubuntu-01
146.190.208.190 # hostname = ubuntu-02
137.184.155.128 # hostname = centos-01
146.190.216.129 # hostname = centos-02
198.199.82.174 # hostname = debian-01
198.199.77.93 # hostname = debian-02
143.198.182.156 # hostname = fedora-01
143.244.174.246 # hostname = fedora-02
```
> **Note**: If you are copying the above file, remove the comments (#).
1. Create an `ansible.cfg` file within the same directory as `inventory`, with the following values:
```
[defaults]
inventory = inventory # Path to the inventory file
private_key_file = ~/.ssh/id_rsa # Path to my private SSH Key
remote_user=root
```
## Use the OpenTelemetry Collector Ansible Role
Next, you'll define an Ansible playbook to apply your chosen or created OpenTelemetry Collector role across your hosts.
Create a file named `deploy-opentelemetry.yml` in the same directory as your `ansible.cfg` and `inventory`.
```yaml
- name: Install OpenTelemetry Collector
hosts: all
become: true
vars:
grafana_cloud_api_key: <Your Grafana.com API Key> # Example - eyJrIjoiYjI3NjI5MGQxZTcyOTIxYTc0MDgzMGVhNDhlODNhYzA5OTk2Y2U5YiIsIm4iOiJhbnNpYmxldGVzdCIsImlkIjo2NTI5
metrics_username: <prometheus-username> # Example - 825019
logs_username: <loki-username> # Example - 411478
prometheus_url: <prometheus-push-url> # Example - https://prometheus-us-central1.grafana.net/api/prom/push
loki_url: <loki-push-url> # Example - https://logs-prod-017.grafana.net/loki/api/v1/push
tempo_url: <tempo-push-url> # Example - tempo-prod-04-prod-us-east-0.grafana.net:443
traces_username: <tempo-username> # Example - 411478
tasks:
- name: Install OpenTelemetry Collector
ansible.builtin.include_role:
name: grafana.grafana.opentelemetry_collector
vars:
otel_collector_extensions:
basicauth/grafana_cloud_tempo:
# https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/extension/basicauthextension
client_auth:
username: "{{ traces_username }}"
password: "{{ grafana_cloud_api_key }}"
basicauth/grafana_cloud_prometheus:
client_auth:
username: "{{ prometheus_url }}"
password: "{{ grafana_cloud_api_key }}"
basicauth/grafana_cloud_loki:
client_auth:
username: "{{ logs_username }}"
password: "{{ grafana_cloud_api_key }}"
otel_collector_receivers:
otlp:
# https://github.com/open-telemetry/opentelemetry-collector/tree/main/receiver/otlpreceiver
protocols:
grpc:
http:
hostmetrics:
# Optional. Host Metrics Receiver added as an example of Infra Monitoring capabilities of the OpenTelemetry Collector
# https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/hostmetricsreceiver
scrapers:
load:
memory:
otel_collector_processors:
batch:
# https://github.com/open-telemetry/opentelemetry-collector/tree/main/processor/batchprocessor
resourcedetection:
# Enriches telemetry data with resource information from the host
# https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/resourcedetectionprocessor
detectors: ["env", "system"]
override: false
transform/add_resource_attributes_as_metric_attributes:
# https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/transformprocessor
error_mode: ignore
metric_statements:
- context: datapoint
statements:
- set(attributes["deployment.environment"], resource.attributes["deployment.environment"])
- set(attributes["service.version"], resource.attributes["service.version"])
otel_collector_exporters:
otlp/grafana_cloud_traces:
# https://github.com/open-telemetry/opentelemetry-collector/tree/main/exporter/otlpexporter
endpoint: "{{ tempo_url }}"
auth:
authenticator: basicauth/grafana_cloud_tempo
loki/grafana_cloud_logs:
# https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/exporter/lokiexporter
endpoint: "{{ loki_url }}"
auth:
authenticator: basicauth/grafana_cloud_loki
prometheusremotewrite/grafana_cloud_metrics:
# https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/exporter/prometheusremotewriteexporter
endpoint: "{{ prometheus_url }}"
add_metric_suffixes: false
auth:
authenticator: basicauth/grafana_cloud_prometheus
otel_collector_service:
extensions: [basicauth/grafana_cloud_tempo, basicauth/grafana_cloud_prometheus, basicauth/grafana_cloud_loki]
pipelines:
traces:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp/grafana_cloud_traces]
metrics:
receivers: [otlp, hostmetrics]
processors: [resourcedetection, transform/add_resource_attributes_as_metric_attributes, batch]
exporters: [prometheusremotewrite/grafana_cloud_metrics]
logs:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [loki/grafana_cloud_logs]
```
> **Note:** You'll need to adjust the configuration to match the specific telemetry data you intend to collect and where you plan to forward it. The configuration snippet above is a basic example designed for traces, logs and metrics collection via OTLP and forwarding to Grafana Cloud.
## Running the Ansible Playbook
Deploy the OpenTelemetry Collector across your hosts by executing:
```sh
ansible-playbook deploy-opentelemetry.yml
```
## Verifying Data Ingestion into Grafana Cloud
Once you've deployed the OpenTelemetry Collector and configured it to forward data to Grafana Cloud, you can verify the ingestion:
- Log into your Grafana Cloud instance.
- Navigate to the **Explore** section.
- Select your Grafana Cloud Prometheus data source from the dropdown menu.
- Execute a query to confirm the reception of metrics, e.g., `{instance="ubuntu-01"}` for a specific host's metrics.
## Visualizing Metrics and Logs in Grafana
With data successfully ingested into Grafana Cloud, you can create custom dashboards to visualize the metrics, logs and traces received from your OpenTelemetry Collector. Utilize Grafana's powerful query builder and visualization tools to derive insights from your data effectively.
- Consider creating dashboards that offer a comprehensive overview of your infrastructure's health and performance.
- Utilize Grafana's alerting features to proactively manage and respond to issues identified through the OpenTelemetry data.
This guide simplifies the deployment of the OpenTelemetry Collector across multiple Linux hosts using Ansible and illustrates how to visualize collected telemetry data in Grafana Cloud. Tailor the Ansible roles, OpenTelemetry Collector configurations, and Grafana dashboards to suit your specific monitoring and observability requirements.

View file

@ -0,0 +1,184 @@
# Monitoring multiple Linux hosts with Grafana Agent Role
Monitoring with Grafana Agents across multiple Linux hosts can be difficult.
To make it easier, you can use the Grafana Agent role with the Grafana Ansible collection.
This guide shows how to use the `grafana_agent` Ansible role to deploy and manage Grafana Agents across multiple Linux hosts so you can monitor them in Grafana.
## Before you begin
Before you begin, you should have:
- Linux hosts
- SSH access to the Linux hosts
- Account permissions sufficient to install and use Grafana Agent on the Linux hosts
## Install the Grafana Ansible collection
The [Grafana Agent role](https://github.com/grafana/grafana-ansible-collection/tree/main/roles/grafana_agent) is available in the Grafana Ansible collection as of the 1.1.0 release.
To install the Grafana Ansible collection, run this command:
```
ansible-galaxy collection install grafana.grafana:2.0.0
```
## Create an Ansible inventory file
Next, you will set up your hosts and create an inventory file.
1. Create your hosts and add public SSH keys to them.
This example uses eight Linux hosts: two Ubuntu hosts, two CentOS hosts, two Fedora hosts, and two Debian hosts.
1. Create an Ansible inventory file.
The Ansible inventory, which resides in a file named `inventory`, looks similar to this:
```
146.190.208.216 # hostname = ubuntu-01
146.190.208.190 # hostname = ubuntu-02
137.184.155.128 # hostname = centos-01
146.190.216.129 # hostname = centos-02
198.199.82.174 # hostname = debian-01
198.199.77.93 # hostname = debian-02
143.198.182.156 # hostname = fedora-01
143.244.174.246 # hostname = fedora-02
```
> **Note**: If you are copying the above file, remove the comments (#).
1. Create an `ansible.cfg` file within the same directory as `inventory`, with the following values:
```
[defaults]
inventory = inventory # Path to the inventory file
private_key_file = ~/.ssh/id_rsa # Path to my private SSH Key
remote_user=root
```
## Use the Grafana Agent Ansible role
Next you will create an Ansible playbook that calls the `grafana_agent` role from the `grafana.grafana` Ansible collection.
To use the Grafana Agent Ansible role:
1. Create a file named `deploy-agent.yml` in the same directory as `ansible.cfg` and `inventory` and add the configuration below.
```yaml
- name: Install Grafana Agent
hosts: all
become: true
vars:
grafana_cloud_api_key: <Your Grafana.com API Key> # Example - eyJrIjoiYjI3NjI5MGQxZTcyOTIxYTc0MDgzMGVhNDhlODNhYzA5OTk2Y2U5YiIsIm4iOiJhbnNpYmxldGVzdCIsImlkIjo2NTI5
metrics_username: <prometheus-username> # Example - 825019
logs_username: <loki-username> # Example - 411478
prometheus_url: <prometheus-push-url> # Example - https://prometheus-us-central1.grafana.net/api/prom/push
loki_url: <loki-push-url> # Example - https://logs-prod-017.grafana.net/loki/api/v1/push
tasks:
- name: Install Grafana Agent
ansible.builtin.include_role:
name: grafana.grafana.grafana_agent
vars:
grafana_agent_metrics_config:
configs:
- name: integrations
remote_write:
- basic_auth:
password: "{{ grafana_cloud_api_key }}"
username: "{{ metrics_username }}"
url: "{{ prometheus_url }}"
global:
scrape_interval: 60s
wal_directory: /tmp/grafana-agent-wal
grafana_agent_logs_config:
configs:
- name: default
clients:
- basic_auth:
password: "{{ grafana_cloud_api_key }}"
username: "{{ logs_username }}"
url: "{{ loki_url }}"
positions:
filename: /tmp/positions.yaml
target_config:
sync_period: 10s
scrape_configs:
- job_name: varlogs
static_configs:
- targets: [localhost]
labels:
instance: ${HOSTNAME:-default}
job: varlogs
__path__: /var/log/*log
grafana_agent_integrations_config:
node_exporter:
enabled: true
instance: ${HOSTNAME:-default}
prometheus_remote_write:
- basic_auth:
password: "{{ grafana_cloud_api_key }}"
username: "{{ metrics_username }}"
url: "{{ prometheus_url }}"
```
The playbook calls the `grafana_agent` role from the `grafana.grafana` Ansible collection.
The Agent configuration in this playbook send metrics and logs from the linux hosts to your Prometheus and Loki data sources.
Refer to the [Grafana Ansible documentation](https://github.com/grafana/grafana-ansible-collection/tree/main/roles/grafana_agent#role-variables) to understand the other variables you can pass to the `grafana_agent` role.
When deploying the Agent across multiple instances for monitoring them, It is essential that the Agent is able to auto-detect the hostname for ease in monitoring.
Notice that the label `instance` has been set to the value `${HOSTNAME:-default}`, which is substituted by the value of the HOSTNAME environment variable in the Linux host.
To read more about the variable substitution, refer to the Grafana Agent [node_exporter_config](https://grafana.com/docs/agent/latest/configuration/integrations/node-exporter-config/) documentation.
1. To run the playbook, run this command:
```
ansible-playbook deploy-agent.yml
```
> **Note:** You can place the `deploy-agent.yml`, `ansible.cfg` and `inventory` files in different directories based on your needs.
## Check that logs and metrics are being ingested into Prometheus and Loki
Logs and metrics will soon be available in Grafana if your Promtheus and Loki datasources are added.
To test this, use the Explore feature.
Click the Explore icon (compass icon) in the vertical navigation bar.
### Check logs
To check logs:
1. Use the dropdown menu at the top of the page to select your Loki logs data source.
1. In the log browser, run the query `{instance="centos-01"}` where centos-01 is the hostname of one of the Linux hosts.
If you see log lines (shown in the example below), logs are being received.
![Logs](https://grafana.com/static/assets/img/blog/ansible-to-manage-agent1.png)
If no log lines appear, logs are not being collected.
### Check metrics
To check metrics:
1. Use the dropdown menu at the top of the page to select your Prometheus data source.
1. Run the query `{instance="centos-01"}` where centos-01 is the hostname of one of the Linux hosts.
If you see a metrics graph and table (shown in the example below), metrics are being received.
![Metrics](https://grafana.com/static/assets/img/blog/ansible-to-manage-agent2.png)
If no metrics appear, metrics are not being collected.
### View dashboards
Now that you have logs and metrics in Grafana, you can use dashboards to view them.
Here's an example of one of the prebuilt dashboards included with the Linux integration in Grafana Cloud:
![Dashboard](https://grafana.com/static/assets/img/blog/ansible-to-manage-agent3.png)
Using the **Instance** dropdown in the dashboard, you can select from the hostnames where you deployed Grafana Agent and start monitoring them.

View file

@ -0,0 +1,43 @@
- name: Install OpenTelemetry Collector
hosts: all
become: true
tasks:
- name: Install OpenTelemetry Collector
ansible.builtin.include_role:
name: grafana.grafana.opentelemetry_collector
vars:
otel_collector_receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
otel_collector_processors:
batch:
otel_collector_exporters:
otlp:
endpoint: otelcol:4317
otel_collector_extensions:
health_check:
pprof:
zpages:
otel_collector_service:
extensions: [health_check, pprof, zpages]
pipelines:
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]

View file

@ -0,0 +1,24 @@
---
- name: Deploy Promtail to ship logs to the local Loki instance
hosts: all
become: true
roles:
- role: grafana.grafana.promtail
vars:
promtail_clients:
- url: http://localhost:3100/loki/api/v1/push
promtail_scrape_configs:
- job_name: system
static_configs:
- targets:
- localhost
labels:
job: messages
instance: "{{ ansible_facts['fqdn'] }}"
__path__: /var/log/messages
- targets:
- localhost
labels:
job: nginx
instance: "{{ ansible_facts['fqdn'] }}"
__path__: /var/log/nginx/*.log

View file

@ -0,0 +1,2 @@
---
requires_ansible: ">=2.12.0,<3.0.0"

View file

@ -0,0 +1,26 @@
{
"name": "grafana-ansible-collection",
"version": "2.1.4",
"description": "",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git+https://github.com/grafana/grafana-ansible-collection.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/grafana/grafana-ansible-collection/issues"
},
"homepage": "https://github.com/grafana/grafana-ansible-collection#readme",
"dependencies": {
"editorconfig-checker": "^5.0.1",
"markdownlint-cli2": "^0.6.0",
"textlint": "^12.5.1",
"textlint-rule-common-misspellings": "^1.0.1",
"textlint-rule-no-todo": "^2.0.1",
"textlint-rule-terminology": "^3.0.4"
}
}

View file

@ -0,0 +1,257 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: alert_contact_point
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Alerting Contact points in Grafana
description:
- Create, Update and delete Contact points using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
options:
name:
description:
- Sets the name of the contact point.
type: str
required: true
uid:
description:
- Sets the UID of the Contact point.
type: str
required: true
type:
description:
- Sets Contact point type.
type: str
required: true
settings:
description:
- Sets Contact point settings.
type: dict
required: true
disableResolveMessage:
description:
- When set to C(true), Disables the resolve message [OK] that is sent when alerting state returns to C(false).
type: bool
default: false
grafana_api_key:
description:
- Grafana API Key used to authenticate with Grafana.
type: str
required : true
grafana_url:
description:
- URL of the Grafana instance.
type: str
required: true
state:
description:
- State for the Grafana Alert Contact Point.
choices: [ present, absent ]
type: str
default: present
'''
EXAMPLES = '''
- name: Create/Update Alerting contact point
grafana.grafana.alert_contact_point:
name: ops-email
uid: opsemail
type: email
settings:
addresses: "ops@mydomain.com,devs@mydomain.com"
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: present
- name: Delete Alerting contact point
grafana.grafana.alert_contact_point:
name: ops-email
uid: opsemail
type: email
settings:
addresses: "ops@mydomain.com,devs@mydomain.com"
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: absent
'''
RETURN = r'''
output:
description: Dict object containing Contact point information information.
returned: On success
type: dict
contains:
disableResolveMessage:
description: When set to True, Disables the resolve message [OK] that is sent when alerting state returns to false.
returned: state is present and on success
type: bool
sample: false
name:
description: The name for the contact point.
returned: state is present and on success
type: str
sample: ops-email
settings:
description: Contains contact point settings.
returned: state is present and on success
type: dict
sample: {
addresses: "ops@mydomain.com,devs@mydomain.com"
}
uid:
description: The UID for the contact point.
returned: state is present and on success
type: str
sample: opsemail
type:
description: The type of contact point.
returned: state is present and on success
type: str
sample: email
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def present_alert_contact_point(module):
body = {
'Name': module.params['name'],
'UID': module.params['uid'],
'type': module.params['type'],
'settings': module.params['settings'],
'DisableResolveMessage': module.params['disableResolveMessage']
}
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
api_url = module.params['grafana_url'] + '/api/v1/provisioning/contact-points'
headers = {
'Authorization': 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.post(api_url, json=body, headers=headers)
if result.status_code == 202:
return False, True, result.json()
elif result.status_code == 500:
sameConfig = False
contactPointInfo = {}
api_url = module.params['grafana_url'] + '/api/v1/provisioning/contact-points'
result = requests.get(api_url, headers=headers)
for contact_points in result.json():
if contact_points['uid'] == module.params['uid']:
if (contact_points['name'] == module.params['name'] and contact_points['type'] == module.params['type'] and contact_points['settings']
and contact_points['settings'] == module.params['settings']
and contact_points['disableResolveMessage'] == module.params['disableResolveMessage']):
sameConfig = True
contactPointInfo = contact_points
if sameConfig:
return False, False, contactPointInfo
else:
api_url = module.params['grafana_url'] + '/api/v1/provisioning/contact-points/' + module.params['uid']
result = requests.put(api_url, json=body, headers=headers)
if result.status_code == 202:
api_url = module.params['grafana_url'] + '/api/v1/provisioning/contact-points'
result = requests.get(api_url, headers=headers)
for contact_points in result.json():
if contact_points['uid'] == module.params['uid']:
return False, True, contact_points
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_alert_contact_point(module):
already_exists = False
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
api_url = module.params['grafana_url'] + '/api/v1/provisioning/contact-points'
headers = {
'Authorization': 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.get(api_url, headers=headers)
for contact_points in result.json():
if contact_points['uid'] == module.params['uid']:
already_exists = True
if already_exists:
api_url = module.params['grafana_url'] + '/api/v1/provisioning/contact-points/' + module.params['uid']
result = requests.delete(api_url, headers=headers)
if result.status_code == 202:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
else:
return True, False, "Alert Contact point does not exist"
def main():
module_args = dict(
name=dict(type='str', required=True),
uid=dict(type='str', required=True),
type=dict(type='str', required=True),
settings=dict(type='dict', required=True),
disableResolveMessage=dict(type='bool', required=False, default=False),
grafana_url=dict(type='str', required=True),
grafana_api_key=dict(type='str', required=True, no_log=True),
state=dict(type='str', required=False, default='present', choices=['present', 'absent'])
)
choice_map = {
"present": present_alert_contact_point,
"absent": absent_alert_contact_point,
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed, output=result)
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,242 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: alert_notification_policy
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Alerting Policies points in Grafana
description:
- Set the notification policy tree using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
options:
Continue:
description:
- Continue matching subsequent sibling nodes if set to C(true).
type: bool
default: false
groupByStr:
description:
- List of string.
- Group alerts when you receive a notification based on labels. If empty it will be inherited from the parent policy.
type: list
default: []
elements: str
muteTimeIntervals:
description:
- List of string.
- Sets the mute timing for the notfification policy.
type: list
default: []
elements: str
rootPolicyReceiver:
description:
- Sets the name of the contact point to be set as the default receiver.
type: str
default: grafana-default-email
routes:
description:
- List of objects
- Sets the Route that contains definitions of how to handle alerts.
type: list
required: true
elements: dict
groupInterval:
description:
- Sets the wait time to send a batch of new alerts for that group after the first notification was sent. Inherited from the parent policy if empty.
type: str
default: 5m
groupWait:
description:
- Sets the wait time until the initial notification is sent for a new group created by an incoming alert. Inherited from the parent policy if empty.
type: str
default: 30s
objectMatchers:
description:
- Matchers is a slice of Matchers that is sortable, implements Stringer, and provides a Matches method to match a LabelSet.
type: list
default: []
elements: dict
repeatInterval:
description:
- Sets the waiting time to resend an alert after they have successfully been sent.
type: str
default: 4h
grafana_url:
description:
- URL of the Grafana instance.
type: str
required: true
grafana_api_key:
description:
- Grafana API Key used to authenticate with Grafana.
type: str
required : true
'''
EXAMPLES = '''
- name: Set Notification policy tree
grafana.grafana.alert_notification_policy:
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
routes: [
{
receiver: myReceiver,
object_matchers: [["env", "=", "Production"]],
}
]
- name: Set nested Notification policies
grafana.grafana.alert_notification_policy:
routes: [
{
receiver: myReceiver,
object_matchers: [["env", "=", "Production"],["team", "=", "ops"]],
routes: [
{
receiver: myReceiver2,
object_matchers: [["region", "=", "eu"]],
}
]
},
{
receiver: myReceiver3,
object_matchers: [["env", "=", "Staging"]]
}
]
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
'''
RETURN = r'''
output:
description: Dict object containing Notification tree information.
returned: On success
type: dict
contains:
group_interval:
description: The waiting time to send a batch of new alerts for that group after the first notification was sent. This is of the parent policy.
returned: on success
type: str
sample: "5m"
group_wait:
description: The waiting time until the initial notification is sent for a new group created by an incoming alert. This is of the parent policy.
returned: on success
type: str
sample: "30s"
receiver:
description: The name of the default contact point.
returned: state is present and on success
type: str
sample: "grafana-default-email"
repeat_interval:
description: The waiting time to resend an alert after they have successfully been sent. This is of the parent policy
returned: on success
type: str
sample: "4h"
routes:
description: The entire notification tree returned as a list.
returned: on success
type: list
sample: [
{
"object_matchers": [
[
"env",
"=",
"Production"
]
],
"receiver": "grafana-default-email"
}
]
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def alert_notification_policy(module):
body = {'routes': module.params['routes'], 'Continue': module.params['Continue'],
'groupByStr': module.params['groupByStr'], 'muteTimeIntervals': module.params['muteTimeIntervals'],
'receiver': module.params['rootPolicyReceiver'], 'group_interval': module.params['groupInterval'],
'group_wait': module.params['groupWait'], 'object_matchers': module.params['objectMatchers'],
'repeat_interval': module.params['repeatInterval']}
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
api_url = module.params['grafana_url'] + '/api/v1/provisioning/policies'
headers = {
'Authorization': 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.get(api_url, headers=headers)
if 'routes' not in result.json():
api_url = module.params['grafana_url'] + '/api/v1/provisioning/policies'
result = requests.put(api_url, json=body, headers=headers)
if result.status_code == 202:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
elif (result.json()['receiver'] == module.params['rootPolicyReceiver'] and result.json()['routes'] == module.params['routes']
and result.json()['group_wait'] == module.params['groupWait'] and result.json()['group_interval'] == module.params['groupInterval']
and result.json()['repeat_interval'] == module.params['repeatInterval']):
return False, False, result.json()
else:
api_url = module.params['grafana_url'] + '/api/v1/provisioning/policies'
result = requests.put(api_url, json=body, headers=headers)
if result.status_code == 202:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def main():
module_args = dict(Continue=dict(type='bool', required=False, default=False),
groupByStr=dict(type='list', required=False, default=[], elements='str'),
muteTimeIntervals=dict(type='list', required=False, default=[], elements='str'),
rootPolicyReceiver=dict(type='str', required=False, default='grafana-default-email'),
routes=dict(type='list', required=True, elements='dict'),
groupInterval=dict(type='str', required=False, default='5m'),
groupWait=dict(type='str', required=False, default='30s'),
repeatInterval=dict(type='str', required=False, default='4h'),
objectMatchers=dict(type='list', required=False, default=[], elements='dict'),
grafana_url=dict(type='str', required=True),
grafana_api_key=dict(type='str', required=True, no_log=True), )
module = AnsibleModule(argument_spec=module_args)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = alert_notification_policy(module)
if not is_error:
module.exit_json(changed=has_changed, output=result)
else:
module.fail_json(msg='Status code is ' + str(result['status']), output=result['response'])
if __name__ == '__main__':
main()

View file

@ -0,0 +1,152 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: cloud_api_key
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Grafana Cloud API keys
description:
- Create and delete Grafana Cloud API keys using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
options:
name:
description:
- Sets the name of the Grafana Cloud API key.
type: str
required: true
role:
description:
- Sets the role to be associated with the Cloud API key.
type: str
required: true
choices: [Admin, Viewer, Editor, MetricsPublisher]
org_slug:
description:
- Name of the Grafana Cloud organization in which Cloud API key will be created.
type: str
required: true
existing_cloud_api_key:
description:
- Cloud API Key to authenticate with Grafana Cloud.
type: str
required : true
fail_if_already_created:
description:
- If set to C(true), the task will fail if the API key with same name already exists in the Organization.
type: bool
default: True
state:
description:
- State for the Grafana Cloud API Key.
type: str
default: present
choices: [ present, absent ]
'''
EXAMPLES = '''
- name: Create Grafana Cloud API key
grafana.grafana.cloud_api_key:
name: key_name
role: Admin
org_slug: "{{ org_slug }}"
existing_cloud_api_key: "{{ grafana_cloud_api_key }}"
fail_if_already_created: False
state: present
- name: Delete Grafana Cloud API key
grafana.grafana.cloud_api_key:
name: key_name
org_slug: "{{ org_slug }}"
existing_cloud_api_key: "{{ grafana_cloud_api_key }}"
state: absent
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def present_cloud_api_key(module):
body = {
'name': module.params['name'],
'role': module.params['role']
}
api_url = 'https://grafana.com/api/orgs/' + module.params['org_slug'] + '/api-keys'
result = requests.post(api_url, json=body, headers={
"Authorization": 'Bearer ' + module.params['existing_cloud_api_key'],
'User-Agent': 'grafana-ansible-collection',
})
if result.status_code == 200:
return False, True, result.json()
elif result.status_code == 409:
return module.params['fail_if_already_created'], False, "A Cloud API key with the same name already exists"
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_cloud_api_key(module):
api_url = 'https://grafana.com/api/orgs/' + module.params['org_slug'] + '/api-keys/' + module.params['name']
result = requests.delete(api_url, headers={
"Authorization": 'Bearer ' + module.params['existing_cloud_api_key'],
'User-Agent': 'grafana-ansible-collection',
})
if result.status_code == 200:
return False, True, "Cloud API key is deleted"
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def main():
module_args = dict(
name=dict(type='str', required=True),
role=dict(type='str', required=True, choices=['Admin', 'Viewer', 'Editor', 'MetricsPublisher']),
org_slug=dict(type='str', required=True),
existing_cloud_api_key=dict(type='str', required=True, no_log=True),
fail_if_already_created=dict(type='bool', required=False, default='True'),
state=dict(type='str', required=False, default='present', choices=['present', 'absent'])
)
choice_map = {
"present": present_cloud_api_key,
"absent": absent_cloud_api_key,
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed, output=result)
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,189 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: cloud_plugin
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Grafana Cloud Plugins
description:
- Create, Update and delete Grafana Cloud plugins using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
options:
name:
description:
- Name of the plugin, e.g. grafana-github-datasource.
type: str
required: true
version:
description:
- Version of the plugin to install.
type: str
default: latest
stack_slug:
description:
- Name of the Grafana Cloud stack to which the plugin will be added.
type: str
required: true
cloud_api_key:
description:
- Cloud API Key to authenticate with Grafana Cloud.
type: str
required : true
state:
description:
- State for the Grafana Cloud Plugin.
type: str
default: present
choices: [ present, absent ]
'''
EXAMPLES = '''
- name: Create/Update a plugin
grafana.grafana.cloud_plugin:
name: grafana-github-datasource
version: 1.0.14
stack_slug: "{{ stack_slug }}"
cloud_api_key: "{{ grafana_cloud_api_key }}"
state: present
- name: Delete a Grafana Cloud stack
grafana.grafana.cloud_plugin:
name: grafana-github-datasource
stack_slug: "{{ stack_slug }}"
cloud_api_key: "{{ grafana_cloud_api_key }}"
state: absent
'''
RETURN = r'''
current_version:
description: Current version of the plugin.
returned: On success
type: str
sample: "1.0.14"
latest_version:
description: Latest version available for the plugin.
returned: On success
type: str
sample: "1.0.15"
pluginId:
description: Id for the Plugin.
returned: On success
type: int
sample: 663
pluginName:
description: Name of the plugin.
returned: On success
type: str
sample: "GitHub"
pluginSlug:
description: Slug for the Plugin.
returned: On success
type: str
sample: "grafana-github-datasource"
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def present_cloud_plugin(module):
body = {
'plugin': module.params['name'],
'version': module.params['version']
}
api_url = 'https://grafana.com/api/instances/' + module.params['stack_slug'] + '/plugins'
headers = {
'Authorization': 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.post(api_url, json=body, headers=headers)
if result.status_code == 200:
return False, True, result.json()
elif result.status_code == 409:
api_url = 'https://grafana.com/api/instances/' + module.params['stack_slug'] + '/plugins/' + module.params['name']
result = requests.get(api_url, headers=headers)
if result.json()['pluginSlug'] == module.params['name'] and result.json()['version'] == module.params['version']:
return False, False, result.json()
else:
api_url = 'https://grafana.com/api/instances/' + module.params['stack_slug'] + '/plugins/' + module.params[
'name']
result = requests.post(api_url, json={'version': module.params['version']},
headers=headers)
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_cloud_plugin(module):
api_url = 'https://grafana.com/api/instances/' + module.params['stack_slug'] + '/plugins/' + module.params['name']
result = requests.delete(api_url, headers={
"Authorization": 'Bearer ' + module.params['cloud_api_key'],
'User-Agent': 'grafana-ansible-collection',
})
if result.status_code == 200:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def main():
module_args = dict(
name=dict(type='str', required=True),
version=dict(type='str', required=False, default='latest'),
stack_slug=dict(type='str', required=True),
cloud_api_key=dict(type='str', required=True, no_log=True),
state=dict(type='str', required=False, default='present', choices=['present', 'absent'])
)
choice_map = {
"present": present_cloud_plugin,
"absent": absent_cloud_plugin,
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed,
pluginId=result['pluginId'],
pluginName=result['pluginName'],
pluginSlug=result['pluginSlug'],
current_version=result['version'],
latest_version=result['latestVersion'])
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,259 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: cloud_stack
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Grafana Cloud stack
description:
- Create and delete Grafana Cloud stacks using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
options:
name:
description:
- Sets the name of stack. Conventionally matches the URL of the instance. For example, C(stackslug.grafana.net).
type: str
required: true
stack_slug:
description:
- Sets the subdomain of the Grafana instance. For example, if slug is B(stackslug), the instance URL will be C(https://stackslug.grafana.net).
type: str
required: true
cloud_api_key:
description:
- Cloud API Key to authenticate with Grafana Cloud.
type: str
required : true
region:
description:
- Sets the region for the Grafana Cloud stack.
type: str
default: us
choices: [ us, us-azure, eu, au, eu-azure, prod-ap-southeast-0, prod-gb-south-0, prod-eu-west-3]
url:
description:
- If you use a custom domain for the instance, you can provide it here. If not provided, Will be set to C(https://stackslug.grafana.net).
type: str
org_slug:
description:
- Name of the organization under which Cloud stack is created.
type: str
required: true
delete_protection:
description:
- Enables or disables deletion protection for the Cloud stack.
- When set to true, the stack cannot be deleted unless this flag is explicitly disabled.
type: bool
default: true
required: false
state:
description:
- State for the Grafana Cloud stack.
type: str
default: present
choices: [ present, absent ]
'''
EXAMPLES = '''
- name: Create a Grafana Cloud stack
grafana.grafana.cloud_stack:
name: stack_name
stack_slug: stack_name
cloud_api_key: "{{ grafana_cloud_api_key }}"
region: eu
url: https://grafana.company_name.com
org_slug: org_name
delete_protection: true
state: present
- name: Delete a Grafana Cloud stack
grafana.grafana.cloud_stack:
name: stack_name
slug: stack_name
cloud_api_key: "{{ grafana_cloud_api_key }}"
org_slug: org_name
state: absent
'''
RETURN = r'''
alertmanager_name:
description: Name of the alertmanager instance.
returned: always
type: str
sample: "stackname-alerts"
alertmanager_url:
description: URL of the alertmanager instance.
returned: always
type: str
sample: "https://alertmanager-eu-west-0.grafana.net"
cluster_slug:
description: Slug for the cluster where the Grafana stack is deployed.
returned: always
type: str
sample: "prod-eu-west-0"
id:
description: ID of the Grafana Cloud stack.
returned: always
type: int
sample: 458182
loki_url:
description: URl for the Loki instance.
returned: always
type: str
sample: "https://logs-prod-eu-west-0.grafana.net"
orgID:
description: ID of the Grafana Cloud organization.
returned: always
type: int
sample: 652992
prometheus_url:
description: URl for the Prometheus instance.
returned: always
type: str
sample: "https://prometheus-prod-01-eu-west-0.grafana.net"
tempo_url:
description: URl for the Tempo instance.
returned: always
type: str
sample: "https://tempo-eu-west-0.grafana.net"
url:
description: URL of the Grafana Cloud stack.
returned: always
type: str
sample: "https://stackname.grafana.net"
delete_protection:
description:
- Enables or disables deletion protection for the Cloud stack.
- When set to true, the stack cannot be deleted unless this flag is explicitly disabled.
returned: always
type: bool
sample: true
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def present_cloud_stack(module):
if not module.params['url']:
module.params['url'] = 'https://' + module.params['stack_slug'] + '.grafana.net'
body = {
'name': module.params['name'],
'slug': module.params['stack_slug'],
'region': module.params['region'],
'url': module.params['url'],
'deleteProtection': module.params.get('delete_protection', True),
}
api_url = 'https://grafana.com/api/instances'
headers = {
"Authorization": 'Bearer ' + module.params['cloud_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.post(api_url, json=body, headers=headers)
if result.status_code == 200:
return False, True, result.json()
elif result.status_code in [409, 403] and result.json()['message'] in ["That URL has already been taken, please try an alternate URL", "Hosted instance limit reached"]:
stack_found = False
if result.json()['message'] == "That URL has already been taken, please try an alternate URL":
api_url = 'https://grafana.com/api/orgs/' + module.params['org_slug'] + '/instances'
result = requests.get(api_url, headers=headers)
stackInfo = {}
for stack in result.json()['items']:
if stack['slug'] == module.params['stack_slug']:
stack_found = True
stackInfo = stack
if stack_found:
if body['deleteProtection'] == stackInfo['deleteProtection']:
return False, False, stackInfo
api_url = f'https://grafana.com/api/instances/{stackInfo["id"]}'
result = requests.post(api_url, json={'deleteProtection': body['deleteProtection']}, headers=headers)
if result.status_code != 200:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
return False, True, result.json()
else:
return True, False, "Stack is not found under your org"
elif result.json()['message'] == "Hosted instance limit reached":
return True, False, "You have reached Maximum number of Cloud Stacks in your Org."
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_cloud_stack(module):
api_url = 'https://grafana.com/api/instances/' + module.params['stack_slug']
result = requests.delete(api_url, headers={
"Authorization": 'Bearer ' + module.params['cloud_api_key'],
'User-Agent': 'grafana-ansible-collection',
})
if result.status_code == 200:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def main():
module_args = dict(
name=dict(type='str', required=True),
stack_slug=dict(type='str', required=True),
cloud_api_key=dict(type='str', required=True, no_log=True),
region=dict(type='str', required=False, default='us',
choices=['us', 'us-azure', 'eu', 'au', 'eu-azure', 'prod-ap-southeast-0', 'prod-gb-south-0',
'prod-eu-west-3']),
url=dict(type='str', required=False),
org_slug=dict(type='str', required=True),
state=dict(type='str', required=False, default='present', choices=['present', 'absent']),
delete_protection=dict(type=bool, required=False),
)
choice_map = {
"present": present_cloud_stack,
"absent": absent_cloud_stack,
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed,
alertmanager_name=result['amInstanceName'],
url=result['url'], id=result['id'],
cluster_slug=result['clusterName'],
orgID=result['orgId'],
loki_url=result['hlInstanceUrl'],
prometheus_url=result['hmInstancePromUrl'],
tempo_url=result['htInstanceUrl'],
alertmanager_url=result['amInstanceUrl'],
delete_protection=result['deleteProtection'])
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,190 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: dashboard
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Dashboards in Grafana
description:
- Create, Update and delete Dashboards using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
- Does not support C(Idempotency).
options:
dashboard:
description:
- JSON source code for dashboard.
type: dict
required: true
grafana_url:
description:
- URL of the Grafana instance.
type: str
required: true
grafana_api_key:
description:
- Grafana API Key to authenticate with Grafana Cloud.
type: str
required : true
state:
description:
- State for the Grafana Dashboard.
choices: [ present, absent ]
default: present
type: str
'''
EXAMPLES = '''
- name: Create/Update a dashboard
grafana.grafana.dashboard:
dashboard: "{{ lookup('ansible.builtin.file', 'dashboard.json') }}"
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: present
- name: Delete dashboard
grafana.grafana.dashboard:
dashboard: "{{ lookup('ansible.builtin.file', 'dashboard.json') }}"
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: absent
'''
RETURN = r'''
output:
description: Dict object containing folder information.
returned: On success
type: dict
contains:
id:
description: The ID for the dashboard.
returned: on success
type: int
sample: 17
slug:
description: The slug for the dashboard.
returned: state is present and on success
type: str
sample: ansible-integration-test
status:
description: The status of the dashboard.
returned: state is present and on success
type: str
sample: success
uid:
description: The UID for the dashboard.
returned: state is present and on success
type: str
sample: "test1234"
url:
description: The endpoint for the dashboard.
returned: state is present and on success
type: str
sample: "/d/test1234/ansible-integration-test"
version:
description: The version of the dashboard.
returned: state is present and on success
type: int
sample: 2
message:
description: The message returned after the operation on the dashboard.
returned: state is absent and on success
type: str
sample: "Dashboard Ansible Integration Test deleted"
title:
description: The name of the dashboard.
returned: state is absent and on success
type: str
sample: "Ansible Integration Test"
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def present_dashboard(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
api_url = module.params['grafana_url'] + '/api/dashboards/db'
result = requests.post(api_url, json=module.params['dashboard'], headers={
"Authorization": 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
})
if result.status_code == 200:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_dashboard(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
if 'uid' not in module.params['dashboard']['dashboard']:
return True, False, "UID is not defined in the the Dashboard configuration"
api_url = api_url = module.params['grafana_url'] + '/api/dashboards/uid/' + module.params['dashboard']['dashboard']['uid']
result = requests.delete(api_url, headers={
"Authorization": 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
})
if result.status_code == 200:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def main():
module_args = dict(
dashboard=dict(type='dict', required=True),
grafana_url=dict(type='str', required=True),
grafana_api_key=dict(type='str', required=True, no_log=True),
state=dict(type='str', required=False, default='present', choices=['present', 'absent'])
)
choice_map = {
"present": present_dashboard,
"absent": absent_dashboard,
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed, output=result)
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,209 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: datasource
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Data sources in Grafana
description:
- Create, Update and delete Data sources using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
- Does not support C(Idempotency).
options:
dataSource:
description:
- JSON source code for the Data source.
type: dict
required: true
grafana_url:
description:
- URL of the Grafana instance.
type: str
required: true
grafana_api_key:
description:
- Grafana API Key to authenticate with Grafana Cloud.
type: str
required : true
state:
description:
- State for the Grafana Datasource.
choices: [ present, absent ]
default: present
type: str
'''
EXAMPLES = '''
- name: Create/Update Data sources
grafana.grafana.datasource:
dataSource:
name: Prometheus
type: prometheus
access: proxy
url: http://localhost:9090
jsonData:
httpMethod: POST
manageAlerts: true
prometheusType: Prometheus
cacheLevel: High
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: present
- name: Delete Data sources
grafana.grafana.datasource:
dataSource: "{{ lookup('ansible.builtin.file', 'datasource.json') | to_yaml }}"
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: absent
'''
RETURN = r'''
output:
description: Dict object containing Data source information.
returned: On success
type: dict
contains:
datasource:
description: The response body content for the data source configuration.
returned: state is present and on success
type: dict
sample: {
"access": "proxy",
"basicAuth": false,
"basicAuthUser": "",
"database": "db-name",
"id": 20,
"isDefault": false,
"jsonData": {},
"name": "ansible-integration",
"orgId": 1,
"readOnly": false,
"secureJsonFields": {
"password": true
},
"type": "influxdb",
"typeLogoUrl": "",
"uid": "ansibletest",
"url": "https://grafana.github.com/grafana-ansible-collection",
"user": "user",
"version": 1,
"withCredentials": false
}
id:
description: The ID assigned to the data source.
returned: on success
type: int
sample: 20
name:
description: The name of the data source defined in the JSON source code.
returned: state is present and on success
type: str
sample: "ansible-integration"
message:
description: The message returned after the operation on the Data source.
returned: on success
type: str
sample: "Datasource added"
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def present_datasource(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
api_url = module.params['grafana_url'] + '/api/datasources'
headers = {
"Authorization": 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.post(api_url, json=module.params['dataSource'], headers=headers)
if result.status_code == 200:
return False, True, result.json()
elif result.status_code == 409:
get_id_url = requests.get(module.params['grafana_url'] + '/api/datasources/id/' + module.params['dataSource']['name'],
headers=headers)
api_url = module.params['grafana_url'] + '/api/datasources/' + str(get_id_url.json()['id'])
result = requests.put(api_url, json=module.params['dataSource'], headers=headers)
if result.status_code == 200:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_datasource(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
api_url = module.params['grafana_url'] + '/api/datasources/name/' + module.params['dataSource']['name']
result = requests.delete(api_url, headers={
"Authorization": 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
})
if result.status_code == 200:
return False, True, {"status": result.status_code, 'response': result.json()['message']}
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def main():
module_args = dict(
dataSource=dict(type='dict', required=True),
grafana_url=dict(type='str', required=True),
grafana_api_key=dict(type='str', required=True, no_log=True),
state=dict(type='str', required=False, default='present', choices=['present', 'absent'])
)
choice_map = {
"present": present_datasource,
"absent": absent_datasource,
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed, output=result)
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,280 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, Ishan Jain (@ishanjainn)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
DOCUMENTATION = '''
---
module: folder
author:
- Ishan Jain (@ishanjainn)
version_added: "0.0.1"
short_description: Manage Folders in Grafana
description:
- Create, Update and delete Folders via Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
options:
title:
description:
- Sets the title of the folder.
type: str
required: true
uid:
description:
- Sets the UID for your folder.
type: str
required: true
overwrite:
description:
- Set to C(false) if you dont want to overwrite existing folder with newer version.
type: bool
required: false
default: true
grafana_api_key:
description:
- Grafana API Key to authenticate with Grafana.
type: str
required : true
grafana_url:
description:
- URL of the Grafana instance.
type: str
required: true
state:
description:
- State for the Grafana Folder.
choices: [ present, absent ]
default: present
type: str
'''
EXAMPLES = '''
- name: Create/Update a Folder in Grafana
grafana.grafana.folder:
title: folder_name
uid: folder_name
overwrite: true
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: present
- name: Delete a Folder in Grafana
grafana.grafana.folder:
uid: folder_name
grafana_url: "{{ grafana_url }}"
grafana_api_key: "{{ grafana_api_key }}"
state: absent
'''
RETURN = r'''
output:
description: Dict object containing folder information.
returned: On success
type: dict
contains:
canAdmin:
description: Boolean value specifying if current user can admin in folder.
returned: state is present and on success
type: bool
sample: true
canDelete:
description: Boolean value specifying if current user can delete the folder.
returned: state is present and on success
type: bool
sample: true
canEdit:
description: Boolean value specifying if current user can edit in folder.
returned: state is present and on success
type: bool
sample: true
canSave:
description: Boolean value specifying if current user can save in folder.
returned: state is present and on success
type: bool
sample: true
created:
description: The date when folder was created.
returned: state is present and on success
type: str
sample: "2022-10-20T09:31:53Z"
createdBy:
description: The name of the user who created the folder.
returned: state is present and on success
type: str
sample: "Anonymous"
hasAcl:
description: Boolean value specifying if folder has acl.
returned: state is present and on success
type: bool
sample: true
id:
description: The ID for the folder.
returned: state is present and on success
type: int
sample: 18
title:
description: The name of the folder.
returned: on success
type: str
sample: foldername
uid:
description: The UID for the folder.
returned: state is present and on success
type: str
sample: foldername
updated:
description: The date when the folder was last updated.
returned: state is present and on success
type: str
sample: "2022-10-20T09:31:53Z"
updatedBy:
description: The name of the user who last updated the folder.
returned: state is present and on success
type: str
sample: "Anonymous"
url:
description: The URl for the folder.
returned: state is present and on success
type: str
sample: "/dashboards/f/foldername/foldername"
version:
description: The version of the folder.
returned: state is present and on success
type: int
sample: 1
message:
description: The message returned after the operation on the folder.
returned: state is absent and on success
type: str
sample: "Folder has been succesfuly deleted"
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def present_folder(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
body = {
'uid': module.params['uid'],
'title': module.params['title'],
}
api_url = module.params['grafana_url'] + '/api/folders'
headers = {
"Authorization": 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.post(api_url, json=body, headers=headers)
if result.status_code == 200:
return False, True, result.json()
elif result.status_code == 412:
sameConfig = False
folderInfo = {}
api_url = module.params['grafana_url'] + '/api/folders'
result = requests.get(api_url, headers=headers)
for folder in result.json():
if folder['uid'] == module.params['uid'] and folder['title'] == module.params['title']:
sameConfig = True
folderInfo = folder
if sameConfig:
return False, False, folderInfo
else:
body = {
'uid': module.params['uid'],
'title': module.params['title'],
'overwrite': module.params['overwrite']
}
api_url = module.params['grafana_url'] + '/api/folders/' + module.params['uid']
result = requests.put(api_url, json=body, headers=headers)
if result.status_code == 200:
return False, True, result.json()
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
else:
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_folder(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
sameConfig = False
api_url = module.params['grafana_url'] + '/api/folders'
headers = {
"Authorization": 'Bearer ' + module.params['grafana_api_key'],
'User-Agent': 'grafana-ansible-collection',
}
result = requests.get(api_url, headers=headers)
for folder in result.json():
if folder['uid'] == module.params['uid'] and folder['title'] == module.params['title']:
sameConfig = True
if sameConfig is True:
api_url = module.params['grafana_url'] + '/api/folders/' + module.params['uid']
result = requests.delete(api_url, headers=headers)
if result.status_code == 200:
return False, True, {"status": result.status_code, 'response': "Folder has been succesfuly deleted"}
else:
return True, False, {"status": result.status_code, 'response': "Error deleting folder"}
else:
return False, True, {"status": 200, 'response': "Folder does not exist"}
def main():
module_args = dict(
title=dict(type='str', required=True),
uid=dict(type='str', required=True),
overwrite=dict(type='bool', required=False, default=True),
grafana_url=dict(type='str', required=True),
grafana_api_key=dict(type='str', required=True, no_log=True),
state=dict(type='str', required=False, default='present', choices=['present', 'absent'])
)
choice_map = {
"present": present_folder,
"absent": absent_folder,
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed, output=result)
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,284 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2024, téïcée (www.teicee.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
DOCUMENTATION = '''
---
module: user
author:
- Mathieu Valois, téïcée
version_added: "0.0.1"
short_description: Manage Users in Grafana
description:
- Create, Update and delete Users using Ansible.
requirements: [ "requests >= 1.0.0" ]
notes:
- Does not support C(check_mode).
- Does not support C(Idempotency).
options:
grafana_url:
description:
- URL of the Grafana instance.
type: str
required: true
admin_name:
description:
- Grafana admin username
type: str
required : true
admin_password:
description:
- Grafana admin password
type: str
required : true
login:
description:
- Login of the user
type: str
required : true
password:
description:
- Password of the user. Should be provided if state=present
type: str
required : false
name:
description:
- Name of the user.
type: str
required : false
email:
description:
- Email address of the user.
type: str
required : false
state:
description:
- State for the Grafana User.
choices: [ present, absent ]
default: present
type: str
'''
EXAMPLES = '''
- name: Create/Update a user
grafana.grafana.user:
login: "grafana_user"
password: "{{ lookup('ansible.builtin.password') }}"
email: "grafana_user@localhost.local
name: "grafana user"
grafana_url: "{{ grafana_url }}"
admin_name: "admin"
admin_password: "admin"
state: present
- name: Delete user
grafana.grafana.user:
login: "grafana_user"
grafana_url: "{{ grafana_url }}"
admin_name: "admin"
admin_password: "admin"
state: absent
'''
RETURN = r'''
output:
description: Dict object containing user information and message.
returned: On success
type: dict
contains:
id:
description: The ID for the user.
returned: on success
type: int
sample: 17
email:
description: The email for the user.
returned: on success
type: str
sample: grafana_user@localhost.local
name:
description: The name for the user.
returned: on success
type: str
sample: grafana user
login:
description: The login for the user.
returned: on success
type: str
sample: grafana_user
'''
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
__metaclass__ = type
def _get_user(grafana_url, admin_name, admin_password, login, email=None):
get_user_url = grafana_url + '/api/users/lookup?loginOrEmail='
# check if user exists by login provided login
result = requests.get(f"{get_user_url}{login}", auth=requests.auth.HTTPBasicAuth(
admin_name, admin_password))
# if no user has this login, check the email if provided
if result.status_code == 404 and email is not None:
result = requests.get(f"{get_user_url}{email}", auth=requests.auth.HTTPBasicAuth(
admin_name, admin_password))
if result.status_code == 404:
return None
return result.json()
def _set_user_password(grafana_url, admin_name, admin_password, user_id, password):
""" sets the password for the existing user having user_id.
admin_name should be a user having users.password:write permission
"""
set_user_password_url = f"{grafana_url}/api/admin/users/{user_id}/password"
result = requests.put(set_user_password_url, json={'password': password}, auth=requests.auth.HTTPBasicAuth(
admin_name, admin_password))
return result
def present_user(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
body = {
'login': module.params['login'],
'password': module.params['password'],
'email': module.params['email'],
'name': module.params['name'],
'OrgId': module.params['orgid']
}
user = _get_user(module.params['grafana_url'], module.params['admin_name'],
module.params['admin_password'], module.params['login'], module.params['email'])
if user is None:
api_url = module.params['grafana_url'] + '/api/admin/users'
result = requests.post(api_url, json=body, auth=requests.auth.HTTPBasicAuth(
module.params['admin_name'], module.params['admin_password']))
else:
user_id = user['id']
api_url = module.params['grafana_url'] + '/api/users'
result = requests.put(f"{api_url}/{user_id}", json=body, auth=requests.auth.HTTPBasicAuth(
module.params['admin_name'], module.params['admin_password']))
if result.status_code == 200:
return False, True, result.json()
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def absent_user(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
user = _get_user(module.params['grafana_url'], module.params['admin_name'],
module.params['admin_password'], module.params['login'], module.params['email'])
if user is None:
return False, False, "User does not exist"
user_id = user['id']
api_url = f"{module.params['grafana_url']}/api/admin/users/{user_id}"
result = requests.delete(api_url, auth=requests.auth.HTTPBasicAuth(
module.params['admin_name'], module.params['admin_password']))
if result.status_code == 200:
return False, True, result.json()
return True, False, {"status": result.status_code, 'response': result.json()['message']}
def password_user(module):
if module.params['grafana_url'][-1] == '/':
module.params['grafana_url'] = module.params['grafana_url'][:-1]
# try with new password to check if already changed
user = _get_user(module.params['grafana_url'], module.params['login'],
module.params['password'], module.params['login'], module.params['email'])
if 'id' in user:
# Auth is OK, password does not need to be changed
return False, False, {'message': 'Password has already been changed', 'user': user}
# from here, we begin password change procedure
user = _get_user(module.params['grafana_url'], module.params['admin_name'],
module.params['admin_password'], module.params['login'], module.params['email'])
if user is None:
return True, False, "User does not exist"
if 'id' not in user:
return True, False, user
result = _set_user_password(module.params['grafana_url'], module.params['admin_name'],
module.params['admin_password'], user['id'], module.params['password'])
if result.status_code == 200:
return False, True, result.json()
return True, False, result.json()
def main():
# Grafana admin API is only accessible with basic auth, not token
# So we shall provide admin name and its password
module_args = dict(
admin_name=dict(type='str', required=True),
admin_password=dict(type='str', required=True, no_log=True),
login=dict(type='str', required=True),
password=dict(type='str', required=False, no_log=True),
email=dict(type='str', required=False),
name=dict(type='str', required=False),
orgid=dict(type='int', required=False),
grafana_url=dict(type='str', required=True),
state=dict(type='str', required=False, default='present',
choices=['present', 'absent', 'update_password'])
)
choice_map = {
"present": present_user,
"absent": absent_user,
"update_password": password_user
}
module = AnsibleModule(
argument_spec=module_args
)
if not HAS_REQUESTS:
module.fail_json(msg=missing_required_lib('requests'))
if module.params['state'] in ('present', 'update_password') and 'password' not in module.params:
module.fail_json(
msg="Want to create or update user but password is missing")
is_error, has_changed, result = choice_map.get(
module.params['state'])(module)
if not is_error:
module.exit_json(changed=has_changed, output=result)
else:
module.fail_json(msg=result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,3 @@
yamllint==1.35.1
ansible-lint>=6.13.1, <25.0.0
pylint>=2.16.2,<4.0.0

View file

@ -0,0 +1,8 @@
---
collections:
- name: https://github.com/ansible-collections/community.general.git
type: git
- name: https://github.com/ansible-collections/community.grafana.git
type: git
- name: https://github.com/ansible-collections/ansible.posix.git
type: git

View file

@ -0,0 +1,71 @@
# Ansible role - Alloy
[![License](https://img.shields.io/github/license/grafana/grafana-ansible-collection)](LICENSE)
This Ansible role to install and configure [Alloy](https://grafana.com/docs/alloy/latest/), which can be used to collect traces, metrics, and logs.
This role is tailored for operating systems such as **RedHat**, **Rocky Linux**, **AlmaLinux**, **Ubuntu**, **Debian**, **macOS**, and **openSUSE**.
## Table of Content
- [Requirements](#requirements)
- [Role Variables](#role-variables)
- [Playbook](#playbook)
## Requirements
- Ansible 2.13+
- `ansible.utils` collection is required. Additionally, you must install the `netaddr` Python library on the host where you are running Ansible (not on the target remote host) if is not present.
- `community.general` collection is required for macOS support
- **macOS**: Homebrew must be installed
## Role Variables
| Variable Name | Description | Default Value |
|-----------------------|----------------------------------------------------------------------|---------------------------------------------------------------------|
| `alloy_version` | The version of Alloy to download and deploy. Supported standard version "1.4.2" format or "latest". | `latest` |
| `alloy_uninstall` | If set to `true` will perfom uninstall instead of deployment. | `false` |
| `alloy_expose_port` | By default, this is set to false. It supports only simple firewalld configurations. If set to true, a firewalld rule is added to expose the TCP alloy port. The Port is automatically extracted from the environment variable `alloy_env_file_vars` in CUSTOM_ARGS when --server.http.listen-addr=0.0.0.0:12345 is defined. If set to false, configuration is skipped. If the firewalld.service is not active, all firewalld tasks are skipped. | `false` |
| `alloy_user_groups` | Appends the alloy user to specific groups. | `[]` |
| `alloy_github_api_url` | The default Github API URL to check for the latest version available. | `"https://api.github.com/repos/grafana/alloy/releases/latest"` |
| `alloy_download_url_rpm` | The default download URL for the Alloy rpm package from GitHub. | `"https://github.com/grafana/alloy/releases/download/v{{ aloy_version }}/alloy-{{ aloy_version }}-1.{{ __alloy_arch }}.rpm"` |
| `alloy_download_url_deb` | The default download URL for the Alloy deb package from GitHub. | `"https://github.com/grafana/alloy/releases/download/v{{ aloy_version }}/alloy-{{ aloy_version }}-1.{{ __alloy_arch }}.deb"` |
| `alloy_readiness_check_use_https` | This boolean variable determines whether the readiness check for the Alloy server should use HTTPS or HTTP when validating the `/-/ready` endpoint. This variable does not enable TLS on the Alloy server itself. | `false` |
| `alloy_readiness_check_use_proxy` | This boolean variable determines whether the readiness check for the Alloy server should use a proxy when validating the `/-/ready` endpoint. If false, it will not use a proxy, even if one is defined in an environment variable on the target hosts. | `true` |
| `alloy_env_file_vars` | You can use environment variables to control the run-time behavior of Grafana Alloy. | `{}` |
| `alloy_systemd_override` | Systemd unit drop-in file used to override or extend the default configuration of a systemd unit. | `{}` |
| `alloy_config` | This is the configuration that sets up Alloy. Refer to the [configuration blocks](https://grafana.com/docs/alloy/latest/reference/config-blocks/) and [components](https://grafana.com/docs/alloy/latest/reference/components/) documentation for more details. Since the purpose of using Alloy varies, no default configuration is provided. ⚠️ **You must provide either `alloy_config` for single config or set `alloy_env_file_vars.CONFIG_FILE` for multi-config setup**. Note that if you use `alloy_env_file_vars.CONFIG_FILE`, the content of `alloy_config` will not be templated. It is expected that you manage the multi-config content using pre_tasks or with your own role. | `{}` |
## Dependencies
No Dependencies
## Playbook
```yaml
- name: Manage alloy service
hosts: all
become: true
vars:
# alloy_config: |
# Your Config Content
roles:
- role: grafana.grafana.alloy
```
- Playbook execution example
```shell
# Deploy Alloy
ansible-playbook function_alloy_play.yml
# Uninstall Alloy
ansible-playbook function_alloy_play.yml -e "alloy_uninstall=true"
```
## License
See [LICENSE](https://github.com/grafana/grafana-ansible-collection/blob/main/LICENSE)
## Author Information
- [Ishan Jain](https://github.com/ishanjainn)
- [VoidQuark](https://github.com/voidquark)

View file

@ -0,0 +1,36 @@
---
# defaults file for alloy
alloy_version: "latest"
alloy_uninstall: false
alloy_expose_port: false
alloy_github_api_url: "https://api.github.com/repos/grafana/alloy/releases/latest"
alloy_download_url_rpm: "https://github.com/grafana/alloy/releases/download/v{{ alloy_version }}/alloy-{{ alloy_version }}-1.{{ __alloy_arch }}.rpm"
alloy_download_url_deb: "https://github.com/grafana/alloy/releases/download/v{{ alloy_version }}/alloy-{{ alloy_version }}-1.{{ __alloy_arch }}.deb"
alloy_readiness_check_use_https: false
alloy_readiness_check_use_proxy: true
alloy_user_groups: []
# alloy_user_groups:
# - "systemd-journal"
alloy_env_file_vars: {}
# alloy_env_file_vars:
# CONFIG_FILE: "/custom/path"
# CUSTOM_ARGS: "--server.http.listen-addr=0.0.0.0:12345 --stability.level=public-preview --feature.community-components.enabled=true"
alloy_systemd_override: {}
# alloy_systemd_override: |
# [Service]
# User=root
alloy_config: {}
# alloy_config: |
# prometheus.scrape "default" {
# targets = [{"__address__" = "127.0.0.1:12345"}]
# forward_to = [prometheus.remote_write.prom.receiver]
# }
# prometheus.remote_write "prom" {
# endpoint {
# url = "http://mimir:9009/api/v1/push"
# }
# }

View file

@ -0,0 +1,17 @@
---
# handlers file for alloy
- name: Restart alloy
listen: "restart alloy"
ansible.builtin.systemd:
daemon_reload: true
name: alloy.service
state: restarted
enabled: true
when: not ansible_check_mode
- name: Restart alloy macos
listen: "restart alloy macos"
ansible.builtin.command: "brew services restart {{ __alloy_brew_package }}"
when:
- not ansible_check_mode
- ansible_facts['os_family'] == 'Darwin'

View file

@ -0,0 +1,34 @@
---
galaxy_info:
role_name: alloy
author: Ishan Jain, voidquark
description: Role to Install and Configure Grafana Alloy
license: "GPL-3.0-or-later"
min_ansible_version: "2.13"
platforms:
- name: EL
versions:
- "8"
- "9"
- name: Fedora
versions:
- all
- name: Debian
versions:
- all
- name: Ubuntu
versions:
- all
- name: macOS
versions:
- all
- name: opensuse
versions:
- all
galaxy_tags:
- alloy
- grafana
- observability
- monitoring
- opentelemetry
- telemetry

View file

@ -0,0 +1,17 @@
---
- name: Converge
hosts: all
vars:
alloy_version: "1.4.2"
alloy_config: |
prometheus.scrape "default" {
targets = [{"__address__" = "127.0.0.1:12345"}]
forward_to = [prometheus.remote_write.prom.receiver]
}
prometheus.remote_write "prom" {
endpoint {
url = "http://mimir:9009/api/v1/push"
}
}
roles:
- role: grafana.grafana.alloy

View file

@ -0,0 +1,20 @@
---
dependency:
name: galaxy
options:
ignore-errors: true
driver:
name: docker
platforms:
- name: instance
image: "geerlingguy/docker-${MOLECULE_DISTRO:-rockylinux8}-ansible:latest"
command: ${MOLECULE_DOCKER_COMMAND:-""}
volumes:
- /sys/fs/cgroup:/sys/fs/cgroup:rw
cgroupns_mode: host
privileged: true
pre_build_image: true
provisioner:
name: ansible
playbooks:
converge: converge.yml

View file

@ -0,0 +1,176 @@
---
- name: Obtain the latest version from the GitHub repo
when: alloy_version == "latest"
block:
- name: Scrape Github API endpoint to obtain latest Alloy version
ansible.builtin.uri:
url: "{{ alloy_github_api_url }}"
method: GET
body_format: json
become: false
delegate_to: localhost
run_once: true
check_mode: false
register: __github_latest_version
- name: Latest available Alloy version
ansible.builtin.set_fact:
alloy_version: "{{ __github_latest_version.json.tag_name | regex_replace('^v?(\\d+\\.\\d+\\.\\d+)$', '\\1') }}"
- name: Verify current deployed version
when: ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
block:
- name: Check if Alloy binary is present
ansible.builtin.stat:
path: "/usr/bin/alloy"
register: __already_deployed
- name: Obtain current deployed Alloy version
ansible.builtin.command:
cmd: "/usr/bin/alloy --version"
changed_when: false
register: __current_deployed_version
when: __already_deployed.stat.exists | bool
- name: Verify current deployed version on macOS
when: ansible_facts['os_family'] == 'Darwin'
block:
- name: Check if Alloy is installed via Homebrew
ansible.builtin.command: brew list --versions {{ __alloy_brew_package }}
register: __brew_alloy_version
failed_when: false
changed_when: false
- name: Extract current Alloy version on macOS
ansible.builtin.set_fact:
__current_deployed_version:
stdout: "{{ __brew_alloy_version.stdout }}"
when: __brew_alloy_version.rc == 0
- name: Include RedHat/Rocky setup
ansible.builtin.include_tasks:
file: setup-RedHat.yml
when: ansible_facts['os_family'] in ['RedHat', 'Rocky']
- name: Include Debian/Ubuntu setup
ansible.builtin.include_tasks:
file: setup-Debian.yml
when: ansible_facts['os_family'] == 'Debian'
- name: Include macOS/Darwin setup
ansible.builtin.include_tasks:
file: setup-Darwin.yml
when: ansible_facts['os_family'] == 'Darwin'
- name: Include SUSE setup
ansible.builtin.include_tasks:
file: setup-Suse.yml
when: ansible_facts['os_family'] == 'Suse'
- name: Alloy systemd override
when:
- alloy_systemd_override | length > 0
- ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
block:
- name: Ensure that Alloy systemd override path exist
ansible.builtin.file:
path: "/etc/systemd/system/alloy.service.d"
state: directory
owner: "root"
group: "root"
mode: "0750"
notify: restart alloy
- name: Template Alloy systemd override.conf - /etc/systemd/system/alloy.service.d/override.conf
ansible.builtin.template:
src: "override.conf.j2"
dest: "/etc/systemd/system/alloy.service.d/override.conf"
owner: "root"
group: "root"
mode: "0644"
notify: restart alloy
- name: Template Alloy env file - {{ __alloy_env_file }}
ansible.builtin.template:
src: "alloy.j2"
dest: "{{ __alloy_env_file }}"
owner: "root"
group: "root"
mode: "0644"
notify: restart alloy
when: ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
- name: Template Alloy config - /etc/alloy/config.alloy
ansible.builtin.template:
src: "config.alloy.j2"
dest: "/etc/alloy/config.alloy"
owner: "root"
group: "root"
mode: "0644"
when:
- alloy_config | length > 0
- alloy_env_file_vars.CONFIG_FILE is not defined
- ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
notify: restart alloy
- name: Ensure that /etc/alloy/alloy.config is absent when a custom configuration file/dir is specified in alloy_env_file_vars.CONFIG_FILE
ansible.builtin.file:
path: "/etc/alloy/config.alloy"
state: absent
when:
- alloy_config | length < 1 or alloy_env_file_vars.CONFIG_FILE is defined
- ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
- name: Add the Alloy system user to additional group
ansible.builtin.user:
name: "alloy"
groups: "{{ item }}"
system: true
append: true
create_home: false
state: present
loop: "{{ alloy_user_groups }}"
when:
- alloy_user_groups | length > 0
- ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
- name: Get firewalld state
ansible.builtin.systemd:
name: "firewalld"
register: __firewalld_service_state
when: ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
- name: Enable firewalld rule to expose Alloy tcp port {{ __alloy_server_http_listen_port }}
ansible.posix.firewalld:
immediate: true
permanent: true
port: "{{ __alloy_server_http_listen_port }}/tcp"
state: enabled
when:
- ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
- __firewalld_service_state.status.ActiveState == "active"
- alloy_expose_port | bool
- name: Flush handlers after deployment
ansible.builtin.meta: flush_handlers
- name: Ensure that Alloy is started (Linux)
ansible.builtin.systemd:
name: alloy.service
state: started
when:
- not ansible_check_mode
- ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']
- name: Verify that Alloy URL is responding
ansible.builtin.uri:
url: "{{ alloy_readiness_check_use_https | ansible.builtin.ternary('https', 'http') }}://{{ __alloy_server_http_listen_address }}:{{ __alloy_server_http_listen_port }}/-/ready"
method: GET
use_proxy: "{{ alloy_readiness_check_use_proxy | bool }}"
register: __alloy_verify_url_status_code
retries: 5
delay: 8
until: __alloy_verify_url_status_code.status == 200
when:
- not ansible_check_mode
- ansible_facts['os_family'] in ['RedHat', 'Debian', 'Suse']

View file

@ -0,0 +1,19 @@
---
# tasks file for alloy
- name: Include OS specific variables
ansible.builtin.include_vars:
file: "{{ ansible_facts['os_family'] }}.yml"
- name: Preflight
ansible.builtin.include_tasks:
file: "preflight.yml"
- name: Deploy Alloy service
ansible.builtin.include_tasks:
file: "deploy.yml"
when: not alloy_uninstall
- name: Uninstall Alloy service
ansible.builtin.include_tasks:
file: "uninstall.yml"
when: alloy_uninstall

View file

@ -0,0 +1,26 @@
---
- name: Fail when alloy_config or alloy_env_file_vars.CONFIG_FILE is not defined
ansible.builtin.fail:
msg: Variable alloy_config or alloy_env_file_vars.CONFIG_FILE is required!
when:
- alloy_config | length < 1
- alloy_env_file_vars.CONFIG_FILE is not defined
- not alloy_uninstall
- name: Extract IP address and PORT from alloy_env_file_vars
when: alloy_env_file_vars.CUSTOM_ARGS is defined and alloy_env_file_vars.CUSTOM_ARGS | length > 0
block:
- name: Search for server.http.listen-addr string
ansible.builtin.set_fact:
__alloy_server_http_listen_addr_regex: "{{ alloy_env_file_vars.CUSTOM_ARGS | regex_search('--server.http.listen-addr=([\\d\\.]+):(\\d+)', '\\1', '\\2') or [] }}"
- name: Extract IP address and port
ansible.builtin.set_fact:
__alloy_server_http_listen_address: "{{ __alloy_server_http_listen_addr_regex[0] }}"
__alloy_server_http_listen_port: "{{ __alloy_server_http_listen_addr_regex[1] }}"
when: __alloy_server_http_listen_addr_regex | length > 0
- name: Assert that extracted IP address is valid
ansible.builtin.assert:
that: (__alloy_server_http_listen_address | ansible.utils.ipaddr) != ""
when: __alloy_server_http_listen_addr_regex | length > 0

View file

@ -0,0 +1,96 @@
---
- name: Check if Homebrew is installed
ansible.builtin.command: which brew
register: __brew_check
changed_when: false
failed_when: false
- name: Fail if Homebrew is not installed
ansible.builtin.fail:
msg: "Homebrew is required but not installed"
when: __brew_check.rc != 0
- name: Get Homebrew prefix
ansible.builtin.command: brew --prefix
register: __brew_prefix
changed_when: false
- name: Set Alloy config directory path
ansible.builtin.set_fact:
__alloy_config_path: "{{ __alloy_config_path_default }}"
- name: Add Grafana tap to Homebrew
community.general.homebrew_tap:
name: "{{ __alloy_brew_tap }}"
state: present
- name: Install Alloy via Homebrew
community.general.homebrew:
name: "{{ __alloy_brew_package }}"
state: present
update_homebrew: true
- name: Ensure Alloy config directory exists
ansible.builtin.file:
path: "{{ __alloy_config_path }}"
state: directory
owner: "{{ ansible_user_id }}"
group: "{{ ansible_user_gid }}"
mode: '0755'
- name: Template Alloy config
ansible.builtin.template:
src: "config.alloy.j2"
dest: "{{ __alloy_config_path }}/config.alloy"
owner: "{{ ansible_user_id }}"
group: "{{ ansible_user_gid }}"
mode: '0644'
backup: true
when: alloy_config | length > 0
notify: restart alloy macos
- name: Check if Alloy service is loaded
ansible.builtin.command: brew services list
register: __brew_services
changed_when: false
- name: Stop Alloy service if it exists (to clean up any issues)
ansible.builtin.command: "brew services stop {{ __alloy_brew_package }}"
register: __stop_result
failed_when: false
changed_when: "'Successfully stopped' in __stop_result.stdout"
when: "'alloy' in __brew_services.stdout"
- name: Start Alloy service
ansible.builtin.command: "brew services start {{ __alloy_brew_package }}"
when:
- "'alloy' not in __brew_services.stdout or 'started' not in __brew_services.stdout"
register: __service_start
failed_when: __service_start.rc != 0
- name: Restart Alloy service if already running
ansible.builtin.command: "brew services restart {{ __alloy_brew_package }}"
when:
- "'alloy' in __brew_services.stdout and 'started' in __brew_services.stdout"
register: __service_restart
failed_when: __service_restart.rc != 0
- name: Check final service status
ansible.builtin.command: brew services list
register: __final_brew_services
changed_when: false
- name: Verify Alloy installation
ansible.builtin.command: alloy --version
register: __alloy_version_output
changed_when: false
failed_when: false
- name: Display Alloy version
ansible.builtin.debug:
msg: "Alloy version: {{ __alloy_version_output.stdout }}"
when: __alloy_version_output.rc == 0
- name: Display service status
ansible.builtin.debug:
msg: "Alloy service status: {{ __final_brew_services.stdout_lines | select('match', '.*alloy.*') | list }}"

View file

@ -0,0 +1,7 @@
---
- name: APT - Install Alloy
ansible.builtin.apt:
deb: "{{ alloy_download_url_deb }}"
state: present
notify: restart alloy
when: __current_deployed_version.stdout is not defined or alloy_version not in __current_deployed_version.stdout

View file

@ -0,0 +1,8 @@
---
- name: DNF - Install Alloy from remote URL
ansible.builtin.package:
name: "{{ alloy_download_url_rpm }}"
state: present
disable_gpg_check: true
notify: restart alloy
when: __current_deployed_version.stdout is not defined or alloy_version not in __current_deployed_version.stdout

View file

@ -0,0 +1,8 @@
---
- name: Zypper - Install Alloy from remote URL
community.general.zypper:
name: "{{ alloy_download_url_rpm }}"
state: present
disable_gpg_check: true
notify: restart alloy
when: __current_deployed_version.stdout is not defined or alloy_version not in __current_deployed_version.stdout

View file

@ -0,0 +1,75 @@
---
- name: Stop Alloy service
ansible.builtin.systemd: # noqa ignore-errors
name: alloy
state: stopped
ignore_errors: true
- name: Stop Alloy service on macOS
ansible.builtin.command: "brew services stop {{ __alloy_brew_package }}"
when: ansible_facts['os_family'] == 'Darwin'
failed_when: false
- name: Uninstall Alloy rpm package
ansible.builtin.package:
name: "alloy"
state: absent
autoremove: true
when: ansible_facts['os_family'] in ['RedHat', 'Rocky']
- name: Uninstall Alloy deb package
ansible.builtin.apt:
name: "alloy"
state: absent
purge: true
when: ansible_facts['os_family'] == 'Debian'
- name: Uninstall Alloy via Homebrew
community.general.homebrew:
name: "{{ __alloy_brew_package }}"
state: absent
when: ansible_facts['os_family'] == 'Darwin'
- name: Uninstall Alloy rpm package (SUSE)
community.general.zypper:
name: "alloy"
state: absent
when: ansible_facts['os_family'] == 'Suse'
- name: Ensure that Alloy firewalld rule is not present - tcp port {{ __alloy_server_http_listen_port }}
ansible.posix.firewalld: # noqa ignore-errors
immediate: true
permanent: true
port: "{{ __alloy_server_http_listen_port }}/tcp"
state: disabled
ignore_errors: true
- name: Remove Alloy directories
ansible.builtin.file:
path: "{{ remove_me }}"
state: absent
loop:
- "/etc/alloy"
- "/etc/systemd/system/alloy.service.d"
- "/var/lib/alloy"
- "/etc/sysconfig/alloy"
- "/etc/default/alloy"
loop_control:
loop_var: remove_me
- name: Remove Alloy config directory on macOS
ansible.builtin.file:
path: "{{ __alloy_config_path_default }}"
state: absent
when: ansible_facts['os_family'] == 'Darwin'
- name: Remove the Alloy system user
ansible.builtin.user:
name: "alloy"
force: true
state: absent
- name: Remove Alloy system group
ansible.builtin.group:
name: "alloy"
state: absent

View file

@ -0,0 +1,10 @@
# Ansible Managed
{% if alloy_env_file_vars.CONFIG_FILE is not defined or alloy_env_file_vars.CONFIG_FILE | length < 1 %}
CONFIG_FILE="/etc/alloy/config.alloy"
{% endif %}
RESTART_ON_UPGRADE=true
{% for key, value in alloy_env_file_vars.items() %}
{{ key}}="{{value}}"
{% endfor %}

View file

@ -0,0 +1,3 @@
// Ansible Managed
{{ alloy_config }}

View file

@ -0,0 +1,3 @@
# Ansible Managed
{{ alloy_systemd_override }}

View file

@ -0,0 +1,5 @@
---
# macOS/Darwin specific variables
__alloy_brew_tap: "grafana/grafana"
__alloy_brew_package: "grafana/grafana/alloy"
__alloy_config_path_default: "{{ __brew_prefix.stdout | default('/opt/homebrew') }}"

View file

@ -0,0 +1,2 @@
---
__alloy_env_file: "/etc/default/alloy"

View file

@ -0,0 +1,2 @@
---
__alloy_env_file: "/etc/sysconfig/alloy"

View file

@ -0,0 +1,2 @@
---
__alloy_env_file: "/etc/sysconfig/alloy"

View file

@ -0,0 +1,9 @@
---
__alloy_server_http_listen_address: 127.0.0.1
__alloy_server_http_listen_port: 12345
__alloy_arch_map:
x86_64: 'amd64'
armv6l: 'arm'
armv7l: 'arm'
aarch64: 'arm64'
__alloy_arch: "{{ __alloy_arch_map[ansible_facts['architecture']] | default(ansible_facts['architecture']) }}"

View file

@ -0,0 +1,132 @@
<p><img src="https://grafana.com/blog/assets/img/blog/timeshift/grafana_release_icon.png" alt="grafana logo" title="grafana" align="right" height="60" /></p>
# Ansible Role: grafana.grafana.grafana
[![License](https://img.shields.io/badge/license-MIT%20License-brightgreen.svg)](https://opensource.org/licenses/MIT)
Provision and manage [Grafana](https://github.com/grafana/grafana) - platform for analytics and monitoring
## Requirements
- Ansible >= 2.9 (It might work on previous versions, but we cannot guarantee it)
- libselinux-python on deployer host (only when deployer machine has SELinux)
- Grafana >= 5.1 (for older Grafana versions use this role in version 0.10.1 or earlier)
- jmespath on deployer machine. If you are using Ansible from a Python virtualenv, install *jmespath* to the same virtualenv via pip.
## Role Variables
All variables which can be overridden are stored in [defaults/main.yml](defaults/main.yml) file as well as in table below.
| Name | Default Value | Description |
| -------------- | ------------- | -----------------------------------|
| `grafana_use_provisioning` | true | Use Grafana provisioning capability when possible (**grafana_version=latest will assume >= 5.0**). |
| `grafana_provisioning_synced` | false | Ensure no previously provisioned dashboards are kept if not referenced anymore. |
| `grafana_version` | latest | Grafana package version |
| `grafana_manage_repo` | true | Manage package repository (or don't) |
| `grafana_yum_repo` | https://rpm.grafana.com | Yum repository URL |
| `grafana_yum_key` | https://rpm.grafana.com/gpg.key | Yum repository gpg key |
| `grafana_rhsm_subscription` | | rhsm subscription name (redhat subscription-manager) |
| `grafana_rhsm_repo` | | rhsm repository name (redhat subscription-manager) |
| `grafana_apt_release_channel` | stable | Apt release chanel (stable or beta) |
| `grafana_apt_arch` | {{ 'arm64' if ansible_facts['architecture'] == 'aarch64' else 'amd64' }} | Apt architecture |
| `grafana_apt_repo` | deb [arch={{ grafana_apt_arch }} signed-by=/usr/share/keyrings/grafana.asc] https://apt.grafana.com/ {{ grafana_apt_release_channel }} main | Apt repository string |
| `grafana_apt_key` | https://apt.grafana.com/gpg.key | Apt repository gpg key |
| `grafana_ini.instance_name` | {{ ansible_facts['fqdn'] \| default(ansible_host) \| default(inventory_hostname) }} | Grafana instance name |
| `grafana_ini.paths.logs` | /var/log/grafana | Path to logs directory |
| `grafana_ini.paths.data` | /var/lib/grafana | Path to database directory |
| `grafana_ini.server.http_addr` | 0.0.0.0 | Address on which Grafana listens |
| `grafana_ini.server.http_port` | 3000 | port on which Grafana listens |
| `grafana_cap_net_bind_service` | false | Enables the use of ports below 1024 without root privileges by leveraging the 'capabilities' of the linux kernel. read: http://man7.org/linux/man-pages/man7/capabilities.7.html |
| `grafana_ini.server.root_url` | "http://{{ grafana_ini.server.http_addr }}:{{ grafana_ini.server.http_port }}" | Full URL used to access Grafana from a web browser |
| `grafana_api_url` | "{{ grafana_url }}" | URL used for API calls in provisioning if different from public URL. See [this issue](https://github.com/cloudalchemy/ansible-grafana/issues/70). |
| `grafana_ini.server.domain` | "{{ ansible_facts['fqdn'] \| default(ansible_host) \| default('localhost') }}" | setting is only used in as a part of the `root_url` option. Useful when using GitHub or Google OAuth |
| `grafana_ini.server` | { protocol: http, enforce_domain: false, socket: "", cert_key: "", cert_file: "", enable_gzip: false, static_root_path: public, router_logging: false } | [server](http://docs.grafana.org/installation/configuration/#server) configuration section |
| `grafana_ini.security` | { admin_user: admin, admin_password: "" } | [security](http://docs.grafana.org/installation/configuration/#security) configuration section |
| `grafana_ini.database` | { type: sqlite3 } | [database](http://docs.grafana.org/installation/configuration/#database) configuration section |
| `grafana_ini.users` | { allow_sign_up: false, auto_assign_org_role: Viewer, default_theme: dark } | [users](http://docs.grafana.org/installation/configuration/#users) configuration section |
| `grafana_ini.auth` | {} | [authorization](http://docs.grafana.org/installation/configuration/#auth) configuration section |
| `grafana_ldap` | {} | [ldap](http://docs.grafana.org/installation/ldap/) configuration section. group_mappings are expanded, see defaults for example |
| `grafana_dashboards` | [] | List of dashboards which should be imported |
| `grafana_dashboards_dir` | "dashboards" | Path to a local directory containing dashboards files in `json` format |
| `grafana_datasources` | [] | List of datasources which should be configured |
| `grafana_environment` | {} | Optional Environment param for Grafana installation, useful ie for setting http_proxy |
| `grafana_plugins` | [] | List of Grafana plugins which should be installed |
| `grafana_alert_notifications` | [] | List of alert notification channels to be created, updated, or deleted |
Data source example:
```yaml
grafana_datasources:
- name: prometheus
type: prometheus
access: proxy
url: 'http://{{ prometheus_web_listen_address }}'
basicAuth: false
```
Dashboard example:
```yaml
grafana_dashboards:
- dashboard_id: 111
revision_id: 1
datasource: prometheus
```
Alert notification channel example:
**NOTE**: setting the variable `grafana_alert_notifications` will only come into
effect when `grafana_use_provisioning` is `true`. That means the new
provisioning system using config files, which is available starting from Grafana
v5.0, needs to be in use.
```yaml
grafana_alert_notifications:
notifiers:
- name: Channel 1
type: email
uid: channel1
is_default: false
send_reminder: false
settings:
addresses: "example@example.com"
autoResolve: true
delete_notifiers:
- name: Channel 2
uid: channel2
```
## Supported CPU Architectures
Historically packages were taken from different channels according to CPU architecture. Specifically, armv6/armv7 and aarch64/arm64 packages were via [unofficial packages distributed by fg2it](https://github.com/fg2it/grafana-on-raspberry). Now that Grafana publishes official ARM builds, all packages are taken from the official [Debian/Ubuntu](http://docs.grafana.org/installation/debian/#installing-on-debian-ubuntu) or [RPM](http://docs.grafana.org/installation/rpm/) packages.
## Example
### Playbook
Fill in the admin password field with your choice, the Grafana web page won't ask to change it at the first login.
```yaml
- hosts: all
roles:
- role: grafana.grafana.grafana
vars:
grafana_ini:
security:
admin_user: admin
admin_password: enter_your_secure_password
```
## Local Testing
The preferred way of locally testing the role is to use Docker and [molecule](https://github.com/ansible-community/molecule). You will have to install Docker on your system.
For more information about molecule go to their [docs](http://molecule.readthedocs.io/en/latest/).
## License
This project is licensed under MIT License. See [LICENSE](/LICENSE) for more details.
## Credits
This role was migrated from [cloudalchemy.grafana](https://github.com/cloudalchemy/ansible-grafana).

View file

@ -0,0 +1,211 @@
---
grafana_version: latest
grafana_manage_repo: true
grafana_yum_repo: "https://rpm.grafana.com"
grafana_yum_key: "https://rpm.grafana.com/gpg.key"
grafana_rhsm_subscription: ""
grafana_rhsm_repo: ""
grafana_apt_release_channel: stable
grafana_apt_arch: "{{ 'arm64' if ansible_facts['architecture'] == 'aarch64' else 'amd64' }}"
grafana_apt_repo_uri: "https://apt.grafana.com/"
grafana_apt_repo: "deb [arch={{ grafana_apt_arch }} signed-by=/usr/share/keyrings/grafana.asc] {{ grafana_apt_repo_uri }} {{ grafana_apt_release_channel }} main"
grafana_apt_key: "https://apt.grafana.com/gpg.key"
grafana_apt_name: "grafana"
# Should we use the provisioning capability when possible (provisioning require grafana >= 5.0)
grafana_use_provisioning: true
# Should the provisioning be kept synced. If true, previous provisioned objects will be removed if not referenced anymore.
grafana_provisioning_synced: false
# Should we provision dashboards by following the files structure. This sets the foldersFromFilesStructure option
grafana_provisioning_dashboards_from_file_structure: false
# To enable the use of ports below 1024 for unprivileged processes linux needs to set CAP_NET_BIND_SERVICE.
# This has some security implications, and should be a conscious choice.
# Get informed by reading: http://man7.org/linux/man-pages/man7/capabilities.7.html
grafana_cap_net_bind_service: false
grafana_ini_default:
instance_name: "{{ ansible_facts['fqdn'] | default(ansible_host) | default(inventory_hostname) }}"
paths:
logs: "/var/log/grafana"
data: "/var/lib/grafana"
server:
http_addr: "0.0.0.0"
http_port: 3000
# External Grafana address. Variable maps to "root_url" in grafana server section
#root_url: "http://{{ grafana_ini.server.http_addr }}:{{ grafana_ini.server.http_port }}"
domain: "{{ ansible_facts['fqdn'] | default(ansible_host) | default('localhost') }}"
# Additional options for grafana "server" section
# This section WILL omit options for: http_addr, http_port, domain, and root_url, as those settings are set by variables listed before
protocol: http
enforce_domain: false
socket: ""
cert_key: ""
cert_file: ""
enable_gzip: false
static_root_path: public
router_logging: false
serve_from_sub_path: false
# Variables correspond to ones in grafana.ini configuration file
# Security
security:
admin_user: admin
admin_password: ""
# secret_key: ""
# login_remember_days: 7
# cookie_username: grafana_user
# cookie_remember_name: grafana_remember
# disable_gravatar: true
# data_source_proxy_whitelist:
# Database setup
database:
type: sqlite3
# host: 127.0.0.1:3306
# name: grafana
# user: root
# password: ""
# url: ""
# ssl_mode: disable
# path: grafana.db
# max_idle_conn: 2
# max_open_conn: ""
# log_queries: ""
# User management and registration
users:
allow_sign_up: false
# allow_org_create: true
# auto_assign_org: true
auto_assign_org_role: Viewer
# login_hint: "email or username"
default_theme: dark
# external_manage_link_url: ""
# external_manage_link_name: ""
# external_manage_info: ""
# grafana authentication mechanisms
auth: {}
# disable_login_form: false
# oauth_auto_login: false
# disable_signout_menu: false
# signout_redirect_url: ""
# anonymous:
# org_name: "Main Organization"
# org_role: Viewer
# ldap:
# config_file: "/etc/grafana/ldap.toml"
# allow_sign_up: false
# basic:
# enabled: true
grafana_api_url: "{{ grafana_ini.server.root_url }}"
grafana_ldap: {}
# verbose_logging: false
# servers:
# host: 127.0.0.1
# port: 389 # 636 for SSL
# use_ssl: false
# start_tls: false
# ssl_skip_verify: false
# root_ca_cert: /path/to/certificate.crt
# bind_dn: "cn=admin,dc=grafana,dc=org"
# bind_password: grafana
# search_filter: "(cn=%s)" # "(sAMAccountName=%s)" on AD
# search_base_dns:
# - "dc=grafana,dc=org"
# group_search_filter: "(&(objectClass=posixGroup)(memberUid=%s))"
# group_search_base_dns:
# - "ou=groups,dc=grafana,dc=org"
# attributes:
# name: givenName
# surname: sn
# username: sAMAccountName
# member_of: memberOf
# email: mail
# group_mappings:
# - name: Main Org.
# id: 1
# groups:
# - group_dn: "cn=admins,ou=groups,dc=grafana,dc=org"
# org_role: Admin
# - group_dn: "cn=editors,ou=groups,dc=grafana,dc=org"
# org_role: Editor
# - group_dn: "*"
# org_role: Viewer
# - name: Alternative Org
# id: 2
# groups:
# - group_dn: "cn=alternative_admins,ou=groups,dc=grafana,dc=org"
# org_role: Admin
#######
# Plugins to install from https://grafana.com/plugins
grafana_plugins: []
# - raintank-worldping-app
# Dashboards from https://grafana.com/dashboards
grafana_dashboards: []
# - dashboard_id: '4271'
# revision_id: '3'
# datasource: 'Prometheus'
# - dashboard_id: '1860'
# revision_id: '4'
# datasource: 'Prometheus'
# - dashboard_id: '358'
# revision_id: '1'
# datasource: 'Prometheus'
grafana_dashboards_dir: "dashboards"
# Alert notification channels to configure
grafana_alert_notifications: []
# - name: "Email Alert"
# type: "email"
# uid: channel1
# is_default: true
# settings:
# addresses: "example@example.com"
# Alert resources channels to configure
grafana_alert_resources: {}
# Datasources to configure
grafana_datasources: []
# - name: "Prometheus"
# type: "prometheus"
# access: "proxy"
# url: "http://prometheus.mydomain"
# basicAuth: true
# basicAuthUser: "admin"
# basicAuthPassword: "password"
# isDefault: true
# jsonData:
# tlsAuth: false
# tlsAuthWithCACert: false
# tlsSkipVerify: true
# API keys to configure
grafana_api_keys: []
# - name: "admin"
# role: "Admin"
# - name: "viewer"
# role: "Viewer"
# - name: "editor"
# role: "Editor"
# The location where the keys should be stored.
grafana_api_keys_dir: "{{ lookup('env', 'HOME') }}/grafana/keys"
grafana_environment: {}

View file

@ -0,0 +1,48 @@
---
- name: "Restart grafana"
ansible.builtin.service:
name: grafana-server
state: restarted
become: true
listen: "restart_grafana"
tags:
- grafana_run
- name: "Set privileges on provisioned dashboards"
ansible.builtin.file:
path: "{{ grafana_ini.paths.data }}/dashboards"
recurse: true
owner: "grafana"
group: "grafana"
mode: "u=rwX,g=rX,o=rX"
become: true
listen: "provisioned dashboards changed"
- name: "Set privileges on provisioned dashboards directory"
ansible.builtin.file:
path: "{{ grafana_ini.paths.data }}/dashboards"
state: "directory"
recurse: false
mode: "0755"
become: true
listen: "provisioned dashboards changed"
- name: "Find dashboards subdirectories"
ansible.builtin.find:
paths: "{{ grafana_ini.paths.data }}/dashboards"
recurse: yes
file_type: directory
register: __dashboards_subdirs
become: true
listen: "provisioned dashboards changed"
- name: "Set privileges on provisioned dashboards sub-directories"
ansible.builtin.file:
path: "{{ item }}"
state: "directory"
recurse: false
mode: "0755"
with_items:
- "{{ __dashboards_subdirs.files | map(attribute='path') | list }}"
become: true
listen: "provisioned dashboards changed"

View file

@ -0,0 +1,31 @@
---
galaxy_info:
author: "Grafana"
description: "Grafana - platform for analytics and monitoring"
license: "GPL-3.0-or-later"
min_ansible_version: "2.9"
platforms:
- name: Ubuntu
versions:
- bionic
- xenial
- name: Debian
versions:
- stretch
- buster
- name: EL
versions:
- "7"
- "8"
- name: Fedora
versions:
- "30"
- "31"
galaxy_tags:
- grafana
- dashboard
- alerts
- alerting
- presentation
- monitoring
- metrics

View file

@ -0,0 +1,107 @@
---
- name: "Run role"
hosts: all
any_errors_fatal: true
roles:
- grafana.grafana.grafana
vars:
grafana_version: 6.2.5
grafana_ini:
security:
admin_user: admin
admin_password: "password"
server:
http_addr: "127.0.0.1"
auth:
login_maximum_inactive_lifetime_days: 42
disable_login_form: false
oauth_auto_login: false
disable_signout_menu: false
signout_redirect_url: ""
anonymous:
org_name: "Main Organization"
org_role: Viewer
ldap:
config_file: "/etc/grafana/ldap.toml"
allow_sign_up: false
basic:
enabled: true
log:
mode: syslog
level: warn
grafana_ldap:
verbose_logging: false
servers:
host: 127.0.0.1
port: 389
use_ssl: false
start_tls: false
ssl_skip_verify: false
root_ca_cert: /path/to/certificate.crt
bind_dn: "cn=admin,dc=grafana,dc=org"
bind_password: grafana
search_filter: "(cn=%s)"
search_base_dns:
- "dc=grafana,dc=org"
group_search_filter: "(&(objectClass=posixGroup)(memberUid=%s))"
group_search_base_dns:
- "ou=groups,dc=grafana,dc=org"
attributes:
name: givenName
surname: sn
username: sAMAccountName
member_of: memberOf
email: mail
group_mappings:
- name: "Main Organization"
id: 1
groups:
- group_dn: "cn=admins,ou=groups,dc=grafana,dc=org"
org_role: Admin
- group_dn: "cn=editors,ou=groups,dc=grafana,dc=org"
org_role: Editor
- group_dn: "*"
org_role: Viewer
- name: "Alternative Org"
id: 2
groups:
- group_dn: "cn=alternative_admins,ou=groups,dc=grafana,dc=org"
org_role: Admin
grafana_api_keys:
- name: "admin"
role: "Admin"
- name: "viewer"
role: "Viewer"
- name: "editor"
role: "Editor"
grafana_api_keys_dir: "/tmp/grafana/keys"
grafana_plugins:
- raintank-worldping-app
grafana_alert_notifications:
notifiers:
- name: "Email Alert"
type: "email"
uid: notifier1
is_default: true
settings:
addresses: "example@example.com"
grafana_dashboards:
- dashboard_id: '1860'
revision_id: '4'
datasource: 'Prometheus'
- dashboard_id: '358'
revision_id: '1'
datasource: 'Prometheus'
grafana_datasources:
- name: "Prometheus"
type: "prometheus"
access: "proxy"
url: "http://prometheus.mydomain"
basicAuth: true
basicAuthUser: "admin"
basicAuthPassword: "password"
isDefault: true
jsonData:
tlsAuth: false
tlsAuthWithCACert: false
tlsSkipVerify: true

View file

@ -0,0 +1,52 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_directories(host):
dirs = [
"/etc/grafana",
"/var/log/grafana",
"/var/lib/grafana",
"/var/lib/grafana/dashboards",
"/var/lib/grafana/plugins",
"/var/lib/grafana/plugins/raintank-worldping-app"
]
files = [
"/etc/grafana/grafana.ini",
"/etc/grafana/ldap.toml"
]
for directory in dirs:
d = host.file(directory)
assert d.is_directory
assert d.exists
for file in files:
f = host.file(file)
assert f.exists
assert f.is_file
def test_service(host):
s = host.service("grafana-server")
# assert s.is_enabled
assert s.is_running
def test_packages(host):
p = host.package("grafana")
assert p.is_installed
assert p.version == "6.2.5"
def test_socket(host):
assert host.socket("tcp://127.0.0.1:3000").is_listening
def test_custom_auth_option(host):
f = host.file("/etc/grafana/grafana.ini")
assert f.contains("login_maximum_inactive_lifetime_days = 42")

View file

@ -0,0 +1,11 @@
---
- name: "Run role"
hosts: all
any_errors_fatal: true
roles:
- grafana.grafana.grafana
vars:
grafana_ini:
security:
admin_user: admin
admin_password: password

View file

@ -0,0 +1,50 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_directories(host):
dirs = [
"/etc/grafana",
"/var/log/grafana",
"/var/lib/grafana",
"/var/lib/grafana/dashboards",
"/var/lib/grafana/plugins"
]
files = [
"/etc/grafana/grafana.ini"
]
for directory in dirs:
d = host.file(directory)
assert d.is_directory
assert d.exists
for file in files:
f = host.file(file)
assert f.exists
assert f.is_file
def test_service(host):
s = host.service("grafana-server")
# assert s.is_enabled
assert s.is_running
def test_packages(host):
p = host.package("grafana")
assert p.is_installed
def test_socket(host):
assert host.socket("tcp://0.0.0.0:3000").is_listening
def test_yum_repo(host):
if host.system_info.distribution in ['centos', 'redhat', 'fedora']:
f = host.file("/etc/yum.repos.d/grafana.repo")
assert f.exists

View file

@ -0,0 +1,43 @@
---
- name: "Ensure grafana key directory exists"
ansible.builtin.file:
path: "{{ grafana_api_keys_dir }}/{{ inventory_hostname }}"
state: directory
mode: "0755"
become: false
delegate_to: localhost
- name: "Check api key list"
ansible.builtin.uri:
url: "{{ grafana_api_url }}/api/auth/keys"
user: "{{ grafana_ini.security.admin_user }}"
password: "{{ grafana_ini.security.admin_password }}"
force_basic_auth: true
return_content: true
register: __existing_api_keys
no_log: "{{ 'false' if lookup('env', 'CI') else 'true' }}"
- name: "Create grafana api keys"
ansible.builtin.uri:
url: "{{ grafana_api_url }}/api/auth/keys"
user: "{{ grafana_ini.security.admin_user }}"
password: "{{ grafana_ini.security.admin_password }}"
force_basic_auth: true
method: POST
body_format: json
body: "{{ item | to_json }}"
loop: "{{ grafana_api_keys }}"
register: __new_api_keys
no_log: "{{ 'false' if lookup('env', 'CI') else 'true' }}"
when: "((__existing_api_keys['json'] | selectattr('name', 'equalto', item['name'])) | list) | length == 0"
- name: "Create api keys file to allow the keys to be seen and used by other automation"
ansible.builtin.copy:
dest: "{{ grafana_api_keys_dir }}/{{ inventory_hostname }}/{{ item['item']['name'] }}.key"
content: "{{ item['json']['key'] }}"
backup: false
mode: "0644"
loop: "{{ __new_api_keys['results'] }}"
become: false
delegate_to: localhost
when: "item['json'] is defined"

View file

@ -0,0 +1,107 @@
---
- name: "Ensure grafana directories exist"
ansible.builtin.file:
path: "{{ item.path }}"
state: "directory"
owner: "{{ item.owner | default('root') }}"
group: "{{ item.group | default('grafana') }}"
mode: "{{ item.mode | default('0755') }}"
loop:
- path: "/etc/grafana"
- path: "/etc/grafana/datasources"
- path: "/etc/grafana/provisioning"
- path: "/etc/grafana/provisioning/datasources"
- path: "/etc/grafana/provisioning/dashboards"
- path: "/etc/grafana/provisioning/notifiers"
- path: "/etc/grafana/provisioning/notification"
- path: "/etc/grafana/provisioning/plugins"
- path: "/etc/grafana/provisioning/alerting"
- path: "{{ grafana_ini.paths.logs }}"
owner: grafana
- path: "{{ grafana_ini.paths.data }}"
owner: grafana
- path: "{{ grafana_ini.paths.data }}/dashboards"
owner: grafana
- path: "{{ grafana_ini.paths.data }}/plugins"
owner: grafana
- name: "Create grafana main configuration file"
ansible.builtin.template:
src: "grafana.ini.j2"
dest: "/etc/grafana/grafana.ini"
owner: "root"
group: "grafana"
mode: "0640"
no_log: "{{ 'false' if lookup('env', 'CI') else 'true' }}"
notify: restart_grafana
- name: "Create grafana LDAP configuration file"
ansible.builtin.template:
src: "ldap.toml.j2"
dest: "{{ grafana_ini.auth.ldap.config_file | default('/etc/grafana/ldap.toml') }}"
owner: "root"
group: "grafana"
mode: "0640"
no_log: "{{ 'false' if lookup('env', 'CI') else 'true' }}"
notify: restart_grafana
when:
- "'ldap' in grafana_ini.auth"
- "'enabled' not in grafana_ini.auth.ldap or grafana_ini.auth.ldap.enabled"
- name: "Enable grafana socket"
when:
- "grafana_ini.server.protocol is defined and grafana_ini.server.protocol == 'socket'"
- "grafana_ini.server.socket | dirname != '/var/run'"
block:
- name: "Create grafana socket directory"
ansible.builtin.file:
path: "{{ grafana_ini.server.socket | dirname }}"
state: "directory"
mode: "0775"
owner: "grafana"
group: "grafana"
- name: "Ensure grafana socket directory created on startup"
ansible.builtin.template:
src: "tmpfiles.j2"
dest: "/etc/tmpfiles.d/grafana.conf"
owner: "root"
group: "root"
mode: "0644"
- name: "Enable grafana to ports lower than port 1024"
community.general.capabilities:
path: /usr/sbin/grafana-server
capability: CAP_NET_BIND_SERVICE+ep
state: present
when:
- "grafana_ini.server.http_port | int <= 1024"
- "grafana_cap_net_bind_service"
- name: Create a directory for overrides.conf unit file if it does not exist
ansible.builtin.file:
path: /etc/systemd/system/grafana-server.service.d
state: directory
mode: '0755'
when:
- "grafana_ini.server.http_port | int <= 1024"
- "grafana_cap_net_bind_service"
- name: "Enable grafana to ports lower than port 1024 in systemd unitfile"
ansible.builtin.blockinfile:
path: /etc/systemd/system/grafana-server.service.d/overrides.conf
create: true
block: |
[Service]
AmbientCapabilities=CAP_NET_BIND_SERVICE
CapabilityBoundingSet=CAP_NET_BIND_SERVICE
when:
- "grafana_ini.server.http_port | int <= 1024"
- "grafana_cap_net_bind_service"
- name: "Enable and start Grafana systemd unit"
ansible.builtin.systemd:
name: "grafana-server"
enabled: true
state: started
daemon_reload: true

View file

@ -0,0 +1,245 @@
---
- name: "Create local grafana dashboard directory"
become: false
delegate_to: localhost
run_once: true
ansible.builtin.tempfile:
state: directory
register: __tmp_dashboards
changed_when: false
check_mode: false
- name: "Download grafana.net dashboards"
become: false
delegate_to: localhost
when:
- not ansible_check_mode
- "grafana_dashboards | length > 0"
block:
- name: "Get latest revision id"
ansible.builtin.uri:
url: "https://grafana.com/api/dashboards/{{ item.dashboard_id }}"
method: GET
return_content: yes
register: __dashboard_info
loop: "{{ grafana_dashboards }}"
when: item.dashboard_url is not defined
- name: "Extract revision_id if not defined in grafana_dashboards"
ansible.builtin.set_fact:
__dashboards_with_revision: >-
{{
__dashboard_info.results | map(attribute='json.revision')
| map('default', 1) | map('community.general.dict_kv', 'revision_id')
| zip(grafana_dashboards) | map('combine')
}}
- name: "Download grafana dashboard from grafana.net to local directory"
ansible.builtin.get_url:
url: "{{ item.dashboard_url if item.dashboard_url is defined else
('https://grafana.com/api/dashboards/' ~ item.dashboard_id ~
'/revisions/' ~ item.revision_id | default(1) ~ '/download') }}"
dest: "{{ __tmp_dashboards.path }}/{{ item.dashboard_id ~ '.json'
if item.dashboard_id is defined else item.dashboard_url | basename}}"
mode: "0644"
register: __download_dashboards
until: "__download_dashboards is succeeded"
retries: 5
delay: 2
changed_when: false
loop: "{{ __dashboards_with_revision }}"
# As noted in [1] an exported dashboard replaces the exporter's datasource
# name with a representative name, something like 'DS_GRAPHITE'. The name
# is different for each datasource plugin, but always begins with 'DS_'.
# In the rest of the data, the same name is used, but captured in braces,
# for example: '${DS_GRAPHITE}'.
#
# [1] http://docs.grafana.org/reference/export_import/#import-sharing-with-grafana-2-x-or-3-0
#
# The data structure looks (massively abbreviated) something like:
#
# "name": "DS_GRAPHITE",
# "datasource": "${DS_GRAPHITE}",
#
# If we import the downloaded dashboard verbatim, it will not automatically
# be connected to the data source like we want it. The Grafana UI expects
# us to do the final connection by hand, which we do not want to do.
# So, in the below task we ensure that we replace instances of this string
# with the data source name we want.
# To make sure that we're not being too greedy with the regex replacement
# of the data source to use for each dashboard that's uploaded, we make the
# regex match very specific by using the following:
#
# 1. Literal boundaries for " on either side of the match.
# 2. Non-capturing optional group matches for the ${} bits which may, or
# or may not, be there..
# 3. A case-sensitive literal match for DS .
# 4. A one-or-more case-sensitive match for the part that follows the
# underscore, with only A-Z, 0-9 and - or _ allowed.
#
# This regex can be tested and understood better by looking at the
# matches and non-matches in https://regex101.com/r/f4Gkvg/6
- name: "Set the correct data source name in the dashboard"
ansible.builtin.replace:
dest: "{{ item.dest }}"
regexp: '"(?:\${)?DS_[A-Z0-9_-]+(?:})?"'
replace: '"{{ item.item.datasource }}"'
changed_when: false
loop: "{{ __download_dashboards.results }}"
loop_control:
label: "{{ item.item }}"
- name: "Import grafana dashboards via api"
community.grafana.grafana_dashboard:
grafana_url: "{{ grafana_api_url }}"
grafana_user: "{{ grafana_ini.security.admin_user }}"
grafana_password: "{{ grafana_ini.security.admin_password }}"
path: "{{ item }}"
commit_message: "Updated by ansible role {{ ansible_role_name }}"
state: present
overwrite: true
no_log: "{{ 'false' if lookup('env', 'CI') else 'true' }}"
with_fileglob:
- "{{ __tmp_dashboards.path }}/*"
- "{{ grafana_dashboards_dir }}/*.json"
when: "not grafana_use_provisioning"
- name: "Import grafana dashboards through provisioning"
when: grafana_use_provisioning
block:
- name: "Create/Update dashboards file (provisioning)"
ansible.builtin.copy:
dest: "/etc/grafana/provisioning/dashboards/ansible.yml"
content: |
apiVersion: 1
providers:
- name: 'default'
orgId: 1
folder: ''
type: file
options:
path: "{{ grafana_ini.paths.data }}/dashboards"
foldersFromFilesStructure: {{ grafana_provisioning_dashboards_from_file_structure | bool | to_nice_yaml }}
backup: false
owner: root
group: grafana
mode: "0640"
become: true
notify: restart_grafana
- name: "Register previously copied dashboards"
ansible.builtin.find:
paths: "{{ grafana_ini.paths.data }}/dashboards"
hidden: true
recurse: true
patterns:
- "*.json"
register: __dashboards_present
when: grafana_provisioning_synced | bool
- name: "Register previously created folders"
ansible.builtin.find:
paths: "{{ grafana_ini.paths.data }}/dashboards/"
recurse: yes
file_type: directory
register: __dashboards_dir_present
become: true
when: grafana_provisioning_synced
- name: "Import grafana.net dashboards"
ansible.builtin.copy:
src: "{{ item }}"
dest: "{{ grafana_ini.paths.data }}/dashboards/{{ item | basename }}"
owner: root
group: grafana
mode: "0644"
with_fileglob:
- "{{ __tmp_dashboards.path }}/*"
become: true
register: __dashboards_copied
notify: "provisioned dashboards changed"
when: not ansible_check_mode
- name: "Verify if custom grafana dashboards dir exist"
ansible.builtin.stat:
path: "{{ grafana_dashboards_dir }}"
delegate_to: localhost
become: false
register: __grafana_custom_dashboards_dir
- name: "Import custom grafana dashboards"
when: __grafana_custom_dashboards_dir.stat.exists
block:
- name: "Find which directories to create"
ansible.builtin.find:
paths: "{{ grafana_dashboards_dir }}/"
recurse: yes
file_type: directory
register: __dashboards_subdirs
delegate_to: localhost
become: false
- name: "Create dashboard folders"
ansible.builtin.file:
path: "{{ grafana_ini.paths.data }}/dashboards/{{ item }}"
state: "directory"
recurse: false
owner: "grafana"
group: "grafana"
mode: "0755"
loop: "{{ __dashboards_subdirs.files | map(attribute='path') | sort | regex_replace(grafana_dashboards_dir + '/*', '') }}"
become: true
register: __dashboards_dir_created
when: not ansible_check_mode
- name: "Copy dashboard files"
ansible.builtin.copy:
src: "{{ item.path }}"
dest: '{{ grafana_ini.paths.data }}/dashboards/{{ item.path | regex_replace(grafana_dashboards_dir + "/*", "") }}'
owner: root
group: grafana
mode: "0644"
loop: "{{ __found_dashboards.files | default([]) }}"
become: true
register: __dashboards_copied_custom
notify: "provisioned dashboards changed"
when: not ansible_check_mode
- name: "Register present and copied folders list"
ansible.builtin.set_fact:
__folders_present_list: "{{ __dashboards_dir_present.files | default([]) | map(attribute='path') | list }}"
__folders_copied_list: "{{ __dashboards_dir_created.results | default([]) | map(attribute='path') | list }}"
when: not ansible_check_mode
- name: "Register present and copied dashboards list"
ansible.builtin.set_fact:
__dashboards_present_list: "{{ __dashboards_present.files | default([]) | map(attribute='path') | list }}"
__dashboards_copied_list: "{{
(
(__dashboards_copied.results | default([]) | map(attribute='dest') | list) +
(__dashboards_copied_custom.results | default([]) | map(attribute='dest') | list)
) | list
}}"
when: not ansible_check_mode
- name: "Remove dashboards not present on deployer machine (synchronize)"
ansible.builtin.file:
path: "{{ item }}"
state: absent
loop: "{{ __dashboards_present_list | difference(__dashboards_copied_list) }}"
become: true
when:
- __dashboards_present_list is defined and __dashboards_present_list
- __dashboards_copied_list is defined and __dashboards_copied_list
- grafana_provisioning_synced | bool
- not ansible_check_mode
- name: "Remove folders not present on deployer machine (synchronize)"
ansible.builtin.file:
path: "{{ item }}"
state: absent
loop: "{{ __folders_present_list | difference(__folders_copied_list) }}"
become: true
when: grafana_provisioning_synced and not ansible_check_mode

View file

@ -0,0 +1,40 @@
---
- name: "Ensure datasources exist (via API)"
community.grafana.grafana_datasource:
grafana_url: "{{ grafana_api_url }}"
grafana_user: "{{ grafana_ini.security.admin_user }}"
grafana_password: "{{ grafana_ini.security.admin_password }}"
name: "{{ item.name }}"
ds_url: "{{ item.url }}"
ds_type: "{{ item.type }}"
access: "{{ item.access | default(omit) }}"
is_default: "{{ item.isDefault | default(omit) }}"
basic_auth_user: "{{ item.basicAuthUser | default(omit) }}"
basic_auth_password: "{{ item.basicAuthPassword | default(omit) }}"
database: "{{ item.database | default(omit) }}"
user: "{{ item.user | default(omit) }}"
password: "{{ item.password | default(omit) }}"
aws_auth_type: "{{ item.aws_auth_type | default(omit) }}"
aws_default_region: "{{ item.aws_default_region | default(omit) }}"
aws_access_key: "{{ item.aws_access_key | default(omit) }}"
aws_secret_key: "{{ item.aws_secret_key | default(omit) }}"
aws_credentials_profile: "{{ item.aws_credentials_profile | default(omit) }}"
aws_custom_metrics_namespaces: "{{ item.aws_custom_metrics_namespaces | default(omit) }}"
loop: "{{ grafana_datasources }}"
when: "not grafana_use_provisioning"
- name: "Create/Update datasources file (provisioning)"
ansible.builtin.copy:
dest: "/etc/grafana/provisioning/datasources/ansible.yml"
content: |
apiVersion: 1
deleteDatasources: []
datasources:
{{ grafana_datasources | to_nice_yaml }}
backup: false
owner: root
group: grafana
mode: 0640
notify: restart_grafana
become: true
when: "grafana_use_provisioning"

View file

@ -0,0 +1,133 @@
---
- name: "Remove conflicting grafana packages"
ansible.builtin.package:
name: grafana-data
state: absent
- name: "Install dependencies"
ansible.builtin.package:
name: "{{ _grafana_dependencies }}"
state: present
update_cache: true
when:
- _grafana_dependencies is defined
- _grafana_dependencies | length > 0
- name: "Prepare zypper"
when:
- "ansible_facts['pkg_mgr'] == 'zypper'"
- "(grafana_manage_repo)"
environment: "{{ grafana_environment }}"
block:
- name: import Grafana RPM Key
ansible.builtin.rpm_key:
state: present
key: "{{ grafana_yum_key }}"
- name: "Add Grafana zypper repository"
community.general.zypper_repository:
name: grafana
description: grafana
repo: "{{ grafana_yum_repo }}"
enabled: true
disable_gpg_check : "{{ false if (grafana_yum_key) else omit }}"
runrefresh: true
when: "(not grafana_rhsm_repo)"
- name: "Prepare yum/dnf"
when:
- "ansible_facts['pkg_mgr'] in ['yum', 'dnf']"
- "(grafana_manage_repo)"
environment: "{{ grafana_environment }}"
block:
- name: "Add Grafana yum/dnf repository"
ansible.builtin.yum_repository:
name: grafana
description: grafana
baseurl: "{{ grafana_yum_repo }}"
enabled: true
gpgkey: "{{ grafana_yum_key | default(omit) }}"
repo_gpgcheck: "{{ true if (grafana_yum_key) else omit }}"
gpgcheck: "{{ true if (grafana_yum_key) else omit }}"
when: "(not grafana_rhsm_repo)"
- name: "Attach RHSM subscription"
when: "(grafana_rhsm_subscription)"
block:
- name: "Check if Grafana RHSM subscription is enabled"
ansible.builtin.command:
cmd: "subscription-manager list --consumed --matches={{ grafana_rhsm_subscription | quote }} --pool-only"
register: __subscription_manager_consumed
changed_when: false
when: (grafana_rhsm_subscription)
- name: "Find RHSM repo subscription pool id"
ansible.builtin.command:
cmd: "subscription-manager list --available --matches={{ grafana_rhsm_subscription | quote }} --pool-only"
register: __subscription_manager_available
changed_when: false
when:
- "(grafana_rhsm_subscription)"
- "__subscription_manager_consumed.stdout | length <= 0"
- name: "Attach RHSM subscription"
ansible.builtin.command:
cmd: "subscription-manager attach --pool={{ __subscription_manager_available.stdout }}"
register: __subscription_manager_attach
changed_when: "__subscription_manager_attach.stdout is search('Successfully attached a subscription')"
failed_when: "__subscription_manager_attach.stdout is search('could not be found')"
when:
- "(grafana_rhsm_subscription)"
- "__subscription_manager_consumed.stdout | default() | length <= 0"
- "__subscription_manager_available.stdout | default() | length > 0"
- name: "Enable RHSM repository"
community.general.rhsm_repository:
name: "{{ grafana_rhsm_repo }}"
state: enabled
when: (grafana_rhsm_repo)
- name: "Prepare apt"
when:
- "ansible_facts['pkg_mgr'] == 'apt'"
- "(grafana_manage_repo)"
environment: "{{ grafana_environment }}"
block:
- name: "Use deb822 format?"
ansible.builtin.set_fact:
__use_deb822: "{{ (ansible_facts['distribution'] == 'Debian' and (ansible_facts['distribution_major_version'] | int) >= 12) or
(ansible_facts['distribution'] == 'Ubuntu' and (ansible_facts['distribution_major_version'] | int) >= 24) }}"
- name: "Import Grafana apt gpg key"
ansible.builtin.get_url:
url: "{{ grafana_apt_key }}"
dest: /usr/share/keyrings/grafana.asc
mode: "0644"
when: not __use_deb822
- name: "Add Grafana apt repository"
when: not __use_deb822
ansible.builtin.apt_repository:
repo: "{{ grafana_apt_repo }}"
update_cache: true
- name: "Add Grafana apt repository"
when: __use_deb822
ansible.builtin.deb822_repository:
name: "{{ grafana_apt_name }}"
types: deb
uris: "{{ grafana_apt_repo_uri }}"
suites: "{{ grafana_apt_release_channel }}"
components:
- main
architectures:
- "{{ grafana_apt_arch }}"
signed_by: "{{ grafana_apt_key }}"
register: __update_cache
- name: "Update apt cache"
ansible.builtin.apt:
update_cache: true
when: __update_cache is defined and __update_cache.changed
- name: "Install Grafana"
ansible.builtin.package:
name: "{{ grafana_package }}"
state: "{{ (grafana_version == 'latest') | ternary('latest', 'present') }}"
notify: restart_grafana

View file

@ -0,0 +1,133 @@
---
- name: Inherit default vars
ansible.builtin.set_fact:
grafana_ini: "{{ grafana_ini_default | ansible.builtin.combine(grafana_ini | default({}), recursive=true) }}"
no_log: "{{ 'false' if lookup('env', 'CI') else 'true' }}"
tags:
- always
- name: "Gather variables for each operating system"
ansible.builtin.include_vars: "{{ distrovars }}"
vars:
distrovars: "{{ lookup('first_found', params, errors='ignore') }}"
params:
skip: true
files:
- "{{ ansible_facts['distribution'] | lower }}-{{ ansible_facts['distribution_version'] | lower }}.yml"
- "{{ ansible_facts['distribution'] | lower }}-{{ ansible_facts['distribution_major_version'] | lower }}.yml"
- "{{ ansible_facts['os_family'] | lower }}-{{ ansible_facts['distribution_major_version'] | lower }}.yml"
- "{{ ansible_facts['distribution'] | lower }}.yml"
- "{{ ansible_facts['os_family'] | lower }}.yml"
paths:
- "vars/distro"
tags:
- grafana_install
- grafana_configure
- grafana_datasources
- grafana_notifications
- grafana_dashboards
- name: Preflight
ansible.builtin.include_tasks:
file: preflight.yml
apply:
tags:
- grafana_install
- grafana_configure
- grafana_datasources
- grafana_notifications
- grafana_dashboards
- name: Install
ansible.builtin.include_tasks:
file: install.yml
apply:
become: true
tags:
- grafana_install
- name: Configure
ansible.builtin.include_tasks:
file: configure.yml
apply:
become: true
tags:
- grafana_configure
- name: Plugins
ansible.builtin.include_tasks:
file: plugins.yml
apply:
tags:
- grafana_configure
when: "grafana_plugins != []"
- name: "Restart grafana before configuring datasources and dashboards"
ansible.builtin.meta: flush_handlers
tags:
- grafana_install
- grafana_configure
- grafana_datasources
- grafana_notifications
- grafana_dashboards
- grafana_run
- name: "Wait for grafana to start"
ansible.builtin.wait_for:
host: "{{ grafana_ini.server.http_addr if grafana_ini.server.protocol is undefined or grafana_ini.server.protocol in ['http', 'https'] else omit }}"
port: "{{ grafana_ini.server.http_port if grafana_ini.server.protocol is undefined or grafana_ini.server.protocol in ['http', 'https'] else omit }}"
path: "{{ grafana_ini.server.socket | default() if grafana_ini.server.protocol is defined and grafana_ini.server.protocol == 'socket' else omit }}"
tags:
- grafana_install
- grafana_configure
- grafana_datasources
- grafana_notifications
- grafana_dashboards
- grafana_run
- name: "Api keys"
ansible.builtin.include_tasks:
file: api_keys.yml
apply:
tags:
- grafana_configure
- grafana_run
when: "grafana_api_keys | length > 0"
- name: Datasources
ansible.builtin.include_tasks:
file: datasources.yml
apply:
tags:
- grafana_configure
- grafana_datasources
- grafana_run
when: "grafana_datasources != []"
- name: Notifications
ansible.builtin.include_tasks:
file: notifications.yml
apply:
tags:
- grafana_configure
- grafana_notifications
- grafana_run
when: "grafana_alert_notifications | length > 0 or grafana_alert_resources | length > 0"
- name: Find dashboards to be provisioned
ansible.builtin.find:
paths: "{{ grafana_dashboards_dir }}"
recurse: true
patterns: "*.json"
delegate_to: localhost
become: false
register: __found_dashboards
- name: Dashboards
ansible.builtin.include_tasks:
file: dashboards.yml
apply:
tags:
- grafana_configure
- grafana_dashboards
- grafana_run
when: "grafana_dashboards | length > 0 or __found_dashboards['files'] | length > 0"

View file

@ -0,0 +1,28 @@
---
# legacy config
- name: "Create/Delete/Update alert notifications channels (provisioning)"
ansible.builtin.copy:
content: |
apiVersion: 1
{{ grafana_alert_notifications | to_nice_yaml }}
dest: /etc/grafana/provisioning/notification/ansible.yml
owner: root
group: grafana
mode: "0640"
become: true
notify: restart_grafana
when: grafana_use_provisioning and grafana_alert_notifications | length > 0
# new alert resources
- name: "Create/Delete/Update alert resources (provisioning)"
ansible.builtin.copy:
content: |
apiVersion: 1
{{ grafana_alert_resources | to_nice_yaml }}
dest: /etc/grafana/provisioning/alerting/ansible.yml
owner: root
group: grafana
mode: "0640"
become: true
notify: restart_grafana
when: grafana_use_provisioning and grafana_alert_resources | length > 0

View file

@ -0,0 +1,20 @@
---
- name: "Check which plugins are installed"
ansible.builtin.find:
file_type: directory
recurse: false
paths: "{{ grafana_ini.paths.data }}/plugins"
register: __installed_plugins
- name: "Install plugins"
become: true
ansible.builtin.command:
cmd: "grafana-cli --pluginsDir {{ grafana_ini.paths.data }}/plugins plugins install {{ item }}"
creates: "{{ grafana_ini.paths.data }}/plugins/{{ item }}"
loop: "{{ grafana_plugins | difference(__installed_plugins.files) }}"
register: __plugin_install
until: "__plugin_install is succeeded"
retries: 5
delay: 2
notify:
- restart_grafana

View file

@ -0,0 +1,104 @@
---
- name: "Check variable types"
ansible.builtin.assert:
that:
- grafana_logs_dir is undefined
- grafana_data_dir is undefined
- grafana_server is undefined
- grafana_database is undefined
- grafana_security is undefined
- grafana_remote_cache is undefined
- grafana_welcome_email_on_sign_up is undefined
- grafana_users is undefined
- grafana_auth is undefined
- grafana_auth_generic_oauth is undefined
- grafana_session is undefined
- grafana_analytics is undefined
- grafana_smtp is undefined
- grafana_alerting is undefined
- grafana_unified_alerting is undefined
- grafana_log is undefined
- grafana_metrics is undefined
- grafana_tracing is undefined
- grafana_snapshots is undefined
- grafana_image_storage is undefined
- grafana_date_formats is undefined
- grafana_feature_toggles is undefined
- grafana_plugins_ops is undefined
- grafana_instance is undefined
- grafana_address is undefined
- grafana_port is undefined
- grafana_domain is undefined
- grafana_url is undefined
- grafana_panels is undefined
fail_msg: Check upgrade notes
- name: "Fail when datasources aren't configured when dashboards are set to be installed"
ansible.builtin.fail:
msg: "You need to specify datasources for dashboards!!!"
when: "grafana_dashboards != [] and grafana_datasources == []"
- name: "Fail when grafana admin user isn't set"
ansible.builtin.fail:
msg: "Please specify grafana admin user (grafana_ini.security.admin_user)"
when:
- "grafana_ini.security.admin_user == '' or
grafana_ini.security.admin_user is not defined"
- name: "Fail when grafana admin password isn't set"
ansible.builtin.fail:
msg: "Please specify grafana admin password (grafana_ini.security.admin_password)"
when:
- "grafana_ini.security.admin_password == '' or
grafana_ini.security.admin_password is not defined"
- name: "Fail on incorrect variable types in datasource definitions"
ansible.builtin.fail:
msg: "Boolean variables in grafana_datasources shouldn't be passed as strings. Please remove unneeded apostrophes."
when: "( item.isDefault is defined and item.isDefault is string ) or
( item.basicAuth is defined and item.basicAuth is string )"
loop: "{{ grafana_datasources }}"
- name: "Fail on bad database configuration"
ansible.builtin.fail:
msg: "Invalid database configuration. Please look at http://docs.grafana.org/installation/configuration/#database"
when: "( grafana_ini.database.type == 'sqlite3' and grafana_ini.database.url is defined ) or
( grafana_ini.database.type != 'sqlite3' and grafana_ini.database.path is defined ) or
( grafana_ini.database.type == 'sqlite3' and grafana_ini.database.host is defined ) or
( grafana_ini.database.type == 'sqlite3' and grafana_ini.database.user is defined ) or
( grafana_ini.database.type == 'sqlite3' and grafana_ini.database.password is defined ) or
( grafana_ini.database.type == 'sqlite3' and grafana_ini.database.server_cert_name is defined )"
- name: "Fail when grafana_api_keys uses invalid role names"
ansible.builtin.fail:
msg: "Check grafana_api_keys. The role can only be one of the following values: Viewer, Editor or Admin."
when: "item.role not in ['Viewer', 'Editor', 'Admin']"
loop: "{{ grafana_api_keys }}"
- name: "Fail when grafana_ldap isn't set when grafana_ini.auth.ldap is"
ansible.builtin.fail:
msg: "You need to configure grafana_ldap.servers and grafana_ldap.group_mappings when grafana_ini.auth.ldap is set"
when:
- "'ldap' in grafana_ini.auth"
- "grafana_ldap is not defined or ('servers' not in grafana_ldap or 'group_mappings' not in grafana_ldap)"
- name: "Force grafana_use_provisioning to false if grafana_version is < 5.0 ( grafana_version is set to '{{ grafana_version }}' )"
ansible.builtin.set_fact:
grafana_use_provisioning: false
when:
- "grafana_version != 'latest'"
- "grafana_version is version_compare('5.0', '<')"
- name: "Fail if grafana_ini.server.http_port is lower than 1024 and grafana_cap_net_bind_service is not true"
ansible.builtin.fail:
msg: "Trying to use a port lower than 1024 without setting grafana_cap_net_bind_service."
when:
- "grafana_ini.server.http_port | int <= 1024"
- "not grafana_cap_net_bind_service"
- name: "Fail if grafana_ini.server.socket not defined when in socket mode"
ansible.builtin.fail:
msg: "You need to configure grafana_ini.server.socket when grafana_ini.server.protocol is set to 'socket'"
when:
- "grafana_ini.server.protocol is defined and grafana_ini.server.protocol == 'socket'"
- "grafana_ini.server.socket is undefined or grafana_ini.server.socket == ''"

Some files were not shown because too many files have changed in this diff Show more