feat: base configuration automation
This commit is contained in:
commit
e4770a7343
70 changed files with 2489 additions and 0 deletions
11
.gitignore
vendored
Normal file
11
.gitignore
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
.idea
|
||||
.*~
|
||||
venv
|
||||
.task
|
||||
collections/ansible_collections/*/*/tests/*
|
||||
collections/ansible_collections/*/*/logs/*
|
||||
!collections/ansible_collections/*/*/tests/config.yml
|
||||
vault.yml
|
||||
inventory/inventory.yml
|
||||
inventory/host_vars/*
|
||||
!.gitkeep
|
129
README.md
Normal file
129
README.md
Normal file
|
@ -0,0 +1,129 @@
|
|||
# Abstract
|
||||
## List of services
|
||||
# Usage
|
||||
## Prerequisites
|
||||
***Required dependencies***
|
||||
- Python3.9+,
|
||||
- PIP,
|
||||
- Virtualenv
|
||||
- [Task](https://taskfile.dev/),
|
||||
- Debian packages:
|
||||
- libcurl4-openssl-dev,
|
||||
- libssl-dev,
|
||||
- libcairo2,
|
||||
- libcairo2-dev,
|
||||
- libffi-dev,
|
||||
- python3-dev,
|
||||
- python3-virtualenv
|
||||
|
||||
***Optional, dev-related dependencies***
|
||||
- Docker
|
||||
|
||||
## Installation
|
||||
```shell
|
||||
# Debian amd64
|
||||
|
||||
TASK_VERSION=3.29.1;
|
||||
sudo apt install -y \
|
||||
libcurl4-openssl-dev \
|
||||
libssl-dev \
|
||||
libcairo2 \
|
||||
libcairo2-dev \
|
||||
libffi-dev \
|
||||
python3-virtualenv \
|
||||
python3-dev;
|
||||
wget https://github.com/go-task/task/releases/download/v"${TASK_VERSION}"/task_linux_amd64.deb;
|
||||
sudo dpkg -i task_linux_amd64.deb;
|
||||
task venv;
|
||||
```
|
||||
|
||||
## General Setup
|
||||
```shell
|
||||
mkdir -p collections/ansible_collections
|
||||
cd collections/ansible_collections
|
||||
ansible-galaxy collection init nullified.infrastructure
|
||||
cd nullified/infrastructure/roles
|
||||
ansible-galaxy collection init tooling
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Cheatsheet
|
||||
### Ansible usage
|
||||
***validate files***
|
||||
`ansible-playbook --syntax-check <file>`
|
||||
|
||||
***gather facts***
|
||||
`ansible <target> -m setup`
|
||||
|
||||
***handlers***
|
||||
invoked by a task through `notify`, executed only if caller triggered a state change; runs at the end of the play in the order
|
||||
they are declared;
|
||||
-> force handlers to run:
|
||||
```yaml
|
||||
- name: some task
|
||||
meta: flush_handlers
|
||||
```
|
||||
|
||||
***looping***
|
||||
looping in task by using the `loop` array with items to loop over;
|
||||
|
||||
***runtime grouping***
|
||||
```yaml
|
||||
name: coin
|
||||
hosts: all
|
||||
gather_facts: true
|
||||
tasks:
|
||||
- name: group by OS
|
||||
group_by:
|
||||
key: "{{ ansible_facts.distribution }}"
|
||||
```
|
||||
|
||||
***builtin vars***
|
||||
- hostvars: {hostname => kvp_vars, ...},
|
||||
- inventory_hostname(_short)?: name of current host,
|
||||
- group_names: list of groups assigned to current host,
|
||||
- groups: {groupname => [hostX, ...], ...},
|
||||
- ansible_check_mode: isRunningInCheckMode ?,
|
||||
- ansible_play_batch: list inventory hostnames active in current batch,
|
||||
- ansibble_play_hosts: ist inventory hostnames active in current play,
|
||||
|
||||
### Python modules
|
||||
***argument options***
|
||||
> *NOTE*
|
||||
> Ansible Up and Running, page 503
|
||||
|
||||
- *default*: default value if arg is required,
|
||||
- *choices*: list of possible values for an array arg,
|
||||
- *deprecated_aliases*: deprecate aliases; `dict(name, version, date, collection_name)`,
|
||||
- *aliases*: aliases for given argument,
|
||||
- *type*: arg type,
|
||||
- *elements*: set type of list elements if arg is array,
|
||||
- *fallback*: tuple of a lookup function and a list to pass to it,
|
||||
- *no_log*: mask arg value in logs for sensitive data,
|
||||
- *options*: complex args; create list of suboptions,
|
||||
- *mutually_exclusive*: list of mutually exclusive suboptions,
|
||||
- *required_together*: list of names of sub options,
|
||||
- *required_one_of*: list of required mutually exclusive suboptions,
|
||||
- *required_if*: sequence of sequences,
|
||||
- *required_by*: dic mapping option names to seqs of option names
|
||||
|
||||
---
|
||||
|
||||
### Notes / Todo
|
||||
***dir layout***
|
||||
- collections: ansible root dir for all modules, playbooks and collections
|
||||
- configuration: <DEPRECATED> ansible root dir for inventory
|
||||
- images: docker images, mostly used for ansible-test / molecule
|
||||
- scripts: scripts used by go-task
|
||||
|
||||
### Setup
|
||||
```shell
|
||||
cp configuration/group_vars/vault.yml.dist configuration/group_vars/vault.yml
|
||||
# encrypt vault
|
||||
ansible-vault encrypt configuration/group_vars/vault.yml
|
||||
# decrypt vault
|
||||
ansible-vault decrypt configuration/group_vars/vault.yml
|
||||
# run ansible command with vault-encrypted data
|
||||
ansible-playbook --ask-vault-password -i inventories/test playbooks/test.yml
|
||||
```
|
106
Taskfile.yml
Normal file
106
Taskfile.yml
Normal file
|
@ -0,0 +1,106 @@
|
|||
version: '3'
|
||||
|
||||
env:
|
||||
DOCKER_REPOSITORY: pouncetech/molecule
|
||||
|
||||
vars:
|
||||
PYTHON_WRAPPER: '{{.ROOT_DIR}}/scripts/python_wrapper.sh'
|
||||
MOLECULE_DIR: '{{.ROOT_DIR}}/collections/ansible_collections/nullified/infrastructure/extensions'
|
||||
|
||||
tasks:
|
||||
venv:setup:
|
||||
desc: install a Python3 virtualenv and all the required ansible / molecule dependencies.
|
||||
cmds:
|
||||
- |
|
||||
set -e
|
||||
rm -rf ./venv || true
|
||||
python3 -m virtualenv --download venv
|
||||
./venv/bin/python3 -m pip install --upgrade -r requirements.txt
|
||||
status:
|
||||
- test -d venv
|
||||
|
||||
test:modules:
|
||||
desc: run `ansible-test sanity` on collections to find common issues for modules and collections
|
||||
dir: collections/ansible_collections
|
||||
vars:
|
||||
ANSIBLE_COLLECTIONS:
|
||||
sh: find -mindepth 2 -maxdepth 2 -type d
|
||||
cmds:
|
||||
- for: { var: ANSIBLE_COLLECTIONS }
|
||||
task: 'test:module:sanity'
|
||||
vars:
|
||||
COLLECTION_PATH: 'collections/ansible_collections/{{.ITEM}}'
|
||||
|
||||
test:module:sanity:
|
||||
internal: true
|
||||
dir: '{{.COLLECTION_PATH}}'
|
||||
preconditions:
|
||||
- test -d tests
|
||||
cmds:
|
||||
- '{{.PYTHON_WRAPPER}} ansible-test sanity --docker default'
|
||||
|
||||
test:collections:
|
||||
desc: run molecule tests for all roles and collections.
|
||||
dir: collections/ansible_collections
|
||||
vars:
|
||||
ANSIBLE_COLLECTIONS:
|
||||
sh: find -mindepth 2 -maxdepth 2 -type d
|
||||
cmds:
|
||||
- for: { var: ANSIBLE_COLLECTIONS }
|
||||
task: 'test:collection:molecule'
|
||||
vars:
|
||||
COLLECTION_PATH: 'collections/ansible_collections/{{.ITEM}}'
|
||||
|
||||
test:collection:molecule:
|
||||
internal: true
|
||||
dir: '{{.COLLECTION_PATH}}/extensions'
|
||||
preconditions:
|
||||
- test -d molecule
|
||||
cmds:
|
||||
- '{{.PYTHON_WRAPPER}} molecule test'
|
||||
|
||||
module:github_artifact:
|
||||
desc: run a module from the collection for testing purposes
|
||||
vars:
|
||||
PLUGINS_DIR: '{{.ROOT_DIR}}/collections/ansible_collections/nullified/infrastructure/plugins'
|
||||
cmd: |
|
||||
{{.PYTHON_WRAPPER}} python3 {{.PLUGINS_DIR}}/modules/github_artifact.py {{.PLUGINS_DIR}}/tests/github_artifact.json |
|
||||
{{.PYTHON_WRAPPER}} python3 -m json.tool | {{.PYTHON_WRAPPER}} pygmentize -l json
|
||||
|
||||
docker:build:
|
||||
desc: build docker images locally.
|
||||
vars:
|
||||
DOCKERFILE_IMAGES:
|
||||
sh: find images -type f -name 'Dockerfile-*'
|
||||
cmds:
|
||||
- for: { var: DOCKERFILE_IMAGES }
|
||||
task: 'docker:build:image'
|
||||
vars:
|
||||
DOCKERFILE: '{{.ITEM}}'
|
||||
DOCKER_CONTEXT: images
|
||||
IMAGE_TAG: '{{.ITEM | splitList "/" | last | replace "Dockerfile-" ""}}'
|
||||
|
||||
docker:build:image:
|
||||
internal: true
|
||||
sources:
|
||||
- '{{.DOCKERFILE}}'
|
||||
cmds:
|
||||
- docker buildx build --load -t "{{.DOCKER_REPOSITORY}}:{{.IMAGE_TAG}}" -f {{.DOCKERFILE}} {{.DOCKER_CONTEXT}}
|
||||
status:
|
||||
- docker image ls --format '{{"{{"}} .Tag {{"}}"}}' | grep -E '^{{.IMAGE_TAG}}$'
|
||||
|
||||
docker:push:
|
||||
desc: push locally built images
|
||||
cmds:
|
||||
- 'echo docker push $DOCKER_REPOSITORY:$IMAGE_TAG'
|
||||
env:
|
||||
IMAGE_TAG:
|
||||
|
||||
venv:
|
||||
desc: execute a python3 command using the virtualenv wrapper
|
||||
cmd: '{{.PYTHON_WRAPPER}} {{.CLI_ARGS}}'
|
||||
|
||||
molecule:
|
||||
desc: execute a molecule command
|
||||
dir: '{{.MOLECULE_DIR}}'
|
||||
cmd: '{{.PYTHON_WRAPPER}} molecule {{.CLI_ARGS}}'
|
|
@ -0,0 +1,3 @@
|
|||
# Ansible Collection - nullified.infrastructure
|
||||
|
||||
Documentation for the collection.
|
|
@ -0,0 +1,9 @@
|
|||
[defaults]
|
||||
# target root path of the repository
|
||||
home = ../../../../..
|
||||
remote_tmp = /tmp
|
||||
local_tmp = /tmp
|
||||
|
||||
[privilege_escalation]
|
||||
become_method = su
|
||||
become_flags = "-l"
|
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
# playbook file that contains the call for your role
|
||||
- name: Fail if molecule group is missing
|
||||
hosts: localhost
|
||||
tasks:
|
||||
- name: Print some info
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ groups }}"
|
||||
|
||||
- name: Assert group existence
|
||||
ansible.builtin.assert:
|
||||
that: "'molecule' in groups"
|
||||
fail_msg: |
|
||||
molecule group was not found inside inventory groups: {{ groups }}
|
||||
|
||||
- name: Converge
|
||||
hosts: molecule
|
||||
gather_facts: true
|
||||
tasks:
|
||||
- name: Testing common role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.common
|
||||
tasks_from: main.yml
|
||||
- name: Testing development role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.development
|
||||
tasks_from: main.yml
|
||||
- name: Testing security role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.security
|
||||
tasks_from: main.yml
|
||||
- name: Testing server role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.server
|
||||
tasks_from: main.yml
|
||||
- name: Testing workstation role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.workstation
|
||||
tasks_from: main.yml
|
|
@ -0,0 +1,90 @@
|
|||
---
|
||||
# playbook file used for creating the instances and storing data in instance-config
|
||||
- name: Create
|
||||
hosts: localhost
|
||||
gather_facts: false
|
||||
vars:
|
||||
molecule_inventory:
|
||||
all:
|
||||
hosts: {}
|
||||
molecule: {}
|
||||
tasks:
|
||||
- name: Create a container
|
||||
community.docker.docker_container:
|
||||
name: "{{ item.name }}"
|
||||
image: "{{ item.image }}"
|
||||
detach: true
|
||||
state: started
|
||||
log_driver: json-file
|
||||
cgroupns_mode: private
|
||||
cgroup_parent: docker.slice
|
||||
mounts:
|
||||
- target: /run
|
||||
type: tmpfs
|
||||
- target: /run/lock
|
||||
type: tmpfs
|
||||
- target: /tmp
|
||||
type: tmpfs
|
||||
register: result
|
||||
loop: "{{ molecule_yml.platforms }}"
|
||||
|
||||
- name: Print some info
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ result.results }}"
|
||||
|
||||
- name: Fail if container is not running
|
||||
when: >
|
||||
item.container.State.ExitCode != 0 or
|
||||
not item.container.State.Running
|
||||
ansible.builtin.include_tasks:
|
||||
file: tasks/create-fail.yml
|
||||
loop: "{{ result.results }}"
|
||||
loop_control:
|
||||
label: "{{ item.container.Name }}"
|
||||
|
||||
- name: Add container to molecule_inventory
|
||||
vars:
|
||||
inventory_partial_yaml: |
|
||||
all:
|
||||
children:
|
||||
molecule:
|
||||
hosts:
|
||||
"{{ item.name }}":
|
||||
ansible_connection: community.docker.docker
|
||||
ansible.builtin.set_fact:
|
||||
molecule_inventory: >
|
||||
{{ molecule_inventory | combine(inventory_partial_yaml | from_yaml) }}
|
||||
loop: "{{ molecule_yml.platforms }}"
|
||||
loop_control:
|
||||
label: "{{ item.name }}"
|
||||
|
||||
- name: Dump molecule_inventory
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ molecule_inventory | to_yaml }}
|
||||
dest: "{{ molecule_ephemeral_directory }}/inventory/molecule_inventory.yml"
|
||||
mode: 0600
|
||||
|
||||
- name: Force inventory refresh
|
||||
ansible.builtin.meta: refresh_inventory
|
||||
|
||||
- name: Fail if molecule group is missing
|
||||
ansible.builtin.assert:
|
||||
that: "'molecule' in groups"
|
||||
fail_msg: |
|
||||
molecule group was not found inside inventory groups: {{ groups }}
|
||||
run_once: true # noqa: run-once[task]
|
||||
|
||||
# we want to avoid errors like "Failed to create temporary directory"
|
||||
- name: Validate that inventory was refreshed
|
||||
hosts: molecule
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Check uname
|
||||
ansible.builtin.raw: uname -a
|
||||
register: result
|
||||
changed_when: false
|
||||
|
||||
- name: Display uname info
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ result.stdout }}"
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
# destroying the instances and removing them from instance-config
|
||||
- name: Destroy molecule containers
|
||||
hosts: molecule
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Stop and remove container
|
||||
delegate_to: localhost
|
||||
community.docker.docker_container:
|
||||
name: "{{ inventory_hostname }}"
|
||||
state: absent
|
||||
auto_remove: true
|
||||
|
||||
- name: Remove dynamic molecule inventory
|
||||
hosts: localhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Remove dynamic inventory file
|
||||
ansible.builtin.file:
|
||||
path: "{{ molecule_ephemeral_directory }}/inventory/molecule_inventory.yml"
|
||||
state: absent
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
# central configuration entry point for Molecule per scenario
|
||||
dependency:
|
||||
name: galaxy
|
||||
options:
|
||||
requirements-file: requirements.yml
|
||||
platforms:
|
||||
- name: debian-bookworm
|
||||
image: pouncetech/molecule:debian-bookworm
|
|
@ -0,0 +1,2 @@
|
|||
collections:
|
||||
- community.docker
|
|
@ -0,0 +1,13 @@
|
|||
- name: Retrieve container log
|
||||
ansible.builtin.command:
|
||||
cmd: >-
|
||||
{% raw %}
|
||||
docker logs
|
||||
{% endraw %}
|
||||
{{ item.stdout_lines[0] }}
|
||||
changed_when: false
|
||||
register: logfile_cmd
|
||||
|
||||
- name: Display container log
|
||||
ansible.builtin.fail:
|
||||
msg: "{{ logfile_cmd.stderr }}"
|
|
@ -0,0 +1,67 @@
|
|||
### REQUIRED
|
||||
# The namespace of the collection. This can be a company/brand/organization or product namespace under which all
|
||||
# content lives. May only contain alphanumeric lowercase characters and underscores. Namespaces cannot start with
|
||||
# underscores or numbers and cannot contain consecutive underscores
|
||||
namespace: nullified
|
||||
|
||||
# The name of the collection. Has the same character restrictions as 'namespace'
|
||||
name: infrastructure
|
||||
|
||||
# The version of the collection. Must be compatible with semantic versioning
|
||||
version: 1.0.0
|
||||
|
||||
# The path to the Markdown (.md) readme file. This path is relative to the root of the collection
|
||||
readme: README.md
|
||||
|
||||
# A list of the collection's content authors. Can be just the name or in the format 'Full Name <email> (url)
|
||||
# @nicks:irc/im.site#channel'
|
||||
authors:
|
||||
- Florian L. <git@0x2a.ninja>
|
||||
|
||||
### OPTIONAL but strongly recommended
|
||||
# A short summary description of the collection
|
||||
description: your collection description
|
||||
|
||||
# Either a single license or a list of licenses for content inside of a collection. Ansible Galaxy currently only
|
||||
# accepts L(SPDX,https://spdx.org/licenses/) licenses. This key is mutually exclusive with 'license_file'
|
||||
license:
|
||||
- GPL-2.0-or-later
|
||||
|
||||
# The path to the license file for the collection. This path is relative to the root of the collection. This key is
|
||||
# mutually exclusive with 'license'
|
||||
license_file: ''
|
||||
|
||||
# A list of tags you want to associate with the collection for indexing/searching. A tag name has the same character
|
||||
# requirements as 'namespace' and 'name'
|
||||
tags: []
|
||||
|
||||
# Collections that this collection requires to be installed for it to be usable. The key of the dict is the
|
||||
# collection label 'namespace.name'. The value is a version range
|
||||
# L(specifiers,https://python-semanticversion.readthedocs.io/en/latest/#requirement-specification). Multiple version
|
||||
# range specifiers can be set and are separated by ','
|
||||
dependencies: {}
|
||||
|
||||
# The URL of the originating SCM repository
|
||||
repository: http://example.com/repository
|
||||
|
||||
# The URL to any online docs
|
||||
documentation: http://docs.example.com
|
||||
|
||||
# The URL to the homepage of the collection/project
|
||||
homepage: http://example.com
|
||||
|
||||
# The URL to the collection issue tracker
|
||||
issues: http://example.com/issue/tracker
|
||||
|
||||
# A list of file glob-like patterns used to filter any files or directories that should not be included in the build
|
||||
# artifact. A pattern is matched from the relative path of the file or directory of the collection directory. This
|
||||
# uses 'fnmatch' to match the files or directories. Some directories and files like 'galaxy.yml', '*.pyc', '*.retry',
|
||||
# and '.git' are always filtered. Mutually exclusive with 'manifest'
|
||||
build_ignore: []
|
||||
|
||||
# A dict controlling use of manifest directives used in building the collection artifact. The key 'directives' is a
|
||||
# list of MANIFEST.in style
|
||||
# L(directives,https://packaging.python.org/en/latest/guides/using-manifest-in/#manifest-in-commands). The key
|
||||
# 'omit_default_directives' is a boolean that controls whether the default directives are used. Mutually exclusive
|
||||
# with 'build_ignore'
|
||||
# manifest: null
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
requires_ansible: '>=2.9.10'
|
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- hosts: server
|
||||
tasks:
|
||||
- name: include common role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.common
|
||||
- name: include security role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.security
|
||||
- name: include server role
|
||||
ansible.builtin.include_role:
|
||||
name: nullified.infrastructure.server
|
|
@ -0,0 +1,31 @@
|
|||
# Collections Plugins Directory
|
||||
|
||||
This directory can be used to ship various plugins inside an Ansible collection. Each plugin is placed in a folder that
|
||||
is named after the type of plugin it is in. It can also include the `module_utils` and `modules` directory that
|
||||
would contain module utils and modules respectively.
|
||||
|
||||
Here is an example directory of the majority of plugins currently supported by Ansible:
|
||||
|
||||
```
|
||||
└── plugins
|
||||
├── action
|
||||
├── become
|
||||
├── cache
|
||||
├── callback
|
||||
├── cliconf
|
||||
├── connection
|
||||
├── filter
|
||||
├── httpapi
|
||||
├── inventory
|
||||
├── lookup
|
||||
├── module_utils
|
||||
├── modules
|
||||
├── netconf
|
||||
├── shell
|
||||
├── strategy
|
||||
├── terminal
|
||||
├── test
|
||||
└── vars
|
||||
```
|
||||
|
||||
A full list of plugin types can be found at [Working With Plugins](https://docs.ansible.com/ansible-core/2.15/plugins/plugins.html).
|
|
@ -0,0 +1,402 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2023, Florian L. <git+ansible@pounce.tech>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import annotations
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: github_artifact
|
||||
|
||||
short_description: fetch assets from a GitHub repository release
|
||||
|
||||
description: vaguely similar to a package manager, but for GitHub artifacts.
|
||||
|
||||
version_added: "2.15.0"
|
||||
|
||||
options:
|
||||
artifacts:
|
||||
description: a list of artifacts to retrieve
|
||||
type: list
|
||||
required: true
|
||||
elements: dict
|
||||
suboptions:
|
||||
asset_name:
|
||||
description: filename of the asset to retrieve, used only for release type; supports templating
|
||||
type: str
|
||||
required: false
|
||||
default: ""
|
||||
asset_type:
|
||||
description: whether the asset is a release or just a tagged asset
|
||||
type: str
|
||||
required: true
|
||||
choices:
|
||||
- release
|
||||
- tag
|
||||
cmds:
|
||||
description: commands to execute in order to install the downloaded asset; supports templating
|
||||
type: list
|
||||
elements: str
|
||||
required: false
|
||||
default: []
|
||||
repository:
|
||||
description: repository to query, formatted like "<owner>/<repo>"
|
||||
required: true
|
||||
type: str
|
||||
version:
|
||||
description: version of the asset to fetch; defaults to `latest`
|
||||
required: false
|
||||
type: str
|
||||
default: latest
|
||||
github_token:
|
||||
description: a GitHub app token if you have one; limits impact of rate-limiting errors
|
||||
type: str
|
||||
required: false
|
||||
|
||||
notes:
|
||||
- "Strings that allow the use of templating variables support the following:"
|
||||
- " V(version): version of the system, B(NOT) the asset's;"
|
||||
- " V(system): type of the OS, retrieved from C(platform.system), e.g. I(Linux), I(Darwin);"
|
||||
- " V(machine): machine architecture, retrieved from C(platform.machine), e.g. I(x86_64), I(i386);"
|
||||
- " V(asset_name): name of the selected asset from the GitHub metadata results, e.g. I(dive_0.11.0_linux_amd64.deb);"
|
||||
- " V(asset_dirname): directory where the downloaded asset is located, e.g. I(/tmp/ansible-moduletmp-1695757626.5862153-xjpc5ip8);"
|
||||
- " V(asset_filename): name of the download asset file, e.g. I(dive_0.11.0_linux_amd64bnha_1dr.deb);"
|
||||
- " V(asset_version): version of the asset, retrieved directly from the GitHub metadata;"
|
||||
- " all variables defined in C(/etc/os-release), lowercase."
|
||||
|
||||
author:
|
||||
- Florian L. (@NaeiKinDus)
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Install dependencies from GitHub
|
||||
become: yes
|
||||
tags:
|
||||
- molecule-idempotence-notest
|
||||
github_artifact:
|
||||
artifacts:
|
||||
- asset_type: tag
|
||||
repository: smxi/inxi
|
||||
cmds:
|
||||
- tar -zxf {asset_dirname}/{asset_filename}
|
||||
- install --group=root --mode=755 --owner=root smxi-inxi-*/inxi /usr/bin
|
||||
- install --group=root --mode=644 --owner=root smxi-inxi-*/inxi.1 /usr/share/man/man1
|
||||
- apt-get install libdata-dump-perl
|
||||
- asset_name: dive_{version}_linux_amd64.deb
|
||||
asset_type: release
|
||||
repository: wagoodman/dive
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
# These are examples of possible return values, and in general should use other names for return values.
|
||||
original_message:
|
||||
description: The original name param that was passed in.
|
||||
type: str
|
||||
returned: always
|
||||
sample: "hello world"
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib # noqa
|
||||
from ansible.module_utils.urls import fetch_url, fetch_file # noqa
|
||||
|
||||
ANSIBLE_MODULE: AnsibleModule | None = None
|
||||
LIB_IMPORTED = False
|
||||
GITHUB_API_BASE = "https://api.github.com"
|
||||
GITHUB_DOWNLOAD_BASE = "https://github.com"
|
||||
GITHUB_API_VERSION = "2022-11-28"
|
||||
DEFAULT_HEADERS: dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json, application/vnd.github+json;q=0.8",
|
||||
}
|
||||
|
||||
TEMPLATE_ASSET_NAME_VARS: dict[str, str] = {
|
||||
"version": "", # platform.version(), e.g. "12 (bookworm)"
|
||||
"system": "", # platform.system(), e.g. "Linux", "Darwin"
|
||||
"machine": "" # platform.machine(), e.g. "x86_64", "i386"
|
||||
}
|
||||
try:
|
||||
from datetime import datetime
|
||||
from difflib import SequenceMatcher
|
||||
from json import loads
|
||||
from os import environ, sep
|
||||
from platform import system, machine
|
||||
from typing import Any
|
||||
LIB_IMPORTED = True
|
||||
except ModuleNotFoundError as excp:
|
||||
import traceback
|
||||
IMPORT_LIB_ERROR = traceback.format_exc()
|
||||
IMPORT_LIB_NAME = excp.name
|
||||
try:
|
||||
from platform import freedesktop_os_release
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
FREEDESKTOP_OS_RELEASE_FILE = '/etc/os-release'
|
||||
|
||||
def freedesktop_os_release() -> dict[str, str]:
|
||||
try:
|
||||
with open(FREEDESKTOP_OS_RELEASE_FILE, 'r') as fd:
|
||||
data = fd.read()
|
||||
os_info: dict[str, str] = {key: value.strip('"') for key, value in (line.split('=') for line in data.splitlines())}
|
||||
except FileNotFoundError:
|
||||
return dict()
|
||||
return os_info
|
||||
|
||||
|
||||
def find_compatible_asset(assets_list: list[dict[str, str | int | float]], asset_name: str) -> dict[str, str] | None:
|
||||
""" takes a list of assets and tries to find the most relevant one; assumes only one asset is required """
|
||||
best_match: dict[str, str] = {}
|
||||
matched_name_ratio: float = 0.0
|
||||
|
||||
if len(assets_list) == 0:
|
||||
return None
|
||||
elif len(assets_list) == 1:
|
||||
return {
|
||||
"asset_name": assets_list[0]["name"],
|
||||
"download_url": assets_list[0]["browser_download_url"],
|
||||
"match_ratio": "1.0"
|
||||
}
|
||||
|
||||
sm = SequenceMatcher(a=asset_name, b="")
|
||||
for asset in assets_list:
|
||||
if asset_name == asset["name"]:
|
||||
return {
|
||||
"asset_name": asset["name"],
|
||||
"download_url": asset["browser_download_url"],
|
||||
"match_ratio": "1.0"
|
||||
}
|
||||
sm.set_seq2(asset["name"])
|
||||
ratio = sm.ratio()
|
||||
if ratio > matched_name_ratio:
|
||||
best_match = asset
|
||||
matched_name_ratio = ratio
|
||||
if not best_match:
|
||||
return None
|
||||
return {
|
||||
"asset_name": best_match["name"],
|
||||
"download_url": best_match["browser_download_url"],
|
||||
"match_ratio": "{:.5f}".format(matched_name_ratio)
|
||||
}
|
||||
|
||||
|
||||
def fetch_github_data(url: str) -> tuple[dict | None, dict[str, int]]:
|
||||
""" query GitHub API and return a JSON formatted response along with HTTP info data """
|
||||
response, info = fetch_url(ANSIBLE_MODULE, url, headers=DEFAULT_HEADERS)
|
||||
http_status: int = info.get("status", 999)
|
||||
if http_status >= 400:
|
||||
return None, info
|
||||
return loads(response.read().decode("utf-8")), info
|
||||
|
||||
|
||||
def get_released_asset(artifact: dict[str, str]) -> tuple[dict[str, str], dict[str, int] | None]:
|
||||
""" fetch asset metadata using the release GitHub API """
|
||||
repository: str = artifact["repository"]
|
||||
version: str = artifact["version"]
|
||||
releases_url: str = "{}/repos/{}/releases/{}{}".format(
|
||||
GITHUB_API_BASE,
|
||||
repository,
|
||||
"tags/" if version != "latest" else "",
|
||||
version
|
||||
)
|
||||
|
||||
if ANSIBLE_MODULE.check_mode:
|
||||
return {
|
||||
"asset_name": "{}/{}.ext".format(repository, version),
|
||||
"download_url": "download_url",
|
||||
"match_confidence": "match_ratio",
|
||||
"version": version
|
||||
}, {}
|
||||
|
||||
response_data, info = fetch_github_data(releases_url)
|
||||
if not response_data:
|
||||
return {"error": "No release found for version {}. Requested source: {}".format(version, releases_url)}, info
|
||||
|
||||
asset_name = artifact.get("asset_name", "").format(**TEMPLATE_ASSET_NAME_VARS)
|
||||
asset = find_compatible_asset(response_data["assets"], asset_name=asset_name)
|
||||
if not asset:
|
||||
if not asset_name:
|
||||
return {"error": "No matching asset detected, try specifying the desired asset name in arguments list"}, info
|
||||
return {"error": "No asset matching name {} found".format(asset_name)}, info
|
||||
return {
|
||||
"asset_name": asset["asset_name"],
|
||||
"download_url": asset["download_url"],
|
||||
"match_confidence": asset["match_ratio"],
|
||||
"version": response_data["tag_name"] or response_data["name"]
|
||||
}, info
|
||||
|
||||
|
||||
def get_tagged_asset(artifact: dict[str, Any]) -> tuple[dict[str, str], dict[str, int] | None]:
|
||||
""" fetch asset metadata using the tags GitHub API """
|
||||
repository: str = artifact["repository"]
|
||||
version: str = artifact["version"]
|
||||
tags_url: str = "{}/repos/{}/tags?per_page=1".format(GITHUB_API_BASE, repository)
|
||||
|
||||
if version != "latest":
|
||||
return {
|
||||
"asset_name": "{}.tar.gz".format(version),
|
||||
"download_url": "{}/{}/archive/refs/tags/{}.tar.gz".format(GITHUB_DOWNLOAD_BASE, repository, version),
|
||||
"version": version
|
||||
}, None
|
||||
|
||||
if ANSIBLE_MODULE.check_mode:
|
||||
return {
|
||||
"asset_name": "asset_name",
|
||||
"download_url": "download_url",
|
||||
"version": version
|
||||
}, {}
|
||||
|
||||
response_data, info = fetch_github_data(tags_url)
|
||||
if not response_data:
|
||||
return {
|
||||
"error": "No tagged asset found for '{}'".format(tags_url)
|
||||
}, info
|
||||
response_data = response_data[0]
|
||||
return {
|
||||
"asset_name": "{}.tar.gz".format(response_data.get("name", "unknown")),
|
||||
"download_url": response_data.get("tarball_url"),
|
||||
"version": response_data.get("name", "latest")
|
||||
}, info
|
||||
|
||||
|
||||
def fetch_metadata(artifact: dict[str, str]) -> dict[str, str] | None:
|
||||
""" retrieve metadata from the specified repository """
|
||||
if artifact["asset_type"] == "tag":
|
||||
metadata, info = get_tagged_asset(artifact)
|
||||
else:
|
||||
metadata, info = get_released_asset(artifact)
|
||||
if info:
|
||||
reset_date = info.get("x-ratelimit-reset", None)
|
||||
metadata["rate_limit_max"] = info.get("x-ratelimit-limit", "unknown")
|
||||
metadata["rate_limit_remaining"] = info.get("x-ratelimit-remaining", "unknown")
|
||||
metadata["rate_limit_reset_date"] = datetime.fromtimestamp(float(reset_date)).isoformat() if reset_date else "unknown"
|
||||
return metadata
|
||||
|
||||
|
||||
def main():
|
||||
global ANSIBLE_MODULE
|
||||
|
||||
module_args: dict[str, dict[str, Any]] = {
|
||||
"artifacts": {
|
||||
"type": "list",
|
||||
"elements": "dict",
|
||||
"required": True,
|
||||
"options": {
|
||||
"asset_name": {
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"default": ""
|
||||
},
|
||||
"asset_type": {
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"choices": ["release", "tag"],
|
||||
},
|
||||
"cmds": {
|
||||
"type": "list",
|
||||
"elements": "str",
|
||||
"required": False,
|
||||
"default": []
|
||||
},
|
||||
"repository": {
|
||||
"type": "str",
|
||||
"required": True
|
||||
},
|
||||
"version": {
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"default": "latest"
|
||||
}
|
||||
}
|
||||
},
|
||||
"github_token": {
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"no_log": True
|
||||
},
|
||||
}
|
||||
result: dict[str, Any] = {
|
||||
"artifacts": [],
|
||||
"changed": False,
|
||||
"msg": ""
|
||||
}
|
||||
|
||||
ANSIBLE_MODULE = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
|
||||
if "FORCE_CHECK_MODE" in environ:
|
||||
ANSIBLE_MODULE.check_mode = True if environ.get("FORCE_CHECK_MODE", False) in [True, "1", "True", "true"] else False
|
||||
if ANSIBLE_MODULE.params["github_token"]:
|
||||
DEFAULT_HEADERS["Authorization"] = "Bearer {}".format(ANSIBLE_MODULE.params["github_token"])
|
||||
|
||||
if not LIB_IMPORTED:
|
||||
ANSIBLE_MODULE.fail_json(msg=missing_required_lib(IMPORT_LIB_NAME), exception=IMPORT_LIB_ERROR) # pylint: disable=used-before-assignment
|
||||
|
||||
# load local metadata cached file to retrieve installed version
|
||||
TEMPLATE_ASSET_NAME_VARS.update({key.lower(): value for key, value in freedesktop_os_release().items()})
|
||||
TEMPLATE_ASSET_NAME_VARS["system"] = system().lower()
|
||||
TEMPLATE_ASSET_NAME_VARS["machine"] = machine().lower()
|
||||
for artifact in ANSIBLE_MODULE.params["artifacts"]:
|
||||
# fetch artifact metadata
|
||||
artifact_result: dict[str, Any] = {
|
||||
"asset_data": fetch_metadata(artifact),
|
||||
"repository": artifact.get("repository"),
|
||||
"version": artifact.get("version"),
|
||||
"cmds": []
|
||||
}
|
||||
result["rate_limit_remaining"] = artifact_result["asset_data"].get("rate_limit_remaining", "unknown")
|
||||
result["rate_limit_max"] = artifact_result["asset_data"].get("rate_limit_max", "unknown")
|
||||
|
||||
if "error" in artifact_result["asset_data"]:
|
||||
result["artifacts"].append(artifact_result)
|
||||
result["msg"] = artifact_result["asset_data"].get("error")
|
||||
result["failed"] = True
|
||||
ANSIBLE_MODULE.fail_json(**result)
|
||||
|
||||
# download artifact
|
||||
if ANSIBLE_MODULE.check_mode:
|
||||
artifact_result["download_dir"] = "unknown"
|
||||
else:
|
||||
artifact_result["download_dir"] = fetch_file(ANSIBLE_MODULE, artifact_result["asset_data"].get("download_url", "unknown"), decompress=False)
|
||||
TEMPLATE_ASSET_NAME_VARS["asset_name"] = artifact_result["asset_data"].get("asset_name", "unknown")
|
||||
TEMPLATE_ASSET_NAME_VARS["asset_version"] = artifact_result["asset_data"].get("version", "unknown")
|
||||
parts = artifact_result["download_dir"].rsplit(sep, 1)
|
||||
TEMPLATE_ASSET_NAME_VARS["asset_dirname"] = parts[0] if len(parts) > 1 else ""
|
||||
TEMPLATE_ASSET_NAME_VARS["asset_filename"] = parts[1] if len(parts) > 1 else parts[0]
|
||||
|
||||
# install artifact
|
||||
artifact_commands = [line.format(**TEMPLATE_ASSET_NAME_VARS) for line in artifact["cmds"]]
|
||||
if ANSIBLE_MODULE.check_mode:
|
||||
artifact_result["stdout"] = artifact_result["stderr"] = ""
|
||||
artifact_result["ret_code"] = None
|
||||
artifact_result["cmds"] = artifact_commands
|
||||
artifact_result["state"] = "should be installed" if len(artifact_commands) else "should be downloaded"
|
||||
else:
|
||||
for command_line in artifact_commands:
|
||||
cmd_rc, cmd_out, cmd_err = ANSIBLE_MODULE.run_command(command_line, use_unsafe_shell=True, cwd=ANSIBLE_MODULE.tmpdir)
|
||||
result["changed"] = True
|
||||
artifact_result["cmds"].append({
|
||||
"command": command_line,
|
||||
"stdout": cmd_out,
|
||||
"stderr": cmd_err,
|
||||
"ret_code": cmd_rc
|
||||
})
|
||||
|
||||
if cmd_rc:
|
||||
artifact_result["state"] = "installation failed"
|
||||
result["failed"] = True
|
||||
result["artifacts"].append(artifact_result)
|
||||
ANSIBLE_MODULE.fail_json(**result)
|
||||
|
||||
try:
|
||||
del artifact_result["command"], artifact_result["stdout"], artifact_result["stderr"], artifact_result["ret_code"]
|
||||
except: # pylint: disable=bare-except # noqa: 722
|
||||
pass
|
||||
artifact_result["state"] = "installed" if len(artifact_commands) else "downloaded"
|
||||
|
||||
result["artifacts"].append(artifact_result)
|
||||
result["msg"] = "OK"
|
||||
ANSIBLE_MODULE.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
"ANSIBLE_MODULE_ARGS": {
|
||||
"github_token": "",
|
||||
"artifacts": [
|
||||
{
|
||||
"asset_type": "tag",
|
||||
"repository": "smxi/inxi",
|
||||
"version": "3.3.29-1",
|
||||
"cmds": [
|
||||
"echo asset_name: {asset_name}",
|
||||
"echo asset_dirname: {asset_dirname}",
|
||||
"echo asset_filename: {asset_filename}",
|
||||
"echo asset_version: {asset_version}",
|
||||
"echo system: {system}",
|
||||
"echo machine: {machine}",
|
||||
"echo version: {version}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"asset_type": "tag",
|
||||
"repository": "smxi/inxi",
|
||||
"cmds": [
|
||||
"tar -zxf {asset_dirname}/{asset_filename}",
|
||||
"install --group=root --mode=755 --owner=root smxi-inxi-*/inxi /usr/bin",
|
||||
"install --group=root --mode=644 --owner=root smxi-inxi-*/inxi.1 /usr/share/man/man1",
|
||||
"apt-get install libdata-dump-perl"
|
||||
]
|
||||
},
|
||||
{
|
||||
"asset_name": "dive_{version}_linux_amd64.deb",
|
||||
"asset_type": "release",
|
||||
"repository": "wagoodman/dive"
|
||||
},
|
||||
{
|
||||
"asset_name": "dive_{version}_linux_amd64.deb",
|
||||
"asset_type": "release",
|
||||
"repository": "wagoodman/dive",
|
||||
"version": "v0.10.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
custom_github_token: ""
|
|
@ -0,0 +1 @@
|
|||
---
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
galaxy_info:
|
||||
author: Florian L.
|
||||
namespace: nullified
|
||||
description: Setup common tasks (e.g. users, CLI tools)
|
||||
# issue_tracker_url: http://example.com/issue/tracker
|
||||
license: MIT
|
||||
min_ansible_version: 2.15
|
||||
|
||||
# https://galaxy.ansible.com/api/v1/platforms/
|
||||
platforms:
|
||||
- name: Debian
|
||||
versions:
|
||||
- bookworm
|
||||
|
||||
galaxy_tags:
|
||||
- github
|
||||
- assets
|
||||
- utils
|
||||
- system
|
||||
|
||||
dependencies: []
|
|
@ -0,0 +1,60 @@
|
|||
---
|
||||
- name: '[APT] install dependencies and tools'
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
update_cache: yes
|
||||
force_apt_get: true
|
||||
cache_valid_time: 3600
|
||||
pkg:
|
||||
- bzip2
|
||||
- cron
|
||||
- emacs-nox
|
||||
- git
|
||||
- jq
|
||||
- less
|
||||
- libdata-dump-perl # inxi
|
||||
- libxml-dumper-perl # inxi
|
||||
- ncdu
|
||||
- openssh-server
|
||||
- procps
|
||||
- rsync
|
||||
- zsh
|
||||
state: present
|
||||
|
||||
- name: '[GitHub] install tools'
|
||||
become: yes
|
||||
tags:
|
||||
- molecule-idempotence-notest
|
||||
nullified.infrastructure.github_artifact:
|
||||
github_token: '{{ custom_github_token }}'
|
||||
artifacts:
|
||||
- repository: smxi/inxi
|
||||
asset_type: tag
|
||||
cmds:
|
||||
- tar -zxf {asset_dirname}/{asset_filename}
|
||||
- install --group=root --mode=755 --owner=root smxi-inxi-*/inxi /usr/local/bin
|
||||
- install --group=root --mode=644 --owner=root smxi-inxi-*/inxi.1 /usr/share/man/man1
|
||||
- repository: sharkdp/bat
|
||||
asset_name: bat_{version}_amd64.deb
|
||||
asset_type: release
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
||||
- repository: aristocratos/btop
|
||||
asset_name: btop-x86_64-linux-musl.tbz
|
||||
asset_type: release
|
||||
cmds:
|
||||
- tar -xjf {asset_dirname}/{asset_filename}
|
||||
- install --group=root --mode=755 --owner=root btop/bin/btop /usr/bin
|
||||
- mkdir /usr/share/btop || true
|
||||
- cp -pr btop/themes /usr/share/btop
|
||||
- repository: eza-community/eza
|
||||
asset_name: eza_x86_64-unknown-linux-gnu.tar.gz
|
||||
asset_type: release
|
||||
cmds:
|
||||
- tar -zxf {asset_dirname}/{asset_filename}
|
||||
- install --group=root --mode=755 --owner=root eza /usr/bin
|
||||
- repository: muesli/duf
|
||||
asset_name: duf_{version}_linux_amd64.deb
|
||||
asset_type: release
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
|
@ -0,0 +1,2 @@
|
|||
localhost
|
||||
|
|
@ -0,0 +1 @@
|
|||
---
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
custom_github_token: ""
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
# handlers file for development
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
galaxy_info:
|
||||
author: Florian L.
|
||||
namespace: nullified
|
||||
description: Install tools for development environment
|
||||
# issue_tracker_url: http://example.com/issue/tracker
|
||||
license: MIT
|
||||
min_ansible_version: 2.15
|
||||
|
||||
# https://galaxy.ansible.com/api/v1/platforms/
|
||||
platforms:
|
||||
- name: Debian
|
||||
versions:
|
||||
- bookworm
|
||||
|
||||
galaxy_tags:
|
||||
- github
|
||||
- assets
|
||||
- utils
|
||||
- system
|
||||
|
||||
dependencies: []
|
|
@ -0,0 +1,146 @@
|
|||
---
|
||||
- name: '[APT] install dependencies and tools'
|
||||
become: true
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
force_apt_get: true
|
||||
cache_valid_time: 3600
|
||||
pkg:
|
||||
- autoconf
|
||||
- bc
|
||||
- ca-certificates # docker-ce
|
||||
- curl
|
||||
- g++
|
||||
- gcc
|
||||
- git
|
||||
- gnupg # docker-ce
|
||||
- jq
|
||||
- libasound2 # draw.io
|
||||
- libatspi2.0-0 # draw.io
|
||||
- libgtk-3-0 # draw.io
|
||||
- libnotify4 # draw.io
|
||||
- libnss3 # draw.io
|
||||
- libsecret-1-0 # draw.io
|
||||
- libxss1 # draw.io
|
||||
- libxtst6 # draw.io
|
||||
- make
|
||||
- shellcheck
|
||||
- valgrind
|
||||
- xdg-utils # draw.io
|
||||
state: present
|
||||
|
||||
- name: '[GitHub] install tools'
|
||||
become: true
|
||||
tags:
|
||||
- molecule-idempotence-notest
|
||||
nullified.infrastructure.github_artifact:
|
||||
github_token: '{{ custom_github_token }}'
|
||||
artifacts:
|
||||
- asset_name: dive_{version}_linux_amd64.deb
|
||||
asset_type: release
|
||||
repository: wagoodman/dive
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
||||
- asset_name: kubeconform-linux-amd64.tar.gz
|
||||
asset_type: release
|
||||
repository: yannh/kubeconform
|
||||
cmds:
|
||||
- tar -zxf {asset_dirname}/{asset_filename}
|
||||
- install --group=root --mode=755 --owner=root kubeconform /usr/local/bin
|
||||
- asset_name: git-delta_{version}_amd64.deb
|
||||
asset_type: release
|
||||
repository: dandavison/delta
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
||||
- asset_name: docker-compose-linux-x86_64
|
||||
asset_type: release
|
||||
repository: docker/compose
|
||||
cmds:
|
||||
- install --group=root --mode=755 --owner=root {asset_dirname}/{asset_filename} /usr/local/bin/docker-compose
|
||||
- test -d /usr/local/lib/docker/cli-plugins && (rm /usr/local/lib/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/local/lib/docker/cli-plugins) || true
|
||||
- test -d /usr/local/libexec/docker/cli-plugins && (rm /usr/local/libexec/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/local/libexec/docker/cli-plugins) || true
|
||||
- test -d /usr/lib/docker/cli-plugins && (rm /usr/lib/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/lib/docker/cli-plugins) || true
|
||||
- test -d /usr/libexec/docker/cli-plugins && (rm /usr/libexec/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/libexec/docker/cli-plugins) || true
|
||||
- asset_name: buildx-{version}.linux-amd64
|
||||
asset_type: release
|
||||
repository: docker/buildx
|
||||
cmds:
|
||||
- install --group=root --mode=755 --owner=root {asset_dirname}/{asset_filename} /usr/local/bin/docker-buildx
|
||||
- test -d /usr/local/lib/docker/cli-plugins && (rm /usr/local/lib/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/local/lib/docker/cli-plugins) || true
|
||||
- test -d /usr/local/libexec/docker/cli-plugins && (rm /usr/local/libexec/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/local/libexec/docker/cli-plugins) || true
|
||||
- test -d /usr/lib/docker/cli-plugins && (rm /usr/lib/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/lib/docker/cli-plugins) || true
|
||||
- test -d /usr/libexec/docker/cli-plugins && (rm /usr/libexec/docker/cli-plugins/docker-compose; ln -s /usr/local/bin/docker-compose /usr/libexec/docker/cli-plugins) || true
|
||||
- asset_name: drawio-amd64-{version}.deb
|
||||
asset_type: release
|
||||
repository: jgraph/drawio-desktop
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
||||
- asset_name: OpenLens-{version}.amd64.deb
|
||||
asset_type: release
|
||||
repository: MuhammedKalkan/OpenLens
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
||||
- asset_name: stern_{version}_linux_amd64.tar.gz
|
||||
asset_type: release
|
||||
repository: stern/stern
|
||||
cmds:
|
||||
- tar -zxf {asset_dirname}/{asset_filename}
|
||||
- install --group=root --mode=755 --owner=root stern /usr/local/bin
|
||||
- asset_name: tofu_{version}_amd64.deb
|
||||
asset_type: release
|
||||
repository: opentofu/opentofu
|
||||
cmds:
|
||||
- dpkg -i {asset_dirname}/{asset_filename}
|
||||
|
||||
- name: '[Custom] install latest kubectl'
|
||||
become: yes
|
||||
tags:
|
||||
- molecule-idempotence-notest
|
||||
ansible.builtin.shell: |
|
||||
kubeVersion=$(curl -sSL -f https://storage.googleapis.com/kubernetes-release/release/stable.txt 2> /dev/null)
|
||||
kubeVersion=${kubeVersion:-v1.28.2}
|
||||
curl --silent --compressed -L -XGET https://storage.googleapis.com/kubernetes-release/release/${kubeVersion}/bin/linux/amd64/kubectl -o kubectl
|
||||
install --group=root --mode=755 --owner=root kubectl /usr/local/bin && rm kubectl
|
||||
|
||||
- name: '[Custom] install latest Helm'
|
||||
become: yes
|
||||
tags:
|
||||
- molecule-idempotence-notest
|
||||
ansible.builtin.shell: |
|
||||
helmVersion=$(curl -sSL https://api.github.com/repos/helm/helm/releases/latest | jq -r '.tag_name')
|
||||
helmVersion=${helmVersion:-v3.13.0}
|
||||
curl --silent --compressed -L -XGET https://get.helm.sh/helm-${helmVersion}-linux-amd64.tar.gz -o helm.tar.gz
|
||||
tar -zxf helm.tar.gz
|
||||
install --group=root --mode=755 --owner=root linux-amd64/helm /usr/local/bin && rm -rf linux-amd64 helm.tar.gz
|
||||
|
||||
- name: '[custom] install Docker CE repository'
|
||||
block:
|
||||
- name: '[apt key] retrieve GPG key'
|
||||
tags:
|
||||
- molecule-idempotence-notest
|
||||
ansible.builtin.shell: |-
|
||||
curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
||||
chmod a+r /etc/apt/keyrings/docker.gpg
|
||||
|
||||
- name: '[apt key] add source'
|
||||
ansible.builtin.apt_repository:
|
||||
repo: "deb [arch=amd64 signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian {{ ansible_distribution_release }} stable"
|
||||
state: present
|
||||
|
||||
- name: '[Apt Key] refresh repository'
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
force_apt_get: true
|
||||
cache_valid_time: 0
|
||||
|
||||
- name: '[Apt] install Docker CE'
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
force_apt_get: true
|
||||
cache_valid_time: 3600
|
||||
pkg:
|
||||
- docker-ce
|
||||
- docker-ce-cli
|
||||
- containerd.io
|
||||
state: present
|
|
@ -0,0 +1,2 @@
|
|||
localhost
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
# vars file for development
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
security_clamav_version: 1.2.1
|
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
- name: '[ssh] restart service'
|
||||
ansible.builtin.systemd_service:
|
||||
name: sshd.service
|
||||
enabled: true
|
||||
state: restarted
|
||||
|
||||
- name: '[clamav] daemon reload'
|
||||
ansible.builtin.systemd_service:
|
||||
daemon_reload: true
|
||||
|
||||
- name: '[freshclam] restart service'
|
||||
ansible.builtin.systemd_service:
|
||||
name: sshd.service
|
||||
enabled: true
|
||||
state: restarted
|
||||
|
||||
- name: '[clamd] wait for signatures'
|
||||
ansible.builtin.wait_for:
|
||||
path: /var/lib/clamav/bytecode.cvd
|
||||
timeout: 600
|
||||
state: present
|
||||
|
||||
- name: '[clamd] restart service'
|
||||
ansible.builtin.systemd_service:
|
||||
name: sshd.service
|
||||
enabled: true
|
||||
state: restarted
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
galaxy_info:
|
||||
author: Florian L.
|
||||
namespace: nullified
|
||||
description: Deploy security tweaks to systems
|
||||
# issue_tracker_url: http://example.com/issue/tracker
|
||||
license: MIT
|
||||
min_ansible_version: 2.15
|
||||
|
||||
# https://galaxy.ansible.com/api/v1/platforms/
|
||||
platforms:
|
||||
- name: Debian
|
||||
versions:
|
||||
- bookworm
|
||||
|
||||
galaxy_tags:
|
||||
- github
|
||||
- assets
|
||||
- utils
|
||||
- system
|
||||
|
||||
dependencies: []
|
|
@ -0,0 +1,166 @@
|
|||
---
|
||||
- name: '[setup] gather facts is not already done'
|
||||
setup:
|
||||
gather_subset:
|
||||
- distribution
|
||||
|
||||
- name: '[ssh] hardening sshd'
|
||||
become: yes
|
||||
block:
|
||||
- name: '[ssh] setup sshd_config'
|
||||
ansible.builtin.template:
|
||||
src: ../templates/openssh-server/sshd_config.j2
|
||||
dest: /etc/ssh/sshd_config
|
||||
mode: 644
|
||||
notify:
|
||||
- '[ssh] restart service'
|
||||
- name: '[ssh] setup sshd_config.d'
|
||||
ansible.builtin.template:
|
||||
src: ../templates/openssh-server/sshd_config.d/encryption.conf.j2
|
||||
dest: /etc/ssh/sshd_config.d/encryption.conf
|
||||
mode: 644
|
||||
notify:
|
||||
- 'security : [ssh] restart service'
|
||||
|
||||
- name: '[utils] install security and audit tools'
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
force_apt_get: true
|
||||
cache_valid_time: 3600
|
||||
pkg:
|
||||
- lsof # rkhunter
|
||||
- rkhunter
|
||||
- unhide # rkhunter
|
||||
state: present
|
||||
|
||||
- name: '[system] configure rkhunter'
|
||||
become: yes
|
||||
block:
|
||||
- name: '[rkhunter] create include dir'
|
||||
ansible.builtin.file:
|
||||
path: /etc/rkhunter.d
|
||||
state: directory
|
||||
mode: '0750'
|
||||
- name: '[rkhunter] copy configuration'
|
||||
ansible.builtin.template:
|
||||
src: ../templates/rkhunter/rkhunter.conf.local.j2
|
||||
dest: /etc/rkhunter.conf.local
|
||||
mode: '0640'
|
||||
- name: '[rkhunter] setup cronjob'
|
||||
ansible.builtin.cron:
|
||||
name: rkhunter check
|
||||
minute: 0
|
||||
hour: 4
|
||||
day: "*/3"
|
||||
job: "/usr/bin/rkhunter -c 2>&1"
|
||||
state: present
|
||||
|
||||
- name: '[system] clamav'
|
||||
become: yes
|
||||
block:
|
||||
- name: '[clamav] retrieve and install clamav package'
|
||||
ansible.builtin.apt:
|
||||
deb: https://www.clamav.net/downloads/production/clamav-{{ security_clamav_version }}.linux.x86_64.deb
|
||||
force_apt_get: true
|
||||
state: present
|
||||
- name: '[clamav] add clamav group'
|
||||
ansible.builtin.group:
|
||||
name: clamav
|
||||
system: true
|
||||
state: present
|
||||
- name: '[clamav] add clamav user'
|
||||
ansible.builtin.user:
|
||||
name: clamav
|
||||
comment: clamav
|
||||
create_home: false
|
||||
expires: -1
|
||||
group: clamav
|
||||
shell: /bin/false
|
||||
system: true
|
||||
state: present
|
||||
- name: '[clamav] setup directories'
|
||||
block:
|
||||
- name: '[clamav] ensure /etc/clamav dir exists'
|
||||
ansible.builtin.file:
|
||||
path: /etc/clamav
|
||||
state: directory
|
||||
owner: clamav
|
||||
group: clamav
|
||||
mode: '0750'
|
||||
- name: '[clamav] ensure /var/lib/clamav dir exists'
|
||||
ansible.builtin.file:
|
||||
path: /var/lib/clamav
|
||||
state: directory
|
||||
owner: clamav
|
||||
group: clamav
|
||||
mode: '0750'
|
||||
- name: '[clamav] ensure /var/lib/clamav/quarantine dir exists'
|
||||
ansible.builtin.file:
|
||||
path: /var/lib/clamav/quarantine
|
||||
state: directory
|
||||
owner: clamav
|
||||
group: clamav
|
||||
mode: '0750'
|
||||
- name: '[clamav] ensure /var/log/clamav dir exists'
|
||||
ansible.builtin.file:
|
||||
path: /var/log/clamav
|
||||
state: directory
|
||||
owner: clamav
|
||||
group: clamav
|
||||
mode: '0750'
|
||||
- name: '[clamav] copy clamd.conf'
|
||||
ansible.builtin.template:
|
||||
src: '../templates/clamav/clamd.conf.j2'
|
||||
dest: /etc/clamav/clamd.conf
|
||||
owner: clamav
|
||||
group: clamav
|
||||
mode: '0640'
|
||||
- name: '[clamav] copy freshclam.conf'
|
||||
ansible.builtin.template:
|
||||
src: '../templates/clamav/freshclam.conf.j2'
|
||||
dest: /etc/clamav/freshclam.conf
|
||||
owner: clamav
|
||||
group: clamav
|
||||
mode: '0640'
|
||||
- name: '[clamav] setup freshclam service'
|
||||
block:
|
||||
- name: '[clamav] copy freshclam service file'
|
||||
ansible.builtin.template:
|
||||
src: '../templates/clamav/clamav-freshclam.service.j2'
|
||||
dest: /usr/lib/systemd/system/clamav-freshclam.service
|
||||
mode: '0644'
|
||||
- name: '[clamav] setup clamd service'
|
||||
block:
|
||||
- name: '[clamav] copy clamd service file'
|
||||
ansible.builtin.template:
|
||||
src: '../templates/clamav/clamav-clamd.service.j2'
|
||||
dest: /usr/lib/systemd/system/clamav-clamd.service
|
||||
mode: '0644'
|
||||
- name: '[clamav] setup cron job'
|
||||
ansible.builtin.cron:
|
||||
name: clamav full system scan
|
||||
minute: 30
|
||||
hour: 5
|
||||
weekday: 0
|
||||
job: "/usr/local/bin/clamscan --recursive --infected --exclude-dir='^/(sys|proc)' --database=/var/lib/clamav --move=/var/lib/clamav/quarantine --log=/var/log/clamav/weekly.log / 2>&1"
|
||||
state: present
|
||||
notify:
|
||||
- 'security : [clamav] daemon reload'
|
||||
- 'security : [freshclam] restart service'
|
||||
- 'security : [clamd] wait for signatures'
|
||||
- 'security : [clamd] restart service'
|
||||
|
||||
- name: '[system] hardening system'
|
||||
become: yes
|
||||
block:
|
||||
- name: '[system] login.defs'
|
||||
ansible.builtin.template:
|
||||
src: '../templates/system/{{ ansible_distribution | lower }}/login.defs.j2'
|
||||
dest: /etc/login.defs
|
||||
mode: '0644'
|
||||
- name: '[system] limits.conf'
|
||||
ansible.builtin.template:
|
||||
src: '../templates/system/{{ ansible_distribution | lower }}/limits.conf.j2'
|
||||
dest: /etc/security/limits.conf
|
||||
mode: '0644'
|
|
@ -0,0 +1,22 @@
|
|||
[Unit]
|
||||
Description=ClamAV virus scanner
|
||||
Documentation=man:clamd(1) man:clamd.conf(5) https://docs.clamav.net/
|
||||
ConditionPathExistsGlob=/var/lib/clamav/main.{c[vl]d,inc}
|
||||
ConditionPathExistsGlob=/var/lib/clamav/daily.{c[vl]d,inc}
|
||||
Wants=network-online.target
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
User=clamav
|
||||
Group=clamav
|
||||
Type=simple
|
||||
ExecStart=/usr/local/sbin/clamd --foreground=true --config-file=/etc/clamav/clamd.conf
|
||||
ExecReload=/bin/kill -USR2 $MAINPID
|
||||
TimeoutStartSec=300
|
||||
RuntimeDirectory=clamav
|
||||
RuntimeDirectoryMode=0755
|
||||
LogsDirectory=clamav
|
||||
LogsDirectoryMode=0750
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,14 @@
|
|||
[Unit]
|
||||
Description=ClamAV virus database updater
|
||||
Documentation=man:freshclam(1) man:freshclam.conf(5) https://docs.clamav.net/
|
||||
ConditionPathExists=!/etc/cron.d/clamav-freshclam
|
||||
Wants=network-online.target
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/local/bin/freshclam -d --foreground=true --config-file=/etc/clamav/freshclam.conf
|
||||
LogsDirectory=clamav
|
||||
LogsDirectoryMode=0750
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,298 @@
|
|||
##
|
||||
## Example config file for clamav-milter
|
||||
##
|
||||
|
||||
# Comment or remove the line below.
|
||||
Example
|
||||
|
||||
|
||||
##
|
||||
## Main options
|
||||
##
|
||||
|
||||
# Define the interface through which we communicate with sendmail
|
||||
# This option is mandatory! Possible formats are:
|
||||
# [[unix|local]:]/path/to/file - to specify a unix domain socket
|
||||
# inet:port@[hostname|ip-address] - to specify an ipv4 socket
|
||||
# inet6:port@[hostname|ip-address] - to specify an ipv6 socket
|
||||
#
|
||||
# Default: no default
|
||||
#MilterSocket /run/clamav/clamav-milter.sock
|
||||
#MilterSocket /tmp/clamav-milter.sock
|
||||
#MilterSocket inet:7357
|
||||
|
||||
# Define the group ownership for the (unix) milter socket.
|
||||
# Default: disabled (the primary group of the user running clamd)
|
||||
#MilterSocketGroup virusgroup
|
||||
|
||||
# Sets the permissions on the (unix) milter socket to the specified mode.
|
||||
# Default: disabled (obey umask)
|
||||
#MilterSocketMode 660
|
||||
|
||||
# Remove stale socket after unclean shutdown.
|
||||
#
|
||||
# Default: yes
|
||||
#FixStaleSocket yes
|
||||
|
||||
# Run as another user (clamav-milter must be started by root for this option
|
||||
# to work)
|
||||
#
|
||||
# Default: unset (don't drop privileges)
|
||||
#User clamav
|
||||
|
||||
# Waiting for data from clamd will timeout after this time (seconds).
|
||||
# Value of 0 disables the timeout.
|
||||
#
|
||||
# Default: 120
|
||||
#ReadTimeout 300
|
||||
|
||||
# Don't fork into background.
|
||||
#
|
||||
# Default: no
|
||||
#Foreground yes
|
||||
|
||||
# Chroot to the specified directory.
|
||||
# Chrooting is performed just after reading the config file and before
|
||||
# dropping privileges.
|
||||
#
|
||||
# Default: unset (don't chroot)
|
||||
#Chroot /newroot
|
||||
|
||||
# This option allows you to save a process identifier of the listening
|
||||
# daemon.
|
||||
# This file will be owned by root, as long as clamav-milter was started by
|
||||
# root. It is recommended that the directory where this file is stored is
|
||||
# also owned by root to keep other users from tampering with it.
|
||||
#
|
||||
# Default: disabled
|
||||
#PidFile /run/clamav/clamav-milter.pid
|
||||
|
||||
# Optional path to the global temporary directory.
|
||||
# Default: system specific (usually /tmp or /var/tmp).
|
||||
#
|
||||
#TemporaryDirectory /var/tmp
|
||||
|
||||
##
|
||||
## Clamd options
|
||||
##
|
||||
|
||||
# Define the clamd socket to connect to for scanning.
|
||||
# This option is mandatory! Syntax:
|
||||
# ClamdSocket unix:path
|
||||
# ClamdSocket tcp:host:port
|
||||
# The first syntax specifies a local unix socket (needs an absolute path) e.g.:
|
||||
# ClamdSocket unix:/run/clamav/clamd.sock
|
||||
# The second syntax specifies a tcp local or remote tcp socket: the
|
||||
# host can be a hostname or an ip address; the ":port" field is only required
|
||||
# for IPv6 addresses, otherwise it defaults to 3310, e.g.:
|
||||
# ClamdSocket tcp:192.168.0.1
|
||||
#
|
||||
# This option can be repeated several times with different sockets or even
|
||||
# with the same socket: clamd servers will be selected in a round-robin
|
||||
# fashion.
|
||||
#
|
||||
# Default: no default
|
||||
#ClamdSocket tcp:scanner.mydomain:7357
|
||||
#ClamdSocket unix:/run/clamav/clamd.sock
|
||||
|
||||
|
||||
##
|
||||
## Exclusions
|
||||
##
|
||||
|
||||
# Messages originating from these hosts/networks will not be scanned
|
||||
# This option takes a host(name)/mask pair in CIRD notation and can be
|
||||
# repeated several times. If "/mask" is omitted, a host is assumed.
|
||||
# To specify a locally originated, non-smtp, email use the keyword "local"
|
||||
#
|
||||
# Default: unset (scan everything regardless of the origin)
|
||||
#LocalNet local
|
||||
#LocalNet 192.168.0.0/24
|
||||
#LocalNet 1111:2222:3333::/48
|
||||
|
||||
# This option specifies a file which contains a list of basic POSIX regular
|
||||
# expressions. Addresses (sent to or from - see below) matching these regexes
|
||||
# will not be scanned. Optionally each line can start with the string "From:"
|
||||
# or "To:" (note: no whitespace after the colon) indicating if it is,
|
||||
# respectively, the sender or recipient that is to be allowed.
|
||||
# If the field is missing, "To:" is assumed.
|
||||
# Lines starting with #, : or ! are ignored.
|
||||
#
|
||||
# Default unset (no exclusion applied)
|
||||
#AllowList /etc/allowed_addresses
|
||||
|
||||
# Messages from authenticated SMTP users matching this extended POSIX
|
||||
# regular expression (egrep-like) will not be scanned.
|
||||
# As an alternative, a file containing a plain (not regex) list of names (one
|
||||
# per line) can be specified using the prefix "file:".
|
||||
# e.g. SkipAuthenticated file:/etc/good_guys
|
||||
#
|
||||
# Note: this is the AUTH login name!
|
||||
#
|
||||
# Default: unset (no allowing based on SMTP auth)
|
||||
#SkipAuthenticated ^(tom|dick|henry)$
|
||||
|
||||
# Messages larger than this value won't be scanned.
|
||||
# Make sure this value is lower or equal than StreamMaxLength in clamd.conf
|
||||
#
|
||||
# Default: 25M
|
||||
#MaxFileSize 10M
|
||||
|
||||
|
||||
##
|
||||
## Actions
|
||||
##
|
||||
|
||||
# The following group of options controls the delivery process under
|
||||
# different circumstances.
|
||||
# The following actions are available:
|
||||
# - Accept
|
||||
# The message is accepted for delivery
|
||||
# - Reject
|
||||
# Immediately refuse delivery (a 5xx error is returned to the peer)
|
||||
# - Defer
|
||||
# Return a temporary failure message (4xx) to the peer
|
||||
# - Blackhole (not available for OnFail)
|
||||
# Like Accept but the message is sent to oblivion
|
||||
# - Quarantine (not available for OnFail)
|
||||
# Like Accept but message is quarantined instead of being delivered
|
||||
#
|
||||
# NOTE: In Sendmail the quarantine queue can be examined via mailq -qQ
|
||||
# For Postfix this causes the message to be placed on hold
|
||||
#
|
||||
# Action to be performed on clean messages (mostly useful for testing)
|
||||
# Default: Accept
|
||||
#OnClean Accept
|
||||
|
||||
# Action to be performed on infected messages
|
||||
# Default: Quarantine
|
||||
#OnInfected Quarantine
|
||||
|
||||
# Action to be performed on error conditions (this includes failure to
|
||||
# allocate data structures, no scanners available, network timeouts,
|
||||
# unknown scanner replies and the like)
|
||||
# Default: Defer
|
||||
#OnFail Defer
|
||||
|
||||
# This option allows to set a specific rejection reason for infected messages
|
||||
# and it's therefore only useful together with "OnInfected Reject"
|
||||
# The string "%v", if present, will be replaced with the virus name.
|
||||
# Default: MTA specific
|
||||
#RejectMsg
|
||||
|
||||
# If this option is set to "Replace" (or "Yes"), an "X-Virus-Scanned" and an
|
||||
# "X-Virus-Status" headers will be attached to each processed message, possibly
|
||||
# replacing existing headers.
|
||||
# If it is set to Add, the X-Virus headers are added possibly on top of the
|
||||
# existing ones.
|
||||
# Note that while "Replace" can potentially break DKIM signatures, "Add" may
|
||||
# confuse procmail and similar filters.
|
||||
# Default: no
|
||||
#AddHeader Replace
|
||||
|
||||
# When AddHeader is in use, this option allows to arbitrary set the reported
|
||||
# hostname. This may be desirable in order to avoid leaking internal names.
|
||||
# If unset the real machine name is used.
|
||||
# Default: disabled
|
||||
#ReportHostname my.mail.server.name
|
||||
|
||||
# Execute a command (possibly searching PATH) when an infected message is
|
||||
# found.
|
||||
# The following parameters are passed to the invoked program in this order:
|
||||
# virus name, queue id, sender, destination, subject, message id, message date.
|
||||
# Note #1: this requires MTA macroes to be available (see LogInfected below)
|
||||
# Note #2: the process is invoked in the context of clamav-milter
|
||||
# Note #3: clamav-milter will wait for the process to exit. Be quick or fork to
|
||||
# avoid unnecessary delays in email delivery
|
||||
# Default: disabled
|
||||
#VirusAction /usr/local/bin/my_infected_message_handler
|
||||
|
||||
##
|
||||
## Logging options
|
||||
##
|
||||
|
||||
# Uncomment this option to enable logging.
|
||||
# LogFile must be writable for the user running daemon.
|
||||
# A full path is required.
|
||||
#
|
||||
# Default: disabled
|
||||
#LogFile /tmp/clamav-milter.log
|
||||
|
||||
# By default the log file is locked for writing - the lock protects against
|
||||
# running clamav-milter multiple times.
|
||||
# This option disables log file locking.
|
||||
#
|
||||
# Default: no
|
||||
#LogFileUnlock yes
|
||||
|
||||
# Maximum size of the log file.
|
||||
# Value of 0 disables the limit.
|
||||
# You may use 'M' or 'm' for megabytes (1M = 1m = 1048576 bytes)
|
||||
# and 'K' or 'k' for kilobytes (1K = 1k = 1024 bytes). To specify the size
|
||||
# in bytes just don't use modifiers. If LogFileMaxSize is enabled, log
|
||||
# rotation (the LogRotate option) will always be enabled.
|
||||
#
|
||||
# Default: 1M
|
||||
#LogFileMaxSize 2M
|
||||
|
||||
# Log time with each message.
|
||||
#
|
||||
# Default: no
|
||||
#LogTime yes
|
||||
|
||||
# Use system logger (can work together with LogFile).
|
||||
#
|
||||
# Default: no
|
||||
#LogSyslog yes
|
||||
|
||||
# Specify the type of syslog messages - please refer to 'man syslog'
|
||||
# for facility names.
|
||||
#
|
||||
# Default: LOG_LOCAL6
|
||||
#LogFacility LOG_MAIL
|
||||
|
||||
# Enable verbose logging.
|
||||
#
|
||||
# Default: no
|
||||
#LogVerbose yes
|
||||
|
||||
# Enable log rotation. Always enabled when LogFileMaxSize is enabled.
|
||||
# Default: no
|
||||
#LogRotate yes
|
||||
|
||||
# This option allows to tune what is logged when a message is infected.
|
||||
# Possible values are Off (the default - nothing is logged),
|
||||
# Basic (minimal info logged), Full (verbose info logged)
|
||||
# Note:
|
||||
# For this to work properly in sendmail, make sure the msg_id, mail_addr,
|
||||
# rcpt_addr and i macroes are available in eom. In other words add a line like:
|
||||
# Milter.macros.eom={msg_id}, {mail_addr}, {rcpt_addr}, i
|
||||
# to your .cf file. Alternatively use the macro:
|
||||
# define(`confMILTER_MACROS_EOM', `{msg_id}, {mail_addr}, {rcpt_addr}, i')
|
||||
# Postfix should be working fine with the default settings.
|
||||
#
|
||||
# Default: disabled
|
||||
#LogInfected Basic
|
||||
|
||||
# This option allows to tune what is logged when no threat is found in
|
||||
# a scanned message.
|
||||
# See LogInfected for possible values and caveats.
|
||||
# Useful in debugging but drastically increases the log size.
|
||||
# Default: disabled
|
||||
#LogClean Basic
|
||||
|
||||
# This option affects the behaviour of LogInfected, LogClean and VirusAction
|
||||
# when a message with multiple recipients is scanned:
|
||||
# If SupportMultipleRecipients is off (the default)
|
||||
# then one single log entry is generated for the message and, in case the
|
||||
# message is determined to be malicious, the command indicated by VirusAction
|
||||
# is executed just once. In both cases only the last recipient is reported.
|
||||
# If SupportMultipleRecipients is on:
|
||||
# then one line is logged for each recipient and the command indicated
|
||||
# by VirusAction is also executed once for each recipient.
|
||||
#
|
||||
# Note: although it's probably a good idea to enable this option, the default
|
||||
# value
|
||||
# is currently set to off for legacy reasons.
|
||||
# Default: no
|
||||
#SupportMultipleRecipients yes
|
|
@ -0,0 +1,250 @@
|
|||
LogFile /var/log/clamav/clamd.log
|
||||
LogFileUnlock no
|
||||
LogFileMaxSize 2M
|
||||
LogTime yes
|
||||
LogClean no
|
||||
LogSyslog no
|
||||
LogFacility LOG_LOCAL6
|
||||
LogVerbose no
|
||||
LogRotate no
|
||||
PreludeEnable no
|
||||
PreludeAnalyzerName ClamAV
|
||||
ExtendedDetectionInfo yes
|
||||
TemporaryDirectory /tmp
|
||||
DatabaseDirectory /var/lib/clamav
|
||||
OfficialDatabaseOnly no
|
||||
#FailIfCvdOlderThan 7
|
||||
|
||||
User clamav
|
||||
# Default: disabled (must be specified by a user)
|
||||
LocalSocket /var/run/clamav/clamd.sock
|
||||
#LocalSocket /tmp/clamd.sock
|
||||
# Default: disabled (the primary group of the user running clamd)
|
||||
LocalSocketGroup clamav
|
||||
# Default: disabled (socket is world accessible)
|
||||
#LocalSocketMode 660
|
||||
#FixStaleSocket yes
|
||||
|
||||
# Default: no
|
||||
#TCPSocket 3310
|
||||
# Default: no
|
||||
#TCPAddr localhost
|
||||
# Default: 200
|
||||
#MaxConnectionQueueLength 30
|
||||
# Default: 100M
|
||||
#StreamMaxLength 25M
|
||||
# Default: 1024
|
||||
#StreamMinPort 30000
|
||||
# Default: 2048
|
||||
#StreamMaxPort 32000
|
||||
# Default: 10
|
||||
#MaxThreads 20
|
||||
# Default: 120
|
||||
#ReadTimeout 300
|
||||
CommandReadTimeout 30
|
||||
# Default: 500
|
||||
#SendBufTimeout 200
|
||||
|
||||
# Maximum number of queued items (including those being processed by
|
||||
# MaxThreads threads).
|
||||
# It is recommended to have this value at least twice MaxThreads if possible.
|
||||
# WARNING: you shouldn't increase this too much to avoid running out of file
|
||||
# descriptors, the following condition should hold:
|
||||
# MaxThreads*MaxRecursion + (MaxQueue - MaxThreads) + 6< RLIMIT_NOFILE (usual
|
||||
# max is 1024).
|
||||
#
|
||||
# Default: 100
|
||||
#MaxQueue 200
|
||||
|
||||
# Default: 30
|
||||
#IdleTimeout 60
|
||||
# Default: scan all
|
||||
ExcludePath ^/proc/
|
||||
ExcludePath ^/sys/
|
||||
MaxDirectoryRecursion 20
|
||||
# Default: no
|
||||
#FollowDirectorySymlinks yes
|
||||
# Default: no
|
||||
#FollowFileSymlinks yes
|
||||
CrossFilesystems yes
|
||||
SelfCheck 600
|
||||
# Default: yes
|
||||
#ConcurrentDatabaseReload no
|
||||
# Default: no
|
||||
#VirusEvent /usr/local/bin/send_sms 123456789 "VIRUS ALERT: %v in %f"
|
||||
#ExitOnOOM yes
|
||||
# Default: no
|
||||
#Foreground yes
|
||||
# Default: no
|
||||
#Debug yes
|
||||
# Default: no
|
||||
#LeaveTemporaryFiles yes
|
||||
# Default: no
|
||||
#GenerateMetadataJson yes
|
||||
# Default: yes
|
||||
#AllowAllMatchScan no
|
||||
DetectPUA yes
|
||||
# Default: Load all categories (if DetectPUA is activated)
|
||||
ExcludePUA Tool
|
||||
ForceToDisk no
|
||||
# Default: no
|
||||
#DisableCache yes
|
||||
#CacheSize 65536
|
||||
HeuristicAlerts yes
|
||||
# Default: no
|
||||
#HeuristicScanPrecedence yes
|
||||
|
||||
##
|
||||
## Heuristic Alerts
|
||||
##
|
||||
# Default: no
|
||||
#AlertBrokenExecutables yes
|
||||
# Default: no
|
||||
#AlertBrokenMedia yes
|
||||
# Default: no
|
||||
#AlertEncrypted yes
|
||||
# Default: no
|
||||
#AlertEncryptedArchive yes
|
||||
# Default: no
|
||||
#AlertEncryptedDoc yes
|
||||
# Default: no
|
||||
AlertOLE2Macros yes
|
||||
# Default: no
|
||||
#AlertPhishingSSLMismatch yes
|
||||
# Default: no
|
||||
#AlertPhishingCloak yes
|
||||
# Default: no
|
||||
#AlertPartitionIntersection yes
|
||||
|
||||
##
|
||||
## Executable files
|
||||
##
|
||||
# Default: yes
|
||||
ScanPE yes
|
||||
# Default: no
|
||||
#DisableCertCheck yes
|
||||
# Default: yes
|
||||
ScanELF yes
|
||||
|
||||
##
|
||||
## Documents
|
||||
##
|
||||
ScanOLE2 yes
|
||||
ScanPDF yes
|
||||
ScanSWF yes
|
||||
ScanXMLDOCS yes
|
||||
ScanHWP3 yes
|
||||
|
||||
##
|
||||
## Mail files
|
||||
##
|
||||
ScanMail yes
|
||||
# Default: no
|
||||
#ScanPartialMessages yes
|
||||
PhishingSignatures yes
|
||||
PhishingScanURLs yes
|
||||
|
||||
##
|
||||
## Data Loss Prevention (DLP)
|
||||
##
|
||||
# Default: No
|
||||
#StructuredDataDetection yes
|
||||
# Default: 3
|
||||
StructuredMinCreditCardCount 5
|
||||
# Default: no
|
||||
#StructuredCCOnly yes
|
||||
# Default: 3
|
||||
StructuredMinSSNCount 5
|
||||
StructuredSSNFormatNormal yes
|
||||
StructuredSSNFormatStripped yes
|
||||
|
||||
##
|
||||
## HTML
|
||||
##
|
||||
ScanHTML yes
|
||||
|
||||
##
|
||||
## Archives
|
||||
##
|
||||
ScanArchive yes
|
||||
|
||||
##
|
||||
## Limits
|
||||
##
|
||||
# Default: 120000
|
||||
#MaxScanTime 300000
|
||||
# Default: 400M
|
||||
MaxScanSize 500M
|
||||
# Default: 100M
|
||||
MaxFileSize 400M
|
||||
# Default: 17
|
||||
#MaxRecursion 10
|
||||
# Default: 10000
|
||||
#MaxFiles 15000
|
||||
# Default: 40M
|
||||
MaxEmbeddedPE 80M
|
||||
# Default: 40M
|
||||
#MaxHTMLNormalize 100M
|
||||
# Default: 8M
|
||||
#MaxHTMLNoTags 16M
|
||||
# Default: 20M
|
||||
#MaxScriptNormalize 50M
|
||||
# Default: 1M
|
||||
#MaxZipTypeRcg 1M
|
||||
# Default: 50
|
||||
#MaxPartitions 128
|
||||
# Default: 100
|
||||
#MaxIconsPE 200
|
||||
# Default: 16
|
||||
#MaxRecHWP3 16
|
||||
# Default: 100000
|
||||
#PCREMatchLimit 20000
|
||||
# Default: 2000
|
||||
#PCRERecMatchLimit 10000
|
||||
# Default: 100M
|
||||
#PCREMaxFileSize 400M
|
||||
# Default: no
|
||||
AlertExceedsMax yes
|
||||
|
||||
##
|
||||
## On-access Scan Settings
|
||||
##
|
||||
# Default: 5M
|
||||
#OnAccessMaxFileSize 10M
|
||||
# Default: 5
|
||||
#OnAccessMaxThreads 10
|
||||
# Default: 5000 (5 seconds)
|
||||
# OnAccessCurlTimeout 10000
|
||||
# Default: no
|
||||
#OnAccessDisableDDD yes
|
||||
# Default: disabled
|
||||
#OnAccessIncludePath /home
|
||||
#OnAccessIncludePath /students
|
||||
# Default: disabled
|
||||
#OnAccessExcludePath /home/user
|
||||
# Default: no
|
||||
OnAccessPrevention yes
|
||||
# Default: no
|
||||
#OnAccessDenyOnError yes
|
||||
# Default: no
|
||||
#OnAccessExtraScanning yes
|
||||
# Default: disabled
|
||||
#OnAccessMountPath /
|
||||
#OnAccessMountPath /home/user
|
||||
# Default: no
|
||||
#OnAccessExcludeRootUID no
|
||||
# Default: disabled
|
||||
#OnAccessExcludeUID -1
|
||||
# Default: disabled
|
||||
OnAccessExcludeUname clamav
|
||||
# Default: 0
|
||||
#OnAccessRetryAttempts 3
|
||||
|
||||
##
|
||||
## Bytecode
|
||||
##
|
||||
Bytecode yes
|
||||
BytecodeSecurity TrustSigned
|
||||
BytecodeUnsigned no
|
||||
# Default: 10000
|
||||
# BytecodeTimeout 1000
|
|
@ -0,0 +1,23 @@
|
|||
DatabaseOwner clamav
|
||||
UpdateLogFile /var/log/clamav/freshclam.log
|
||||
LogVerbose false
|
||||
LogSyslog false
|
||||
LogFacility LOG_LOCAL6
|
||||
LogFileMaxSize 0
|
||||
LogRotate true
|
||||
LogTime true
|
||||
Foreground false
|
||||
Debug false
|
||||
MaxAttempts 5
|
||||
DatabaseDirectory /var/lib/clamav
|
||||
DNSDatabaseInfo current.cvd.clamav.net
|
||||
ConnectTimeout 30
|
||||
ReceiveTimeout 0
|
||||
TestDatabases yes
|
||||
ScriptedUpdates yes
|
||||
CompressLocalDatabase no
|
||||
Bytecode true
|
||||
NotifyClamd /etc/clamav/clamd.conf
|
||||
Checks 24
|
||||
DatabaseMirror db.local.clamav.net
|
||||
DatabaseMirror database.clamav.net
|
|
@ -0,0 +1,4 @@
|
|||
KexAlgorithms curve25519-sha256,curve25519-sha256@libssh.org
|
||||
Ciphers chacha20-poly1305@openssh.com,aes256-gcm@openssh.com
|
||||
MACs hmac-sha2-512-etm@openssh.com
|
||||
HostKeyAlgorithms ssh-ed25519,ssh-ed25519-cert-v01@openssh.com,sk-ssh-ed25519@openssh.com,sk-ssh-ed25519-cert-v01@openssh.com
|
|
@ -0,0 +1,24 @@
|
|||
AcceptEnv LANG LC_*
|
||||
AddressFamily inet
|
||||
AllowAgentForwarding no
|
||||
ChallengeResponseAuthentication no
|
||||
ClientAliveCountMax 2
|
||||
ClientAliveInterval 300
|
||||
HostKey /etc/ssh/ssh_host_ed25519_key
|
||||
IgnoreRhosts yes
|
||||
LogLevel VERBOSE
|
||||
MaxAuthTries 3
|
||||
MaxSessions 3
|
||||
PermitEmptyPasswords no
|
||||
PermitRootLogin no
|
||||
PrintMotd yes
|
||||
Protocol 2
|
||||
PubkeyAuthentication yes
|
||||
Port 22
|
||||
TCPKeepAlive no
|
||||
UseDNS no
|
||||
UsePAM yes
|
||||
X11Forwarding no
|
||||
Subsystem sftp /usr/lib/openssh/sftp-server
|
||||
|
||||
Include /etc/ssh/sshd_config.d/*.conf
|
|
@ -0,0 +1,76 @@
|
|||
# No end-of-line comments;
|
||||
# No quotes around path names;
|
||||
# To unset previous configuration, set it to "" (empty) beforehand;
|
||||
# Some options allow multiple definitions, leads to a concatenation;
|
||||
|
||||
ROTATE_MIRRORS=1
|
||||
UPDATE_MIRRORS=1
|
||||
MIRRORS_MODE=0
|
||||
MAIL-ON-WARNING=naeikindus@pounce.tech
|
||||
MAIL_CMD=mail -s "[rkhunter] Warnings found for ${HOST_NAME}"
|
||||
TMPDIR=/var/lib/rkhunter/tmp
|
||||
DBDIR=/var/lib/rkhunter/db
|
||||
SCRIPTDIR=/usr/share/rkhunter/scripts
|
||||
BINDIR=/bin /usr/bin /sbin /usr/sbin
|
||||
BINDIR=+/usr/local/bin +/usr/local/sbin
|
||||
UPDATE_LANG="en"
|
||||
LOGFILE=/var/log/rkhunter.log
|
||||
APPEND_LOG=0
|
||||
COPY_LOG_ON_ERROR=0
|
||||
USE_SYSLOG=authpriv.warning
|
||||
AUTO_X_DETECT=0
|
||||
ALLOW_SSH_ROOT_USER=no
|
||||
ALLOW_SSH_PROT_V1=0
|
||||
|
||||
ENABLE_TESTS=ALL
|
||||
DISABLE_TESTS=NONE
|
||||
HASH_CMD=SHA256
|
||||
PKGMGR=NONE
|
||||
|
||||
USER_FILEPROP_FILES_DIRS=/etc/rkhunter.conf
|
||||
USER_FILEPROP_FILES_DIRS=/etc/rkhunter.conf.local
|
||||
|
||||
EXISTWHITELIST=""
|
||||
ATTRWHITELIST=""
|
||||
WRITEWHITELIST=""
|
||||
|
||||
SCRIPTWHITELIST=/usr/bin/egrep
|
||||
SCRIPTWHITELIST=/usr/bin/lwp-request
|
||||
SCRIPTWHITELIST=/usr/bin/fgrep
|
||||
SCRIPTWHITELIST=/usr/bin/which
|
||||
SCRIPTWHITELIST=/usr/bin/ldd
|
||||
SCRIPTWHITELIST=/usr/bin/which.debianutils
|
||||
SCRIPTWHITELIST=/usr/sbin/adduser
|
||||
|
||||
IMMUTABLE_SET=0
|
||||
SKIP_INODE_CHECK=0
|
||||
ALLOWPROMISCIF=""
|
||||
|
||||
SCAN_MODE_DEV=THOROUGH
|
||||
ALLOWDEVFILE=""
|
||||
ALLOW_SYSLOG_REMOTE_LOGGING=0
|
||||
|
||||
### Needs update to add user-controller dirs like upload and user generated content dirs from webserver
|
||||
SUSPSCAN_DIRS=/tmp /var/tmp
|
||||
SUSPSCAN_TEMP=/dev/shm
|
||||
SUSPSCAN_MAXSIZE=1024000
|
||||
SUSPSCAN_THRESH=200
|
||||
SUSPSCAN_WHITELIST=""
|
||||
|
||||
# Examples:
|
||||
#
|
||||
# PORT_WHITELIST=TCP:2001 UDP:32011
|
||||
# PORT_PATH_WHITELIST=/usr/sbin/squid
|
||||
# PORT_PATH_WHITELIST=/usr/sbin/squid:TCP:3801
|
||||
PORT_WHITELIST=""
|
||||
PORT_PATH_WHITELIST=""
|
||||
|
||||
WARN_ON_OS_CHANGE=1
|
||||
|
||||
USE_LOCKING=1
|
||||
LOCK_TIMEOUT=300
|
||||
SCANROOTKITMODE=""
|
||||
|
||||
SHOW_SUMMARY_WARNINGS_NUMBER=1
|
||||
GLOBSTAR=0
|
||||
INSTALLDIR=/usr
|
|
@ -0,0 +1,2 @@
|
|||
* soft core 0
|
||||
* hard core 0
|
|
@ -0,0 +1,40 @@
|
|||
# Based on Debian 12 manual
|
||||
CHFN_RESTRICT rwh
|
||||
DEFAULT_HOME yes
|
||||
ENCRYPT_METHOD YESCRYPT
|
||||
ENV_PATH PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games
|
||||
ENV_SUPATH PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
ERASECHAR 0177
|
||||
FAILLOG_ENAB yes
|
||||
FTMP_FILE /var/log/btmp
|
||||
GID_MAX 60000
|
||||
GID_MIN 1000
|
||||
HOME_MODE 0700
|
||||
HUSHLOGIN_FILE .hushlogin
|
||||
KILLCHAR 025
|
||||
LOGIN_RETRIES 3
|
||||
LOGIN_TIMEOUT 60
|
||||
LOG_OK_LOGINS yes
|
||||
LOG_UNKFAIL_ENAB no
|
||||
MAIL_DIR /var/mail
|
||||
NONEXISTENT /nonexistent
|
||||
PASS_MAX_DAYS 99999
|
||||
PASS_MIN_DAYS 0
|
||||
PASS_WARN_AGE 7
|
||||
PREVENT_NO_AUTH yes
|
||||
SUB_GID_COUNT 65536
|
||||
SUB_GID_MAX 600100000
|
||||
SUB_GID_MIN 100000
|
||||
SUB_UID_COUNT 65536
|
||||
SUB_UID_MAX 600100000
|
||||
SUB_UID_MIN 100000
|
||||
SU_NAME su
|
||||
SYSLOG_SG_ENAB yes
|
||||
SYSLOG_SU_ENAB yes
|
||||
TTYGROUP tty
|
||||
TTYPERM 0600
|
||||
UID_MAX 60000
|
||||
UID_MIN 1000
|
||||
UMASK 027
|
||||
USERGROUPS_ENAB yes
|
||||
YESCRYPT_COST_FACTOR 10
|
|
@ -0,0 +1,2 @@
|
|||
localhost
|
||||
|
|
@ -0,0 +1 @@
|
|||
---
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
# defaults file for tooling
|
|
@ -0,0 +1 @@
|
|||
---
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
galaxy_info:
|
||||
author: Florian L.
|
||||
namespace: nullified
|
||||
description: Install server related configuration and tooling
|
||||
# issue_tracker_url: http://example.com/issue/tracker
|
||||
license: MIT
|
||||
min_ansible_version: 2.15
|
||||
|
||||
# https://galaxy.ansible.com/api/v1/platforms/
|
||||
platforms:
|
||||
- name: Debian
|
||||
versions:
|
||||
- bookworm
|
||||
|
||||
galaxy_tags:
|
||||
- github
|
||||
- assets
|
||||
- utils
|
||||
- system
|
||||
|
||||
dependencies: []
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
# TODO: Add monitoring roles
|
|
@ -0,0 +1,2 @@
|
|||
localhost
|
||||
|
|
@ -0,0 +1 @@
|
|||
---
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
custom_base_user_account: "root"
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
# handlers file for development
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
galaxy_info:
|
||||
author: Florian L.
|
||||
namespace: nullified
|
||||
description: Install workstation environment
|
||||
# issue_tracker_url: http://example.com/issue/tracker
|
||||
license: MIT
|
||||
min_ansible_version: 2.15
|
||||
|
||||
# https://galaxy.ansible.com/api/v1/platforms/
|
||||
platforms:
|
||||
- name: Debian
|
||||
versions:
|
||||
- bookworm
|
||||
|
||||
galaxy_tags:
|
||||
- github
|
||||
- assets
|
||||
- utils
|
||||
- system
|
||||
|
||||
dependencies: []
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
- name: '[APT] install dependencies and tools'
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
update_cache: true
|
||||
force_apt_get: true
|
||||
cache_valid_time: 3600
|
||||
pkg:
|
||||
- curl
|
||||
- flatpak
|
||||
- gnupg
|
||||
- pwgen
|
||||
- sudo
|
||||
state: present
|
||||
|
||||
- name: '[Setup] setup Flatpak'
|
||||
become: yes
|
||||
become_user: "{{ custom_base_user_account }}"
|
||||
become_method: su
|
||||
tags:
|
||||
- molecule-idempotence-notest
|
||||
ansible.builtin.shell: |
|
||||
flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
|
||||
flatpak install --noninteractive flathub com.discordapp.Discord
|
||||
flatpak install --noninteractive flathub md.obsidian.Obsidian
|
|
@ -0,0 +1,2 @@
|
|||
localhost
|
||||
|
|
@ -0,0 +1 @@
|
|||
---
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
modules:
|
||||
python_requires: '>= 3.9'
|
20
images/Dockerfile-debian-bookworm
Normal file
20
images/Dockerfile-debian-bookworm
Normal file
|
@ -0,0 +1,20 @@
|
|||
FROM debian:bookworm
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && \
|
||||
apt-get upgrade -y; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
python3 \
|
||||
python3-dev \
|
||||
sudo \
|
||||
systemd \
|
||||
systemd-sysv \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
;
|
||||
|
||||
STOPSIGNAL SIGRTMIN+3
|
||||
VOLUME ["/run", "/run/lock", "/tmp"]
|
||||
CMD ["/sbin/init"]
|
0
inventory/group_vars/.gitkeep
Normal file
0
inventory/group_vars/.gitkeep
Normal file
2
inventory/group_vars/all.yml
Normal file
2
inventory/group_vars/all.yml
Normal file
|
@ -0,0 +1,2 @@
|
|||
custom_base_user_account: '{{ vault_custom_base_user_account }}'
|
||||
custom_github_token: '{{ vault_custom_github_token }}'
|
0
inventory/host_vars/.gitkeep
Normal file
0
inventory/host_vars/.gitkeep
Normal file
2
inventory/inventory.yml.dist
Normal file
2
inventory/inventory.yml.dist
Normal file
|
@ -0,0 +1,2 @@
|
|||
all:
|
||||
hosts:
|
2
inventory/vault.yml.dist
Normal file
2
inventory/vault.yml.dist
Normal file
|
@ -0,0 +1,2 @@
|
|||
vault_custom_base_user_account:
|
||||
vault_custom_github_token:
|
9
playbooks/external.yml
Normal file
9
playbooks/external.yml
Normal file
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
- name: setup external infrastructure
|
||||
hosts: external
|
||||
gather_facts: false
|
||||
vars_files: ../inventory/vault.yml
|
||||
tasks:
|
||||
- include_vars: ../inventory/vault.yml
|
||||
|
||||
- ansible.builtin.import_playbook: nullified.infrastructure.server
|
1
playbooks/internal.yml
Normal file
1
playbooks/internal.yml
Normal file
|
@ -0,0 +1 @@
|
|||
---
|
14
requirements.txt
Normal file
14
requirements.txt
Normal file
|
@ -0,0 +1,14 @@
|
|||
ansible==8.3.0
|
||||
ansible-compat==4.1.6
|
||||
ansible-core==2.15.3
|
||||
ansible-lint==6.17.2
|
||||
docker==6.1.3
|
||||
Jinja2==3.1.2
|
||||
jsonschema==4.19.0
|
||||
molecule==6.0.1
|
||||
molecule-plugins==23.5.0
|
||||
paramiko==3.3.1
|
||||
pycurl==7.45.2
|
||||
Pygments==2.16.1
|
||||
pylint==2.17.5
|
||||
yamllint==1.32.0
|
117
scripts/generate_ansible_config.sh
Executable file
117
scripts/generate_ansible_config.sh
Executable file
|
@ -0,0 +1,117 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -o pipefail -o noclobber
|
||||
|
||||
function usage() {
|
||||
cat <<EOF
|
||||
DESCRIPTION
|
||||
This command generates an ansible configuration file to use with this repository; it is tailored for the user calling ansible commands,
|
||||
not to be used as a boilerplate for Molecule and \`ansible-test\` environments.
|
||||
|
||||
USAGE
|
||||
\$> ./$(basename "$0") [option [option...]] [file_path]
|
||||
|
||||
ARGUMENTS
|
||||
file_path file path where the file will be saved; defaults to '\$HOME/.ansible.cfg';
|
||||
if neither this argument nor the environment variable \$ANSIBLE_CFG_PATH is provided, the
|
||||
content will be printed on stdout.
|
||||
|
||||
OPTIONS
|
||||
-h, --help show this output
|
||||
-o, --overwrite allow overwriting of a pre-existing file
|
||||
|
||||
ENVIRONMENT
|
||||
The following environment variables are supported for overrides:
|
||||
ANSIBLE_CFG_PATH: path where the ansible config file should be stored (current: "${ANSIBLE_CFG_PATH:-stdout}" or argument)
|
||||
TMPL_PROJECT_ROOT: path where this project is located; defaults to where '../' leads (current: "$TMPL_PROJECT_ROOT")
|
||||
TMPL_INVENTORY_FILE: path where the main inventory file is located (current: "$TMPL_INVENTORY_FILE")
|
||||
TMPL_CONNECTION_TIMEOUT: default connection timeout when connecting to hosts (current: $TMPL_CONNECTION_TIMEOUT)
|
||||
TMPL_PLAYBOOK_DIR: path where the playbooks are located (current: "$TMPL_PLAYBOOK_DIR")
|
||||
EOF
|
||||
}
|
||||
|
||||
ANSIBLE_CFG_PATH=${ANSIBLE_CFG_PATH:=}
|
||||
OVERWRITE_CFG=0
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Template variables
|
||||
TMPL_PROJECT_ROOT=$(realpath "${TMPL_PROJECT_ROOT:-"${SCRIPT_DIR}/.."}")
|
||||
TMPL_INVENTORY_FILE=$(realpath "${TMPL_INVENTORY_FILE:-"${TMPL_PROJECT_ROOT}/inventory/inventory.yml"}")
|
||||
TMPL_CONNECTION_TIMEOUT=${TMPL_CONNECTION_TIMEOUT:-30}
|
||||
TMPL_PLAYBOOK_DIR=$(realpath "${TMPL_PLAYBOOK_DIR:-"${TMPL_PROJECT_ROOT}/playbooks"}")
|
||||
|
||||
OPTS=ho
|
||||
LONGOPTS=help,overwrite
|
||||
! getopt --test > /dev/null
|
||||
if [[ ${PIPESTATUS[0]} -ne 4 ]]; then
|
||||
echo -e "Getopt is not available, please install linux-utils or a similar package and ensure your bash is up-to-date."
|
||||
exit 1
|
||||
fi
|
||||
! PARSED=$(getopt --options=${OPTS} --longoptions=${LONGOPTS} --name "$0" -- "$@")
|
||||
if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
|
||||
exit 2
|
||||
fi
|
||||
eval set -- "${PARSED}"
|
||||
|
||||
while true; do
|
||||
case "$1" in
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
-o|--overwrite)
|
||||
OVERWRITE_CFG=1
|
||||
shift
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported option: ${1}"
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -n "${1}" ]; then
|
||||
ANSIBLE_CFG_PATH=$1
|
||||
fi
|
||||
|
||||
if [ -e "${ANSIBLE_CFG_PATH}" ] && [ $OVERWRITE_CFG -eq 0 ]; then
|
||||
printf "Configuration file already exists, not overwriting (file: '%s')\n" "${ANSIBLE_CFG_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CFG_TEMPLATE=$(cat <<EOF
|
||||
[defaults]
|
||||
cow_selection = random
|
||||
error_on_undefined_vars = True
|
||||
home = ${TMPL_PROJECT_ROOT}
|
||||
inventory = ${TMPL_INVENTORY_FILE}
|
||||
local_tmp = /tmp
|
||||
nocolor = 0
|
||||
nocows = 0
|
||||
playbook_dir = ${TMPL_PLAYBOOK_DIR}
|
||||
remote_tmp = /tmp
|
||||
|
||||
[inventory]
|
||||
enable_plugins = yaml
|
||||
|
||||
[privilege_escalation]
|
||||
become_method = su
|
||||
become_flags = "-l"
|
||||
|
||||
[persistent_connection]
|
||||
connect_timeout = ${TMPL_CONNECTION_TIMEOUT}
|
||||
|
||||
[galaxy]
|
||||
display_progress = True
|
||||
EOF
|
||||
)
|
||||
|
||||
if [ -z "${ANSIBLE_CFG_PATH}" ]; then
|
||||
echo -e "$CFG_TEMPLATE"
|
||||
else
|
||||
echo -e "$CFG_TEMPLATE" > "${ANSIBLE_CFG_PATH}"
|
||||
fi
|
9
scripts/python_wrapper.sh
Executable file
9
scripts/python_wrapper.sh
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
test -d "$SCRIPT_DIR"/../venv || (printf "Python virtualenv not found (%s)\n" "$SCRIPT_DIR"/../venv && exit 1)
|
||||
source "$SCRIPT_DIR"/../venv/bin/activate
|
||||
|
||||
exec $@
|
Loading…
Add table
Reference in a new issue