Merge pull request #235 from ansible-lockdown/devel

rhel9-cis main release v1.0.0
This commit is contained in:
uk-bolly 2024-09-10 15:45:16 +01:00 committed by GitHub
commit 16cb6a4617
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 408 additions and 372 deletions

View file

@ -1,5 +1,4 @@
passlib
lxml
xmltodict
jmespath
yamllint

View file

@ -13,13 +13,21 @@
- '**.j2'
- '**.ps1'
- '**.cfg'
# Allow manual running of workflow
workflow_dispatch:
# Allow permissions for AWS auth
permissions:
id-token: write
contents: read
pull-requests: read
# A workflow run is made up of one or more jobs
# that can run sequentially or in parallel
jobs:
# This will create messages for first time contributers and direct them to the Discord server
welcome:
runs-on: ubuntu-latest
runs-on: self-hosted
steps:
- uses: actions/first-interaction@main
@ -32,108 +40,119 @@
# This workflow contains a single job that tests the playbook
playbook-test:
# The type of runner that the job will run on
runs-on: ubuntu-latest
runs-on: self-hosted
env:
ENABLE_DEBUG: ${{ vars.ENABLE_DEBUG }}
# Imported as a variable by terraform
TF_VAR_repository: ${{ github.event.repository.name }}
AWS_REGION: "us-east-1"
ANSIBLE_VERSION: ${{ vars.ANSIBLE_RUNNER_VERSION }}
defaults:
run:
shell: bash
working-directory: .github/workflows/github_linux_IaC
# working-directory: .github/workflows
steps:
- name: Clone ${{ github.event.repository.name }}
- name: Git clone the lockdown repository to test
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: If a variable for IAC_BRANCH is set use that branch
working-directory: .github/workflows
run: |
if [ ${{ vars.IAC_BRANCH }} != '' ]; then
echo "IAC_BRANCH=${{ vars.IAC_BRANCH }}" >> $GITHUB_ENV
echo "Pipeline using the following IAC branch ${{ vars.IAC_BRANCH }}"
else
echo IAC_BRANCH=main >> $GITHUB_ENV
fi
# Pull in terraform code for linux servers
- name: Clone GitHub IaC plan
uses: actions/checkout@v4
with:
repository: ansible-lockdown/github_linux_IaC
path: .github/workflows/github_linux_IaC
ref: ${{ env.IAC_BRANCH }}
- name: Add_ssh_key
working-directory: .github/workflows
env:
SSH_AUTH_SOCK: /tmp/ssh_agent.sock
PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}"
run: |
mkdir .ssh
chmod 700 .ssh
echo $PRIVATE_KEY > .ssh/github_actions.pem
chmod 600 .ssh/github_actions.pem
# Uses dedicated restricted role and policy to enable this only for this task
# No credentials are part of github for AWS auth
- name: configure aws credentials
uses: aws-actions/configure-aws-credentials@main
with:
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE }}
role-session-name: ${{ secrets.AWS_ROLE_SESSION }}
aws-region: ${{ env.AWS_REGION }}
- name: DEBUG - Show IaC files
if: env.ENABLE_DEBUG == 'true'
run: |
echo "OSVAR = $OSVAR"
echo "benchmark_type = $benchmark_type"
echo "PRIVSUBNET_ID = $AWS_PRIVSUBNET_ID"
echo "VPC_ID" = $AWS_VPC_SECGRP_ID"
pwd
ls
env:
# Imported from GitHub variables this is used to load the relevant OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
benchmark_type: ${{ vars.BENCHMARK_TYPE }}
PRIVSUBNET_ID: ${{ secrets.AWS_PRIVSUBNET_ID }}
VPC_ID: ${{ secrets.AWS_VPC_SECGRP_ID }}
- name: Terraform_Init
- name: Tofu init
id: init
run: terraform init
run: tofu init
env:
# Imported from GitHub variables this is used to load the relevant OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Validate
- name: Tofu validate
id: validate
run: terraform validate
run: tofu validate
env:
# Imported from GitHub variables this is used to load the relevant OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Apply
- name: Tofu apply
id: apply
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false
TF_VAR_privsubnet_id: ${{ secrets.AWS_PRIVSUBNET_ID }}
TF_VAR_vpc_secgrp_id: ${{ secrets.AWS_VPC_SECGRP_ID }}
run: tofu apply -var-file "${OSVAR}.tfvars" --auto-approve -input=false
## Debug Section
## Debug Section
- name: DEBUG - Show Ansible hostfile
if: env.ENABLE_DEBUG == 'true'
run: cat hosts.yml
# Aws deployments taking a while to come up insert sleep or playbook fails
- name: Sleep for 60 seconds
- name: Sleep to allow system to come up
run: sleep ${{ vars.BUILD_SLEEPTIME }}
# Run the Ansibleplaybook
# Run the Ansible playbook
- name: Run_Ansible_Playbook
uses: arillso/action.playbook@master
with:
playbook: site.yml
inventory: .github/workflows/github_linux_IaC/hosts.yml
galaxy_file: collections/requirements.yml
private_key: ${{ secrets.SSH_PRV_KEY }}
# verbose: 3
env:
ANSIBLE_HOST_KEY_CHECKING: "false"
ANSIBLE_DEPRECATION_WARNINGS: "false"
ANSIBLE_INJECT_FACT_VARS: "false"
run: |
/opt/ansible_${{ env.ANSIBLE_VERSION }}_venv/bin/ansible-playbook -i hosts.yml --private-key ~/.ssh/le_runner ../../../site.yml
# Remove test system - User secrets to keep if necessary
- name: Terraform_Destroy
- name: Tofu Destroy
if: always() && env.ENABLE_DEBUG == 'false'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false
TF_VAR_privsubnet_id: ${{ secrets.AWS_PRIVSUBNET_ID }}
TF_VAR_vpc_secgrp_id: ${{ secrets.AWS_VPC_SECGRP_ID }}
run: tofu destroy -var-file "${OSVAR}.tfvars" --auto-approve -input=false

View file

@ -14,115 +14,143 @@
- '**.ps1'
- '**.cfg'
# Allow permissions for AWS auth
permissions:
id-token: write
contents: read
pull-requests: read
# A workflow run is made up of one or more jobs
# that can run sequentially or in parallel
jobs:
# This will create messages for first time contributers and direct them to the Discord server
welcome:
runs-on: self-hosted
steps:
- uses: actions/first-interaction@main
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
pr-message: |-
Congrats on opening your first pull request and thank you for taking the time to help improve Ansible-Lockdown!
Please join in the conversation happening on the [Discord Server](https://www.lockdownenterprise.com/discord) as well.
# This workflow contains a single job that tests the playbook
playbook-test:
# The type of runner that the job will run on
runs-on: ubuntu-latest
runs-on: self-hosted
env:
ENABLE_DEBUG: ${{ vars.ENABLE_DEBUG }}
# Imported as a variable by terraform
TF_VAR_repository: ${{ github.event.repository.name }}
AWS_REGION : "us-east-1"
ANSIBLE_VERSION: ${{ vars.ANSIBLE_RUNNER_VERSION }}
defaults:
run:
shell: bash
working-directory: .github/workflows/github_linux_IaC
# working-directory: .github/workflows
steps:
- name: Clone ${{ github.event.repository.name }}
- name: Git clone the lockdown repository to test
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: If a variable for IAC_BRANCH is set use that branch
working-directory: .github/workflows
run: |
if [ ${{ vars.IAC_BRANCH }} != '' ]; then
echo "IAC_BRANCH=${{ vars.IAC_BRANCH }}" >> $GITHUB_ENV
echo "Pipeline using the following IAC branch ${{ vars.IAC_BRANCH }}"
else
echo IAC_BRANCH=main >> $GITHUB_ENV
fi
# Pull in terraform code for linux servers
- name: Clone GitHub IaC plan
uses: actions/checkout@v4
with:
repository: ansible-lockdown/github_linux_IaC
path: .github/workflows/github_linux_IaC
ref: ${{ env.IAC_BRANCH }}
- name: Add_ssh_key
working-directory: .github/workflows
env:
SSH_AUTH_SOCK: /tmp/ssh_agent.sock
PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}"
run: |
mkdir .ssh
chmod 700 .ssh
echo $PRIVATE_KEY > .ssh/github_actions.pem
chmod 600 .ssh/github_actions.pem
# Uses dedicated restricted role and policy to enable this only for this task
# No credentials are part of github for AWS auth
- name: configure aws credentials
uses: aws-actions/configure-aws-credentials@main
with:
role-to-assume: ${{ secrets.AWS_ASSUME_ROLE }}
role-session-name: ${{ secrets.AWS_ROLE_SESSION }}
aws-region: ${{ env.AWS_REGION }}
- name: DEBUG - Show IaC files
if: env.ENABLE_DEBUG == 'true'
run: |
echo "OSVAR = $OSVAR"
echo "benchmark_type = $benchmark_type"
echo "PRIVSUBNET_ID = $AWS_PRIVSUBNET_ID"
echo "VPC_ID" = $AWS_VPC_SECGRP_ID"
pwd
ls
env:
# Imported from GitHub variables this is used to load the relevant OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
benchmark_type: ${{ vars.BENCHMARK_TYPE }}
PRIVSUBNET_ID: ${{ secrets.AWS_PRIVSUBNET_ID }}
VPC_ID: ${{ secrets.AWS_VPC_SECGRP_ID }}
- name: Terraform_Init
- name: Tofu init
id: init
run: terraform init
run: tofu init
env:
# Imported from GitHub variables this is used to load the relevant OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Validate
- name: Tofu validate
id: validate
run: terraform validate
run: tofu validate
env:
# Imported from GitHub variables this is used to load the relevant OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Apply
- name: Tofu apply
id: apply
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false
TF_VAR_privsubnet_id: ${{ secrets.AWS_PRIVSUBNET_ID }}
TF_VAR_vpc_secgrp_id: ${{ secrets.AWS_VPC_SECGRP_ID }}
run: tofu apply -var-file "${OSVAR}.tfvars" --auto-approve -input=false
## Debug Section
## Debug Section
- name: DEBUG - Show Ansible hostfile
if: env.ENABLE_DEBUG == 'true'
run: cat hosts.yml
# Aws deployments taking a while to come up insert sleep or playbook fails
- name: Sleep for 60 seconds
- name: Sleep to allow system to come up
run: sleep ${{ vars.BUILD_SLEEPTIME }}
# Run the Ansibleplaybook
# Run the Ansible playbook
- name: Run_Ansible_Playbook
uses: arillso/action.playbook@master
with:
playbook: site.yml
inventory: .github/workflows/github_linux_IaC/hosts.yml
galaxy_file: collections/requirements.yml
private_key: ${{ secrets.SSH_PRV_KEY }}
# verbose: 3
env:
ANSIBLE_HOST_KEY_CHECKING: "false"
ANSIBLE_DEPRECATION_WARNINGS: "false"
ANSIBLE_INJECT_FACT_VARS: "false"
run: |
/opt/ansible_${{ env.ANSIBLE_VERSION }}_venv/bin/ansible-playbook -i hosts.yml --private-key ~/.ssh/le_runner ../../../site.yml
# Remove test system - User secrets to keep if necessary
- name: Terraform_Destroy
- name: Tofu Destroy
if: always() && env.ENABLE_DEBUG == 'false'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false
TF_VAR_privsubnet_id: ${{ secrets.AWS_PRIVSUBNET_ID }}
TF_VAR_vpc_secgrp_id: ${{ secrets.AWS_VPC_SECGRP_ID }}
run: tofu destroy -var-file "${OSVAR}.tfvars" --auto-approve -input=false

View file

@ -1,12 +1,12 @@
---
name: update galaxy
name: update galaxy
on:
on:
push:
branches:
- main
jobs:
jobs:
update_role:
runs-on: ubuntu-latest
steps:

View file

@ -37,13 +37,13 @@ repos:
exclude: .config/.gitleaks-report.json
- repo: https://github.com/gitleaks/gitleaks
rev: v8.18.3
rev: v8.18.4
hooks:
- id: gitleaks
args: ['--baseline-path', '.config/.gitleaks-report.json']
- repo: https://github.com/ansible-community/ansible-lint
rev: v24.6.0
rev: v24.7.0
hooks:
- id: ansible-lint
name: Ansible-lint

View file

@ -78,7 +78,7 @@ The control found in the `defaults` main also needs to reflect this, as this con
CIS release always contains changes, it is highly recommended to review the new references and available variables. This has changed significantly since the ansible-lockdown initial release.
This is now compatible with python3 if it is found to be the default interpreter. This does come with prerequisites which configure the system accordingly.
Further details can be seen in the [Changelog](./ChangeLog.md)
Further details can be seen in the [Changelog](./Changelog.md)
## Auditing (new)

View file

@ -421,7 +421,7 @@ rhel9cis_rule_enable_repogpg: true
# This variable will store the hashed GRUB bootloader password to be stored in '/boot/grub2/user.cfg' file. The default value
# must be changed to a value that may be generated with this command 'grub2-mkpasswd-pbkdf2' and must comply with
# this format: 'grub.pbkdf2.sha512.<Rounds>.<Salt>.<Checksum>'
rhel9cis_bootloader_password_hash: 'grub.pbkdf2.sha512.10000.9306A36764A7BEA3BF492D1784396B27F52A71812E9955A58709F94EE70697F9BD5366F36E07DEC41B52279A056E2862A93E42069D7BBB08F5DFC2679CD43812.6C32ADA5449303AD5E67A4C150558592A05381331DE6B33463469A236871FA8E70738C6F9066091D877EF88A213C86825E093117F30E9E1BF158D0DB75E7581B' # pragma: allowlist secret
rhel9cis_bootloader_password_hash: 'grub.pbkdf2.sha512.changethispassword' # pragma: allowlist secret
## Control 1.4.1
# This variable governs whether a bootloader password should be set in '/boot/grub2/user.cfg' file.
@ -962,7 +962,7 @@ rhel9cis_logrotate: "daily"
# This value, containing the absolute filepath of the produced 'sshd' config file, allows usage of
# drop-in files('/etc/ssh/ssh_config.d/{ssh_drop_in_name}.conf', supported by RHEL9) when CIS adopts them.
# Otherwise, the default value is '/etc/ssh/ssh_config'.
rhel9_cis_sshd_config_file: /etc/ssh/sshd_config
rhel9cis_sshd_config_file: /etc/ssh/sshd_config
## Controls:
## - 5.2.4 - Ensure SSH access is limited

View file

@ -10,7 +10,7 @@
- name: Pre Audit Setup | Set audit package name | ARM64
ansible.builtin.set_fact:
audit_pkg_arch_name: ARM64
when: ansible_facts.machine == "arm64"
when: ansible_facts.machine == "aarch64"
- name: Pre Audit Setup | Download audit binary
ansible.builtin.get_url:

View file

@ -22,7 +22,7 @@
when:
- audit_only
ansible.builtin.debug:
msg: "The Audit results are: {{ pre_audit_summary }}."
msg: "{{ audit_results.split('\n') }}"
- name: Audit_only | Stop Playbook Audit Only selected
when:

View file

@ -21,26 +21,24 @@
when:
- audit_format == "json"
block:
- name: capture data {{ post_audit_outfile }}
ansible.builtin.shell: "cat {{ post_audit_outfile }}"
register: post_audit
- name: Post Audit | Capture audit data if json format
ansible.builtin.shell: grep -E '"summary-line.*Count:.*Failed' "{{ post_audit_outfile }}" | cut -d'"' -f4
register: post_audit_summary
changed_when: false
- name: Capture post-audit result
- name: Post Audit | Set Fact for audit summary
ansible.builtin.set_fact:
post_audit_summary: "{{ post_audit.stdout | from_json | json_query(summary) }}"
vars:
summary: summary."summary-line"
post_audit_results: "{{ post_audit_summary.stdout }}"
- name: Post Audit | Capture audit data if documentation format
when:
- audit_format == "documentation"
block:
- name: Post Audit | capture data {{ post_audit_outfile }}
ansible.builtin.shell: "tail -2 {{ post_audit_outfile }}"
register: post_audit
- name: Post Audit | Capture audit data if documentation format
ansible.builtin.shell: tail -2 "{{ post_audit_outfile }}" | tac | tr '\n' ' '
register: post_audit_summary
changed_when: false
- name: Post Audit | Capture post-audit result
- name: Post Audit | Set Fact for audit summary
ansible.builtin.set_fact:
post_audit_summary: "{{ post_audit.stdout_lines }}"
post_audit_results: "{{ post_audit_summary.stdout }}"

View file

@ -90,31 +90,30 @@
when:
- audit_format == "json"
block:
- name: Pre Audit | Capture data {{ pre_audit_outfile }}
ansible.builtin.shell: "cat {{ pre_audit_outfile }}"
register: pre_audit
- name: Pre Audit | Capture audit data if json format
ansible.builtin.shell: grep -E '\"summary-line.*Count:.*Failed' "{{ pre_audit_outfile }}" | cut -d'"' -f4
register: pre_audit_summary
changed_when: false
- name: Pre Audit | Capture pre-audit result
- name: Pre Audit | Set Fact for audit summary
ansible.builtin.set_fact:
pre_audit_summary: "{{ pre_audit.stdout | from_json | json_query(summary) }}"
vars:
summary: summary."summary-line"
pre_audit_results: "{{ pre_audit_summary.stdout }}"
- name: Pre Audit | Capture audit data if documentation format
when:
- audit_format == "documentation"
block:
- name: Pre Audit | Capture data {{ pre_audit_outfile }} | documentation format
ansible.builtin.shell: "tail -2 {{ pre_audit_outfile }}"
register: pre_audit
- name: Pre Audit | Capture audit data if documentation format
ansible.builtin.shell: tail -2 "{{ pre_audit_outfile }}" | tac | tr '\n' ' '
register: pre_audit_summary
changed_when: false
- name: Pre Audit | Capture pre-audit result | documentation format
- name: Pre Audit | Set Fact for audit summary
ansible.builtin.set_fact:
pre_audit_summary: "{{ pre_audit.stdout_lines }}"
pre_audit_results: "{{ pre_audit_summary.stdout }}"
- name: Audit_Only | Run Audit Only
when:
- audit_only
ansible.builtin.import_tasks: audit_only.yml
ansible.builtin.import_tasks:
file: audit_only.yml

View file

@ -242,18 +242,16 @@
# Added to ensure ssh drop in file exists if not default /etc/ssh/sshd_config
- name: "PRELIM | Section 5.2 | SSH"
ansible.builtin.file:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
owner: root
group: root
mode: '0600'
state: touch
when:
- rhel9_cis_sshd_config_file != '/etc/ssh/sshd_config'
- rhel9cis_sshd_config_file != '/etc/ssh/sshd_config'
- "'openssh-server' in ansible_facts.packages"
tags:
- ssh
- level1_server
- level1_workstation
- always
- name: "PRELIM | 5.3.4 | Find all sudoers files."
ansible.builtin.shell: "find /etc/sudoers /etc/sudoers.d/ -type f ! -name '*~' ! -name '*.*'"

View file

@ -89,8 +89,8 @@
group: root
mode: '0644'
loop:
- { regexp: '^user-db', line: 'user-db: user' }
- { regexp: '^system-db', line: 'system-db: local' }
- { regexp: '^user-db', line: 'user-db:user' }
- { regexp: '^system-db', line: 'system-db:local' }
- name: "1.8.4 | PATCH | Ensure GDM screen locks when the user is idle | Make db directory"
ansible.builtin.file:

View file

@ -23,7 +23,7 @@
"4.1.4.3 | PATCH | Ensure only authorized groups are assigned ownership of audit log files"
ansible.builtin.file:
path: "{{ audit_discovered_logfile.stdout }}"
mode: "{% if auditd_logfile.stat.mode != '0600' %}0640{% endif %}"
mode: 'u-x,g-rw,o-rwx'
owner: root
group: root
when:
@ -50,7 +50,7 @@
ansible.builtin.file:
path: "{{ audit_discovered_logfile.stdout | dirname }}"
state: directory
mode: '0750'
mode: 'g-w,o-rwx'
when: not auditlog_dir.stat.mode is match('07(0|5)0')
when:
- rhel9cis_rule_4_1_4_4
@ -64,7 +64,9 @@
- name: "4.1.4.5 | PATCH | Ensure audit configuration files are 640 or more restrictive"
ansible.builtin.file:
path: "{{ item.path }}"
mode: "{{ '0600' if item.mode == '0600' else '0640' }}"
mode: 'u-x,g-wx,o-rwx'
failed_when: rhel9cis_4_1_4_5_file_list.state not in '[ file, absent ]'
register: rhel9cis_4_1_4_5_file_list
loop: "{{ auditd_conf_files.files }}"
loop_control:
label: "{{ item.path }}"
@ -81,6 +83,8 @@
ansible.builtin.file:
path: "{{ item.path }}"
owner: root
failed_when: rhel9cis_4_1_4_6_file_list.state not in '[ file, absent ]'
register: rhel9cis_4_1_4_6_file_list
loop: "{{ auditd_conf_files.files | default([]) }}"
loop_control:
label: "{{ item.path }}"
@ -97,6 +101,8 @@
ansible.builtin.file:
path: "{{ item.path }}"
group: root
failed_when: rhel9cis_4_1_4_7_file_list.state not in '[ file, absent ]'
register: rhel9cis_4_1_4_7_file_list
loop: "{{ auditd_conf_files.files | default([]) }}"
loop_control:
label: "{{ item.path }}"
@ -126,8 +132,7 @@
- name: "4.1.4.8 | PATCH | Ensure audit tools are 755 or more restrictive | set if required"
ansible.builtin.file:
path: "{{ item.item }}"
mode: '0750'
mode: 'go-w'
loop: "{{ audit_bins.results }}"
loop_control:
label: "{{ item.item }}"

View file

@ -123,7 +123,7 @@
local2,local3.* -/var/log/localmessages
local4,local5.* -/var/log/localmessages
local6,local7.* -/var/log/localmessages
*.emrg :omusrmsg:*
*.emerg :omusrmsg:*
insertafter: '#### RULES ####'
notify: Restart rsyslog

View file

@ -14,6 +14,8 @@
ansible.builtin.file:
path: "{{ item.path }}"
mode: "{{ '0600' if item.mode == '0600' else '0640' }}"
failed_when: rhel9cis_4_2_3_file_list.state not in '[ file, absent ]'
register: rhel9cis_4_2_3_file_list
loop: "{{ logfiles.files }}"
loop_control:
label: "{{ item.path }}"

View file

@ -77,7 +77,7 @@
block:
- name: "5.2.4 | PATCH | Ensure SSH access is limited | Add line to sshd_config for allowusers"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^AllowUsers"
line: "AllowUsers {{ rhel9cis_sshd['allowusers'] }}"
validate: sshd -t -f %s
@ -86,7 +86,7 @@
- name: "5.2.4 | PATCH | Ensure SSH access is limited | Add line to sshd_config for allowgroups"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^AllowGroups"
line: "AllowGroups {{ rhel9cis_sshd['allowgroups'] }}"
validate: sshd -t -f %s
@ -95,7 +95,7 @@
- name: "5.2.4 | PATCH | Ensure SSH access is limited | Add line to sshd_config for denyusers"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^DenyUsers"
line: "DenyUsers {{ rhel9cis_sshd['denyusers'] }}"
validate: sshd -t -f %s
@ -104,7 +104,7 @@
- name: "5.2.4 | PATCH | Ensure SSH access is limited | Add line to sshd_config for denygroups"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^DenyGroups"
line: "DenyGroups {{ rhel9cis_sshd['denygroups'] }}"
validate: sshd -t -f %s
@ -121,10 +121,11 @@
- name: "5.2.5 | PATCH | Ensure SSH LogLevel is appropriate"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#LogLevel|^LogLevel"
line: 'LogLevel {{ rhel9cis_ssh_loglevel }}'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_5
tags:
@ -136,10 +137,11 @@
- name: "5.2.6 | PATCH | Ensure SSH PAM is enabled"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#UsePAM|^UsePAM"
line: 'UsePAM yes'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_6
tags:
@ -153,7 +155,7 @@
block:
- name: "5.2.7 | PATCH | Ensure SSH root login is disabled | config file"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#PermitRootLogin|^PermitRootLogin"
line: 'PermitRootLogin no'
validate: sshd -t -f %s
@ -162,6 +164,7 @@
ansible.builtin.file:
path: /etc/ssh/sshd_config.d/01-permitrootlogin.conf
state: absent
notify: Restart sshd
when:
- rhel9cis_rule_5_2_7
tags:
@ -173,10 +176,11 @@
- name: "5.2.8 | PATCH | Ensure SSH HostbasedAuthentication is disabled"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#HostbasedAuthentication|^HostbasedAuthentication"
line: 'HostbasedAuthentication no'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_8
tags:
@ -188,10 +192,11 @@
- name: "5.2.9 | PATCH | Ensure SSH PermitEmptyPasswords is disabled"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#PermitEmptyPasswords|^PermitEmptyPasswords"
line: 'PermitEmptyPasswords no'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_9
tags:
@ -203,10 +208,11 @@
- name: "5.2.10 | PATCH | Ensure SSH PermitUserEnvironment is disabled"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#PermitUserEnvironment|^PermitUserEnvironment"
line: 'PermitUserEnvironment no'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_10
tags:
@ -218,10 +224,11 @@
- name: "5.2.11 | PATCH | Ensure SSH IgnoreRhosts is enabled"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#IgnoreRhosts|^IgnoreRhosts"
line: 'IgnoreRhosts yes'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_11
tags:
@ -236,10 +243,11 @@
- name: "5.2.12 | PATCH | Ensure SSH X11 forwarding is disabled | config file"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#X11Forwarding|^X11Forwarding"
line: 'X11Forwarding no'
validate: sshd -t -f %s
notify: Restart sshd
- name: "5.2.12 | PATCH | Ensure SSH X11 forwarding is disabled | override"
ansible.builtin.lineinfile:
@ -247,6 +255,7 @@
regexp: "^#X11Forwarding|^X11Forwarding"
line: 'X11Forwarding no'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_12
tags:
@ -258,10 +267,11 @@
- name: "5.2.13 | PATCH | Ensure SSH AllowTcpForwarding is disabled"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#AllowTcpForwarding|^AllowTcpForwarding"
line: 'AllowTcpForwarding no'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_13
tags:
@ -294,9 +304,10 @@
- name: "5.2.15 | PATCH | Ensure SSH warning banner is configured"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: '^Banner'
line: 'Banner /etc/issue.net'
notify: Restart sshd
when:
- rhel9cis_rule_5_2_15
tags:
@ -308,10 +319,11 @@
- name: "5.2.16 | PATCH | Ensure SSH MaxAuthTries is set to 4 or less"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: '^(#)?MaxAuthTries \d'
line: 'MaxAuthTries 4'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_16
tags:
@ -323,10 +335,11 @@
- name: "5.2.17 | PATCH | Ensure SSH MaxStartups is configured"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#MaxStartups|^MaxStartups"
line: 'MaxStartups 10:30:60'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_17
tags:
@ -338,10 +351,11 @@
- name: "5.2.18 | PATCH | Ensure SSH MaxSessions is set to 10 or less"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#MaxSessions|^MaxSessions"
line: 'MaxSessions {{ rhel9cis_ssh_maxsessions }}'
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_18
tags:
@ -353,10 +367,11 @@
- name: "5.2.19 | PATCH | Ensure SSH LoginGraceTime is set to one minute or less"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: "^#LoginGraceTime|^LoginGraceTime"
line: "LoginGraceTime {{ rhel9cis_sshd['logingracetime'] }}"
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_19
tags:
@ -370,17 +385,19 @@
block:
- name: "5.2.20 | PATCH | Ensure SSH Idle Timeout Interval is configured | Add line in sshd_config for ClientAliveInterval"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: '^ClientAliveInterval'
line: "ClientAliveInterval {{ rhel9cis_sshd['clientaliveinterval'] }}"
validate: sshd -t -f %s
notify: Restart sshd
- name: "5.2.20 | PATCH | Ensure SSH Idle Timeout Interval is configured | Ensure SSH ClientAliveCountMax set to <= 3"
ansible.builtin.lineinfile:
path: "{{ rhel9_cis_sshd_config_file }}"
path: "{{ rhel9cis_sshd_config_file }}"
regexp: '^ClientAliveCountMax'
line: "ClientAliveCountMax {{ rhel9cis_sshd['clientalivecountmax'] }}"
validate: sshd -t -f %s
notify: Restart sshd
when:
- rhel9cis_rule_5_2_20
tags:

View file

@ -150,7 +150,7 @@
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist"
block:
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist | Finding all unowned files or directories"
ansible.builtin.shell: find "{{ item.mount }}" -xdev -nouser
ansible.builtin.shell: find "{{ item.mount }}" -xdev -nouser -not -fstype nfs
changed_when: false
failed_when: false
check_mode: false
@ -162,28 +162,21 @@
- item['device'].startswith('/dev')
- not 'bind' in item['options']
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist | set fact"
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist | Flatten no_user_items results for easier use"
ansible.builtin.set_fact:
rhel_09_6_1_10_unowned_files_found: true
loop: "{{ rhel_09_6_1_10_audit.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
discovered_unowned_files_flatten: "{{ rhel_09_6_1_10_audit.results | map(attribute='stdout_lines') | flatten }}"
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist | Displaying any unowned files or directories"
ansible.builtin.debug:
msg: "Warning!! Missing owner on items in {{ rhel_09_6_1_10_audit | community.general.json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel_09_6_1_10_unowned_files_found
msg: "Warning!! Missing owner on items in {{ discovered_unowned_files_flatten }}"
when: discovered_unowned_files_flatten | length > 0
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist | warning"
ansible.builtin.import_tasks:
file: warning_facts.yml
vars:
warn_control_id: '6.1.10'
when: rhel_09_6_1_10_unowned_files_found
vars:
rhel_09_6_1_10_unowned_files_found: false
when: discovered_unowned_files_flatten | length > 0
when:
- rhel9cis_rule_6_1_10
tags:
@ -209,28 +202,21 @@
- item['device'].startswith('/dev')
- not 'bind' in item['options']
- name: "6.1.11 | AUDIT | Ensure no ungrouped files or directories exist | set fact"
- name: "6.1.11 | AUDIT | Ensure no ungrouped files or directories exist | Flatten no_user_items results for easier use"
ansible.builtin.set_fact:
rhel_09_6_1_11_ungrouped_files_found: true
loop: "{{ rhel_09_6_1_11_audit.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
discovered_ungrouped_files_flatten: "{{ rhel_09_6_1_11_audit.results | map(attribute='stdout_lines') | flatten }}"
- name: "6.1.11 | AUDIT | Ensure no ungrouped files or directories exist | Displaying all ungrouped files or directories"
ansible.builtin.debug:
msg: "Warning!! Missing group on items in {{ rhel_09_6_1_11_audit | community.general.json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel_09_6_1_11_ungrouped_files_found
msg: "Warning!! Missing group on items in {{ discovered_ungrouped_files_flatten }}"
when: discovered_ungrouped_files_flatten | length > 0
- name: "6.1.11 | AUDIT | Ensure no ungrouped files or directories exist | warning"
ansible.builtin.import_tasks:
file: warning_facts.yml
vars:
warn_control_id: '6.1.11'
when: rhel_09_6_1_11_ungrouped_files_found
vars:
rhel_09_6_1_11_ungrouped_files_found: false
when: discovered_ungrouped_files_flatten | length > 0
when:
- rhel9cis_rule_6_1_11
tags:
@ -258,34 +244,26 @@
- name: "6.1.13 | AUDIT | Audit SUID executables"
block:
- name: "6.1.13 | AUDIT | Audit SUID executables | Find all SUID executables"
ansible.builtin.shell: df {{ item.mount }} -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type f -perm -4000
ansible.builtin.shell: "df --local -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type f -perm -4000"
failed_when: false
changed_when: false
register: rhel_09_6_1_13_suid_perms
loop: "{{ ansible_facts.mounts }}"
loop_control:
label: "{{ item.mount }}"
- name: "6.1.13 | AUDIT | Audit SUID executables | set fact SUID executables"
ansible.builtin.set_fact:
rhel9_6_1_13_suid_found: true
loop: "{{ rhel_09_6_1_13_suid_perms.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
- name: "6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist"
ansible.builtin.debug:
msg: "Warning!! SUID set on items in {{ rhel_09_6_1_13_suid_perms | community.general.json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel9_6_1_13_suid_found
msg: "Warning!! SUID set on items in {{ rhel_09_6_1_13_suid_perms.stdout_lines }}"
when:
- rhel_09_6_1_13_suid_perms.stdout is defined
- rhel_09_6_1_13_suid_perms.stdout | length > 0
- name: "6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist | warning"
ansible.builtin.import_tasks:
file: warning_facts.yml
vars:
warn_control_id: '6.1.13'
when: rhel9_6_1_13_suid_found
when:
- rhel_09_6_1_13_suid_perms.stdout is defined
- rhel_09_6_1_13_suid_perms.stdout | length > 0
vars:
rhel9_6_1_13_suid_found: false
when:
@ -301,34 +279,26 @@
- name: "6.1.14 | AUDIT | Audit SGID executables"
block:
- name: "6.1.14 | AUDIT | Audit SGID executables | Find all SGID executables"
ansible.builtin.shell: df {{ item.mount }} -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type f -perm -2000
ansible.builtin.shell: df --local -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type f -perm -2000
failed_when: false
changed_when: false
register: rhel_09_6_1_14_sgid_perms
loop: "{{ ansible_facts.mounts }}"
loop_control:
label: "{{ item.mount }}"
- name: "6.1.14 | AUDIT | Audit SGID executables | Set fact SGID executables"
ansible.builtin.set_fact:
rhel9_6_1_14_sgid_found: true
loop: "{{ rhel_09_6_1_14_sgid_perms.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
- name: "6.1.14 | AUDIT | Audit SGID executables | Alert SGID executables exist"
ansible.builtin.debug:
msg: "Warning!! SGID set on items in {{ rhel_09_6_1_14_sgid_perms | community.general.json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel9_6_1_14_sgid_found
msg: "Warning!! SGID set on items in {{ rhel_09_6_1_14_sgid_perms.stdout_lines }}"
when:
- rhel_09_6_1_14_sgid_perms.stdout is defined
- rhel_09_6_1_14_sgid_perms.stdout | length > 0
- name: "6.1.14 | AUDIT | Audit SGID executables| warning"
ansible.builtin.import_tasks:
file: warning_facts.yml
vars:
warn_control_id: '6.1.14'
when: rhel9_6_1_14_sgid_found
when:
- rhel_09_6_1_14_sgid_perms.stdout is defined
- rhel_09_6_1_14_sgid_perms.stdout | length > 0
vars:
rhel9_6_1_14_sgid_found: false
when:

View file

@ -462,7 +462,7 @@ rhel9cis_syslog: {{ rhel9cis_syslog }}
# Section 5
# This will allow use of drop in files when CIS adopts them.
rhel9_cis_sshd_config_file: {{ rhel9_cis_sshd_config_file }}
rhel9_cis_sshd_config_file: {{ rhel9cis_sshd_config_file }}
## 5.2.4 Note the following to understand precedence and layout
rhel9cis_sshd_limited: false

View file

@ -26,15 +26,16 @@ post_audit_outfile: "{{ audit_log_dir }}/{{ ansible_facts.hostname }}-{{ benchma
### Audit binary settings ###
audit_bin_version:
release: v0.4.4
AMD64_checksum: 'sha256:1c4f54b22fde9d4d5687939abc2606b0660a5d14a98afcd09b04b793d69acdc5'
release: v0.4.8
AMD64_checksum: 'sha256:85d00b7bba5f175bec95de7dfe1f71f8f25204914aad4c6f03c8457868eb6e2f'
ARM64_checksum: 'sha256:bca8c898bfd35b94c51455ece6193c95e2cd7b2b183ac2047b2d76291e73e47d'
audit_bin_path: /usr/local/bin/
audit_bin: "{{ audit_bin_path }}goss"
audit_format: json
audit_vars_path: "{{ audit_conf_dir }}/vars/{{ ansible_facts.hostname }}.yml"
audit_results: |
The audit results are: {{ pre_audit_summary }}
{% if not audit_only %}The post remediation audit results are: {{ post_audit_summary }}{% endif %}
The{% if not audit_only %} pre remediation{% endif %} audit results are: {{ pre_audit_results}}
{% if not audit_only %}The post remediation audit results are: {{ post_audit_results }}{% endif %}
Full breakdown can be found in {{ audit_log_dir }}