4
0
Fork 0

Merge pull request #81 from ansible-lockdown/workflow_and_precommit

Workflow and pre-commit added
This commit is contained in:
uk-bolly 2023-08-08 14:18:57 +01:00 committed by GitHub
commit 83c4e5c7e5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 1003 additions and 638 deletions

View file

@ -12,6 +12,7 @@ skip_list:
- 'name[casing]'
- 'name[template]'
- 'fqcn[action]'
- 'key-order[task]'
- '204'
- '305'
- '303'

View file

@ -0,0 +1,322 @@
[
{
"Description": "Generic API Key",
"StartLine": 119,
"EndLine": 119,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8\"",
"Secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "495f942b7d26ee82690dc16eb4f231c587a57687",
"Entropy": 3.853056,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-26T15:51:17Z",
"Message": "added pre-commit files\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "495f942b7d26ee82690dc16eb4f231c587a57687:.secrets.baseline:generic-api-key:119"
},
{
"Description": "Generic API Key",
"StartLine": 127,
"EndLine": 127,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"fe96f7cfa2ab2224e7d015067a6f6cc713f7012e\"",
"Secret": "fe96f7cfa2ab2224e7d015067a6f6cc713f7012e",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "495f942b7d26ee82690dc16eb4f231c587a57687",
"Entropy": 3.6568441,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-26T15:51:17Z",
"Message": "added pre-commit files\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "495f942b7d26ee82690dc16eb4f231c587a57687:.secrets.baseline:generic-api-key:127"
},
{
"Description": "Generic API Key",
"StartLine": 135,
"EndLine": 135,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"a415ab5cc17c8c093c015ccdb7e552aee7911aa4\"",
"Secret": "a415ab5cc17c8c093c015ccdb7e552aee7911aa4",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "495f942b7d26ee82690dc16eb4f231c587a57687",
"Entropy": 3.5221736,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-26T15:51:17Z",
"Message": "added pre-commit files\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "495f942b7d26ee82690dc16eb4f231c587a57687:.secrets.baseline:generic-api-key:135"
},
{
"Description": "Generic API Key",
"StartLine": 145,
"EndLine": 145,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"2478fefdceefe2847c3aa36dc731aaad5b3cc2fb\"",
"Secret": "2478fefdceefe2847c3aa36dc731aaad5b3cc2fb",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "495f942b7d26ee82690dc16eb4f231c587a57687",
"Entropy": 3.6348295,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-26T15:51:17Z",
"Message": "added pre-commit files\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "495f942b7d26ee82690dc16eb4f231c587a57687:.secrets.baseline:generic-api-key:145"
},
{
"Description": "Generic API Key",
"StartLine": 153,
"EndLine": 153,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"64411efd0f0561fe4852c6e414071345c9c6432a\"",
"Secret": "64411efd0f0561fe4852c6e414071345c9c6432a",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "495f942b7d26ee82690dc16eb4f231c587a57687",
"Entropy": 3.646039,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-26T15:51:17Z",
"Message": "added pre-commit files\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "495f942b7d26ee82690dc16eb4f231c587a57687:.secrets.baseline:generic-api-key:153"
},
{
"Description": "Generic API Key",
"StartLine": 163,
"EndLine": 163,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360\"",
"Secret": "2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "495f942b7d26ee82690dc16eb4f231c587a57687",
"Entropy": 3.8439426,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-26T15:51:17Z",
"Message": "added pre-commit files\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "495f942b7d26ee82690dc16eb4f231c587a57687:.secrets.baseline:generic-api-key:163"
},
{
"Description": "Generic API Key",
"StartLine": 119,
"EndLine": 119,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8\"",
"Secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "7452e78f487c0b2cacfb81ccf582936a6ab09389",
"Entropy": 3.853056,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-04T16:08:02Z",
"Message": "signature new precommits\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "7452e78f487c0b2cacfb81ccf582936a6ab09389:.secrets.baseline:generic-api-key:119"
},
{
"Description": "Generic API Key",
"StartLine": 127,
"EndLine": 127,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"fe96f7cfa2ab2224e7d015067a6f6cc713f7012e\"",
"Secret": "fe96f7cfa2ab2224e7d015067a6f6cc713f7012e",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "7452e78f487c0b2cacfb81ccf582936a6ab09389",
"Entropy": 3.6568441,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-04T16:08:02Z",
"Message": "signature new precommits\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "7452e78f487c0b2cacfb81ccf582936a6ab09389:.secrets.baseline:generic-api-key:127"
},
{
"Description": "Generic API Key",
"StartLine": 135,
"EndLine": 135,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"a415ab5cc17c8c093c015ccdb7e552aee7911aa4\"",
"Secret": "a415ab5cc17c8c093c015ccdb7e552aee7911aa4",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "7452e78f487c0b2cacfb81ccf582936a6ab09389",
"Entropy": 3.5221736,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-04T16:08:02Z",
"Message": "signature new precommits\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "7452e78f487c0b2cacfb81ccf582936a6ab09389:.secrets.baseline:generic-api-key:135"
},
{
"Description": "Generic API Key",
"StartLine": 145,
"EndLine": 145,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"2478fefdceefe2847c3aa36dc731aaad5b3cc2fb\"",
"Secret": "2478fefdceefe2847c3aa36dc731aaad5b3cc2fb",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "7452e78f487c0b2cacfb81ccf582936a6ab09389",
"Entropy": 3.6348295,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-04T16:08:02Z",
"Message": "signature new precommits\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "7452e78f487c0b2cacfb81ccf582936a6ab09389:.secrets.baseline:generic-api-key:145"
},
{
"Description": "Generic API Key",
"StartLine": 153,
"EndLine": 153,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"64411efd0f0561fe4852c6e414071345c9c6432a\"",
"Secret": "64411efd0f0561fe4852c6e414071345c9c6432a",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "7452e78f487c0b2cacfb81ccf582936a6ab09389",
"Entropy": 3.646039,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-04T16:08:02Z",
"Message": "signature new precommits\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "7452e78f487c0b2cacfb81ccf582936a6ab09389:.secrets.baseline:generic-api-key:153"
},
{
"Description": "Generic API Key",
"StartLine": 163,
"EndLine": 163,
"StartColumn": 18,
"EndColumn": 68,
"Match": "secret\": \"2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360\"",
"Secret": "2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360",
"File": ".secrets.baseline",
"SymlinkFile": "",
"Commit": "7452e78f487c0b2cacfb81ccf582936a6ab09389",
"Entropy": 3.8439426,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-07-04T16:08:02Z",
"Message": "signature new precommits\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "7452e78f487c0b2cacfb81ccf582936a6ab09389:.secrets.baseline:generic-api-key:163"
},
{
"Description": "Generic API Key",
"StartLine": 3,
"EndLine": 4,
"StartColumn": 9,
"EndColumn": 1,
"Match": "key_pubkey_name: gpg-pubkey-8d8b756f-629e59ec",
"Secret": "gpg-pubkey-8d8b756f-629e59ec",
"File": "vars/OracleLinux.yml",
"SymlinkFile": "",
"Commit": "e04da88df42da0108d489f359513c574fbe5c87a",
"Entropy": 3.96772,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2023-03-06T11:22:08Z",
"Message": "Added OracleLinux support\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "e04da88df42da0108d489f359513c574fbe5c87a:vars/OracleLinux.yml:generic-api-key:3"
},
{
"Description": "Generic API Key",
"StartLine": 4,
"EndLine": 5,
"StartColumn": 8,
"EndColumn": 1,
"Match": "key_pubkey_name: gpg-pubkey-fd431d51-4ae0493b",
"Secret": "gpg-pubkey-fd431d51-4ae0493b",
"File": "vars/RedHat.yml",
"SymlinkFile": "",
"Commit": "28bbc2ff5f832d150452e9dc4cb6667b876ed09a",
"Entropy": 3.96772,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2022-07-25T10:26:27Z",
"Message": "1.2.2 rpm gpg key check\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "28bbc2ff5f832d150452e9dc4cb6667b876ed09a:vars/RedHat.yml:generic-api-key:4"
},
{
"Description": "Generic API Key",
"StartLine": 4,
"EndLine": 5,
"StartColumn": 8,
"EndColumn": 1,
"Match": "key_pubkey_name: gpg-pubkey-b86b3716-61e69f29",
"Secret": "gpg-pubkey-b86b3716-61e69f29",
"File": "vars/AlmaLinux.yml",
"SymlinkFile": "",
"Commit": "28bbc2ff5f832d150452e9dc4cb6667b876ed09a",
"Entropy": 3.824863,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2022-07-25T10:26:27Z",
"Message": "1.2.2 rpm gpg key check\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "28bbc2ff5f832d150452e9dc4cb6667b876ed09a:vars/AlmaLinux.yml:generic-api-key:4"
},
{
"Description": "Generic API Key",
"StartLine": 4,
"EndLine": 5,
"StartColumn": 8,
"EndColumn": 1,
"Match": "key_pubkey_name: gpg-pubkey-350d275d-6279464b",
"Secret": "gpg-pubkey-350d275d-6279464b",
"File": "vars/Rocky.yml",
"SymlinkFile": "",
"Commit": "28bbc2ff5f832d150452e9dc4cb6667b876ed09a",
"Entropy": 3.9946804,
"Author": "Mark Bolwell",
"Email": "mark.bollyuk@gmail.com",
"Date": "2022-07-25T10:26:27Z",
"Message": "1.2.2 rpm gpg key check\n\nSigned-off-by: Mark Bolwell \u003cmark.bollyuk@gmail.com\u003e",
"Tags": [],
"RuleID": "generic-api-key",
"Fingerprint": "28bbc2ff5f832d150452e9dc4cb6667b876ed09a:vars/Rocky.yml:generic-api-key:4"
}
]

170
.config/.secrets.baseline Normal file
View file

@ -0,0 +1,170 @@
{
"version": "1.4.0",
"plugins_used": [
{
"name": "ArtifactoryDetector"
},
{
"name": "AWSKeyDetector"
},
{
"name": "AzureStorageKeyDetector"
},
{
"name": "Base64HighEntropyString",
"limit": 4.5
},
{
"name": "BasicAuthDetector"
},
{
"name": "CloudantDetector"
},
{
"name": "DiscordBotTokenDetector"
},
{
"name": "GitHubTokenDetector"
},
{
"name": "HexHighEntropyString",
"limit": 3.0
},
{
"name": "IbmCloudIamDetector"
},
{
"name": "IbmCosHmacDetector"
},
{
"name": "JwtTokenDetector"
},
{
"name": "KeywordDetector",
"keyword_exclude": ""
},
{
"name": "MailchimpDetector"
},
{
"name": "NpmDetector"
},
{
"name": "PrivateKeyDetector"
},
{
"name": "SendGridDetector"
},
{
"name": "SlackDetector"
},
{
"name": "SoftlayerDetector"
},
{
"name": "SquareOAuthDetector"
},
{
"name": "StripeDetector"
},
{
"name": "TwilioKeyDetector"
}
],
"filters_used": [
{
"path": "detect_secrets.filters.allowlist.is_line_allowlisted"
},
{
"path": "detect_secrets.filters.common.is_baseline_file",
"filename": ".config/.secrets.baseline"
},
{
"path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies",
"min_level": 2
},
{
"path": "detect_secrets.filters.heuristic.is_indirect_reference"
},
{
"path": "detect_secrets.filters.heuristic.is_likely_id_string"
},
{
"path": "detect_secrets.filters.heuristic.is_lock_file"
},
{
"path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string"
},
{
"path": "detect_secrets.filters.heuristic.is_potential_uuid"
},
{
"path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign"
},
{
"path": "detect_secrets.filters.heuristic.is_sequential_string"
},
{
"path": "detect_secrets.filters.heuristic.is_swagger_file"
},
{
"path": "detect_secrets.filters.heuristic.is_templated_secret"
}
],
"results": {
"defaults/main.yml": [
{
"type": "Secret Keyword",
"filename": "defaults/main.yml",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 363,
"is_secret": false
},
{
"type": "Secret Keyword",
"filename": "defaults/main.yml",
"hashed_secret": "fe96f7cfa2ab2224e7d015067a6f6cc713f7012e",
"is_verified": false,
"line_number": 374,
"is_secret": false
},
{
"type": "Secret Keyword",
"filename": "defaults/main.yml",
"hashed_secret": "a415ab5cc17c8c093c015ccdb7e552aee7911aa4",
"is_verified": false,
"line_number": 375,
"is_secret": false
}
],
"tasks/main.yml": [
{
"type": "Secret Keyword",
"filename": "tasks/main.yml",
"hashed_secret": "2478fefdceefe2847c3aa36dc731aaad5b3cc2fb",
"is_verified": false,
"line_number": 38,
"is_secret": false
},
{
"type": "Secret Keyword",
"filename": "tasks/main.yml",
"hashed_secret": "64411efd0f0561fe4852c6e414071345c9c6432a",
"is_verified": false,
"line_number": 110,
"is_secret": false
}
],
"tasks/parse_etc_password.yml": [
{
"type": "Secret Keyword",
"filename": "tasks/parse_etc_password.yml",
"hashed_secret": "2aaf9f2a51d8fe89e48cb9cc7d04a991ceb7f360",
"is_verified": false,
"line_number": 18
}
]
},
"generated_at": "2023-08-07T15:38:18Z"
}

View file

@ -1,9 +0,0 @@
#Ami Alma 9
ami_id = "ami-0845395779540e3cb"
ami_os = "rhel9"
ami_username = "ec2-user"
ami_user_home = "/home/ec2-user"
instance_tags = {
Name = "RHEL9-CIS"
Environment = "lockdown_github_repo_workflow"
}

View file

@ -0,0 +1,138 @@
---
name: Devel pipeline
on: # yamllint disable-line rule:truthy
pull_request_target:
types: [opened, reopened, synchronize]
branches:
- devel
paths:
- '**.yml'
- '**.sh'
- '**.j2'
- '**.ps1'
- '**.cfg'
# A workflow run is made up of one or more jobs
# that can run sequentially or in parallel
jobs:
# This will create messages for first time contributers and direct them to the Discord server
welcome:
runs-on: ubuntu-latest
steps:
- uses: actions/first-interaction@main
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
pr-message: |-
Congrats on opening your first pull request and thank you for taking the time to help improve Ansible-Lockdown!
Please join in the conversation happening on the [Discord Server](https://discord.io/ansible-lockdown) as well.
# This workflow contains a single job which tests the playbook
playbook-test:
# The type of runner that the job will run on
runs-on: ubuntu-latest
env:
ENABLE_DEBUG: ${{ vars.ENABLE_DEBUG }}
# Imported as a variable by terraform
TF_VAR_repository: ${{ github.event.repository.name }}
defaults:
run:
shell: bash
working-directory: .github/workflows/github_linux_IaC
steps:
- name: Clone ${{ github.event.repository.name }}
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
# Pull in terraform code for linux servers
- name: Clone github IaC plan
uses: actions/checkout@v3
with:
repository: ansible-lockdown/github_linux_IaC
path: .github/workflows/github_linux_IaC
- name: Add_ssh_key
working-directory: .github/workflows
env:
SSH_AUTH_SOCK: /tmp/ssh_agent.sock
PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}"
run: |
mkdir .ssh
chmod 700 .ssh
echo $PRIVATE_KEY > .ssh/github_actions.pem
chmod 600 .ssh/github_actions.pem
- name: DEBUG - Show IaC files
if: env.ENABLE_DEBUG == 'true'
run: |
echo "OSVAR = $OSVAR"
echo "benchmark_type = $benchmark_type"
pwd
ls
env:
# Imported from github variables this is used to load the relvent OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Init
id: init
run: terraform init
env:
# Imported from github variables this is used to load the relvent OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Validate
id: validate
run: terraform validate
env:
# Imported from github variables this is used to load the relvent OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Apply
id: apply
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false
## Debug Section
- name: DEBUG - Show Ansible hostfile
if: env.ENABLE_DEBUG == 'true'
run: cat hosts.yml
# Aws deployments taking a while to come up insert sleep or playbook fails
- name: Sleep for 60 seconds
run: sleep 60s
# Run the ansible playbook
- name: Run_Ansible_Playbook
uses: arillso/action.playbook@master
with:
playbook: site.yml
inventory: .github/workflows/github_linux_IaC/hosts.yml
galaxy_file: collections/requirements.yml
private_key: ${{ secrets.SSH_PRV_KEY }}
# verbose: 3
env:
ANSIBLE_HOST_KEY_CHECKING: "false"
ANSIBLE_DEPRECATION_WARNINGS: "false"
# Remove test system - User secrets to keep if necessary
- name: Terraform_Destroy
if: always() && env.ENABLE_DEBUG == 'false'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false

View file

@ -1,53 +0,0 @@
resource "aws_vpc" "Main" {
cidr_block = var.main_vpc_cidr
instance_tenancy = "default"
tags = {
Environment = "${var.environment}"
Name = "${var.namespace}-VPC"
}
}
resource "aws_internet_gateway" "IGW" {
vpc_id = aws_vpc.Main.id
tags = {
Environment = "${var.environment}"
Name = "${var.namespace}-IGW"
}
}
resource "aws_subnet" "publicsubnets" {
vpc_id = aws_vpc.Main.id
cidr_block = var.public_subnets
availability_zone = var.availability_zone
tags = {
Environment = "${var.environment}"
Name = "${var.namespace}-pubsub"
}
}
resource "aws_subnet" "Main" {
vpc_id = aws_vpc.Main.id
cidr_block = var.private_subnets
availability_zone = var.availability_zone
tags = {
Environment = "${var.environment}"
Name = "${var.namespace}-prvsub"
}
}
resource "aws_route_table" "PublicRT" {
vpc_id = aws_vpc.Main.id
route {
cidr_block = "0.0.0.0/0"
gateway_id = aws_internet_gateway.IGW.id
}
tags = {
Environment = "${var.environment}"
Name = "${var.namespace}-publicRT"
}
}
resource "aws_route_table_association" "rt_associate_public" {
subnet_id = aws_subnet.Main.id
route_table_id = aws_route_table.PublicRT.id
}

View file

@ -1,14 +0,0 @@
// github_actions variables
// Resourced in github_networks.tf
// Declared in variables.tf
//
namespace = "github_actions"
environment = "lockdown_github_repo_workflow"
// Matching pair name found in AWS for keypairs PEM key
ami_key_pair_name = "github_actions"
private_key = ".ssh/github_actions.pem"
main_vpc_cidr = "172.22.0.0/24"
public_subnets = "172.22.0.128/26"
private_subnets = "172.22.0.192/26"

View file

@ -1,111 +0,0 @@
# This is a basic workflow to help you get started with Actions
name: linux_benchmark_pipeline
# Controls when the action will run.
# Triggers the workflow on push or pull request
# events but only for the devel branch
on: # yamllint disable-line rule:truthy
pull_request_target:
types: [opened, reopened, synchronize]
branches:
- devel
- main
paths:
- '**.yml'
- '**.sh'
- '**.j2'
- '**.ps1'
- '**.cfg'
# A workflow run is made up of one or more jobs
# that can run sequentially or in parallel
jobs:
# This will create messages for first time contributers and direct them to the Discord server
welcome:
runs-on: ubuntu-latest
steps:
- uses: actions/first-interaction@main
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
pr-message: |-
Congrats on opening your first pull request and thank you for taking the time to help improve Ansible-Lockdown!
Please join in the conversation happening on the [Discord Server](https://discord.io/ansible-lockdown) as well.
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: ubuntu-latest
env:
ENABLE_DEBUG: false
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE,
# so your job can access it
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Add_ssh_key
working-directory: .github/workflows
env:
SSH_AUTH_SOCK: /tmp/ssh_agent.sock
PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}"
run: |
mkdir .ssh
chmod 700 .ssh
echo $PRIVATE_KEY > .ssh/github_actions.pem
chmod 600 .ssh/github_actions.pem
### Build out the server
- name: Terraform_Init
working-directory: .github/workflows
run: terraform init
- name: Terraform_Validate
working-directory: .github/workflows
run: terraform validate
- name: Terraform_Apply
working-directory: .github/workflows
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: terraform apply -var-file "github_vars.tfvars" -var-file "OS.tfvars" --auto-approve -input=false
## Debug Section
- name: DEBUG - Show Ansible hostfile
if: env.ENABLE_DEBUG == 'true'
working-directory: .github/workflows
run: cat hosts.yml
# Aws deployments taking a while to come up insert sleep or playbook fails
- name: Sleep for 60 seconds
run: sleep 60s
shell: bash
# Run the ansible playbook
- name: Run_Ansible_Playbook
uses: arillso/action.playbook@master
with:
playbook: site.yml
inventory: .github/workflows/hosts.yml
galaxy_file: collections/requirements.yml
private_key: ${{ secrets.SSH_PRV_KEY }}
# verbose: 3
env:
ANSIBLE_HOST_KEY_CHECKING: "false"
ANSIBLE_DEPRECATION_WARNINGS: "false"
# Remove test system - User secrets to keep if necessary
- name: Terraform_Destroy
working-directory: .github/workflows
if: always() && env.ENABLE_DEBUG == 'false'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: terraform destroy -var-file "github_vars.tfvars" -var-file "OS.tfvars" --auto-approve -input=false

View file

@ -1,83 +0,0 @@
provider "aws" {
profile = ""
region = var.aws_region
}
// Create a security group with access to port 22 and port 80 open to serve HTTP traffic
resource "random_id" "server" {
keepers = {
# Generate a new id each time we switch to a new AMI id
ami_id = "${var.ami_id}"
}
byte_length = 8
}
resource "aws_security_group" "github_actions" {
name = "${var.namespace}-${random_id.server.hex}-SG"
vpc_id = aws_vpc.Main.id
ingress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
ingress {
from_port = 80
to_port = 80
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
tags = {
Environment = "${var.environment}"
Name = "${var.namespace}-SG"
}
}
// instance setup
resource "aws_instance" "testing_vm" {
ami = var.ami_id
availability_zone = var.availability_zone
associate_public_ip_address = true
key_name = var.ami_key_pair_name # This is the key as known in the ec2 key_pairs
instance_type = var.instance_type
tags = var.instance_tags
vpc_security_group_ids = [aws_security_group.github_actions.id]
subnet_id = aws_subnet.Main.id
root_block_device {
delete_on_termination = true
}
}
// generate inventory file
resource "local_file" "inventory" {
filename = "./hosts.yml"
directory_permission = "0755"
file_permission = "0644"
content = <<EOF
# benchmark host
all:
hosts:
${var.ami_os}:
ansible_host: ${aws_instance.testing_vm.public_ip}
ansible_user: ${var.ami_username}
vars:
setup_audit: true
run_audit: true
system_is_ec2: true
skip_reboot: false
rhel9cis_rule_5_6_6: false # skip root passwd check and keys only
EOF
}

View file

@ -0,0 +1,127 @@
---
name: Main pipeline
on: # yamllint disable-line rule:truthy
pull_request_target:
types: [opened, reopened, synchronize]
branches:
- main
paths:
- '**.yml'
- '**.sh'
- '**.j2'
- '**.ps1'
- '**.cfg'
# A workflow run is made up of one or more jobs
# that can run sequentially or in parallel
jobs:
# This workflow contains a single job which tests the playbook
playbook-test:
# The type of runner that the job will run on
runs-on: ubuntu-latest
env:
ENABLE_DEBUG: ${{ vars.ENABLE_DEBUG }}
# Imported as a variable by terraform
TF_VAR_repository: ${{ github.event.repository.name }}
defaults:
run:
shell: bash
working-directory: .github/workflows/github_linux_IaC
steps:
- name: Clone ${{ github.event.repository.name }}
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
# Pull in terraform code for linux servers
- name: Clone github IaC plan
uses: actions/checkout@v3
with:
repository: ansible-lockdown/github_linux_IaC
path: .github/workflows/github_linux_IaC
- name: Add_ssh_key
working-directory: .github/workflows
env:
SSH_AUTH_SOCK: /tmp/ssh_agent.sock
PRIVATE_KEY: "${{ secrets.SSH_PRV_KEY }}"
run: |
mkdir .ssh
chmod 700 .ssh
echo $PRIVATE_KEY > .ssh/github_actions.pem
chmod 600 .ssh/github_actions.pem
- name: DEBUG - Show IaC files
if: env.ENABLE_DEBUG == 'true'
run: |
echo "OSVAR = $OSVAR"
echo "benchmark_type = $benchmark_type"
pwd
ls
env:
# Imported from github variables this is used to load the relvent OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Init
id: init
run: terraform init
env:
# Imported from github variables this is used to load the relvent OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Validate
id: validate
run: terraform validate
env:
# Imported from github variables this is used to load the relvent OS.tfvars file
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
- name: Terraform_Apply
id: apply
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform apply -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false
## Debug Section
- name: DEBUG - Show Ansible hostfile
if: env.ENABLE_DEBUG == 'true'
run: cat hosts.yml
# Aws deployments taking a while to come up insert sleep or playbook fails
- name: Sleep for 60 seconds
run: sleep 60s
# Run the ansible playbook
- name: Run_Ansible_Playbook
uses: arillso/action.playbook@master
with:
playbook: site.yml
inventory: .github/workflows/github_linux_IaC/hosts.yml
galaxy_file: collections/requirements.yml
private_key: ${{ secrets.SSH_PRV_KEY }}
# verbose: 3
env:
ANSIBLE_HOST_KEY_CHECKING: "false"
ANSIBLE_DEPRECATION_WARNINGS: "false"
# Remove test system - User secrets to keep if necessary
- name: Terraform_Destroy
if: always() && env.ENABLE_DEBUG == 'false'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
OSVAR: ${{ vars.OSVAR }}
TF_VAR_benchmark_type: ${{ vars.BENCHMARK_TYPE }}
run: terraform destroy -var-file "github_vars.tfvars" -var-file "${OSVAR}.tfvars" --auto-approve -input=false

View file

@ -1,6 +0,0 @@
// vars should be loaded by OSname.tfvars
availability_zone = "us-east-1b"
aws_region = "us-east-1"
ami_os = var.ami_os
ami_username = var.ami_username
instance_tags = var.instance_tags

View file

@ -1,81 +0,0 @@
// Taken from the OSname.tfvars
variable "aws_region" {
description = "AWS region"
default = "us-east-1"
type = string
}
variable "availability_zone" {
description = "List of availability zone in the region"
default = "us-east-1b"
type = string
}
variable "instance_type" {
description = "EC2 Instance Type"
default = "t3.micro"
type = string
}
variable "instance_tags" {
description = "Tags to set for instances"
type = map(string)
}
variable "ami_key_pair_name" {
description = "Name of key pair in AWS thats used"
type = string
}
variable "private_key" {
description = "path to private key for ssh"
type = string
}
variable "ami_os" {
description = "AMI OS Type"
type = string
}
variable "ami_id" {
description = "AMI ID reference"
type = string
}
variable "ami_username" {
description = "Username for the ami id"
type = string
}
variable "ami_user_home" {
description = "home dir for the username"
type = string
}
variable "namespace" {
description = "Name used across all tags"
type = string
}
variable "environment" {
description = "Env Name used across all tags"
type = string
}
// taken from github_vars.tfvars &
variable "main_vpc_cidr" {
description = "Private cidr block to be used for vpc"
type = string
}
variable "public_subnets" {
description = "public subnet cidr block"
type = string
}
variable "private_subnets" {
description = "private subnet cidr block"
type = string
}

68
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,68 @@
---
##### CI for use by github no need for action to be added
##### Inherited
ci:
autofix_prs: false
skip: [detect-aws-credentials, ansible-lint ]
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
# Safety
- id: detect-aws-credentials
- id: detect-private-key
# git checks
- id: check-merge-conflict
- id: check-added-large-files
- id: check-case-conflict
# General checks
- id: trailing-whitespace
name: Trim Trailing Whitespace
description: This hook trims trailing whitespace.
entry: trailing-whitespace-fixer
language: python
types: [text]
args: [--markdown-linebreak-ext=md]
- id: end-of-file-fixer
# Scan for passwords
- repo: https://github.com/Yelp/detect-secrets
rev: v1.4.0
hooks:
- id: detect-secrets
args: ['--baseline', '.config/.secrets.baseline']
exclude: package.lock.json
- repo: https://github.com/gitleaks/gitleaks
rev: v8.17.0
hooks:
- id: gitleaks
args: ['--baseline-path','.config/.gitleaks-report.json']
- repo: https://github.com/ansible-community/ansible-lint
rev: v6.17.2
hooks:
- id: ansible-lint
name: Ansible-lint
description: This hook runs ansible-lint.
entry: python3 -m ansiblelint --force-color site.yml -c .ansible-lint
language: python
# do not pass files to ansible-lint, see:
# https://github.com/ansible/ansible-lint/issues/611
pass_filenames: false
always_run: true
additional_dependencies:
# https://github.com/pre-commit/pre-commit/issues/1526
# If you want to use specific version of ansible-core or ansible, feel
# free to override `additional_dependencies` in your own hook config
# file.
- ansible-core>=2.10.1
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.32.0 # or higher tag
hooks:
- id: yamllint

View file

@ -1,5 +1,24 @@
# Changes to rhel9CIS
## 1.1.0
- new workflow configuration
- Allowing devel and main configs
- IaC code found in alternate repo for easier mgmt
- Added pre-commit config - Does not have to be used but can improve things
- .pre-commit-config.yaml
- .secrets.baseline
- gitleaks and secrets detection
- updated to logic in 5.6.5
- lint updates to 6.1.x
- readme updates
- audit control updates and variable name changes
- ability to run audit on arm64(e.g. pi or M1/2) too thanks to @lucab85 #77
- tidy up README adopted PR #78 thanks to @lucab85
- moved Makefile requirements to .config/
- removed .ansible.cfg and local.yml
## 1.0.10
- [#72](https://github.com/ansible-lockdown/RHEL9-CIS/issues/72)

View file

@ -25,5 +25,5 @@ yamllint:
pip-requirements:
@echo 'Python dependencies:'
@cat requirements.txt
@cat .config/requirements.txt
pip3 install -r requirements.txt

158
README.md
View file

@ -15,13 +15,14 @@
![Ansible Galaxy Quality](https://img.shields.io/ansible/quality/61781?label=Quality&&logo=ansible)
![Discord Badge](https://img.shields.io/discord/925818806838919229?logo=discord)
![Devel Build Status](https://img.shields.io/github/actions/workflow/status/ansible-lockdown/RHEL9-CIS/linux_benchmark_testing.yml?label=Devel%20Build%20Status)
![Devel Commits](https://img.shields.io/github/commit-activity/m/ansible-lockdown/RHEL9-CIS/devel?color=dark%20green&label=Devel%20Branch%20commits)
![Release Branch](https://img.shields.io/badge/Release%20Branch-Main-brightgreen)
![Release Tag](https://img.shields.io/github/v/release/ansible-lockdown/RHEL9-CIS)
![Release Date](https://img.shields.io/github/release-date/ansible-lockdown/RHEL9-CIS)
![Release Branch](https://img.shields.io/badge/Release%20Branch-Main-brightgreen)
![Main Build Status](https://img.shields.io/github/actions/workflow/status/ansible-lockdown/RHEL9-CIS/linux_benchmark_testing.yml?label=Build%20Status)
![Main Release Date](https://img.shields.io/github/release-date/ansible-lockdown/RHEL9-CIS?label=Release%20Date)
![Release Tag](https://img.shields.io/github/v/tag/ansible-lockdown/RHEL9-CIS?label=Release%20Tag&&color=success)
[![Main Pipeline Status](https://github.com/ansible-lockdown/RHEL9-CIS/actions/workflows/main_pipeline_validation.yml/badge.svg?)](https://github.com/ansible-lockdown/RHEL9-CIS/actions/workflows/main_pipeline_validation.yml)
[![Devel Pipeline Status](https://github.com/ansible-lockdown/RHEL9-CIS/actions/workflows/devel_pipeline_validation.yml/badge.svg?)](https://github.com/ansible-lockdown/RHEL9-CIS/actions/workflows/devel_pipeline_validation.yml)
![Devel Commits](https://img.shields.io/github/commit-activity/m/ansible-lockdown/RHEL9-CIS/devel?color=dark%20green&label=Devel%20Branch%20commits)
![Issues Open](https://img.shields.io/github/issues-raw/ansible-lockdown/RHEL9-CIS?label=Open%20Issues)
![Issues Closed](https://img.shields.io/github/issues-closed-raw/ansible-lockdown/RHEL9-CIS?label=Closed%20Issues&&color=success)
@ -41,6 +42,11 @@
Join us on our [Discord Server](https://discord.io/ansible-lockdown) to ask questions, discuss features, or just chat with other Ansible-Lockdown users.
### Contributing
Issues and Pull requests are welcome please ensure that all commits are signed-off-by and gpg-signed.
Refer to [Contributing Guide](./CONTRIBUTING.rst)
---
## Caution(s)
@ -100,12 +106,14 @@ Almalinux 9
Rocky 9
OracleLinux 9
ansible 2.10
jmespath
relevant collections
- Access to download or add the goss binary and content to the system if using auditing (other options are available on how to get the content to the system.)
CentOS stream - while this will generally work it is not supported and requires the following variable setting
```sh
check_os: false
```
**General:**
- Basic knowledge of Ansible, below are some links to the Ansible documentation to help get started if you are unfamiliar with Ansible
@ -120,13 +128,15 @@ relevant collections
**Technical Dependencies:**
- Python3
- Ansible 2.9+
- Ansible 2.10+
- python-def (should be included in RHEL 9)
- libselinux-python
- pip packages
- jmespath ( complete list found in requirements.txt)
- collections found in collections/requirememnts.yml
pre-commit is available if installed on your host for pull request testing.
## Role Variables
This role is designed that the end user should not have to edit the tasks themselves. All customizing should be done by overriding the required varaibles as found in defaults/main.yml file. e.g. using inventory, group_vars, extra_vars
@ -170,124 +180,20 @@ uses:
- ansible-core 2.12
- ansible collections - pulls in the latest version based on requirements file
- runs the audit using the devel branch
- Runs the audit using the devel branch
- Runs the pre-commit setup on the PR to ensure everything is in place as expected.
- This is an automated test that occurs on pull requests into devel
## Local Testing
ansible-base 2.10.17 - python 3.8
ansible-core 2.13.4 - python 3.10
- ansible-base 2.10.17 - python 3.8
- ansible-core 2.13.4 - python 3.10
- ansible-core 2.15.1 - python 3.11
- makefile - this is there purely for testing and initial setup purposes.
makefile - this is there purely for testing and initial setup purposes.
pre-commit can be tested
Is run from with the directory
## v1.0.0 - released Dec 2022
![Build Status](https://img.shields.io/github/workflow/status/ansible-lockdown/RHEL9-CIS/CommunityToDevel?label=Devel%20Build%20Status&style=plastic)
![Build Status](https://img.shields.io/github/workflow/status/ansible-lockdown/RHEL9-CIS/DevelToMain?label=Main%20Build%20Status&style=plastic)
![Release](https://img.shields.io/github/v/release/ansible-lockdown/RHEL9-CIS?style=plastic)
## Join us
On our [Discord Server](https://discord.io/ansible-lockdown) to ask questions, discuss features, or just chat with other Ansible-Lockdown users
## Caution(s)
This role **will make changes to the system** which may have unintended concequences.
This role was developed against a clean install of the Operating System. If you are implimenting to an existing system please review this role for any site specific changes that are needed.
To use release version please point to main branch
## Documentation
- [Readthedocs](https://ansible-lockdown.readthedocs.io/en/latest/)
- [Getting Started](https://www.lockdownenterprise.com/docs/getting-started-with-lockdown)
- [Customizing Roles](https://www.lockdownenterprise.com/docs/customizing-lockdown-enterprise)
- [Per-Host Configuration](https://www.lockdownenterprise.com/docs/per-host-lockdown-enterprise-configuration)
- [Getting the Most Out of the Role](https://www.lockdownenterprise.com/docs/get-the-most-out-of-lockdown-enterprise)
## Requirements
RHEL 9
Almalinux 9
Rocky 9
OracleLinux 9
ansible 2.10
jmespath
relevant collections
- Access to download or add the goss binary and content to the system if using auditing (other options are available on how to get the content to the system.)
## Tested with
ansible-base 2.10.17 - python 3.8
ansible-core 2.13.4 - python 3.10
- makefile - this is there purely for testing and initial setup purposes.
## General
- Basic knowledge of Ansible, below are some links to the Ansible documentation to help get started if you are unfamiliar with Ansible
- [Main Ansible documentation page](https://docs.ansible.com)
- [Ansible Getting Started](https://docs.ansible.com/ansible/latest/user_guide/intro_getting_started.html)
- [Tower User Guide](https://docs.ansible.com/ansible-tower/latest/html/userguide/index.html)
- [Ansible Community Info](https://docs.ansible.com/ansible/latest/community/index.html)
- Functioning Ansible and/or Tower Installed, configured, and running. This includes all of the base Ansible/Tower configurations, needed packages installed, and infrastructure setup.
- Please read through the tasks in this role to gain an understanding of what each control is doing.
- Some of the tasks are disruptive and can have unintended consiquences in a live production system. Also familiarize yourself with the variables in the defaults/main.yml file
## Dependencies
- Python3
- Ansible 2.9+
- python-def (should be included in RHEL 9)
- libselinux-python
- pip packages
- jmespath ( complete list found in requirements.txt)
- collections found in collections/requirememnts.yml
### Known Issues
CIS 1.2.4 - repo_gpgcheck is not carried out for RedHat hosts as the default repos do not have this function. This also affect EPEL(not covered by var).
- Rocky and Alma not affected.
Variable used to unset.
rhel9cis_rhel_default_repo: true # to be set to false if using repo that does have this ability
```sh
pre-commit run
```

View file

@ -1,30 +0,0 @@
[defaults]
host_key_checking=False
display_skipped_hosts=True
system_warnings=False
command_warnings=False
nocows=1
retry_files_save_path=/dev/null
pipelining=true
# Use the YAML callback plugin.
#stdout_callback = yaml
# Use the stdout_callback when running ad-hoc commands.
bin_ansible_callbacks = True
[privilege_escalation]
[paramiko_connection]
record_host_keys=False
[ssh_connection]
transfer_method=scp
ssh_args = -o ControlMaster=auto -o ControlPersist=60s
[accelerate]
[selinux]
[colors]
[diff]

View file

@ -52,7 +52,7 @@ setup_audit: false
# How to retrive goss
# Options are copy or download - detailed settings at the bottom of this file
# you will need to access to either github or the file already dowmloaded
get_goss_file: download
get_audit_binary_method: download
# how to get audit files onto host options
# options are git/copy/get_url - use local if already available to to the host (adjust paths accordingly)
@ -663,15 +663,16 @@ audit_run_script_environment:
AUDIT_CONTENT_LOCATION: "{{ audit_out_dir }}"
### Goss binary settings ###
goss_version:
release: v0.3.21
checksum: 'sha256:9a9200779603acf0353d2c0e85ae46e083596c10838eaf4ee050c924678e4fe3'
audit_bin_release: v0.3.23
audit_bin_version:
AMD64_checksum: 'sha256:9e9f24e25f86d6adf2e669a9ffbe8c3d7b9b439f5f877500dea02ba837e10e4d'
ARM64_checksum: 'sha256:7b0794fa590857e7d64ef436e1a100ca26f6039f269a6138009aa837d27d7f9e'
audit_bin_path: /usr/local/bin/
audit_bin: "{{ audit_bin_path }}goss"
audit_format: json
# if get_goss_file == download change accordingly
goss_url: "https://github.com/goss-org/goss/releases/download/{{ goss_version.release }}/goss-linux-amd64"
audit_bin_url: "https://github.com/goss-org/goss/releases/download/{{ audit_bin_release }}/goss-linux-"
## if get_goss_file - copy the following needs to be updated for your environment
## it is expected that it will be copied from somewhere accessible to the control node

View file

@ -3,8 +3,6 @@
- name: Reload sysctl
ansible.builtin.shell: sysctl --system
when:
- sysctl_updated.changed
- name: Sysctl flush ipv4 route table
ansible.posix.sysctl:

View file

@ -1,8 +0,0 @@
---
- hosts: localhost
connection: local
become: true
roles:
- role: "{{ playbook_dir }}"

View file

@ -1,30 +1,34 @@
---
- name: Download audit binary
- name: Pre Audit Setup | Set audit package name
block:
- name: Pre Audit Setup | Set audit package name | 64bit
ansible.builtin.set_fact:
audit_pkg_arch_name: AMD64
when: ansible_machine == "x86_64"
- name: Pre Audit Setup | Set audit package name | ARM64
ansible.builtin.set_fact:
audit_pkg_arch_name: ARM64
when: ansible_machine == "arm64"
- name: Pre Audit Setup | Download audit binary
ansible.builtin.get_url:
url: "{{ goss_url }}"
url: "{{ audit_bin_url }}{{ audit_pkg_arch_name }}"
dest: "{{ audit_bin }}"
owner: root
group: root
checksum: "{{ goss_version.checksum }}"
checksum: "{{ audit_bin_version[audit_pkg_arch_name + '_checksum'] }}"
mode: 0555
when:
- get_goss_file == 'download'
- get_audit_binary_method == 'download'
- name: Copy audit binary
- name: Pre Audit Setup | copy audit binary
ansible.builtin.copy:
src:
src: "{{ audit_bin_copy_location }}"
dest: "{{ audit_bin }}"
mode: 0555
owner: root
group: root
when:
- get_goss_file == 'copy'
- name: Install git if not present
ansible.builtin.package:
name: git
state: present
register: git_installed
when:
- '"git" not in ansible_facts.packages'
- get_audit_binary_method == 'copy'

View file

@ -19,7 +19,7 @@
- Audit immutable fact
- Restart auditd
- name: POST | AUDITD | Add Warning count for changes to template file | Warn Count # noqa: no-handler
- name: POST | AUDITD | Add Warning count for changes to template file | Warn Count # noqa no-handler
ansible.builtin.import_tasks: warning_facts.yml
vars:
warn_control_id: 'Auditd template updated, see diff output for details'

View file

@ -2,9 +2,11 @@
- name: "Post Audit | Run post_remediation {{ benchmark }} audit"
ansible.builtin.shell: "{{ audit_conf_dir }}/run_audit.sh -v {{ audit_vars_path }} -o {{ post_audit_outfile }} -g {{ group_names }}"
environment: "{{ audit_run_script_environment | default({}) }}"
changed_when: audit_run_post_remediation.rc == 0
register: audit_run_post_remediation
changed_when: true
environment:
AUDIT_BIN: "{{ audit_bin }}"
AUDIT_CONTENT_LOCATION: "{{ audit_out_dir }}"
AUDIT_FILE: "goss.yml"
- name: Post Audit | ensure audit files readable by users
ansible.builtin.file:
@ -17,7 +19,7 @@
- name: Post Audit | Capture audit data if json format
block:
- name: "Capture data {{ post_audit_outfile }}"
- name: "capture data {{ post_audit_outfile }}"
ansible.builtin.shell: "cat {{ post_audit_outfile }}"
register: post_audit
changed_when: false

View file

@ -1,109 +1,113 @@
---
- name: Pre Audit | Setup the audit
- name: Pre Audit Binary Setup | Setup the LE audit
ansible.builtin.include_tasks: LE_audit_setup.yml
when:
- setup_audit
- setup_audit
tags:
- setup_audit
- setup_audit
- name: "Pre Audit | Ensure {{ audit_conf_dir }} exists"
- name: "Pre Audit Setup | Ensure {{ audit_conf_dir }} exists"
ansible.builtin.file:
path: "{{ audit_conf_dir }}"
state: directory
mode: '0755'
- name: Pre Audit | retrieve audit content files from git
ansible.builtin.git:
repo: "{{ audit_file_git }}"
dest: "{{ audit_conf_dir }}"
version: "{{ audit_git_version }}"
- name: Pre Audit Setup | If using git for content set up
block:
- name: Pre Audit Setup | Install git
ansible.builtin.package:
name: git
state: present
- name: Pre Audit Setup | retrieve audit content files from git
ansible.builtin.git:
repo: "{{ audit_file_git }}"
dest: "{{ audit_conf_dir }}"
version: "{{ audit_git_version }}"
when:
- audit_content == 'git'
- audit_content == 'git'
- name: Pre Audit | confirm audit branch vs benchmark version
ansible.builtin.debug:
msg: "Audit will run the branch {{ audit_git_version }} for this Benchmark {{ benchmark_version }}"
- name: Pre Audit | copy to audit content files to server
- name: Pre Audit Setup | copy to audit content files to server
ansible.builtin.copy:
src: "{{ audit_local_copy }}"
dest: "{{ audit_conf_dir }}"
mode: 0644
dest: "{{ audit_conf_dest }}"
mode: preserve
when:
- audit_content == 'copy'
- audit_content == 'copy'
- name: Pre Audit | get audit content from url
- name: Pre Audit Setup | unarchive audit content files on server
ansible.builtin.unarchive:
src: "{{ audit_conf_copy }}"
dest: "{{ audit_conf_dir }}"
when:
- audit_content == 'archived'
- name: Pre Audit Setup | get audit content from url
ansible.builtin.get_url:
url: "{{ audit_files_url }}"
dest: "{{ audit_conf_dir }}"
owner: root
group: root
mode: 0755
when:
- audit_content == 'get_url'
- audit_content == 'get_url'
- name: Pre Audit | Check Goss is available
- name: Pre Audit Setup | Check Goss is available
block:
- name: Pre Audit | Check for goss file
ansible.builtin.stat:
path: "{{ audit_bin }}"
register: goss_available
- name: Pre Audit Setup | Check for goss file
ansible.builtin.stat:
path: "{{ audit_bin }}"
register: goss_available
- name: Pre Audit | Alert if goss not available
ansible.builtin.assert:
that: goss_available.stat.exists
fail_msg: "Audit binary file {{ audit_bin }} does not exist"
- name: Pre Audit Setup | If audit ensure goss is available
ansible.builtin.assert:
msg: "Audit has been selected: unable to find goss binary at {{ audit_bin }}"
when:
- not goss_available.stat.exists
when:
- run_audit
- run_audit
- name: "Pre Audit | Check whether machine is UEFI-based"
ansible.builtin.stat:
path: /sys/firmware/efi
register: rhel9_efi_boot
tags:
- goss_template
- name: Pre Audit | Copy ansible default vars values to test audit
- name: Pre Audit Setup | Copy ansible default vars values to test audit
ansible.builtin.template:
src: ansible_vars_goss.yml.j2
dest: "{{ audit_vars_path }}"
mode: 0600
when:
- run_audit
- run_audit
tags:
- goss_template
- goss_template
- always
- name: "Pre Audit | Run pre_remediation {{ benchmark }} audit"
ansible.builtin.shell: "{{ audit_conf_dir }}/run_audit.sh -v {{ audit_vars_path }} -o {{ pre_audit_outfile }} -g {{ group_names }}"
environment: "{{ audit_run_script_environment | default({}) }}"
changed_when: audit_run_pre_remediation.rc == 0
register: audit_run_pre_remediation
changed_when: true
environment:
AUDIT_BIN: "{{ audit_bin }}"
AUDIT_CONTENT_LOCATION: "{{ audit_out_dir }}"
AUDIT_FILE: "goss.yml"
- name: Pre Audit | Capture audit data if json format
block:
- name: "Pre Audit | capture data {{ pre_audit_outfile }}"
ansible.builtin.shell: "cat {{ pre_audit_outfile }}"
register: pre_audit
changed_when: false
- name: "capture data {{ pre_audit_outfile }}"
ansible.builtin.shell: "cat {{ pre_audit_outfile }}"
register: pre_audit
changed_when: false
- name: Pre Audit | Capture pre-audit result
ansible.builtin.set_fact:
pre_audit_summary: "{{ pre_audit.stdout | from_json | json_query(summary) }}"
vars:
summary: 'summary."summary-line"'
- name: Pre Audit | Capture pre-audit result
ansible.builtin.set_fact:
pre_audit_summary: "{{ pre_audit.stdout | from_json | json_query(summary) }}"
vars:
summary: 'summary."summary-line"'
when:
- audit_format == "json"
- audit_format == "json"
- name: Pre Audit | Capture audit data if documentation format
block:
- name: "Pre Audit | capture data {{ pre_audit_outfile }}"
ansible.builtin.shell: "tail -2 {{ pre_audit_outfile }}"
register: pre_audit
changed_when: false
- name: "Pre Audit | capture data {{ pre_audit_outfile }} | documentation format"
ansible.builtin.shell: "tail -2 {{ pre_audit_outfile }}"
register: pre_audit
changed_when: false
- name: Pre Audit | Capture pre-audit result
ansible.builtin.set_fact:
pre_audit_summary: "{{ pre_audit.stdout_lines }}"
- name: Pre Audit | Capture pre-audit result | documentation format
ansible.builtin.set_fact:
pre_audit_summary: "{{ pre_audit.stdout_lines }}"
when:
- audit_format == "documentation"
- audit_format == "documentation"

View file

@ -44,7 +44,7 @@
ansible.builtin.shell: nmcli radio all off
changed_when: false
failed_when: false
when: rhel_09_wifi_enabled is changed
when: rhel_09_wifi_enabled is changed # noqa no-handler
when:
- rhel9cis_rule_3_1_2
tags:

View file

@ -83,25 +83,21 @@
- name: "5.6.5 | PATCH | Ensure default user umask is 027 or more restrictive"
block:
- name: "5.6.5 | PATCH | Ensure default user umask is 027 or more restrictive | Set umask for /etc/login.defs pam_umask settings"
ansible.builtin.replace:
path: /etc/login.defs
regexp: "{{ item.regexp }}"
replace: "{{ item.replace }}"
loop:
- { regexp: '(UMASK\s+)0[012][0-6]', replace: '\1 027' }
- { regexp: '(USERGROUPS_ENAB\s+)yes', replace: '\1 no' }
ansible.builtin.lineinfile:
path: "{{ item.path }}"
regexp: '(?i)(umask\s*)'
line: '{{ item.line }} 027'
with_items:
- { path: '/etc/bashrc', line: 'umask' }
- { path: '/etc/profile', line: 'umask' }
- { path: '/etc/login.defs', line: 'UMASK' }
- name: "5.6.5 | PATCH | Ensure default user umask is 027 or more restrictive | Set umask for /etc/bashrc"
ansible.builtin.replace:
path: /etc/bashrc
regexp: '^(?i)(\s+UMASK|UMASK)\s0[0-2][0-6]'
replace: '\1 027'
ansible.builtin.lineinfile:
path: /etc/login.defs
regexp: '^USERGROUPS_ENAB'
line: USERGROUPS_ENAB no
- name: "5.6.5 | PATCH | Ensure default user umask is 027 or more restrictive | Set umask for /etc/profile"
ansible.builtin.replace:
path: /etc/profile
regexp: '(?i)(umask)\s0[0-2][0-6]'
replace: '\1 027'
when:
- rhel9cis_rule_5_6_5
tags:

View file

@ -168,12 +168,12 @@
loop: "{{ rhel_09_6_1_10_audit.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist | Displaying any unowned files or directories"
ansible.builtin.debug:
msg: "Warning!! Missing owner on items in {{ rhel_09_6_1_10_audit | json_query('results[*].stdout_lines[*]') | flatten }}"
msg: "Warning!! Missing owner on items in {{ rhel_09_6_1_10_audit | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel_09_6_1_10_unowned_files_found
- name: "6.1.10 | AUDIT | Ensure no unowned files or directories exist | warning"
@ -214,12 +214,12 @@
loop: "{{ rhel_09_6_1_11_audit.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
- name: "6.1.11 | AUDIT | Ensure no ungrouped files or directories exist | Displaying all ungrouped files or directories"
ansible.builtin.debug:
msg: "Warning!! Missing group on items in {{ rhel_09_6_1_11_audit | json_query('results[*].stdout_lines[*]') | flatten }}"
msg: "Warning!! Missing group on items in {{ rhel_09_6_1_11_audit | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel_09_6_1_11_ungrouped_files_found
- name: "6.1.11 | AUDIT | Ensure no ungrouped files or directories exist | warning"
@ -270,12 +270,12 @@
loop: "{{ rhel_09_6_1_13_suid_perms.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
- name: "6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist"
ansible.builtin.debug:
msg: "Warning!! SUID set on items in {{ rhel_09_6_1_13_suid_perms | json_query('results[*].stdout_lines[*]') | flatten }}"
msg: "Warning!! SUID set on items in {{ rhel_09_6_1_13_suid_perms | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel9_6_1_13_suid_found
- name: "6.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist | warning"
@ -312,12 +312,12 @@
loop: "{{ rhel_09_6_1_14_sgid_perms.results }}"
when:
- item | length > 0
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout is defined # skipped items are part of results list, but don't have the registered module properties
- item.stdout | length > 0
- name: "6.1.14 | AUDIT | Audit SGID executables | Alert SGID executables exist"
ansible.builtin.debug:
msg: "Warning!! SGID set on items in {{ rhel_09_6_1_14_sgid_perms | json_query('results[*].stdout_lines[*]') | flatten }}"
msg: "Warning!! SGID set on items in {{ rhel_09_6_1_14_sgid_perms | json_query('results[*].stdout_lines[*]') | flatten }}" # noqa jinja[invalid]
when: rhel9_6_1_14_sgid_found
- name: "6.1.14 | AUDIT | Audit SGID executables| warning"

4
vars/CentOS.yml Normal file
View file

@ -0,0 +1,4 @@
---
os_gpg_key_pubkey_name: centos-gpg-keys
os_gpg_key_pubkey_content: "builder@centos.org 9.0"