adding requre files and the contents for installation

This commit is contained in:
areeqakbr 2025-08-14 09:25:55 +07:00
parent c740fd5a50
commit 5f07bbcec0
16 changed files with 882 additions and 134 deletions

18
group_vars/all.yaml Normal file
View File

@ -0,0 +1,18 @@
---
# Global variables for all hosts
ansible_python_interpreter: /usr/bin/python3
ansible_ssh_common_args: '-o StrictHostKeyChecking=no'
# Security configuration
security_hardening: true
fail2ban_enabled: true
ufw_enabled: true
# Monitoring configuration
monitoring_enabled: true
log_retention_days: 30
backup_enabled: true
# SSL/TLS configuration
ssl_cert_path: /etc/ssl/certs
ssl_key_path: /etc/ssl/private

View File

@ -0,0 +1,40 @@
---
# Variables specific to security servers
vault_version: "1.15.2"
wazuh_version: "4.7.0"
prometheus_version: "2.47.0"
grafana_version: "10.1.0"
# Vault configuration
vault_port: 8200
vault_cluster_port: 8201
vault_ui_enabled: true
# Wazuh configuration
wazuh_manager_port: 1514
wazuh_api_port: 55000
wazuh_registration_port: 1515
# Prometheus configuration
prometheus_port: 9090
prometheus_retention: "30d"
prometheus_storage_retention_size: "10GB"
# Network configuration
allowed_ssh_users:
- ubuntu
- admin
firewall_rules:
- port: 22
protocol: tcp
source: any
- port: 8200
protocol: tcp
source: any
- port: 9090
protocol: tcp
source: any
- port: 3000
protocol: tcp
source: any

View File

@ -7,144 +7,144 @@
node_exporter_version: "1.6.1"
alertmanager_version: "0.25.0"
monitoring_retention_days: 30
roles:
- prometheus
- vault
- wazuh
- promotheus
- vault
- wazuh
tasks:
- name: Install Node Exporter
block:
- name: Download Node Exporter
get_url:
url: "https://github.com/prometheus/node_exporter/releases/download/v{{ node_exporter_version }}/node_exporter-{{ node_exporter_version }}.linux-amd64.tar.gz"
dest: /tmp/node_exporter.tar.gz
- name: Extract Node Exporter
unarchive:
src: /tmp/node_exporter.tar.gz
dest: /opt/
remote_src: yes
owner: prometheus
group: prometheus
- name: Create Node Exporter symlink
file:
src: "/opt/node_exporter-{{ node_exporter_version }}.linux-amd64/node_exporter"
dest: /usr/local/bin/node_exporter
state: link
- name: Create Node Exporter systemd service
copy:
content: |
[Unit]
Description=Node Exporter
Wants=network-online.target
After=network-online.target
[Service]
User=prometheus
Group=prometheus
Type=simple
ExecStart=/usr/local/bin/node_exporter
[Install]
WantedBy=multi-user.target
dest: /etc/systemd/system/node_exporter.service
notify:
- reload systemd
- restart node_exporter
- name: Install Grafana
block:
- name: Add Grafana GPG key
apt_key:
url: https://packages.grafana.com/gpg.key
state: present
- name: Add Grafana repository
apt_repository:
repo: "deb https://packages.grafana.com/oss/deb stable main"
state: present
- name: Install Grafana
apt:
name: grafana
state: present
update_cache: yes
- name: Start and enable Grafana
service:
name: grafana-server
state: started
enabled: yes
- name: Configure security monitoring alerts
copy:
content: |
groups:
- name: security_alerts
rules:
- alert: WazuhManagerDown
expr: up{job="wazuh"} == 0
for: 2m
labels:
severity: critical
annotations:
summary: "Wazuh Manager is down"
- alert: VaultSealed
expr: vault_core_unsealed == 0
for: 1m
labels:
severity: critical
annotations:
summary: "Vault is sealed"
- alert: HighCPUUsage
expr: 100 - (avg by(instance) (irate(node_cpu_seconds_total{mode="idle"}[5m])) * 100) > 80
for: 5m
labels:
severity: warning
annotations:
summary: "High CPU usage on {{ $labels.instance }}"
dest: /etc/prometheus/security_rules.yml
- name: Install Node Exporter
block:
- name: Download Node Exporter
get_url:
url: "https://github.com/prometheus/node_exporter/releases/download/v{{ node_exporter_version }}/node_exporter-{{ node_exporter_version }}.linux-amd64.tar.gz"
dest: /tmp/node_exporter.tar.gz
- name: Extract Node Exporter
unarchive:
src: /tmp/node_exporter.tar.gz
dest: /opt/
remote_src: yes
owner: prometheus
group: prometheus
notify: restart prometheus
handlers:
- name: reload systemd
systemd:
daemon_reload: yes
- name: restart prometheus
- name: Create Node Exporter symlink
file:
src: "/opt/node_exporter-{{ node_exporter_version }}.linux-amd64/node_exporter"
dest: /usr/local/bin/node_exporter
state: link
- name: Create Node Exporter systemd service
copy:
content: |
[Unit]
Description=Node Exporter
Wants=network-online.target
After=network-online.target
[Service]
User=prometheus
Group=prometheus
Type=simple
ExecStart=/usr/local/bin/node_exporter
[Install]
WantedBy=multi-user.target
dest: /etc/systemd/system/node_exporter.service
notify:
- reload systemd
- restart node_exporter
- name: Install Grafana
block:
- name: Add Grafana GPG key
apt_key:
url: https://packages.grafana.com/gpg.key
state: present
- name: Add Grafana repository
apt_repository:
repo: "deb https://packages.grafana.com/oss/deb stable main"
state: present
- name: Install Grafana
apt:
name: grafana
state: present
update_cache: yes
- name: Start and enable Grafana
service:
name: prometheus
state: restarted
- name: restart node_exporter
service:
name: node_exporter
state: restarted
enabled: yes
post_tasks:
- name: Verify monitoring services
service:
name: "{{ item }}"
name: grafana-server
state: started
enabled: yes
loop:
- prometheus
- node_exporter
- grafana-server
- name: Display monitoring URLs
debug:
msg: |
Monitoring services available at:
- Prometheus: http://{{ ansible_default_ipv4.address }}:9090
- Grafana: http://{{ ansible_default_ipv4.address }}:3000
- Node Exporter: http://{{ ansible_default_ipv4.address }}:9100
- name: Configure security monitoring alerts
copy:
content: |
groups:
- name: security_alerts
rules:
- alert: WazuhManagerDown
expr: up{job="wazuh"} == 0
for: 2m
labels:
severity: critical
annotations:
summary: "Wazuh Manager is down"
- alert: VaultSealed
expr: vault_core_unsealed == 0
for: 1m
labels:
severity: critical
annotations:
summary: "Vault is sealed"
- alert: HighCPUUsage
expr: 100 - (avg by(instance) (irate(node_cpu_seconds_total{mode="idle"}[5m])) * 100) > 80
for: 5m
labels:
severity: warning
annotations:
summary: "High CPU usage on {{ $labels.instance }}"
dest: /etc/prometheus/security_rules.yml
owner: prometheus
group: prometheus
notify: restart prometheus
handlers:
- name: reload systemd
systemd:
daemon_reload: yes
- name: restart prometheus
service:
name: prometheus
state: restarted
- name: restart node_exporter
service:
name: node_exporter
state: restarted
enabled: yes
post_tasks:
- name: Verify monitoring services
service:
name: "{{ item }}"
state: started
enabled: yes
loop:
- prometheus
- node_exporter
- grafana-server
- name: Display monitoring URLs
debug:
msg: |
Monitoring services available at:
- Prometheus: http://{{ ansible_default_ipv4.address }}:9090
- Grafana: http://{{ ansible_default_ipv4.address }}:3000
- Node Exporter: http://{{ ansible_default_ipv4.address }}:9100

View File

@ -0,0 +1,94 @@
---
- name: Install and Configure HashiCorp Vault
hosts: security_servers
become: true
vars:
vault_version: "1.15.2"
vault_datacenter: "dc1"
tasks:
- name: Create vault user
user:
name: vault
system: yes
shell: /bin/false
home: /opt/vault
- name: Download Vault binary
get_url:
url: "https://releases.hashicorp.com/vault/{{ vault_version }}/vault_{{ vault_version }}_linux_amd64.zip"
dest: /tmp/vault.zip
mode: '0644'
- name: Install unzip
apt:
name: unzip
state: present
update_cache: yes
- name: Extract Vault binary
unarchive:
src: /tmp/vault.zip
dest: /usr/local/bin/
remote_src: yes
owner: root
group: root
mode: '0755'
- name: Create Vault directories
file:
path: "{{ item }}"
state: directory
owner: vault
group: vault
mode: '0750'
loop:
- /etc/vault.d
- /opt/vault/data
- /opt/vault/logs
- name: Generate Vault configuration
template:
src: vault.hcl.j2
dest: /etc/vault.d/vault.hcl
owner: vault
group: vault
mode: '0640'
notify: restart vault
- name: Create Vault systemd service
template:
src: vault.service.j2
dest: /etc/systemd/system/vault.service
notify:
- reload systemd
- restart vault
- name: Start and enable Vault service
service:
name: vault
state: started
enabled: yes
- name: Wait for Vault to be ready
wait_for:
port: 8200
host: 127.0.0.1
delay: 10
- name: Display Vault status
debug:
msg: |
Vault has been installed and started.
Access Vault UI at: http://{{ ansible_default_ipv4.address }}:8200
Initialize Vault with: vault operator init
handlers:
- name: reload systemd
systemd:
daemon_reload: yes
- name: restart vault
service:
name: vault
state: restarted

View File

@ -0,0 +1,157 @@
---
- name: Deploy Vulnerability Scanner (OpenVAS/GVM)
hosts: security_servers
become: true
vars:
openvas_admin_user: "admin"
openvas_admin_password: "{{ vault_openvas_password | default('ChangeMe123!') }}"
tasks:
- name: Update apt cache
apt:
update_cache: yes
- name: Install required packages
apt:
name:
- software-properties-common
- apt-transport-https
- curl
- gnupg
state: present
- name: Add GVM PPA repository
apt_repository:
repo: ppa:mrazavi/gvm
state: present
- name: Install GVM/OpenVAS
apt:
name:
- gvm
- openvas-scanner
- openvas-manager
- greenbone-security-assistant
- greenbone-feed-sync
state: present
update_cache: yes
- name: Setup GVM
shell: |
gvm-setup
gvm-feed-update
args:
creates: /var/lib/gvm/.setup_complete
- name: Create setup completion marker
file:
path: /var/lib/gvm/.setup_complete
state: touch
owner: _gvm
group: _gvm
- name: Create GVM admin user
shell: |
gvmd --create-user={{ openvas_admin_user }} --password={{ openvas_admin_password }}
args:
creates: /var/lib/gvm/.admin_user_created
register: create_user_result
- name: Create admin user marker
file:
path: /var/lib/gvm/.admin_user_created
state: touch
owner: _gvm
group: _gvm
when: create_user_result is succeeded
- name: Start and enable GVM services
service:
name: "{{ item }}"
state: started
enabled: yes
loop:
- greenbone-security-assistant
- openvas-scanner
- openvas-manager
- name: Configure firewall for GVM
ufw:
rule: allow
port: "{{ item }}"
proto: tcp
loop:
- 443 # GSA web interface
- 9390 # GVM daemon
- name: Wait for GSA to be ready
wait_for:
port: 443
host: 127.0.0.1
delay: 60
- name: Create vulnerability scan script
copy:
content: |
#!/bin/bash
# Automated vulnerability scan script
TARGET=${1:-127.0.0.1}
SCAN_NAME="Security_Scan_$(date +%Y%m%d_%H%M%S)"
echo "Starting vulnerability scan for: $TARGET"
echo "Scan name: $SCAN_NAME"
# Create scan task
TASK_ID=$(gvm-cli --gmp-username {{ openvas_admin_user }} --gmp-password {{ openvas_admin_password }} \
socket --socketpath /var/run/gvmd.sock --xml \
"<create_task><name>$SCAN_NAME</name><target><hosts>$TARGET</hosts></target></create_task>" \
| grep -oP 'id="\K[^"]+')
echo "Created scan task with ID: $TASK_ID"
# Start scan
gvm-cli --gmp-username {{ openvas_admin_user }} --gmp-password {{ openvas_admin_password }} \
socket --socketpath /var/run/gvmd.sock --xml \
"<start_task task_id=\"$TASK_ID\"/>"
echo "Scan started. Monitor progress in GSA web interface."
dest: /usr/local/bin/vulnerability-scan.sh
mode: '0755'
- name: Create scheduled vulnerability scan
cron:
name: "Weekly vulnerability scan"
minute: "0"
hour: "2"
weekday: "0"
job: "/usr/local/bin/vulnerability-scan.sh {{ ansible_default_ipv4.address }}"
- name: Display OpenVAS/GVM information
debug:
msg: |
OpenVAS/GVM has been successfully deployed:
- Web Interface: https://{{ ansible_default_ipv4.address }}:443
- Admin Username: {{ openvas_admin_user }}
- Admin Password: {{ openvas_admin_password }}
Run vulnerability scans with:
/usr/local/bin/vulnerability-scan.sh <target_ip>
Weekly automated scans are configured for Sunday 2 AM.
handlers:
- name: restart greenbone-security-assistant
service:
name: greenbone-security-assistant
state: restarted
- name: restart openvas-scanner
service:
name: openvas-scanner
state: restarted
- name: restart openvas-manager
service:
name: openvas-manager
state: restarted

View File

@ -0,0 +1,133 @@
---
- name: Deploy Wazuh Security Manager
hosts: security_servers
become: true
vars:
wazuh_version: "4.7.0"
wazuh_manager_config:
email_notification: false
log_level: 3
tasks:
- name: Update apt cache
apt:
update_cache: yes
- name: Install required packages
apt:
name:
- curl
- apt-transport-https
- lsb-release
- gnupg2
state: present
- name: Download Wazuh GPG key
get_url:
url: https://packages.wazuh.com/key/GPG-KEY-WAZUH
dest: /tmp/GPG-KEY-WAZUH
- name: Add Wazuh GPG key
apt_key:
file: /tmp/GPG-KEY-WAZUH
state: present
- name: Add Wazuh repository
apt_repository:
repo: "deb https://packages.wazuh.com/4.x/apt/ stable main"
state: present
- name: Install Wazuh manager
apt:
name:
- wazuh-manager
state: present
update_cache: yes
- name: Configure Wazuh manager
template:
src: ossec.conf.j2
dest: /var/ossec/etc/ossec.conf
backup: yes
owner: root
group: ossec
mode: '0640'
notify: restart wazuh-manager
- name: Start and enable Wazuh manager
service:
name: wazuh-manager
state: started
enabled: yes
- name: Install Wazuh indexer
apt:
name: wazuh-indexer
state: present
- name: Start and enable Wazuh indexer
service:
name: wazuh-indexer
state: started
enabled: yes
- name: Install Wazuh dashboard
apt:
name: wazuh-dashboard
state: present
- name: Start and enable Wazuh dashboard
service:
name: wazuh-dashboard
state: started
enabled: yes
- name: Open required firewall ports
ufw:
rule: allow
port: "{{ item }}"
proto: tcp
loop:
- 1514 # Wazuh agent connection
- 1515 # Wazuh agent registration
- 55000 # Wazuh API
- 9200 # Wazuh indexer
- 443 # Wazuh dashboard
- name: Wait for services to be ready
wait_for:
port: "{{ item }}"
host: 127.0.0.1
delay: 30
loop:
- 55000
- 9200
- 443
- name: Display Wazuh information
debug:
msg: |
Wazuh has been successfully deployed:
- Manager API: https://{{ ansible_default_ipv4.address }}:55000
- Dashboard: https://{{ ansible_default_ipv4.address }}:443
- Indexer: https://{{ ansible_default_ipv4.address }}:9200
Default credentials:
- Username: admin
- Password: admin (change immediately)
handlers:
- name: restart wazuh-manager
service:
name: wazuh-manager
state: restarted
- name: restart wazuh-indexer
service:
name: wazuh-indexer
state: restarted
- name: restart wazuh-dashboard
service:
name: wazuh-dashboard
state: restarted

View File

@ -0,0 +1,9 @@
---
- name: reload systemd
systemd:
daemon_reload: yes
- name: restart prometheus
service:
name: prometheus
state: restarted

View File

@ -0,0 +1,16 @@
[Unit]
Description=Alertmanager
Wants=network-online.target
After=network-online.target
[Service]
User=prometheus
Group=prometheus
Type=simple
ExecStart=/opt/alertmanager-{{ alertmanager_version }}.linux-amd64/alertmanager \
--config.file=/etc/alertmanager/alertmanager.yml \
--storage.path=/var/lib/alertmanager/ \
--web.listen-address=0.0.0.0:9093
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,32 @@
global:
smtp_smarthost: 'localhost:587'
smtp_from: 'alertmanager@{{ ansible_hostname }}'
route:
group_by: ['alertname']
group_wait: 10s
group_interval: 10s
repeat_interval: 1h
receiver: 'web.hook'
receivers:
- name: 'web.hook'
webhook_configs:
- url: 'http://127.0.0.1:5001/'
- name: 'security-team'
email_configs:
- to: 'security@company.com'
subject: 'Security Alert: {{ .GroupLabels.alertname }}'
body: |
{{ range .Alerts }}
Alert: {{ .Annotations.summary }}
Description: {{ .Annotations.description }}
{{ end }}
inhibit_rules:
- source_match:
severity: 'critical'
target_match:
severity: 'warning'
equal: ['alertname', 'dev', 'instance']

View File

@ -0,0 +1,19 @@
[Unit]
Description=Prometheus
Wants=network-online.target
After=network-online.target
[Service]
User=prometheus
Group=prometheus
Type=simple
ExecStart=/opt/prometheus-{{ prometheus_version }}.linux-amd64/prometheus \
--config.file=/etc/prometheus/prometheus.yml \
--storage.tsdb.path=/var/lib/prometheus/ \
--web.console.templates=/opt/prometheus-{{ prometheus_version }}.linux-amd64/consoles \
--web.console.libraries=/opt/prometheus-{{ prometheus_version }}.linux-amd64/console_libraries \
--web.listen-address=0.0.0.0:9090 \
--web.enable-lifecycle
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,34 @@
global:
scrape_interval: 15s
evaluation_interval: 15s
rule_files:
- "/etc/prometheus/security_rules.yml"
alerting:
alertmanagers:
- static_configs:
- targets:
- localhost:9093
scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
- job_name: 'node'
static_configs:
- targets: ['localhost:9100']
- job_name: 'wazuh'
static_configs:
- targets: ['localhost:55000']
metrics_path: '/api/monitoring'
- job_name: 'vault'
static_configs:
- targets: ['localhost:8200']
metrics_path: '/v1/sys/metrics'
params:
format: ['prometheus']
bearer_token_file: /etc/prometheus/vault_token

View File

@ -0,0 +1,9 @@
---
- name: reload systemd
systemd:
daemon_reload: yes
- name: restart vault
service:
name: vault
state: restarted

View File

@ -0,0 +1,18 @@
ui = true
storage "file" {
path = "/opt/vault/data"
}
listener "tcp" {
address = "0.0.0.0:8200"
tls_disable = 1
}
telemetry {
prometheus_retention_time = "30s"
disable_hostname = true
}
api_addr = "http://{{ ansible_default_ipv4.address }}:8200"
cluster_addr = "https://{{ ansible_default_ipv4.address }}:8201"

View File

@ -0,0 +1,33 @@
[Unit]
Description=HashiCorp Vault
Documentation=https://www.vaultproject.io/docs/
Requires=network-online.target
After=network-online.target
ConditionFileNotEmpty=/etc/vault.d/vault.hcl
[Service]
Type=notify
User=vault
Group=vault
ProtectSystem=full
ProtectHome=read-only
PrivateTmp=yes
PrivateDevices=yes
SecureBits=keep-caps
AmbientCapabilities=CAP_IPC_LOCK
Capabilities=CAP_IPC_LOCK+ep
CapabilityBoundingSet=CAP_SYSLOG CAP_IPC_LOCK
NoNewPrivileges=yes
ExecStart=/usr/local/bin/vault server -config=/etc/vault.d/vault.hcl
ExecReload=/bin/kill -HUP $MAINPID
KillMode=process
Restart=on-failure
RestartSec=5
TimeoutStopSec=30
StartLimitInterval=60
StartLimitBurst=3
LimitNOFILE=65536
LimitMEMLOCK=infinity
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,15 @@
---
- name: restart wazuh-manager
service:
name: wazuh-manager
state: restarted
- name: restart wazuh-indexer
service:
name: wazuh-indexer
state: restarted
- name: restart wazuh-dashboard
service:
name: wazuh-dashboard
state: restarted

View File

@ -0,0 +1,121 @@
<ossec_config>
<global>
<jsonout_output>yes</jsonout_output>
<alerts_log>yes</alerts_log>
<logall>no</logall>
<logall_json>no</logall_json>
<email_notification>no</email_notification>
<smtp_server>localhost</smtp_server>
<email_from>wazuh@{{ ansible_hostname }}</email_from>
<email_to>admin@company.com</email_to>
<hostname>{{ ansible_hostname }}</hostname>
<description>Wazuh manager</description>
</global>
<alerts>
<log_alert_level>3</log_alert_level>
<email_alert_level>12</email_alert_level>
</alerts>
<remote>
<connection>secure</connection>
<port>1514</port>
<protocol>udp</protocol>
<queue_size>131072</queue_size>
</remote>
<auth>
<disabled>no</disabled>
<port>1515</port>
<use_source_ip>no</use_source_ip>
<purge>yes</purge>
<use_password>no</use_password>
<ciphers>HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH</ciphers>
<ssl_agent_ca></ssl_agent_ca>
<ssl_verify_host>no</ssl_verify_host>
<ssl_manager_cert>/var/ossec/etc/sslmanager.cert</ssl_manager_cert>
<ssl_manager_key>/var/ossec/etc/sslmanager.key</ssl_manager_key>
<ssl_auto_negotiate>no</ssl_auto_negotiate>
</auth>
<monitoring>
<frequency>120</frequency>
<queue_size>16384</queue_size>
<rlimit_nofile>65536</rlimit_nofile>
</monitoring>
<indexer>
<enabled>yes</enabled>
<hosts>
<host>https://127.0.0.1:9200</host>
</hosts>
<ssl>
<certificate_authorities>
<ca>/etc/wazuh-indexer/certs/root-ca.pem</ca>
</certificate_authorities>
<certificate>/etc/wazuh-indexer/certs/wazuh.manager.pem</certificate>
<key>/etc/wazuh-indexer/certs/wazuh.manager-key.pem</key>
</ssl>
</indexer>
<vulnerability-detector>
<enabled>yes</enabled>
<interval>5m</interval>
<min_full_scan_interval>6h</min_full_scan_interval>
<run_on_start>yes</run_on_start>
<provider name="canonical">
<enabled>yes</enabled>
<os>trusty</os>
<os>xenial</os>
<os>bionic</os>
<os>focal</os>
<os>jammy</os>
<update_interval>1h</update_interval>
</provider>
</vulnerability-detector>
<!-- Log analysis -->
<localfile>
<log_format>syslog</log_format>
<location>/var/log/auth.log</location>
</localfile>
<localfile>
<log_format>syslog</log_format>
<location>/var/log/syslog</location>
</localfile>
<localfile>
<log_format>apache</log_format>
<location>/var/log/apache2/access.log</location>
</localfile>
<!-- Rootcheck -->
<rootcheck>
<disabled>no</disabled>
<check_files>yes</check_files>
<check_trojans>yes</check_trojans>
<check_dev>yes</check_dev>
<check_sys>yes</check_sys>
<check_pids>yes</check_pids>
<check_ports>yes</check_ports>
<check_if>yes</check_if>
</rootcheck>
<!-- File integrity monitoring -->
<syscheck>
<disabled>no</disabled>
<frequency>79200</frequency>
<scan_on_start>yes</scan_on_start>
<!-- Directories to check -->
<directories check_all="yes">/etc,/usr/bin,/usr/sbin</directories>
<directories check_all="yes">/bin,/sbin,/boot</directories>
<!-- Files/directories to ignore -->
<ignore>/etc/mtab</ignore>
<ignore>/etc/hosts.deny</ignore>
<ignore>/etc/mail/statistics</ignore>
</syscheck>
</ossec_config>