diff --git a/group_vars/all.yaml b/group_vars/all.yaml
new file mode 100644
index 0000000..7a5af11
--- /dev/null
+++ b/group_vars/all.yaml
@@ -0,0 +1,18 @@
+---
+# Global variables for all hosts
+ansible_python_interpreter: /usr/bin/python3
+ansible_ssh_common_args: '-o StrictHostKeyChecking=no'
+
+# Security configuration
+security_hardening: true
+fail2ban_enabled: true
+ufw_enabled: true
+
+# Monitoring configuration
+monitoring_enabled: true
+log_retention_days: 30
+backup_enabled: true
+
+# SSL/TLS configuration
+ssl_cert_path: /etc/ssl/certs
+ssl_key_path: /etc/ssl/private
diff --git a/group_vars/security_servers.yaml b/group_vars/security_servers.yaml
new file mode 100644
index 0000000..a81ac5d
--- /dev/null
+++ b/group_vars/security_servers.yaml
@@ -0,0 +1,40 @@
+---
+# Variables specific to security servers
+vault_version: "1.15.2"
+wazuh_version: "4.7.0"
+prometheus_version: "2.47.0"
+grafana_version: "10.1.0"
+
+# Vault configuration
+vault_port: 8200
+vault_cluster_port: 8201
+vault_ui_enabled: true
+
+# Wazuh configuration
+wazuh_manager_port: 1514
+wazuh_api_port: 55000
+wazuh_registration_port: 1515
+
+# Prometheus configuration
+prometheus_port: 9090
+prometheus_retention: "30d"
+prometheus_storage_retention_size: "10GB"
+
+# Network configuration
+allowed_ssh_users:
+- ubuntu
+- admin
+
+firewall_rules:
+- port: 22
+ protocol: tcp
+ source: any
+- port: 8200
+ protocol: tcp
+ source: any
+- port: 9090
+ protocol: tcp
+ source: any
+- port: 3000
+ protocol: tcp
+ source: any
diff --git a/playbooks/monitoring.yaml b/playbooks/monitoring.yaml
index 12392cf..c50d622 100644
--- a/playbooks/monitoring.yaml
+++ b/playbooks/monitoring.yaml
@@ -7,144 +7,144 @@
node_exporter_version: "1.6.1"
alertmanager_version: "0.25.0"
monitoring_retention_days: 30
-
+
roles:
- - prometheus
- - vault
- - wazuh
-
+ - promotheus
+ - vault
+ - wazuh
+
tasks:
- - name: Install Node Exporter
- block:
- - name: Download Node Exporter
- get_url:
- url: "https://github.com/prometheus/node_exporter/releases/download/v{{ node_exporter_version }}/node_exporter-{{ node_exporter_version }}.linux-amd64.tar.gz"
- dest: /tmp/node_exporter.tar.gz
-
- - name: Extract Node Exporter
- unarchive:
- src: /tmp/node_exporter.tar.gz
- dest: /opt/
- remote_src: yes
- owner: prometheus
- group: prometheus
-
- - name: Create Node Exporter symlink
- file:
- src: "/opt/node_exporter-{{ node_exporter_version }}.linux-amd64/node_exporter"
- dest: /usr/local/bin/node_exporter
- state: link
-
- - name: Create Node Exporter systemd service
- copy:
- content: |
- [Unit]
- Description=Node Exporter
- Wants=network-online.target
- After=network-online.target
-
- [Service]
- User=prometheus
- Group=prometheus
- Type=simple
- ExecStart=/usr/local/bin/node_exporter
-
- [Install]
- WantedBy=multi-user.target
- dest: /etc/systemd/system/node_exporter.service
- notify:
- - reload systemd
- - restart node_exporter
-
- - name: Install Grafana
- block:
- - name: Add Grafana GPG key
- apt_key:
- url: https://packages.grafana.com/gpg.key
- state: present
-
- - name: Add Grafana repository
- apt_repository:
- repo: "deb https://packages.grafana.com/oss/deb stable main"
- state: present
-
- - name: Install Grafana
- apt:
- name: grafana
- state: present
- update_cache: yes
-
- - name: Start and enable Grafana
- service:
- name: grafana-server
- state: started
- enabled: yes
-
- - name: Configure security monitoring alerts
- copy:
- content: |
- groups:
- - name: security_alerts
- rules:
- - alert: WazuhManagerDown
- expr: up{job="wazuh"} == 0
- for: 2m
- labels:
- severity: critical
- annotations:
- summary: "Wazuh Manager is down"
-
- - alert: VaultSealed
- expr: vault_core_unsealed == 0
- for: 1m
- labels:
- severity: critical
- annotations:
- summary: "Vault is sealed"
-
- - alert: HighCPUUsage
- expr: 100 - (avg by(instance) (irate(node_cpu_seconds_total{mode="idle"}[5m])) * 100) > 80
- for: 5m
- labels:
- severity: warning
- annotations:
- summary: "High CPU usage on {{ $labels.instance }}"
- dest: /etc/prometheus/security_rules.yml
+ - name: Install Node Exporter
+ block:
+ - name: Download Node Exporter
+ get_url:
+ url: "https://github.com/prometheus/node_exporter/releases/download/v{{ node_exporter_version }}/node_exporter-{{ node_exporter_version }}.linux-amd64.tar.gz"
+ dest: /tmp/node_exporter.tar.gz
+
+ - name: Extract Node Exporter
+ unarchive:
+ src: /tmp/node_exporter.tar.gz
+ dest: /opt/
+ remote_src: yes
owner: prometheus
group: prometheus
- notify: restart prometheus
-
- handlers:
- - name: reload systemd
- systemd:
- daemon_reload: yes
-
- - name: restart prometheus
+
+ - name: Create Node Exporter symlink
+ file:
+ src: "/opt/node_exporter-{{ node_exporter_version }}.linux-amd64/node_exporter"
+ dest: /usr/local/bin/node_exporter
+ state: link
+
+ - name: Create Node Exporter systemd service
+ copy:
+ content: |
+ [Unit]
+ Description=Node Exporter
+ Wants=network-online.target
+ After=network-online.target
+
+ [Service]
+ User=prometheus
+ Group=prometheus
+ Type=simple
+ ExecStart=/usr/local/bin/node_exporter
+
+ [Install]
+ WantedBy=multi-user.target
+ dest: /etc/systemd/system/node_exporter.service
+ notify:
+ - reload systemd
+ - restart node_exporter
+
+ - name: Install Grafana
+ block:
+ - name: Add Grafana GPG key
+ apt_key:
+ url: https://packages.grafana.com/gpg.key
+ state: present
+
+ - name: Add Grafana repository
+ apt_repository:
+ repo: "deb https://packages.grafana.com/oss/deb stable main"
+ state: present
+
+ - name: Install Grafana
+ apt:
+ name: grafana
+ state: present
+ update_cache: yes
+
+ - name: Start and enable Grafana
service:
- name: prometheus
- state: restarted
-
- - name: restart node_exporter
- service:
- name: node_exporter
- state: restarted
- enabled: yes
-
- post_tasks:
- - name: Verify monitoring services
- service:
- name: "{{ item }}"
+ name: grafana-server
state: started
enabled: yes
- loop:
- - prometheus
- - node_exporter
- - grafana-server
-
- - name: Display monitoring URLs
- debug:
- msg: |
- Monitoring services available at:
- - Prometheus: http://{{ ansible_default_ipv4.address }}:9090
- - Grafana: http://{{ ansible_default_ipv4.address }}:3000
- - Node Exporter: http://{{ ansible_default_ipv4.address }}:9100
\ No newline at end of file
+
+ - name: Configure security monitoring alerts
+ copy:
+ content: |
+ groups:
+ - name: security_alerts
+ rules:
+ - alert: WazuhManagerDown
+ expr: up{job="wazuh"} == 0
+ for: 2m
+ labels:
+ severity: critical
+ annotations:
+ summary: "Wazuh Manager is down"
+
+ - alert: VaultSealed
+ expr: vault_core_unsealed == 0
+ for: 1m
+ labels:
+ severity: critical
+ annotations:
+ summary: "Vault is sealed"
+
+ - alert: HighCPUUsage
+ expr: 100 - (avg by(instance) (irate(node_cpu_seconds_total{mode="idle"}[5m])) * 100) > 80
+ for: 5m
+ labels:
+ severity: warning
+ annotations:
+ summary: "High CPU usage on {{ $labels.instance }}"
+ dest: /etc/prometheus/security_rules.yml
+ owner: prometheus
+ group: prometheus
+ notify: restart prometheus
+
+ handlers:
+ - name: reload systemd
+ systemd:
+ daemon_reload: yes
+
+ - name: restart prometheus
+ service:
+ name: prometheus
+ state: restarted
+
+ - name: restart node_exporter
+ service:
+ name: node_exporter
+ state: restarted
+ enabled: yes
+
+ post_tasks:
+ - name: Verify monitoring services
+ service:
+ name: "{{ item }}"
+ state: started
+ enabled: yes
+ loop:
+ - prometheus
+ - node_exporter
+ - grafana-server
+
+ - name: Display monitoring URLs
+ debug:
+ msg: |
+ Monitoring services available at:
+ - Prometheus: http://{{ ansible_default_ipv4.address }}:9090
+ - Grafana: http://{{ ansible_default_ipv4.address }}:3000
+ - Node Exporter: http://{{ ansible_default_ipv4.address }}:9100
diff --git a/playbooks/vault_install.yaml b/playbooks/vault_install.yaml
index e69de29..5bb7e76 100644
--- a/playbooks/vault_install.yaml
+++ b/playbooks/vault_install.yaml
@@ -0,0 +1,94 @@
+---
+- name: Install and Configure HashiCorp Vault
+ hosts: security_servers
+ become: true
+ vars:
+ vault_version: "1.15.2"
+ vault_datacenter: "dc1"
+
+ tasks:
+ - name: Create vault user
+ user:
+ name: vault
+ system: yes
+ shell: /bin/false
+ home: /opt/vault
+
+ - name: Download Vault binary
+ get_url:
+ url: "https://releases.hashicorp.com/vault/{{ vault_version }}/vault_{{ vault_version }}_linux_amd64.zip"
+ dest: /tmp/vault.zip
+ mode: '0644'
+
+ - name: Install unzip
+ apt:
+ name: unzip
+ state: present
+ update_cache: yes
+
+ - name: Extract Vault binary
+ unarchive:
+ src: /tmp/vault.zip
+ dest: /usr/local/bin/
+ remote_src: yes
+ owner: root
+ group: root
+ mode: '0755'
+
+ - name: Create Vault directories
+ file:
+ path: "{{ item }}"
+ state: directory
+ owner: vault
+ group: vault
+ mode: '0750'
+ loop:
+ - /etc/vault.d
+ - /opt/vault/data
+ - /opt/vault/logs
+
+ - name: Generate Vault configuration
+ template:
+ src: vault.hcl.j2
+ dest: /etc/vault.d/vault.hcl
+ owner: vault
+ group: vault
+ mode: '0640'
+ notify: restart vault
+
+ - name: Create Vault systemd service
+ template:
+ src: vault.service.j2
+ dest: /etc/systemd/system/vault.service
+ notify:
+ - reload systemd
+ - restart vault
+
+ - name: Start and enable Vault service
+ service:
+ name: vault
+ state: started
+ enabled: yes
+
+ - name: Wait for Vault to be ready
+ wait_for:
+ port: 8200
+ host: 127.0.0.1
+ delay: 10
+
+ - name: Display Vault status
+ debug:
+ msg: |
+ Vault has been installed and started.
+ Access Vault UI at: http://{{ ansible_default_ipv4.address }}:8200
+ Initialize Vault with: vault operator init
+
+ handlers:
+ - name: reload systemd
+ systemd:
+ daemon_reload: yes
+
+ - name: restart vault
+ service:
+ name: vault
+ state: restarted
diff --git a/playbooks/vuln-scanner.yaml b/playbooks/vuln-scanner.yaml
index e69de29..57e02ef 100644
--- a/playbooks/vuln-scanner.yaml
+++ b/playbooks/vuln-scanner.yaml
@@ -0,0 +1,157 @@
+---
+- name: Deploy Vulnerability Scanner (OpenVAS/GVM)
+ hosts: security_servers
+ become: true
+ vars:
+ openvas_admin_user: "admin"
+ openvas_admin_password: "{{ vault_openvas_password | default('ChangeMe123!') }}"
+
+ tasks:
+ - name: Update apt cache
+ apt:
+ update_cache: yes
+
+ - name: Install required packages
+ apt:
+ name:
+ - software-properties-common
+ - apt-transport-https
+ - curl
+ - gnupg
+ state: present
+
+ - name: Add GVM PPA repository
+ apt_repository:
+ repo: ppa:mrazavi/gvm
+ state: present
+
+ - name: Install GVM/OpenVAS
+ apt:
+ name:
+ - gvm
+ - openvas-scanner
+ - openvas-manager
+ - greenbone-security-assistant
+ - greenbone-feed-sync
+ state: present
+ update_cache: yes
+
+ - name: Setup GVM
+ shell: |
+ gvm-setup
+ gvm-feed-update
+ args:
+ creates: /var/lib/gvm/.setup_complete
+
+ - name: Create setup completion marker
+ file:
+ path: /var/lib/gvm/.setup_complete
+ state: touch
+ owner: _gvm
+ group: _gvm
+
+ - name: Create GVM admin user
+ shell: |
+ gvmd --create-user={{ openvas_admin_user }} --password={{ openvas_admin_password }}
+ args:
+ creates: /var/lib/gvm/.admin_user_created
+ register: create_user_result
+
+ - name: Create admin user marker
+ file:
+ path: /var/lib/gvm/.admin_user_created
+ state: touch
+ owner: _gvm
+ group: _gvm
+ when: create_user_result is succeeded
+
+ - name: Start and enable GVM services
+ service:
+ name: "{{ item }}"
+ state: started
+ enabled: yes
+ loop:
+ - greenbone-security-assistant
+ - openvas-scanner
+ - openvas-manager
+
+ - name: Configure firewall for GVM
+ ufw:
+ rule: allow
+ port: "{{ item }}"
+ proto: tcp
+ loop:
+ - 443 # GSA web interface
+ - 9390 # GVM daemon
+
+ - name: Wait for GSA to be ready
+ wait_for:
+ port: 443
+ host: 127.0.0.1
+ delay: 60
+
+ - name: Create vulnerability scan script
+ copy:
+ content: |
+ #!/bin/bash
+ # Automated vulnerability scan script
+
+ TARGET=${1:-127.0.0.1}
+ SCAN_NAME="Security_Scan_$(date +%Y%m%d_%H%M%S)"
+
+ echo "Starting vulnerability scan for: $TARGET"
+ echo "Scan name: $SCAN_NAME"
+
+ # Create scan task
+ TASK_ID=$(gvm-cli --gmp-username {{ openvas_admin_user }} --gmp-password {{ openvas_admin_password }} \
+ socket --socketpath /var/run/gvmd.sock --xml \
+ "$SCAN_NAME$TARGET" \
+ | grep -oP 'id="\K[^"]+')
+
+ echo "Created scan task with ID: $TASK_ID"
+
+ # Start scan
+ gvm-cli --gmp-username {{ openvas_admin_user }} --gmp-password {{ openvas_admin_password }} \
+ socket --socketpath /var/run/gvmd.sock --xml \
+ ""
+
+ echo "Scan started. Monitor progress in GSA web interface."
+ dest: /usr/local/bin/vulnerability-scan.sh
+ mode: '0755'
+
+ - name: Create scheduled vulnerability scan
+ cron:
+ name: "Weekly vulnerability scan"
+ minute: "0"
+ hour: "2"
+ weekday: "0"
+ job: "/usr/local/bin/vulnerability-scan.sh {{ ansible_default_ipv4.address }}"
+
+ - name: Display OpenVAS/GVM information
+ debug:
+ msg: |
+ OpenVAS/GVM has been successfully deployed:
+ - Web Interface: https://{{ ansible_default_ipv4.address }}:443
+ - Admin Username: {{ openvas_admin_user }}
+ - Admin Password: {{ openvas_admin_password }}
+
+ Run vulnerability scans with:
+ /usr/local/bin/vulnerability-scan.sh
+
+ Weekly automated scans are configured for Sunday 2 AM.
+
+ handlers:
+ - name: restart greenbone-security-assistant
+ service:
+ name: greenbone-security-assistant
+ state: restarted
+
+ - name: restart openvas-scanner
+ service:
+ name: openvas-scanner
+ state: restarted
+
+ - name: restart openvas-manager
+ service:
+ name: openvas-manager
+ state: restarted
diff --git a/playbooks/wazuh_server.yaml b/playbooks/wazuh_server.yaml
index e69de29..fab87ff 100644
--- a/playbooks/wazuh_server.yaml
+++ b/playbooks/wazuh_server.yaml
@@ -0,0 +1,133 @@
+---
+- name: Deploy Wazuh Security Manager
+ hosts: security_servers
+ become: true
+ vars:
+ wazuh_version: "4.7.0"
+ wazuh_manager_config:
+ email_notification: false
+ log_level: 3
+
+ tasks:
+ - name: Update apt cache
+ apt:
+ update_cache: yes
+
+ - name: Install required packages
+ apt:
+ name:
+ - curl
+ - apt-transport-https
+ - lsb-release
+ - gnupg2
+ state: present
+
+ - name: Download Wazuh GPG key
+ get_url:
+ url: https://packages.wazuh.com/key/GPG-KEY-WAZUH
+ dest: /tmp/GPG-KEY-WAZUH
+
+ - name: Add Wazuh GPG key
+ apt_key:
+ file: /tmp/GPG-KEY-WAZUH
+ state: present
+
+ - name: Add Wazuh repository
+ apt_repository:
+ repo: "deb https://packages.wazuh.com/4.x/apt/ stable main"
+ state: present
+
+ - name: Install Wazuh manager
+ apt:
+ name:
+ - wazuh-manager
+ state: present
+ update_cache: yes
+
+ - name: Configure Wazuh manager
+ template:
+ src: ossec.conf.j2
+ dest: /var/ossec/etc/ossec.conf
+ backup: yes
+ owner: root
+ group: ossec
+ mode: '0640'
+ notify: restart wazuh-manager
+
+ - name: Start and enable Wazuh manager
+ service:
+ name: wazuh-manager
+ state: started
+ enabled: yes
+
+ - name: Install Wazuh indexer
+ apt:
+ name: wazuh-indexer
+ state: present
+
+ - name: Start and enable Wazuh indexer
+ service:
+ name: wazuh-indexer
+ state: started
+ enabled: yes
+
+ - name: Install Wazuh dashboard
+ apt:
+ name: wazuh-dashboard
+ state: present
+
+ - name: Start and enable Wazuh dashboard
+ service:
+ name: wazuh-dashboard
+ state: started
+ enabled: yes
+
+ - name: Open required firewall ports
+ ufw:
+ rule: allow
+ port: "{{ item }}"
+ proto: tcp
+ loop:
+ - 1514 # Wazuh agent connection
+ - 1515 # Wazuh agent registration
+ - 55000 # Wazuh API
+ - 9200 # Wazuh indexer
+ - 443 # Wazuh dashboard
+
+ - name: Wait for services to be ready
+ wait_for:
+ port: "{{ item }}"
+ host: 127.0.0.1
+ delay: 30
+ loop:
+ - 55000
+ - 9200
+ - 443
+
+ - name: Display Wazuh information
+ debug:
+ msg: |
+ Wazuh has been successfully deployed:
+ - Manager API: https://{{ ansible_default_ipv4.address }}:55000
+ - Dashboard: https://{{ ansible_default_ipv4.address }}:443
+ - Indexer: https://{{ ansible_default_ipv4.address }}:9200
+
+ Default credentials:
+ - Username: admin
+ - Password: admin (change immediately)
+
+ handlers:
+ - name: restart wazuh-manager
+ service:
+ name: wazuh-manager
+ state: restarted
+
+ - name: restart wazuh-indexer
+ service:
+ name: wazuh-indexer
+ state: restarted
+
+ - name: restart wazuh-dashboard
+ service:
+ name: wazuh-dashboard
+ state: restarted
diff --git a/roles/promotheus/handlers/main.yaml b/roles/promotheus/handlers/main.yaml
new file mode 100644
index 0000000..48cd700
--- /dev/null
+++ b/roles/promotheus/handlers/main.yaml
@@ -0,0 +1,9 @@
+---
+- name: reload systemd
+ systemd:
+ daemon_reload: yes
+
+- name: restart prometheus
+ service:
+ name: prometheus
+ state: restarted
diff --git a/roles/promotheus/templates/alertmanager.service.j2 b/roles/promotheus/templates/alertmanager.service.j2
new file mode 100644
index 0000000..b2df47b
--- /dev/null
+++ b/roles/promotheus/templates/alertmanager.service.j2
@@ -0,0 +1,16 @@
+[Unit]
+Description=Alertmanager
+Wants=network-online.target
+After=network-online.target
+
+[Service]
+User=prometheus
+Group=prometheus
+Type=simple
+ExecStart=/opt/alertmanager-{{ alertmanager_version }}.linux-amd64/alertmanager \
+ --config.file=/etc/alertmanager/alertmanager.yml \
+ --storage.path=/var/lib/alertmanager/ \
+ --web.listen-address=0.0.0.0:9093
+
+[Install]
+WantedBy=multi-user.target
diff --git a/roles/promotheus/templates/alertmanager.yml.j2 b/roles/promotheus/templates/alertmanager.yml.j2
new file mode 100644
index 0000000..0e41017
--- /dev/null
+++ b/roles/promotheus/templates/alertmanager.yml.j2
@@ -0,0 +1,32 @@
+global:
+ smtp_smarthost: 'localhost:587'
+ smtp_from: 'alertmanager@{{ ansible_hostname }}'
+
+route:
+ group_by: ['alertname']
+ group_wait: 10s
+ group_interval: 10s
+ repeat_interval: 1h
+ receiver: 'web.hook'
+
+receivers:
+- name: 'web.hook'
+ webhook_configs:
+ - url: 'http://127.0.0.1:5001/'
+
+- name: 'security-team'
+ email_configs:
+ - to: 'security@company.com'
+ subject: 'Security Alert: {{ .GroupLabels.alertname }}'
+ body: |
+ {{ range .Alerts }}
+ Alert: {{ .Annotations.summary }}
+ Description: {{ .Annotations.description }}
+ {{ end }}
+
+inhibit_rules:
+ - source_match:
+ severity: 'critical'
+ target_match:
+ severity: 'warning'
+ equal: ['alertname', 'dev', 'instance']
diff --git a/roles/promotheus/templates/prometheus.service.j2 b/roles/promotheus/templates/prometheus.service.j2
new file mode 100644
index 0000000..2fc7931
--- /dev/null
+++ b/roles/promotheus/templates/prometheus.service.j2
@@ -0,0 +1,19 @@
+[Unit]
+Description=Prometheus
+Wants=network-online.target
+After=network-online.target
+
+[Service]
+User=prometheus
+Group=prometheus
+Type=simple
+ExecStart=/opt/prometheus-{{ prometheus_version }}.linux-amd64/prometheus \
+ --config.file=/etc/prometheus/prometheus.yml \
+ --storage.tsdb.path=/var/lib/prometheus/ \
+ --web.console.templates=/opt/prometheus-{{ prometheus_version }}.linux-amd64/consoles \
+ --web.console.libraries=/opt/prometheus-{{ prometheus_version }}.linux-amd64/console_libraries \
+ --web.listen-address=0.0.0.0:9090 \
+ --web.enable-lifecycle
+
+[Install]
+WantedBy=multi-user.target
diff --git a/roles/promotheus/templates/prometheus.yml.j2 b/roles/promotheus/templates/prometheus.yml.j2
new file mode 100644
index 0000000..a5203f0
--- /dev/null
+++ b/roles/promotheus/templates/prometheus.yml.j2
@@ -0,0 +1,34 @@
+global:
+ scrape_interval: 15s
+ evaluation_interval: 15s
+
+rule_files:
+ - "/etc/prometheus/security_rules.yml"
+
+alerting:
+ alertmanagers:
+ - static_configs:
+ - targets:
+ - localhost:9093
+
+scrape_configs:
+ - job_name: 'prometheus'
+ static_configs:
+ - targets: ['localhost:9090']
+
+ - job_name: 'node'
+ static_configs:
+ - targets: ['localhost:9100']
+
+ - job_name: 'wazuh'
+ static_configs:
+ - targets: ['localhost:55000']
+ metrics_path: '/api/monitoring'
+
+ - job_name: 'vault'
+ static_configs:
+ - targets: ['localhost:8200']
+ metrics_path: '/v1/sys/metrics'
+ params:
+ format: ['prometheus']
+ bearer_token_file: /etc/prometheus/vault_token
diff --git a/roles/vault/handlers/main.yaml b/roles/vault/handlers/main.yaml
new file mode 100644
index 0000000..ef3523d
--- /dev/null
+++ b/roles/vault/handlers/main.yaml
@@ -0,0 +1,9 @@
+---
+- name: reload systemd
+ systemd:
+ daemon_reload: yes
+
+- name: restart vault
+ service:
+ name: vault
+ state: restarted
diff --git a/roles/vault/templates/vault.hcl.j2 b/roles/vault/templates/vault.hcl.j2
new file mode 100644
index 0000000..0068fb5
--- /dev/null
+++ b/roles/vault/templates/vault.hcl.j2
@@ -0,0 +1,18 @@
+ui = true
+
+storage "file" {
+ path = "/opt/vault/data"
+}
+
+listener "tcp" {
+ address = "0.0.0.0:8200"
+ tls_disable = 1
+}
+
+telemetry {
+ prometheus_retention_time = "30s"
+ disable_hostname = true
+}
+
+api_addr = "http://{{ ansible_default_ipv4.address }}:8200"
+cluster_addr = "https://{{ ansible_default_ipv4.address }}:8201"
diff --git a/roles/vault/templates/vault.service.j2 b/roles/vault/templates/vault.service.j2
new file mode 100644
index 0000000..0072d21
--- /dev/null
+++ b/roles/vault/templates/vault.service.j2
@@ -0,0 +1,33 @@
+[Unit]
+Description=HashiCorp Vault
+Documentation=https://www.vaultproject.io/docs/
+Requires=network-online.target
+After=network-online.target
+ConditionFileNotEmpty=/etc/vault.d/vault.hcl
+
+[Service]
+Type=notify
+User=vault
+Group=vault
+ProtectSystem=full
+ProtectHome=read-only
+PrivateTmp=yes
+PrivateDevices=yes
+SecureBits=keep-caps
+AmbientCapabilities=CAP_IPC_LOCK
+Capabilities=CAP_IPC_LOCK+ep
+CapabilityBoundingSet=CAP_SYSLOG CAP_IPC_LOCK
+NoNewPrivileges=yes
+ExecStart=/usr/local/bin/vault server -config=/etc/vault.d/vault.hcl
+ExecReload=/bin/kill -HUP $MAINPID
+KillMode=process
+Restart=on-failure
+RestartSec=5
+TimeoutStopSec=30
+StartLimitInterval=60
+StartLimitBurst=3
+LimitNOFILE=65536
+LimitMEMLOCK=infinity
+
+[Install]
+WantedBy=multi-user.target
diff --git a/roles/wazuh/handlers/main.yaml b/roles/wazuh/handlers/main.yaml
new file mode 100644
index 0000000..a54bcd9
--- /dev/null
+++ b/roles/wazuh/handlers/main.yaml
@@ -0,0 +1,15 @@
+---
+- name: restart wazuh-manager
+ service:
+ name: wazuh-manager
+ state: restarted
+
+- name: restart wazuh-indexer
+ service:
+ name: wazuh-indexer
+ state: restarted
+
+- name: restart wazuh-dashboard
+ service:
+ name: wazuh-dashboard
+ state: restarted
diff --git a/roles/wazuh/templates/ossec.conf.j2 b/roles/wazuh/templates/ossec.conf.j2
new file mode 100644
index 0000000..0e7fa37
--- /dev/null
+++ b/roles/wazuh/templates/ossec.conf.j2
@@ -0,0 +1,121 @@
+
+
+ yes
+ yes
+ no
+ no
+ no
+ localhost
+ wazuh@{{ ansible_hostname }}
+ admin@company.com
+ {{ ansible_hostname }}
+ Wazuh manager
+
+
+
+ 3
+ 12
+
+
+
+ secure
+ 1514
+ udp
+ 131072
+
+
+
+ no
+ 1515
+ no
+ yes
+ no
+ HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH
+
+ no
+ /var/ossec/etc/sslmanager.cert
+ /var/ossec/etc/sslmanager.key
+ no
+
+
+
+ 120
+ 16384
+ 65536
+
+
+
+ yes
+
+ https://127.0.0.1:9200
+
+
+
+ /etc/wazuh-indexer/certs/root-ca.pem
+
+ /etc/wazuh-indexer/certs/wazuh.manager.pem
+ /etc/wazuh-indexer/certs/wazuh.manager-key.pem
+
+
+
+
+ yes
+ 5m
+ 6h
+ yes
+
+
+ yes
+ trusty
+ xenial
+ bionic
+ focal
+ jammy
+ 1h
+
+
+
+
+
+ syslog
+ /var/log/auth.log
+
+
+
+ syslog
+ /var/log/syslog
+
+
+
+ apache
+ /var/log/apache2/access.log
+
+
+
+
+ no
+ yes
+ yes
+ yes
+ yes
+ yes
+ yes
+ yes
+
+
+
+
+ no
+ 79200
+ yes
+
+
+ /etc,/usr/bin,/usr/sbin
+ /bin,/sbin,/boot
+
+
+ /etc/mtab
+ /etc/hosts.deny
+ /etc/mail/statistics
+
+