diff --git a/.github/workflows/ansible-deploy.yml b/.github/workflows/ansible-deploy.yml new file mode 100644 index 0000000000..462f0ec337 --- /dev/null +++ b/.github/workflows/ansible-deploy.yml @@ -0,0 +1,103 @@ +name: Ansible Deployment + +on: + push: + branches: [main, master] + paths: + - 'ansible/**' + - '!ansible/docs/**' + - '.github/workflows/ansible-deploy.yml' + pull_request: + branches: [main, master] + paths: + - 'ansible/**' + - '!ansible/docs/**' + - '.github/workflows/ansible-deploy.yml' + +jobs: + lint: + name: Ansible Lint + runs-on: ubuntu-latest + defaults: + run: + working-directory: ansible + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install Ansible tooling + run: | + python -m pip install --upgrade pip + pip install ansible ansible-lint + ansible-galaxy collection install community.docker + + - name: Run ansible-lint + run: ansible-lint playbooks/*.yml + + deploy: + name: Deploy Application + needs: lint + if: github.event_name == 'push' || github.event_name == 'pull_request' + runs-on: ubuntu-latest + defaults: + run: + working-directory: ansible + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install Ansible runtime + run: | + python -m pip install --upgrade pip + pip install ansible + ansible-galaxy collection install community.docker + + - name: Configure SSH access + run: | + mkdir -p ~/.ssh + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa + ssh-keyscan -H "${{ secrets.VM_HOST }}" >> ~/.ssh/known_hosts + + - name: Build CI inventory + run: | + cat > inventory/ci-hosts.ini < /tmp/vault_pass + ansible-playbook playbooks/deploy.yml \ + -i inventory/ci-hosts.ini \ + --vault-password-file /tmp/vault_pass + + - name: Verify deployment + run: | + sleep 10 + ssh -i ~/.ssh/id_rsa "${{ secrets.VM_USER }}@${{ secrets.VM_HOST }}" ' + set -e + if curl -fsS "http://127.0.0.1:8000/health" >/dev/null; then + echo "Health check passed on port 8000" + elif curl -fsS "http://127.0.0.1:5000/health" >/dev/null; then + echo "Health check passed on port 5000" + else + echo "Health check failed on ports 8000 and 5000" >&2 + docker ps >&2 || true + exit 1 + fi + ' diff --git a/README.md b/README.md index 371d51f456..76f2546db1 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ [![Labs](https://img.shields.io/badge/Labs-18-blue)](#labs) [![Exam](https://img.shields.io/badge/Exam-Optional-green)](#exam-alternative) [![Duration](https://img.shields.io/badge/Duration-18%20Weeks-lightgrey)](#course-roadmap) +[![Ansible Deployment](https://github.com/sofiakulagina/DevOps-Core-Course/actions/workflows/ansible-deploy.yml/badge.svg)](https://github.com/sofiakulagina/DevOps-Core-Course/actions/workflows/ansible-deploy.yml) Master **production-grade DevOps practices** through hands-on labs. Build, containerize, deploy, monitor, and scale applications using industry-standard tools. diff --git a/ansible/docs/LAB06.md b/ansible/docs/LAB06.md new file mode 100644 index 0000000000..ecce8a0df6 --- /dev/null +++ b/ansible/docs/LAB06.md @@ -0,0 +1,314 @@ +## Overview + +In Lab 6, I upgraded the Ansible setup from Lab 5 to a production-style layout: + +- Refactored roles with `block`/`rescue`/`always` +- Added role and task tags for selective execution +- Migrated app deployment from single `docker run` style to Docker Compose +- Renamed role `app_deploy` -> `web_app` +- Added role dependency (`web_app` depends on `docker`) +- Implemented safe wipe logic with double gating (`web_app_wipe` variable + `web_app_wipe` tag) +- Added GitHub Actions workflow for lint + deployment + verification + +Tech used: Ansible, Jinja2, Docker Compose v2 module, GitHub Actions, Ansible Vault. + +--- + +## Blocks & Tags + +### `common` role changes + +File: `ansible/roles/common/tasks/main.yml` + +- Package tasks are grouped in a block with tag `packages`. +- User management tasks are grouped in a block with tag `users`. +- `rescue` runs `apt-get update --fix-missing` and retries apt cache update. +- `always` writes completion logs to `/tmp/common-role.log`. +- `become: true` is applied once at block level. + +### `docker` role changes + +File: `ansible/roles/docker/tasks/main.yml` + +- Docker installation tasks grouped under tag `docker_install`. +- Docker configuration tasks grouped under tag `docker_config`. +- `rescue` waits 10 seconds, refreshes apt cache, retries GPG/repo/install. +- `always` ensures Docker service is enabled and started. + +### Tag strategy + +- Role-level tags: + - `common` role in `playbooks/provision.yml` + - `docker` role in `playbooks/provision.yml` + - `web_app`/`app_deploy` role in `playbooks/deploy.yml` +- Task-level tags: + - `packages`, `users`, `docker_install`, `docker_config`, `compose`, `web_app_wipe` + +### `--list-tags` evidence + +```bash +ANSIBLE_LOCAL_TEMP=.ansible/tmp ansible-playbook playbooks/provision.yml --list-tags +``` + +Output: + +```text +playbook: playbooks/provision.yml + + play #1 (webservers): Provision web servers TAGS: [] + TASK TAGS: [common, docker, docker_config, docker_install, packages, users] +``` + +```bash +ANSIBLE_LOCAL_TEMP=.ansible/tmp ansible-playbook playbooks/deploy.yml --list-tags +``` + +Output: + +```text +playbook: playbooks/deploy.yml + + play #1 (webservers): Deploy application TAGS: [] + TASK TAGS: [app_deploy, compose, docker_config, docker_install, web_app, web_app_wipe] +``` + +--- + +## Docker Compose Migration + +### Role rename + +- Renamed directory: `ansible/roles/app_deploy` -> `ansible/roles/web_app` +- Updated playbook reference in `ansible/playbooks/deploy.yml` + +### Compose template + +File: `ansible/roles/web_app/templates/docker-compose.yml.j2` + +- Dynamic service name/image/tag/ports via variables +- Dynamic environment block from `app_env` +- `restart: unless-stopped` +- Dedicated bridge network `web_app_net` + +### Role dependency + +File: `ansible/roles/web_app/meta/main.yml` + +```yaml +dependencies: + - role: docker +``` + +This guarantees Docker installation before Compose deployment. + +### Deployment implementation + +File: `ansible/roles/web_app/tasks/main.yml` + +- Includes wipe tasks first +- Creates `/opt/{{ app_name }}` project directory +- Renders `docker-compose.yml` +- Deploys stack with `community.docker.docker_compose_v2` +- Waits for app port and checks `/health` +- Uses `rescue` to report deployment failure context + +### Variables + +File: `ansible/roles/web_app/defaults/main.yml` + +- `web_app_name`, `web_app_docker_image`, `web_app_docker_tag` +- `web_app_port`, `web_app_internal_port` +- `web_app_compose_project_dir`, `web_app_docker_compose_version` +- `web_app_secret_key` (override with Vault) +- `web_app_env` +- `web_app_wipe` (default `false`) + +The role keeps compatibility with legacy variable names (`app_name`, `docker_image`, etc.) through `default(...)`, so existing Vault values continue to work. + +--- + +## Wipe Logic + +### Implementation + +Files: + +- `ansible/roles/web_app/tasks/wipe.yml` +- `ansible/roles/web_app/tasks/main.yml` + +Behavior: + +- Wipe tasks are included at the beginning of role execution. +- Wipe block runs only when `web_app_wipe | bool` is `true`. +- Wipe is tagged with `web_app_wipe`. +- Wipe removes compose project, compose file, and app directory. + +Double safety mechanism: + +1. Variable gate: `-e "web_app_wipe=true"` +2. Tag gate: `--tags web_app_wipe` + +Result: destructive cleanup is explicit and controlled. + +### Test scenarios + +1. Normal deploy (wipe should not run): + +```bash +ansible-playbook playbooks/deploy.yml +``` + +2. Wipe only: + +```bash +ansible-playbook playbooks/deploy.yml -e "web_app_wipe=true" --tags web_app_wipe +``` + +3. Clean reinstall (wipe -> deploy): + +```bash +ansible-playbook playbooks/deploy.yml -e "web_app_wipe=true" +``` + +4. Safety check (tag set, variable false): + +```bash +ansible-playbook playbooks/deploy.yml --tags web_app_wipe +``` + +Expected: wipe block is skipped by `when` condition. + +--- + +## CI/CD Integration + +File: `.github/workflows/ansible-deploy.yml` + +### Workflow architecture + +- Trigger on pushes/PRs affecting `ansible/**` +- Excludes `ansible/docs/**` +- Job `lint`: + - Installs `ansible`, `ansible-lint`, `community.docker` + - Runs `ansible-lint playbooks/*.yml` +- Job `deploy` (push only): + - Sets up SSH from GitHub Secrets + - Builds runtime inventory file + - Uses Vault password from secret + - Runs `ansible-playbook playbooks/deploy.yml` + - Verifies app and health endpoints via `curl` + +### Required GitHub Secrets + +- `ANSIBLE_VAULT_PASSWORD` +- `SSH_PRIVATE_KEY` +- `VM_HOST` +- `VM_USER` + +### Badge + +Added to root `README.md`: + +![Ansible Deployment](https://github.com/sofiakulagina/DevOps-Core-Course/actions/workflows/ansible-deploy.yml/badge.svg)](https://github.com/sofiakulagina/DevOps-Core-Course/actions/workflows/ansible-deploy.yml) + +--- + +## Testing Results + +### Local syntax checks + +```bash +ANSIBLE_LOCAL_TEMP=.ansible/tmp ansible-playbook playbooks/provision.yml --syntax-check +ANSIBLE_LOCAL_TEMP=.ansible/tmp ansible-playbook playbooks/deploy.yml --syntax-check +``` + +Both returned successful syntax validation (`playbook: ...`). + +### Notes + +- `ansible-lint` is configured in CI workflow. +- On this local machine, `ansible-lint` binary was not available (`command not found`), so lint validation is delegated to GitHub Actions. + +### Runtime verification on target VM + +Use these commands after deployment: + +```bash +ssh @ "docker ps" +ssh @ "docker compose -f /opt/devops-app/docker-compose.yml ps" +curl -f http://:8000 +curl -f http://:8000/health +``` + +--- + +## Challenges & Solutions + +1. Sandbox blocked default Ansible temp path (`~/.ansible/tmp`) during local checks. + - Solution: run checks with `ANSIBLE_LOCAL_TEMP=.ansible/tmp` inside repo. +2. Migration from container module to Compose module required variable and template redesign. + - Solution: centralized runtime config in role defaults + Jinja2 compose template. +3. Safe cleanup needed to avoid accidental environment destruction. + - Solution: double-gated wipe logic (`variable + tag`) plus default `web_app_wipe: false`. + +--- + +## Research Answers + +### Task 1 (Blocks & Tags) + +1. What happens if `rescue` block also fails? +- The play fails; Ansible reports a failed task in `rescue`. + +2. Can you have nested blocks? +- Yes. Nested blocks are valid and useful for focused error handling. + +3. How do tags inherit within blocks? +- Tags set on a block are inherited by tasks in `block`, `rescue`, and `always` sections. + +### Task 2 (Docker Compose) + +1. `restart: always` vs `restart: unless-stopped`? +- `always`: restart even after manual stop (after daemon reboot). +- `unless-stopped`: restart automatically unless user intentionally stopped container. + +2. Compose networks vs default Docker bridge? +- Compose creates project-scoped user-defined networks with built-in DNS/service discovery. +- Default bridge is global/shared and less isolated. + +3. Can Ansible Vault variables be used in templates? +- Yes. Vault-decrypted variables are available to Jinja2 templates during playbook execution. + +### Task 3 (Wipe Logic) + +1. Why both variable and tag? +- Two independent confirmations reduce accidental destructive execution. + +2. Difference from `never` tag? +- `never` is a static tag behavior. +- Variable+tag allows explicit runtime safety logic and clean reinstall flows. + +3. Why wipe before deploy? +- Enables deterministic clean reinstall: remove old state first, then deploy fresh. + +4. Clean reinstall vs rolling update? +- Clean reinstall for drifted/broken state reset. +- Rolling update for minimal downtime and state continuity. + +5. How to extend wipe to images/volumes? +- Add `docker_compose_v2` cleanup options and dedicated tasks to prune project images/volumes with explicit additional guards. + +### Task 4 (CI/CD) + +1. Security implications of SSH keys in GitHub Secrets? +- If secrets leak (misconfiguration/log exposure), attackers can access infrastructure. +- Mitigate with least-privilege keys, key rotation, environment protection rules, and IP/host restrictions. + +2. Staging -> production pipeline design? +- Use separate jobs/environments: deploy to staging, run smoke tests, require manual approval, then deploy to production. + +3. What to add for rollbacks? +- Versioned image tags, release metadata, and workflow step to redeploy previous known-good tag. + +4. Why can self-hosted runner improve security? +- Private network access without exposing SSH externally, tighter boundary control, and reduced secret distribution to public runner infrastructure. diff --git a/ansible/inventory/hosts.ini b/ansible/inventory/hosts.ini index 0d53a641ad..d7cf91b54b 100644 --- a/ansible/inventory/hosts.ini +++ b/ansible/inventory/hosts.ini @@ -1,2 +1,2 @@ [webservers] -aws-vm ansible_host=54.198.129.202 ansible_user=ubuntu ansible_ssh_private_key_file=./labuser.pem +aws-vm ansible_host=18.212.247.163 ansible_user=ubuntu ansible_ssh_private_key_file=./labsuser.pem diff --git a/ansible/playbooks/deploy.yml b/ansible/playbooks/deploy.yml index b77f528c7a..e8474ad96a 100644 --- a/ansible/playbooks/deploy.yml +++ b/ansible/playbooks/deploy.yml @@ -1,7 +1,10 @@ --- - name: Deploy application hosts: webservers - become: yes + become: true roles: - - app_deploy \ No newline at end of file + - role: web_app + tags: + - web_app + - app_deploy diff --git a/ansible/playbooks/provision.yml b/ansible/playbooks/provision.yml index 26708d9cfa..8795723fb9 100644 --- a/ansible/playbooks/provision.yml +++ b/ansible/playbooks/provision.yml @@ -1,6 +1,11 @@ +--- - name: Provision web servers hosts: webservers roles: - - common - - docker \ No newline at end of file + - role: common + tags: + - common + - role: docker + tags: + - docker diff --git a/ansible/playbooks/site.yml b/ansible/playbooks/site.yml index e69de29bb2..63dcc1014e 100644 --- a/ansible/playbooks/site.yml +++ b/ansible/playbooks/site.yml @@ -0,0 +1,6 @@ +--- +- name: Provision infrastructure + import_playbook: provision.yml + +- name: Deploy application + import_playbook: deploy.yml diff --git a/ansible/roles/app_deploy/defaults/main.yml b/ansible/roles/app_deploy/defaults/main.yml deleted file mode 100644 index 8347f040ad..0000000000 --- a/ansible/roles/app_deploy/defaults/main.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -app_port: 5000 -restart_policy: unless-stopped -# App listens on 5002 by default; set PORT=5000 to match our port mapping -app_env: - PORT: "5000" diff --git a/ansible/roles/app_deploy/handlers/main.yml b/ansible/roles/app_deploy/handlers/main.yml deleted file mode 100644 index 8cd76aa241..0000000000 --- a/ansible/roles/app_deploy/handlers/main.yml +++ /dev/null @@ -1,4 +0,0 @@ ---- -- name: restart app container - ansible.builtin.command: docker restart {{ app_container_name }} - listen: "restart app container" diff --git a/ansible/roles/app_deploy/tasks/main.yml b/ansible/roles/app_deploy/tasks/main.yml deleted file mode 100644 index 32f373d72e..0000000000 --- a/ansible/roles/app_deploy/tasks/main.yml +++ /dev/null @@ -1,44 +0,0 @@ ---- -- name: Log in to Docker Hub - community.docker.docker_login: - username: "{{ dockerhub_username }}" - password: "{{ dockerhub_password }}" - -- name: Pull Docker image - community.docker.docker_image: - name: "{{ docker_image }}" - tag: "{{ docker_image_tag }}" - source: pull - -- name: Remove old container (stops if running) - community.docker.docker_container: - name: "{{ app_container_name }}" - state: absent - ignore_errors: yes - -- name: Run new container - community.docker.docker_container: - name: "{{ app_container_name }}" - image: "{{ docker_image }}:{{ docker_image_tag }}" - state: started - ports: - - "{{ app_port }}:5000" - env: "{{ app_env }}" - restart_policy: "{{ restart_policy }}" - notify: restart app container - -- name: Wait for application to be ready - ansible.builtin.wait_for: - port: "{{ app_port }}" - host: 127.0.0.1 - delay: 2 - timeout: 30 - state: started - -- name: Verify health endpoint - ansible.builtin.uri: - url: "http://127.0.0.1:{{ app_port }}/health" - status_code: 200 - timeout: 5 - register: health_check - failed_when: health_check.status != 200 \ No newline at end of file diff --git a/ansible/roles/common/defaults/main.yml b/ansible/roles/common/defaults/main.yml index fb01629168..a4dc614436 100644 --- a/ansible/roles/common/defaults/main.yml +++ b/ansible/roles/common/defaults/main.yml @@ -1,6 +1,18 @@ +--- +# Packages shared across all hosts. common_packages: - python3-pip - curl - git - vim - - htop \ No newline at end of file + - htop + +# Optional extra users managed by the common role. +# Example: +# common_managed_users: +# - name: deploy +# groups: [sudo] +common_managed_users: [] + +# Completion marker for block/always logging. +common_completion_log: /tmp/common-role.log diff --git a/ansible/roles/common/tasks/main.yml b/ansible/roles/common/tasks/main.yml index 5a54c9d072..fe65adb116 100644 --- a/ansible/roles/common/tasks/main.yml +++ b/ansible/roles/common/tasks/main.yml @@ -1,8 +1,62 @@ -- name: Update apt cache - apt: - update_cache: yes - -- name: Install common packages - apt: - name: "{{ common_packages }}" - state: present \ No newline at end of file +--- +- name: Run common role tasks with block-level controls + become: true + block: + - name: Manage package prerequisites + tags: + - packages + block: + - name: Refresh apt cache + ansible.builtin.apt: + update_cache: true + cache_valid_time: 3600 + + - name: Install common packages + ansible.builtin.apt: + name: "{{ common_packages }}" + state: present + + rescue: + - name: Repair apt metadata by refreshing apt cache + ansible.builtin.apt: + update_cache: true + + - name: Retry apt cache update after repair + ansible.builtin.apt: + update_cache: true + + always: + - name: Log package block completion + ansible.builtin.lineinfile: + path: "{{ common_completion_log }}" + line: "packages block finished at {{ ansible_date_time.iso8601 }}" + create: true + mode: "0644" + + - name: Manage users + tags: + - users + block: + - name: Ensure remote Ansible user exists + ansible.builtin.user: + name: "{{ ansible_user }}" + shell: /bin/bash + state: present + + - name: Ensure additional managed users exist + ansible.builtin.user: + name: "{{ item.name }}" + shell: "{{ item.shell | default('/bin/bash') }}" + groups: "{{ item.groups | default(omit) }}" + append: true + state: present + loop: "{{ common_managed_users }}" + when: common_managed_users | length > 0 + + always: + - name: Log users block completion + ansible.builtin.lineinfile: + path: "{{ common_completion_log }}" + line: "users block finished at {{ ansible_date_time.iso8601 }}" + create: true + mode: "0644" diff --git a/ansible/roles/docker/defaults/main.yml b/ansible/roles/docker/defaults/main.yml index 254c295034..490db6fddf 100644 --- a/ansible/roles/docker/defaults/main.yml +++ b/ansible/roles/docker/defaults/main.yml @@ -1 +1,5 @@ -docker_user: ubuntu \ No newline at end of file +--- +docker_user: ubuntu +docker_apt_repo: "deb https://download.docker.com/linux/ubuntu jammy stable" +docker_gpg_url: https://download.docker.com/linux/ubuntu/gpg +docker_retry_wait_seconds: 10 diff --git a/ansible/roles/docker/handlers/main.yml b/ansible/roles/docker/handlers/main.yml index 07f166a38f..a8a0bf67c7 100644 --- a/ansible/roles/docker/handlers/main.yml +++ b/ansible/roles/docker/handlers/main.yml @@ -1,4 +1,4 @@ - name: Restart Docker - service: + ansible.builtin.service: name: docker - state: restarted \ No newline at end of file + state: restarted diff --git a/ansible/roles/docker/tasks/main.yml b/ansible/roles/docker/tasks/main.yml index 09fad751b1..d46a1c0829 100644 --- a/ansible/roles/docker/tasks/main.yml +++ b/ansible/roles/docker/tasks/main.yml @@ -1,41 +1,85 @@ -- name: Install dependencies - apt: - name: - - ca-certificates - - gnupg - state: present - -- name: Add Docker GPG key - apt_key: - url: https://download.docker.com/linux/ubuntu/gpg - state: present - -- name: Add Docker repo - apt_repository: - repo: "deb https://download.docker.com/linux/ubuntu jammy stable" - state: present - -- name: Install Docker - apt: - name: - - docker-ce - - docker-ce-cli - - containerd.io - state: present - -- name: Ensure Docker is running - service: - name: docker - state: started - enabled: true - -- name: Add user to docker group - user: - name: "{{ docker_user }}" - groups: docker - append: yes - -- name: Install python docker module - apt: - name: python3-docker - state: present +--- +- name: Install and configure Docker engine + become: true + block: + - name: Docker installation tasks + tags: + - docker_install + block: + - name: Update apt cache for Docker installation + ansible.builtin.apt: + update_cache: true + cache_valid_time: 3600 + + - name: Install Docker apt prerequisites + ansible.builtin.apt: + name: + - ca-certificates + - gnupg + state: present + + - name: Add Docker GPG key + ansible.builtin.apt_key: + url: "{{ docker_gpg_url }}" + state: present + + - name: Add Docker apt repository + ansible.builtin.apt_repository: + repo: "{{ docker_apt_repo }}" + state: present + + - name: Install Docker engine packages + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + - python3-docker + state: present + + rescue: + - name: Wait before retrying Docker key setup + ansible.builtin.pause: + seconds: "{{ docker_retry_wait_seconds }}" + + - name: Retry apt cache update after Docker key failure + ansible.builtin.apt: + update_cache: true + + - name: Retry Docker GPG key addition + ansible.builtin.apt_key: + url: "{{ docker_gpg_url }}" + state: present + + - name: Retry Docker apt repository configuration + ansible.builtin.apt_repository: + repo: "{{ docker_apt_repo }}" + state: present + + - name: Retry Docker engine installation + ansible.builtin.apt: + name: + - docker-ce + - docker-ce-cli + - containerd.io + - docker-compose-plugin + - python3-docker + state: present + + - name: Docker configuration tasks + tags: + - docker_config + block: + - name: Add deployment user to docker group + ansible.builtin.user: + name: "{{ docker_user }}" + groups: docker + append: true + + always: + - name: Ensure Docker service is enabled and started + ansible.builtin.service: + name: docker + state: started + enabled: true diff --git a/ansible/roles/web_app/defaults/main.yml b/ansible/roles/web_app/defaults/main.yml new file mode 100644 index 0000000000..6aac9d2b35 --- /dev/null +++ b/ansible/roles/web_app/defaults/main.yml @@ -0,0 +1,27 @@ +--- +# Application configuration +web_app_name: "{{ app_name | default('devops-app') }}" +web_app_docker_image: "{{ docker_image | default((dockerhub_username | default('your_dockerhub_username')) ~ '/devops-info-service') }}" +web_app_docker_tag: "{{ docker_tag | default('latest') }}" +web_app_port: "{{ app_port | default(8000) }}" +web_app_internal_port: "{{ app_internal_port | default(8000) }}" + +# Docker Compose project configuration +web_app_compose_project_dir: "{{ compose_project_dir | default('/opt/' ~ web_app_name) }}" +web_app_docker_compose_version: "{{ docker_compose_version | default('3.8') }}" + +# Secrets should be overridden via Ansible Vault in group_vars/all.yml. +web_app_secret_key: "{{ app_secret_key | default('change-me-in-vault') }}" + +# App runtime environment passed to docker-compose template. +web_app_env: "{{ app_env | default({'PORT': (web_app_internal_port | string), 'APP_SECRET_KEY': web_app_secret_key}) }}" + +# Optional Docker Hub auth for private images. +web_app_dockerhub_username: "{{ dockerhub_username | default('') }}" +web_app_dockerhub_password: "{{ dockerhub_password | default('') }}" + +# Wipe Logic Control +# Set to true to remove application completely. +# Wipe only: ansible-playbook playbooks/deploy.yml -e "web_app_wipe=true" --tags web_app_wipe +# Clean install: ansible-playbook playbooks/deploy.yml -e "web_app_wipe=true" +web_app_wipe: false diff --git a/ansible/roles/web_app/handlers/main.yml b/ansible/roles/web_app/handlers/main.yml new file mode 100644 index 0000000000..2142c3274a --- /dev/null +++ b/ansible/roles/web_app/handlers/main.yml @@ -0,0 +1,2 @@ +--- +# No handlers are required for compose deployment in this role. diff --git a/ansible/roles/web_app/meta/main.yml b/ansible/roles/web_app/meta/main.yml new file mode 100644 index 0000000000..ff70715592 --- /dev/null +++ b/ansible/roles/web_app/meta/main.yml @@ -0,0 +1,4 @@ +--- +# Docker must exist before Compose deployment tasks run. +dependencies: + - role: docker diff --git a/ansible/roles/web_app/tasks/main.yml b/ansible/roles/web_app/tasks/main.yml new file mode 100644 index 0000000000..0081a81049 --- /dev/null +++ b/ansible/roles/web_app/tasks/main.yml @@ -0,0 +1,66 @@ +--- +# Wipe logic runs first (only if web_app_wipe=true). +- name: Include wipe tasks + ansible.builtin.include_tasks: wipe.yml + tags: + - web_app_wipe + +- name: Deploy application with Docker Compose + when: not (web_app_wipe | bool and 'web_app_wipe' in ansible_run_tags and ansible_run_tags | length == 1) + tags: + - app_deploy + - compose + - web_app_wipe + block: + - name: Log in to Docker Hub when credentials are provided + community.docker.docker_login: + username: "{{ web_app_dockerhub_username }}" + password: "{{ web_app_dockerhub_password }}" + when: + - web_app_dockerhub_username | length > 0 + - web_app_dockerhub_password | length > 0 + + - name: Create Docker Compose project directory + ansible.builtin.file: + path: "{{ web_app_compose_project_dir }}" + state: directory + mode: "0755" + + - name: Template docker-compose file + ansible.builtin.template: + src: docker-compose.yml.j2 + dest: "{{ web_app_compose_project_dir }}/docker-compose.yml" + mode: "0644" + + - name: Deploy stack with Docker Compose v2 + community.docker.docker_compose_v2: + project_src: "{{ web_app_compose_project_dir }}" + files: + - docker-compose.yml + pull: always + remove_orphans: true + recreate: auto + state: present + + - name: Wait for application port + ansible.builtin.wait_for: + host: 127.0.0.1 + port: "{{ web_app_port }}" + delay: 2 + timeout: 60 + state: started + + - name: Verify health endpoint + ansible.builtin.uri: + url: "http://127.0.0.1:{{ web_app_port }}/health" + status_code: 200 + timeout: 10 + register: web_app_health + failed_when: web_app_health.status != 200 + + rescue: + - name: Report deployment failure + ansible.builtin.debug: + msg: >- + Docker Compose deployment failed for {{ web_app_name }}. + Check docker compose logs in {{ web_app_compose_project_dir }}. diff --git a/ansible/roles/web_app/tasks/wipe.yml b/ansible/roles/web_app/tasks/wipe.yml new file mode 100644 index 0000000000..043a5d15a3 --- /dev/null +++ b/ansible/roles/web_app/tasks/wipe.yml @@ -0,0 +1,33 @@ +--- +- name: Wipe web application + when: web_app_wipe | bool + tags: + - web_app_wipe + block: + - name: Check if compose file exists + ansible.builtin.stat: + path: "{{ web_app_compose_project_dir }}/docker-compose.yml" + register: web_app_compose_file + + - name: Stop and remove Docker Compose project + community.docker.docker_compose_v2: + project_src: "{{ web_app_compose_project_dir }}" + files: + - docker-compose.yml + state: absent + remove_orphans: true + when: web_app_compose_file.stat.exists + + - name: Remove docker-compose.yml file + ansible.builtin.file: + path: "{{ web_app_compose_project_dir }}/docker-compose.yml" + state: absent + + - name: Remove application directory + ansible.builtin.file: + path: "{{ web_app_compose_project_dir }}" + state: absent + + - name: Log wipe completion + ansible.builtin.debug: + msg: "Application {{ web_app_name }} wiped successfully" diff --git a/ansible/roles/web_app/templates/docker-compose.yml.j2 b/ansible/roles/web_app/templates/docker-compose.yml.j2 new file mode 100644 index 0000000000..9df8d25b31 --- /dev/null +++ b/ansible/roles/web_app/templates/docker-compose.yml.j2 @@ -0,0 +1,19 @@ +version: '{{ web_app_docker_compose_version }}' + +services: + {{ web_app_name }}: + image: '{{ web_app_docker_image }}:{{ web_app_docker_tag }}' + container_name: '{{ web_app_name }}' + ports: + - '{{ web_app_port }}:{{ web_app_internal_port }}' + environment: +{% for env_key, env_value in web_app_env.items() %} + {{ env_key }}: '{{ env_value }}' +{% endfor %} + restart: unless-stopped + networks: + - web_app_net + +networks: + web_app_net: + driver: bridge