diff --git a/.github/workflows/core.yaml b/.github/workflows/core.yaml new file mode 100644 index 0000000..c8cd622 --- /dev/null +++ b/.github/workflows/core.yaml @@ -0,0 +1,36 @@ +name: GitHub Actions Common CI + +on: + push: + branches: + - master + - feature/* + +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + VERSION: v1 + +jobs: + ci: + name: Checkout main repository, release GitHub Actions Common + runs-on: ubuntu-22.04 + steps: + - name: Initialize environment variables + run: | + GIT_BRANCH="${GITHUB_REF#refs/heads/}" + + if [[ "master" != "${GIT_BRANCH}" ]]; then + VERSION="${VERSION}-develop" + fi + + echo "GIT_BRANCH=${GIT_BRANCH}" >> ${GITHUB_ENV} + echo "VERSION=${VERSION}" >> ${GITHUB_ENV} + + - name: Checkout main repository + uses: actions/checkout@v4 + with: + ref: ${{ github.ref }} + + - name: Release GitHub Actions Common + run: | + ./release-service.sh ${{ env.VERSION }} ${{ env.GIT_BRANCH }} ${{ env.GITHUB_SHA }} "" diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..663b20c --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +.DS_Store +.idea/ +*.egg-info/ +*.egg +dist/ +build/ +__pycache__/ diff --git a/docs/CODEOWNERS b/docs/CODEOWNERS new file mode 100644 index 0000000..8f2a70b --- /dev/null +++ b/docs/CODEOWNERS @@ -0,0 +1,3 @@ +# These owners will be the default owners for everything in +# the repo and will be requested for review when someone opens a pull request. +* @anovikov-el @apanasiuk-el diff --git a/examples/requirements.txt b/examples/requirements.txt new file mode 100644 index 0000000..1f98c83 --- /dev/null +++ b/examples/requirements.txt @@ -0,0 +1,15 @@ +# === Public GitHub package === +# Installs from the v1 tag of a public GitHub repository +# No authentication required +git+https://github.com/edenlabllc/github_actions.common.git@v1#egg=github_actions.common + +# === Private GitHub package === +# Installs from v1 tag of a private GitHub repository +# Requires a GitHub Personal Access Token (with 'repo' scope) +# The token should be passed securely via environment variables +# Example usage: +# export GITHUB_TOKEN=ghp_XXXXXXXXXXXXXXXXXXXXXXXXXXXX +# pip install -r requirements.txt +# +# Do NOT hardcode your token here! +git+https://${GITHUB_TOKEN}@github.com/edenlabllc/github_actions.common.git@v1#egg=github_actions.common diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0034813 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,45 @@ +[project] +name = "github_actions.common" +version = "v1" +description = "Common helpers and utilities for GitHub Actions used across edenlab repositories" +readme = "README.md" +license = { text = "Apache-2.0" } +requires-python = ">=3.10" +authors = [{ name = "Edenlabllc" }] +maintainers = [ + { name = "@anovikov-el" }, + { name = "@apanasiuk-el" } +] +keywords = [ + "python", "github", "actions", + "ci", "cd", "devops", "infrastructure", + "aws", "azure", "gcp", "devops", + "rmk", +] +classifiers = [ + "Intended Audience :: Developers", + "Topic :: Software Development :: Build Tools", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11" +] +dependencies = [ + "requests~=2.32.3", + "GitPython~=3.1.44", + "PyGithub==2.5.0", + "slack_sdk~=3.34.0", + "packaging~=24.2", + "utils~=1.0.2", + "boto3~=1.37.18", + "botocore~=1.37.18" +] + +[build-system] +requires = ["setuptools>=61.0", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +package-dir = {"" = "src"} + +[tool.setuptools.packages.find] +where = ["src"] diff --git a/release-service.sh b/release-service.sh new file mode 100755 index 0000000..853db2d --- /dev/null +++ b/release-service.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set -e + +VERSION="${1}" +GIT_BRANCH="${2}" +GITHUB_SHA="${3}" +RELEASE_FILES="${4}" + +echo "Release service (only for master)." +if [[ "master" == "${GIT_BRANCH}" ]]; then + echo "Configure Git user.name and user.email." + git config user.name github-actions + git config user.email github-actions@github.com + + RELEASE_MSG="Release ${VERSION}" + + echo "Add Git tag ${VERSION}." + git tag -a "${VERSION}" -m "${RELEASE_MSG}" + git push origin "${VERSION}" -f + + if gh release view "${VERSION}" &> /dev/null; then + echo "GitHub release ${VERSION} already exists." + echo "Skipped." + else + echo "Create GitHub release ${VERSION}." + gh release create "${VERSION}" --target "${GITHUB_SHA}" --notes "${RELEASE_MSG}" ${RELEASE_FILES} + fi +else + echo "Skipped." +fi diff --git a/src/github_actions/__init__.py b/src/github_actions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/__init__.py b/src/github_actions/common/__init__.py new file mode 100644 index 0000000..dd33709 --- /dev/null +++ b/src/github_actions/common/__init__.py @@ -0,0 +1,48 @@ +# github_actions/common/__init__.py + +from .actions.init_project import GETTenant, ProjectInitializer, RMKConfigInitCommand +from .credentials.cluster_provider_credentials import ( + AWSConfig, AzureConfig, ClusterProviders, Credentials, EnvironmentConfig, GCPConfig +) +from .input_output.input import ArgumentParser +from .input_output.output import GitHubOutput +from .notification.slack_notification import SlackNotifier +from .providers.aws_provider.aws import ( + AWSSessionManager, EKSClusterFetcher, EBSVolumeFetcher, ECRManager, S3BucketManager +) +from .select_environment.allowed_environments import AllowEnvironments +from .select_environment.select_environment import ( + EnvironmentSelectorInterface, EnvironmentSelector, ExtendedEnvironmentSelector +) +from .utils.cmd import BaseCommand, CMDInterface +from .utils.github_environment_variables import GitHubContext +from .utils.install_rmk import RMKInstaller + + +__all__ = [ + "AWSConfig", + "AWSSessionManager", + "AllowEnvironments", + "ArgumentParser", + "AzureConfig", + "BaseCommand", + "CMDInterface", + "ClusterProviders", + "Credentials", + "EBSVolumeFetcher", + "ECRManager", + "EKSClusterFetcher", + "EnvironmentConfig", + "EnvironmentSelector", + "EnvironmentSelectorInterface", + "ExtendedEnvironmentSelector", + "GCPConfig", + "GETTenant", + "GitHubContext", + "GitHubOutput", + "ProjectInitializer", + "RMKConfigInitCommand", + "RMKInstaller", + "S3BucketManager", + "SlackNotifier", +] diff --git a/src/github_actions/common/actions/__init__.py b/src/github_actions/common/actions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/actions/init_project.py b/src/github_actions/common/actions/init_project.py new file mode 100644 index 0000000..6cdf282 --- /dev/null +++ b/src/github_actions/common/actions/init_project.py @@ -0,0 +1,78 @@ +import json +import os + +from argparse import Namespace +from git import Repo, GitCommandError + +from ..utils.cmd import BaseCommand, CMDInterface + + +class RMKConfigInitCommand(BaseCommand, CMDInterface): + def __init__(self, environment: str, args: Namespace): + super().__init__(environment) + self.cluster_provider = args.rmk_cluster_provider + self.github_token = args.github_token + self.slack_notification = "" + self.slack_channel = "" + self.slack_message_details = "" + self.slack_webhook = "" + + def execute(self): + self.run() + + def run(self): + """Configure Slack notifications if enabled.""" + os.environ["RMK_GITHUB_TOKEN"] = self.github_token + if self.slack_notification == "true": + os.environ["RMK_SLACK_WEBHOOK"] = self.slack_webhook + os.environ["RMK_SLACK_CHANNEL"] = self.slack_channel + + flags_slack_message_details = "" + if self.slack_message_details.splitlines(): + flags_slack_message_details = " ".join( + [f'--slack-message-details="{detail}"' for detail in self.slack_message_details.splitlines()] + ) + + self.run_command(f"rmk config init --cluster-provider={self.cluster_provider}" + f" --progress-bar=false --slack-notifications {flags_slack_message_details}") + else: + self.run_command(f"rmk config init --cluster-provider={self.cluster_provider} --progress-bar=false") + + +class GETTenant(BaseCommand, CMDInterface): + def __init__(self, environment: str): + super().__init__(environment) + + def execute(self) -> str: + return self.run() + + def run(self) -> str: + output = self.run_command(f"rmk --log-format=json config view", True) + rmk_config = json.loads(output) + return rmk_config["config"]["Tenant"] + + +class ProjectInitializer: + GIT_CONFIG = { + "name": "github-actions", + "email": "github-actions@github.com", + } + + def __init__(self, environment: str): + print("Initialize project repository.") + self.environment = environment + self.configure_git() + + def configure_git(self): + """Configure Git user settings.""" + try: + repo = Repo(".") + repo.config_writer().set_value("user", "name", self.GIT_CONFIG["name"]).release() + repo.config_writer().set_value("user", "email", self.GIT_CONFIG["email"]).release() + except GitCommandError as err: + raise ValueError(f"failed to configure Git: {err}") + + def configure_rmk_init(self, args: Namespace): + """Configure Slack notifications using SlackConfigCommand.""" + rmk_init = RMKConfigInitCommand(self.environment, args) + rmk_init.execute() diff --git a/src/github_actions/common/credentials/__init__.py b/src/github_actions/common/credentials/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/credentials/cluster_provider_credentials.py b/src/github_actions/common/credentials/cluster_provider_credentials.py new file mode 100644 index 0000000..dd8dea7 --- /dev/null +++ b/src/github_actions/common/credentials/cluster_provider_credentials.py @@ -0,0 +1,139 @@ +import json +import os + +from dataclasses import dataclass, field +from typing import Dict, Optional + + +@dataclass +class AWSConfig: + AWS_ACCESS_KEY_ID: str + AWS_SECRET_ACCESS_KEY: str + AWS_REGION: str + + +@dataclass +class AzureConfig: + AZURE_CLIENT_ID: str + AZURE_CLIENT_SECRET: str + AZURE_LOCATION: str + AZURE_SUBSCRIPTION_ID: str + AZURE_TENANT_ID: str + + +@dataclass +class GCPConfig: + GOOGLE_APPLICATION_CREDENTIALS: str + GCP_REGION: str + + +@dataclass +class ClusterProviders: + aws: Optional[AWSConfig] = field(default=None) + azure: Optional[AzureConfig] = field(default=None) + gcp: Optional[GCPConfig] = field(default=None) + + +@dataclass +class EnvironmentConfig: + cluster_providers: ClusterProviders + + +class Credentials: + def __init__(self, json_data: str): + self.environments: Dict[str, EnvironmentConfig] = self._parse_json(json_data) + + @staticmethod + def _parse_json(json_data: str) -> Dict[str, EnvironmentConfig]: + """Parses the JSON input and validates required fields.""" + try: + data = json.loads(json_data) + if not isinstance(data, dict): + raise ValueError("invalid JSON format: expected a dictionary") + except json.JSONDecodeError as err: + raise ValueError(f"failed to parse JSON: {err}") + + environments = {} + for env_name, env_data in data.items(): + try: + cluster_providers = env_data.get("cluster_providers", {}) + + environments[env_name] = EnvironmentConfig( + cluster_providers=ClusterProviders( + aws=AWSConfig(**cluster_providers["aws"]) if "aws" in cluster_providers else None, + azure=AzureConfig(**cluster_providers["azure"]) if "azure" in cluster_providers else None, + gcp=GCPConfig(**cluster_providers["gcp"]) if "gcp" in cluster_providers else None, + ) + ) + except TypeError as err: + raise ValueError(f"invalid structure for environment '{env_name}': {err}") + + return environments + + def get_environment(self, env_name: str) -> Optional[EnvironmentConfig]: + return self.environments.get(env_name, None) + + def list_environments(self) -> list: + return list(self.environments.keys()) + + @staticmethod + def save_gcp_credentials(credentials_content: str) -> str: + """Save GCP credentials content to a file and return its path with validation.""" + if not credentials_content: + raise ValueError("GCP credentials content is empty or invalid") + + if isinstance(credentials_content, dict): + credentials_json = credentials_content + else: + try: + credentials_json = json.loads(json.dumps(credentials_content)) + if not isinstance(credentials_json, dict): + raise ValueError("invalid GCP credentials format, expected a JSON object") + except json.JSONDecodeError as err: + raise ValueError(f"failed to parse GCP credentials JSON: {err}") + + file_path = "gcp-credentials.json" + try: + with open(file_path, "w") as cred_file: + json.dump(credentials_json, cred_file, indent=4) + except IOError as err: + raise IOError(f"failed to write GCP credentials file: {err}") + + return os.path.abspath(file_path) + + def set_env_variables(self, env_name: str, provider: str): + """Set environment variables based on the selected cluster provider.""" + env_config = self.get_environment(env_name) + if not env_config: + raise ValueError(f"environment '{env_name}' not found in credentials values") + + providers = env_config.cluster_providers + + match provider.lower(): + case "aws": + os.environ.update({ + "AWS_ACCESS_KEY_ID": providers.aws.AWS_ACCESS_KEY_ID, + "AWS_SECRET_ACCESS_KEY": providers.aws.AWS_SECRET_ACCESS_KEY, + "AWS_REGION": providers.aws.AWS_REGION, + }) + case "azure": + os.environ.update({ + "AZURE_CLIENT_ID": providers.azure.AZURE_CLIENT_ID, + "AZURE_CLIENT_SECRET": providers.azure.AZURE_CLIENT_SECRET, + "AZURE_LOCATION": providers.azure.AZURE_LOCATION, + "AZURE_SUBSCRIPTION_ID": providers.azure.AZURE_SUBSCRIPTION_ID, + "AZURE_TENANT_ID": providers.azure.AZURE_TENANT_ID, + }) + case "gcp": + credentials_path = self.save_gcp_credentials(providers.gcp.GOOGLE_APPLICATION_CREDENTIALS) + os.environ.update({ + "GOOGLE_APPLICATION_CREDENTIALS": credentials_path, + "GCP_REGION": providers.gcp.GCP_REGION, + }) + case _: + raise ValueError(f"invalid provider '{provider}', supported providers: aws, azure, gcp") + + print(f"Credentials as environment variables set for {env_name} with cluster provider: {provider}.") + + def __repr__(self): + return f"Credentials(environments={self.environments})" diff --git a/src/github_actions/common/input_output/__init__.py b/src/github_actions/common/input_output/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/input_output/input.py b/src/github_actions/common/input_output/input.py new file mode 100644 index 0000000..5ebef9d --- /dev/null +++ b/src/github_actions/common/input_output/input.py @@ -0,0 +1,28 @@ +import os +import argparse + + +class ArgumentParser: + class EnvDefault(argparse.Action): + def __init__(self, envvar, required=True, default=None, **kwargs): + if envvar: + if envvar in os.environ: + default = os.environ.get(envvar, default) + if required and default: + required = False + super(ArgumentParser.EnvDefault, self).__init__(default=default, required=required, metavar=envvar, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, values) + + def __init__(self): + self.parser = argparse.ArgumentParser() + self.setup_arguments() + + def setup_arguments(self): + raise NotImplementedError( + f"{self.__class__.__name__} must implement the setup_arguments() method to define its specific CLI arguments." + ) + + def parse_args(self): + return self.parser.parse_args() diff --git a/src/github_actions/common/input_output/output.py b/src/github_actions/common/input_output/output.py new file mode 100644 index 0000000..7a65415 --- /dev/null +++ b/src/github_actions/common/input_output/output.py @@ -0,0 +1,15 @@ +import os + +from typing import Dict + + +class GitHubOutput: + def __init__(self): + self.is_github_actions_runner = 'GITHUB_OUTPUT' in os.environ + + def output_dict(self, body: Dict[str, str]): + if self.is_github_actions_runner: + with open(os.environ['GITHUB_OUTPUT'], 'a') as f: + for key in body: print(f"{key}={body[key]}", file=f) + else: + print("Skip output counters as GitHub Actions outputs.") diff --git a/src/github_actions/common/notification/__init__.py b/src/github_actions/common/notification/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/notification/slack_notification.py b/src/github_actions/common/notification/slack_notification.py new file mode 100644 index 0000000..b4e75c3 --- /dev/null +++ b/src/github_actions/common/notification/slack_notification.py @@ -0,0 +1,74 @@ +from argparse import Namespace + +from slack_sdk.webhook import WebhookClient +from github import Github, GithubException + +from ..utils.github_environment_variables import GitHubContext + + +class SlackNotifier: + ICONS = { + "Success": "https://img.icons8.com/doodle/48/000000/add.png", + "Failure": "https://img.icons8.com/office/40/000000/minus.png", + "Skip": "https://img.icons8.com/ios-filled/50/000000/0-degrees.png", + } + + def __init__(self, github_context: GitHubContext, args: Namespace, status, message, additional_info=None, tenant=None): + self.branch = github_context.ref_name + self.status = status + self.message = message + self.additional_info = additional_info or {} + self.tenant = tenant or "" + self.icon_url = self.ICONS.get(status, self.ICONS["Skip"]) + self.github_context = github_context + + if not args.github_token or not args.github_token.strip(): + raise ValueError("GitHub token is missing or empty") + self.github_client = Github(args.github_token) + + if not args.rmk_slack_webhook or not args.rmk_slack_webhook.strip(): + raise ValueError("Slack webhook token is missing or empty") + self.webhook_client = WebhookClient(args.rmk_slack_webhook) + + def get_action_job_url(self): + try: + repo = self.github_client.get_repo(self.github_context.repository) + runs = repo.get_workflow_runs() + for run in runs: + if str(run.id) == self.github_context.run_id: + jobs = run.jobs() + job_id = jobs[0].id if jobs.totalCount > 0 else None + if job_id: + return f"{self.github_context.get_github_url()}/actions/runs/{ self.github_context.run_id}/job/{job_id}" + except GithubException as err: + raise ValueError(f"accessing GitHub API: {err}") + + def construct_payload(self, action_job_url, action_run_by): + payload_text = ( + f"*Action run by*: {action_run_by}\n" + f"*Action job URL*: {action_job_url}\n" + f"*Tenant*: {self.tenant}\n" + f"*Branch*: {self.branch}\n" + f"*Status*: {self.status}\n" + f"*Message*: {self.message}\n" + ) + for key, value in self.additional_info.items(): + payload_text += f"*{key}*: {value}\n" + + return { + "username": "GitLabFlow Action", + "icon_url": self.icon_url, + "text": payload_text + } + + def notify(self): + try: + action_job_url = self.get_action_job_url() + action_run_by = self.github_context.actor \ + if self.github_context.event_name == "workflow_dispatch" else "ci-cd-fhir-user" + payload = self.construct_payload(action_job_url, action_run_by) + response = self.webhook_client.send_dict(payload) + except Exception as err: + raise ValueError(f"sending webhook request: {err}") + + return response.status_code diff --git a/src/github_actions/common/providers/__init__.py b/src/github_actions/common/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/providers/aws_provider/__init__.py b/src/github_actions/common/providers/aws_provider/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/providers/aws_provider/aws.py b/src/github_actions/common/providers/aws_provider/aws.py new file mode 100644 index 0000000..87edb8a --- /dev/null +++ b/src/github_actions/common/providers/aws_provider/aws.py @@ -0,0 +1,411 @@ +import base64 +import boto3 +import json +import mimetypes +import os +import time + +from botocore.exceptions import BotoCoreError, ClientError, NoCredentialsError, EndpointConnectionError +from collections import defaultdict +from datetime import datetime +from typing import Dict, List, Optional + + +class AWSSessionManager: + def __init__(self, region_name: str, service_name=None): + self.region_name = region_name + self.service_name = service_name + self.session = None + self.client = None + self._initialize_session() + + def _initialize_session(self): + try: + self.session = boto3.Session(region_name=self.region_name) + self.client = self.session.client(self.service_name) + except (NoCredentialsError, BotoCoreError) as err: + raise ValueError(f"failed to create AWS session for {self.service_name}: {err}") + + def get_client(self): + if not self.client: + raise RuntimeError(f"{self.service_name.upper()} client is not initialized.") + + return self.client + + +class EKSClusterFetcher(AWSSessionManager): + def __init__(self, region_name: str): + super().__init__(region_name, service_name="eks") + self.eks_client = self.get_client() + + def list_all_clusters(self) -> List[str]: + clusters = [] + try: + paginator = self.eks_client.get_paginator('list_clusters') + for page in paginator.paginate(): + clusters.extend(page.get('clusters', [])) + except (ClientError, EndpointConnectionError, BotoCoreError) as err: + raise ValueError(f"failed to list EKS clusters: {err}") + return clusters + + def describe_cluster(self, name: str) -> Dict: + try: + response = self.eks_client.describe_cluster(name=name) + return response.get('cluster', {}) + except self.eks_client.exceptions.ResourceNotFoundException: + print(f"Cluster '{name}' not found") + return {"name": name, "status": "NOT_FOUND"} + except (ClientError, EndpointConnectionError, BotoCoreError) as err: + print(f"Failed to describe cluster '{name}': {err}") + return {"name": name, "status": "ERROR"} + + def get_clusters_by_status(self) -> Dict[str, List[str]]: + cluster_names = self.list_all_clusters() + status_map = defaultdict(list) + + for name in cluster_names: + cluster_info = self.describe_cluster(name) + status = cluster_info.get("status", "UNKNOWN") + status_map[status].append(name) + + return dict(status_map) + + def print_eks_clusters_by_status(self): + clusters_by_status = self.get_clusters_by_status().items() + if not clusters_by_status: + print("No clusters found") + return + + for status, clusters in clusters_by_status: + print(f"Detected EKS cluster with status: {status} - {len(clusters)} clusters") + for cluster in clusters: + print(f" - {cluster}") + + +class EBSVolumeFetcher(AWSSessionManager): + def __init__(self, region_name: str): + super().__init__(region_name, service_name="ec2") + self.ec2_client = self.get_client() + + def list_orphaned_volumes(self) -> List[Dict]: + try: + response = self.ec2_client.describe_volumes( + Filters=[ + {"Name": "status", "Values": ["available", "error"]} + ] + ) + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to fetch orphaned volumes: {err}") + + volumes = [] + for vol in response.get("Volumes", []): + tags = vol.get("Tags", []) + name_tags = [ + tag["Value"] + for tag in tags + if tag["Key"] in ["Name", "kubernetes.io/created-for/pvc/name"] + ] + + volumes.append({ + "CreateTime": vol.get("CreateTime", datetime.min).isoformat(), + "AvailabilityZone": vol.get("AvailabilityZone", ""), + "VolumeId": vol.get("VolumeId", ""), + "Name": " ".join(name_tags), + "State": vol.get("State", ""), + "VolumeType": vol.get("VolumeType", ""), + "SizeGiB": f"{vol.get('Size', 0)}GiB" + }) + + return volumes + + def print_orphaned_volumes(self): + volumes = self.list_orphaned_volumes() + if not volumes: + print("No orphaned volumes found") + return + + print("Orphaned volumes detected:") + for vol in volumes: + print( + f"{vol['CreateTime']} | {vol['AvailabilityZone']} | {vol['VolumeId']} " + f"| {vol['Name']} | {vol['State']} | {vol['VolumeType']} | {vol['SizeGiB']}" + ) + + +class S3BucketManager(AWSSessionManager): + def __init__(self, region_name: str): + super().__init__(region_name, service_name="s3") + self.s3_client = self.get_client() + self.s3_resource = boto3.resource('s3', region_name=region_name) + + def create_bucket(self, bucket_name: str): + try: + self.s3_client.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={'LocationConstraint': self.region_name} + ) + print(f"S3 bucket '{bucket_name}' created.") + except self.s3_client.exceptions.BucketAlreadyOwnedByYou: + print(f"S3 bucket '{bucket_name}' already exists and is owned by you.") + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to create S3 bucket '{bucket_name}': {err}") + + def set_public_block(self, bucket_name: str): + try: + self.s3_client.put_public_access_block( + Bucket=bucket_name, + PublicAccessBlockConfiguration={ + 'BlockPublicAcls': True, + 'IgnorePublicAcls': True, + 'BlockPublicPolicy': True, + 'RestrictPublicBuckets': True + } + ) + print(f"Public access block applied to bucket '{bucket_name}'.") + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to block public access for bucket '{bucket_name}': {err}") + + def apply_lifecycle_policy(self, bucket_name: str, expiration_days: int): + lifecycle_config = { + "Rules": [ + { + "ID": "auto delete objects", + "Filter": {}, + "Status": "Enabled", + "Expiration": {"Days": expiration_days} + } + ] + } + + try: + self.s3_client.put_bucket_lifecycle_configuration( + Bucket=bucket_name, + LifecycleConfiguration=lifecycle_config + ) + print(f"Lifecycle policy applied to bucket '{bucket_name}'.") + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to apply lifecycle policy: {err}") + + def sync_directory_to_bucket(self, local_path: str, bucket_name: str): + bucket = self.s3_resource.Bucket(bucket_name) + + for root, _, files in os.walk(local_path): + for file in files: + full_path = os.path.join(root, file) + rel_path = os.path.relpath(full_path, start=local_path) + content_type = mimetypes.guess_type(full_path)[0] or "binary/octet-stream" + + try: + bucket.upload_file( + Filename=full_path, + Key=rel_path, + ExtraArgs={'ContentType': content_type} + ) + print(f"Uploaded {rel_path} to bucket '{bucket_name}'.") + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to upload '{rel_path}': {err}") + + +class ECRManager(AWSSessionManager): + IMPORTANT_SEVERITIES = {"CRITICAL", "HIGH", "MEDIUM", "UNDEFINED"} + IGNORED_SEVERITIES = {"LOW"} + + def __init__(self, region_name: str, public: bool = False): + """ + Initializes ECRLoginManager. + + Args: + region_name (str): AWS region name. + public (bool): If True, connects to AWS ECR Public service instead of private ECR. + """ + service_name = "ecr-public" if public else "ecr" + super().__init__(region_name, service_name=service_name) + self.ecr_client = self.get_client() + self.public = public + + def get_login_credentials(self) -> Dict[str, str]: + """ + Retrieves login credentials for Docker to authenticate with AWS ECR. + + Returns: + A dictionary containing: + - username: usually "AWS" + - password: decoded token to be passed to Docker + - proxy_endpoint: the ECR (private or public) registry URL + """ + try: + if self.public: + # For public ECR + auth_data = self.ecr_client.get_authorization_token() + token = auth_data["authorizationData"]["authorizationToken"] + proxy_endpoint = "https://public.ecr.aws" + else: + # For private ECR + auth_data = self.ecr_client.get_authorization_token() + token = auth_data["authorizationData"][0]["authorizationToken"] + proxy_endpoint = auth_data["authorizationData"][0]["proxyEndpoint"] + + # Decode the base64 token ("AWS:") + decoded_token = base64.b64decode(token).decode("utf-8") + username, password = decoded_token.split(":", 1) + + return { + "username": username, + "password": password, + "proxy_endpoint": proxy_endpoint + } + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to get ECR login credentials: {err}") + + def scan_image(self, + ecr_repository_name, + image_tag: str, skip_cves: Optional[List[str]] = None, sleep_interval: int = 3) -> Dict[str, any] | None: + """ + Continuously polls ECR image scan status until it's COMPLETE or FAILED. + + Args: + ecr_repository_name (str): Name of the ECR repository. + image_tag (str): Docker image tag to scan. + skip_cves (List[str], optional): List of CVEs to ignore. + sleep_interval (int): Polling interval in seconds. + + Returns: + dict with scan results, similar to bash logic. + """ + if self.public: + print("Skipping image scan: public ECR does not support scanning.") + return { + "status": "SKIPPED", + "important_count": 0, + "skipped_cves": [], + "ignored_count": 0, + "severity_counts": {}, + "all_findings": [], + } + + try: + response = self.ecr_client.describe_repositories(repositoryNames=[ecr_repository_name]) + if not response["repositories"][0]["imageScanningConfiguration"].get("scanOnPush", False): + print(f"scanOnPush is not enabled for '{ecr_repository_name}'") + return { + "status": "SKIPPED", + "important_count": 0, + "skipped_cves": [], + "ignored_count": 0, + "severity_counts": {}, + "all_findings": [], + } + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to check scanOnPush configuration: {err}") + + skip_cves = skip_cves or [] + + print("Important vulnerabilities to be scanned:", ", ".join(self.IMPORTANT_SEVERITIES)) + print("Ignored vulnerabilities:", ", ".join(self.IGNORED_SEVERITIES)) + + while True: + try: + response = self.ecr_client.describe_image_scan_findings( + repositoryName=ecr_repository_name, + imageId={"imageTag": image_tag} + ) + except self.ecr_client.exceptions.ScanNotFoundException: + print(f"Scan not yet available for tag '{image_tag}'. Retrying in {sleep_interval} sec...") + time.sleep(sleep_interval) + continue + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to describe image scan findings: {err}") + + status = response.get("imageScanStatus", {}).get("status", "UNKNOWN") + description = response.get("imageScanStatus", {}).get("description") + + print("Current image scan status:", status) + if description and description != "null": + print(description) + + if status == "COMPLETE": + findings = response.get("imageScanFindings", {}).get("findings", []) + severity_counts = response.get("imageScanFindings", {}).get("findingSeverityCounts", {}) + + important = [f for f in findings if f.get("severity") in self.IMPORTANT_SEVERITIES] + ignored = [f for f in findings if f.get("severity") in self.IGNORED_SEVERITIES] + + # Skip CVEs + skipped_cves = [] + for cve in skip_cves: + if any(f.get("name") == cve and f.get("severity") in self.IMPORTANT_SEVERITIES for f in important): + print(f"The {cve} vulnerability is in the skip list. Skipped.") + skipped_cves.append(cve) + else: + print(f"The {cve} vulnerability has already been fixed. You must remove it from the skip list!") + + important_count = len(important) - len(skipped_cves) + + print(f"Important vulnerabilities total: {important_count}") + + if severity_counts: + print("All vulnerability severity totals:") + print(json.dumps(severity_counts, indent=2)) + + if findings: + print("All findings:") + print(json.dumps(findings, indent=2)) + + if important_count <= 0: + print("Image has no important vulnerabilities.") + return { + "status": "PASS", + "important_count": 0, + "skipped_cves": skipped_cves, + "ignored_count": len(ignored), + "severity_counts": severity_counts, + "all_findings": findings, + } + else: + return { + "status": "FAIL", + "important_count": important_count, + "skipped_cves": skipped_cves, + "ignored_count": len(ignored), + "severity_counts": severity_counts, + "all_findings": findings, + } + elif status == "FAILED": + return { + "status": "ERROR", + "important_count": -1, + "skipped_cves": [], + "ignored_count": 0, + "severity_counts": {}, + "all_findings": [], + } + else: + print(f"Waiting {sleep_interval} seconds for scan to complete...") + time.sleep(sleep_interval) + + def delete_image(self, repository_name: str, image_tag: str) -> None: + """ + Deletes an image by tag from the given ECR repository. + + Args: + repository_name (str): Name of the ECR repository. + image_tag (str): Tag of the Docker image to delete. + + Returns: + bool: True if image deleted successfully, False if there were deletion failures. + + Raises: + RuntimeError: If the AWS API call fails completely. + """ + try: + response = self.ecr_client.batch_delete_image( + repositoryName=repository_name, + imageIds=[{"imageTag": image_tag}] + ) + except (ClientError, BotoCoreError) as err: + raise RuntimeError(f"failed to delete image '{image_tag}' from '{repository_name}': {err}") + + failures = response.get("failures", []) + if failures: + failure_messages = json.dumps(failures, indent=2) + raise RuntimeError(f"partial deletion failure for image '{image_tag}': {failure_messages}") diff --git a/src/github_actions/common/select_environment/__init__.py b/src/github_actions/common/select_environment/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/select_environment/allowed_environments.py b/src/github_actions/common/select_environment/allowed_environments.py new file mode 100644 index 0000000..25cafa3 --- /dev/null +++ b/src/github_actions/common/select_environment/allowed_environments.py @@ -0,0 +1,14 @@ +from argparse import Namespace + + +class AllowEnvironments: + def __init__(self, args: Namespace, environment: str): + self.args = args + self.environment = environment + + def validate(self): + environments = self.args.allowed_environments.split(",") + if len([env for env in environments if env == self.environment]) == 0: + raise ValueError(f"environment {self.environment} is not allowed") + + print(f"Environment {self.environment} is allowed") diff --git a/src/github_actions/common/select_environment/select_environment.py b/src/github_actions/common/select_environment/select_environment.py new file mode 100644 index 0000000..5abd779 --- /dev/null +++ b/src/github_actions/common/select_environment/select_environment.py @@ -0,0 +1,56 @@ +import re + +from abc import ABC, abstractmethod +from git import Repo + +from ..utils.github_environment_variables import GitHubContext + + +class EnvironmentSelectorInterface(ABC): + @abstractmethod + def select_environment(self, github_context: GitHubContext) -> str: + pass + + +class EnvironmentSelector(EnvironmentSelectorInterface): + TASK_NUM_REGEXP = r"[a-z]+-\d+" + SEMVER_REGEXP = r"v\d+\.\d+\.\d+(-rc)?$" + + PREFIX_FEATURE_BRANCH = "feature" + PREFIX_RELEASE_BRANCH = "release" + + SELECT_FEATURE_BRANCHES = fr"{PREFIX_FEATURE_BRANCH}/{TASK_NUM_REGEXP}" + SELECT_RELEASE_BRANCHES = fr"{PREFIX_RELEASE_BRANCH}/{TASK_NUM_REGEXP}|{PREFIX_RELEASE_BRANCH}/{SEMVER_REGEXP}" + + SELECT_ORIGIN_FEATURE_BRANCHES = fr"origin/{PREFIX_FEATURE_BRANCH}/{TASK_NUM_REGEXP}" + SELECT_ORIGIN_RELEASE_BRANCHES = fr"origin/{PREFIX_RELEASE_BRANCH}/{TASK_NUM_REGEXP}|origin/{PREFIX_RELEASE_BRANCH}/{SEMVER_REGEXP}" + + SELECT_ALL_BRANCHES = fr"{SELECT_FEATURE_BRANCHES}|{SELECT_RELEASE_BRANCHES}" + SELECT_ORIGIN_ALL_BRANCHES = fr"{SELECT_ORIGIN_FEATURE_BRANCHES}|{SELECT_ORIGIN_RELEASE_BRANCHES}" + + def select_environment(self, github_context: GitHubContext) -> str: + if re.match(r"^(develop|staging|production)$", github_context.ref_name, re.IGNORECASE): + return github_context.ref_name + + if re.match(EnvironmentSelector.SELECT_FEATURE_BRANCHES, github_context.ref_name, re.IGNORECASE): + return "develop" + + if re.match(EnvironmentSelector.SELECT_RELEASE_BRANCHES, github_context.ref_name, re.IGNORECASE): + if re.search(EnvironmentSelector.SEMVER_REGEXP, github_context.ref_name, re.IGNORECASE): + if "-rc" in github_context.ref_name: + return "staging" + return "production" + return "staging" + + raise ValueError(f"environment '{github_context.ref_name}' not allowed for environment selection") + + +class ExtendedEnvironmentSelector(EnvironmentSelector): + def select_environment(self, github_context: GitHubContext) -> str: + if github_context.event_name == "pull_request": + repo = Repo(".") + github_context.ref_name = repo.active_branch.name + + if github_context.ref_name.startswith("hotfix/"): + return "production" + return super().select_environment(github_context) diff --git a/src/github_actions/common/utils/__init__.py b/src/github_actions/common/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/github_actions/common/utils/cmd.py b/src/github_actions/common/utils/cmd.py new file mode 100644 index 0000000..f49dbd3 --- /dev/null +++ b/src/github_actions/common/utils/cmd.py @@ -0,0 +1,46 @@ +import subprocess + +from abc import ABC, abstractmethod +from argparse import Namespace + +from ..notification.slack_notification import SlackNotifier +from .github_environment_variables import GitHubContext + + +class CMDInterface(ABC): + @abstractmethod + def execute(self): + pass + + +class BaseCommand(ABC): + def __init__(self, environment: str | None): + self.environment = environment + + @abstractmethod + def run(self): + pass + + @staticmethod + def run_command(cmd: str, capture_output: bool = False): + try: + if capture_output: + result = subprocess.run(cmd, shell=True, check=True, text=True, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + return result.stdout.strip() + else: + subprocess.run(cmd, shell=True, check=True) + return None + except subprocess.CalledProcessError as err: + raise ValueError(f"command '{cmd}' failed with exit code {err.returncode}") + + def notify_slack(self, github_context: GitHubContext, args: Namespace, status, message, + additional_info=None, tenant=None, slack_message_log_output: bool = True): + notifier = SlackNotifier(github_context, args, status=status, message=message, + additional_info=additional_info, tenant=tenant) + response_code = notifier.notify() + print(f"Slack notification sent with response code: {response_code}") + if slack_message_log_output: + print(f"Slack notification - Status: {status}, Environment: {self.environment}, Message: {message}") + else: + print(f"Slack notification - Status: {status}, Environment: {self.environment}") diff --git a/src/github_actions/common/utils/github_environment_variables.py b/src/github_actions/common/utils/github_environment_variables.py new file mode 100644 index 0000000..a83b378 --- /dev/null +++ b/src/github_actions/common/utils/github_environment_variables.py @@ -0,0 +1,132 @@ +# GITHUB_ACTOR=user +# GITHUB_API_URL=https://api.github.com/ +# GITHUB_BASE_REF=feature/FFS-2-test +# GITHUB_EVENT_NAME=workflow_dispatch +# GITHUB_HEAD_REF=feature/FFS-3-test +# GITHUB_REF=refs/heads/feature-branch-1 +# GITHUB_REF_NAME=feature/FFS-123-test +# GITHUB_REF_TYPE=branch +# GITHUB_REPOSITORY=octocat/Hello-World +# GITHUB_REPOSITORY_OWNER=octocat +# GITHUB_RUN_ATTEMPT=1 +# GITHUB_RUN_ID=1658821493 +# GITHUB_RUN_NUMBER=1 +# GITHUB_SERVER_URL=https://github.com +# GITHUB_SHA=ffac537e6cbbf934b08745a378932722df287a53 + +import os + +from dataclasses import dataclass +from typing import Optional, Dict, List + + +@dataclass +class GitHubContext: + actor: str + api_url: str + base_ref: str + event_name: str + head_ref: str + ref: str + ref_name: str + ref_type: str + repository: str + repository_owner: str + run_attempt: str + run_id: str + run_number: str + server_url: str + sha: str + + @staticmethod + def from_env(github_custom_ref="", github_custom_ref_name="") -> "GitHubContext": + required_env_vars = [ + "GITHUB_ACTOR", + "GITHUB_API_URL", + "GITHUB_BASE_REF", + "GITHUB_EVENT_NAME", + "GITHUB_HEAD_REF", + "GITHUB_REF", + "GITHUB_REF_NAME", + "GITHUB_REF_TYPE", + "GITHUB_REPOSITORY", + "GITHUB_REPOSITORY_OWNER", + "GITHUB_RUN_ATTEMPT", + "GITHUB_RUN_ID", + "GITHUB_RUN_NUMBER", + "GITHUB_SERVER_URL", + "GITHUB_SHA", + ] + + missing_vars = [var for var in required_env_vars if os.getenv(var) is None] + if missing_vars: + raise ValueError(f"missing required environment variables: {', '.join(missing_vars)}") + + return GitHubContext( + actor=os.getenv("GITHUB_ACTOR"), + api_url=os.getenv("GITHUB_API_URL"), + base_ref=os.getenv("GITHUB_BASE_REF"), + event_name=os.getenv("GITHUB_EVENT_NAME"), + head_ref=os.getenv("GITHUB_HEAD_REF"), + ref=github_custom_ref if github_custom_ref else os.getenv("GITHUB_REF"), + ref_name=github_custom_ref_name if github_custom_ref_name else os.getenv("GITHUB_REF_NAME"), + ref_type=os.getenv("GITHUB_REF_TYPE"), + repository=os.getenv("GITHUB_REPOSITORY"), + repository_owner=os.getenv("GITHUB_REPOSITORY_OWNER"), + run_attempt=os.getenv("GITHUB_RUN_ATTEMPT"), + run_id=os.getenv("GITHUB_RUN_ID"), + run_number=os.getenv("GITHUB_RUN_NUMBER"), + server_url=os.getenv("GITHUB_SERVER_URL"), + sha=os.getenv("GITHUB_SHA"), + ) + + def to_list(self) -> List[str]: + """Return context attributes as a list.""" + return [ + f"actor: {self.actor}", + f"api_url: {self.api_url}", + f"base_ref: {self.base_ref}", + f"event_name: {self.event_name}", + f"head_ref: {self.head_ref}", + f"ref: {self.ref}", + f"ref_name: {self.ref_name}", + f"ref_type: {self.ref_type}", + f"repository: {self.repository}", + f"repository_owner: {self.repository_owner}", + f"run_attempt: {self.run_attempt}", + f"run_id: {self.run_id}", + f"run_number: {self.run_number}", + f"server_url: {self.server_url}", + f"sha: {self.sha}", + ] + + def to_string(self) -> str: + """Return context attributes as a single formatted string.""" + return " | ".join(self.to_list()) + + def search_key(self, key: str) -> Optional[str]: + """Quick search for a specific key in the context attributes.""" + context_dict = self.__dict__ + return context_dict.get(key, None) + + def validate_repository_format(self) -> bool: + """Check if the repository follows the format 'owner/repo'.""" + return "/" in self.repository and len(self.repository.split("/")) == 2 + + def get_github_url(self) -> str: + """Generate the GitHub repository URL.""" + return f"{self.server_url}/{self.repository}" + + def get_env_as_dict(self) -> Dict[str, str]: + """Return all attributes as a dictionary.""" + return self.__dict__ + + def get_repository_name(self) -> str: + """Extract and return the repository name from 'owner/repo'.""" + if self.validate_repository_format(): + return self.repository.split("/")[1] + return "Invalid Repository Format" + + def get_action_job_api_url(self) -> str: + """Generate the GitHub Actions job API URL.""" + return f"{self.api_url}/repos/{self.repository}/actions/runs/{self.run_id}/attempts/{self.run_attempt}/jobs" diff --git a/src/github_actions/common/utils/install_rmk.py b/src/github_actions/common/utils/install_rmk.py new file mode 100644 index 0000000..67d7235 --- /dev/null +++ b/src/github_actions/common/utils/install_rmk.py @@ -0,0 +1,38 @@ +import subprocess +import requests + +from argparse import Namespace +from packaging import version + + +class RMKInstaller: + def __init__(self, args: Namespace): + self.version = args.rmk_version + self.url = args.rmk_download_url + self.verify_rmk_version() + self.install_rmk() + + def verify_rmk_version(self): + print("Verifying RMK installation version...") + if self.version != "latest": + if version.parse('v0.45.2') > version.parse(self.version): + raise Exception(f"version {self.version} of RMK is not correct, " + + "the version for RMK must be at least v0.45.2 or greater") + + def install_rmk(self): + print("Installing RMK.") + try: + response = requests.get(self.url) + response.raise_for_status() + except requests.RequestException as err: + raise Exception(f"downloading RMK installer file:\n{err}") + + try: + subprocess.run( + ["bash", "-s", "--", self.version], + check=True, + text=True, + input=response.text + ) + except subprocess.CalledProcessError as err: + raise Exception(f"installing RMK:\n{err}")