Skip to content

- orchestrates the contributor path (#766) #168

- orchestrates the contributor path (#766)

- orchestrates the contributor path (#766) #168

name: SOC Packs — Release
# ─────────────────────────────────────────────────────────────────
# push → main
#
# Job 0 · bump — read version label from merged PR;
# bump pack_metadata.json (patch/minor/major);
# commit back to main [skip ci]
#
# Job 1 · release — build zip + create immutable GitHub release
# + upload modified xsoar_config.json as asset
# (url points at production zip — repo file
# is never modified by CI)
#
# Job 2 · catalog — rebuild pack_catalog.json from bumped metadata;
# only runs after a successful release so the
# catalog always reflects what actually shipped;
# commits back to main [skip ci]
#
# Tenant deploy is intentionally NOT here. It runs once, pre-merge,
# in soc-packs-pr-gate.yml (Job 8 · deploy-dev). Running it again
# post-merge would deploy twice on every merged PR.
#
# Deploy sources by environment:
# Dev tenant → PR Gate prerelease asset (-pr<N>/xsoar_config.json)
# PoV tenant → Production release asset (<tag>/xsoar_config.json)
# Repo file → Baseline template only (never used directly by CI)
#
# Failure modes:
# bump fails → release + catalog both skipped; metadata unchanged
# release fails → catalog skipped; catalog stays truthful at old version
# catalog fails → zip already on GitHub; re-run catalog job manually
# ─────────────────────────────────────────────────────────────────
on:
push:
branches:
- main
workflow_dispatch:
inputs:
packs:
description: 'Comma-separated pack names to release (e.g. soc-framework-nist-ir,soc-optimization-unified). Leave empty to auto-detect from last commit.'
required: false
default: ''
bump_level:
description: 'Version bump level'
required: false
default: 'patch'
type: choice
options:
- patch
- minor
- major
permissions:
contents: write
env:
PACKS_DIR: Packs
DEMISTO_SDK_VERSION: "1.38.14"
DEMISTO_SDK_IGNORE_CONTENT_WARNING: "1"
jobs:
# ── JOB 0: BUMP PACK VERSIONS ───────────────────────────────────
bump:
name: Bump pack versions
if: |
github.event_name == 'workflow_dispatch' ||
(
!contains(github.event.head_commit.message, '[skip ci]') &&
!contains(github.event.head_commit.message, '[skip release]')
)
runs-on: ubuntu-latest
permissions:
contents: write
outputs:
packs: ${{ steps.do_bump.outputs.packs }}
has_bumped: ${{ steps.do_bump.outputs.has_bumped }}
steps:
- name: Generate bot token
id: bot-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.BOT_APP_ID }}
private-key: ${{ secrets.BOT_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ steps.bot-token.outputs.token }}
- uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Resolve merged PR label
id: label
env:
GH_TOKEN: ${{ steps.bot-token.outputs.token }}
run: |
python - << 'PY'
import os, re, subprocess
msg = subprocess.check_output(
["git", "log", "-1", "--pretty=%s"], text=True
).strip()
print(f"Merge commit subject: {msg}")
match = re.search(r"#(\d+)", msg)
if not match:
print("::warning::Could not extract PR number from commit message — defaulting to patch.")
level = "patch"
else:
pr_number = match.group(1)
result = subprocess.run(
["gh", "pr", "view", pr_number, "--json", "labels",
"--jq", ".labels[].name"],
capture_output=True, text=True
)
labels = result.stdout.strip().splitlines()
print(f"PR #{pr_number} labels: {labels or '(none)'}")
if "version:major" in labels:
level = "major"
elif "version:minor" in labels:
level = "minor"
elif "version:patch" in labels:
level = "patch"
else:
level = "patch"
print(f"::warning::PR #{pr_number} had no version label — defaulting to patch. "
"Add version:patch, version:minor, or version:major next time.")
print(f"Bump level: {level}")
with open(os.environ["GITHUB_OUTPUT"], "a") as fh:
fh.write(f"bump_level={level}\n")
PY
- name: Find content-changed packs
id: changed
env:
MANUAL_PACKS: ${{ github.event.inputs.packs }}
run: |
python - << 'PY'
import json, os, subprocess
from pathlib import Path
packs_dir = Path(os.environ["PACKS_DIR"])
manual = os.environ.get("MANUAL_PACKS", "").strip()
if manual:
# workflow_dispatch: use explicitly provided pack list
packs = {p.strip() for p in manual.split(",") if p.strip()}
print("Manual pack override:", packs)
else:
# push: auto-detect from last commit
changed_files = subprocess.check_output(
["git", "diff", "--name-only", "HEAD~1", "HEAD"], text=True
).splitlines()
packs = set()
for f in changed_files:
p = Path(f)
if p.parts and p.parts[0] == str(packs_dir):
pack = p.parts[1] if len(p.parts) > 1 else None
if pack and (packs_dir / pack / "pack_metadata.json").exists():
packs.add(pack)
out = ",".join(sorted(packs))
print("Packs to bump:", out or "(none)")
with open(os.environ["GITHUB_OUTPUT"], "a") as fh:
fh.write(f"packs={out}\n")
fh.write(f"has_packs={'true' if packs else 'false'}\n")
PY
- name: Bump pack_metadata.json versions
id: do_bump
if: steps.changed.outputs.has_packs == 'true'
env:
CHANGED_PACKS: ${{ steps.changed.outputs.packs }}
BUMP_LEVEL: ${{ github.event.inputs.bump_level || steps.label.outputs.bump_level }}
run: |
python - << 'PY'
import os, subprocess, sys
from pathlib import Path
packs_dir = Path(os.environ["PACKS_DIR"])
packs = [p for p in os.environ["CHANGED_PACKS"].split(",") if p]
level = os.environ["BUMP_LEVEL"]
bumped = []
for pack in packs:
pack_dir = packs_dir / pack
if not (pack_dir / "pack_metadata.json").exists():
print(f"{pack}: no pack_metadata.json, skipping")
continue
result = subprocess.run(
["python", "tools/bump_pack_version.py", str(pack_dir), "--level", level],
capture_output=False
)
if result.returncode != 0:
print(f"::error::bump_pack_version.py failed for {pack} (exit {result.returncode})")
sys.exit(result.returncode)
bumped.append(pack)
with open(os.environ["GITHUB_OUTPUT"], "a") as fh:
fh.write(f"packs={','.join(bumped)}\n")
fh.write(f"has_bumped={'true' if bumped else 'false'}\n")
PY
- name: Commit version bumps
if: steps.do_bump.outputs.has_bumped == 'true'
env:
BUMP_LEVEL: ${{ steps.label.outputs.bump_level }}
BOT_TOKEN: ${{ steps.bot-token.outputs.token }}
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add Packs/*/pack_metadata.json Packs/*/xsoar_config.json
git commit -m "chore: bump pack versions (${BUMP_LEVEL}) [skip ci]"
git push https://x-access-token:${BOT_TOKEN}@github.com/${{ github.repository }} main
# ── JOB 1: BUILD ZIP + CREATE GITHUB RELEASE ────────────────────
release:
name: Build & release (production)
needs: [bump]
if: needs.bump.outputs.has_bumped == 'true'
runs-on: ubuntu-latest
environment:
name: main
outputs:
released_packs: ${{ steps.publish.outputs.released_packs }}
steps:
- uses: actions/checkout@v4
with:
ref: main
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Find packs with version bumps
id: changed
run: |
python - << 'PY'
import json, os, subprocess
from pathlib import Path
packs_dir = Path(os.environ["PACKS_DIR"])
changed_files = subprocess.check_output(
["git", "diff", "--name-only", "HEAD~1", "HEAD"], text=True
).splitlines()
changed = set()
for f in changed_files:
p = Path(f)
if p.parts and p.parts[0] == str(packs_dir):
pack = p.parts[1] if len(p.parts) > 1 else None
if not pack:
continue
meta = packs_dir / pack / "pack_metadata.json"
if not meta.exists():
continue
try:
old = subprocess.check_output(
["git", "show", f"HEAD~1:{packs_dir}/{pack}/pack_metadata.json"],
stderr=subprocess.DEVNULL, text=True
)
new_text = meta.read_text()
old_ver = json.loads(old).get("version") or json.loads(old).get("currentVersion")
new_ver = json.loads(new_text).get("version") or json.loads(new_text).get("currentVersion")
if old_ver != new_ver:
changed.add(pack)
except subprocess.CalledProcessError:
changed.add(pack)
out = ",".join(sorted(changed))
print("Changed packs:", out or "(none)")
with open(os.environ["GITHUB_OUTPUT"], "a") as fh:
fh.write(f"packs={out}\n")
fh.write(f"has_changes={'true' if changed else 'false'}\n")
PY
- name: Install PyYAML
run: pip install pyyaml
- name: Validate shadow mode (pre-release safety net)
run: |
python tools/validate_shadow_mode.py --all \
--actions-list Packs/soc-optimization-unified/Lists/SOCFrameworkActions_V3/SOCFrameworkActions_V3_data.json \
--policy Packs/soc-optimization-unified/Lists/SOCFrameworkActions_V3/shadow_mode_policy.json
- name: Build & publish production releases
id: publish
if: steps.changed.outputs.has_changes == 'true'
env:
CHANGED_PACKS: ${{ steps.changed.outputs.packs }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python - << 'PY'
import json, os, subprocess
from pathlib import Path
packs_dir = Path(os.environ["PACKS_DIR"])
changed = [p for p in os.environ["CHANGED_PACKS"].split(",") if p]
repo = os.environ["GITHUB_REPOSITORY"]
dist = Path("dist")
dist.mkdir(exist_ok=True)
def run(cmd, **kw):
print("+", " ".join(cmd))
subprocess.run(cmd, check=True, **kw)
def gh(*args, check=True):
return subprocess.run(["gh", *args], check=check)
released = []
for pack in changed:
pack_path = packs_dir / pack
meta_path = pack_path / "pack_metadata.json"
config_path = pack_path / "xsoar_config.json"
if not meta_path.exists():
print(f"{pack}: no pack_metadata.json, skipping")
continue
meta = json.loads(meta_path.read_text())
version = meta.get("version") or meta.get("currentVersion") or ""
if not version:
print(f"{pack}: no version, skipping")
continue
tag = f"{pack}-v{version}"
asset = f"{pack}-v{version}.zip"
if gh("release", "view", tag, check=False).returncode == 0:
raise RuntimeError(
f"Release tag {tag} already exists. "
f"Bump pack_metadata.json version to publish a new release."
)
final = dist / asset
if final.exists():
final.unlink()
run(["zip", "-r", str(final.resolve()), pack,
"--exclude", f"{pack}/xsoar_config.json"],
cwd=str(packs_dir.resolve()))
if not final.exists():
raise RuntimeError(f"{pack}: zip produced no output at {final}")
print(f"Built: {final}")
gh("release", "create", tag, str(final),
"-t", f"{pack} v{version}",
"-n", f"Production release for {pack} v{version}")
url = f"https://github.com/{repo}/releases/download/{tag}/{asset}"
print(f"Published: {url}")
if config_path.exists():
cfg = json.loads(config_path.read_text())
for entry in cfg.get("custom_packs", []):
if entry.get("id") == f"{pack}.zip":
entry["url"] = url
break
prod_config = dist / "xsoar_config.json"
prod_config.write_text(json.dumps(cfg, indent=2) + "\n")
gh("release", "upload", tag, str(prod_config), "--clobber")
config_asset_url = f"https://github.com/{repo}/releases/download/{tag}/xsoar_config.json"
print(f"Config asset: {config_asset_url}")
else:
print(f"{pack}: no xsoar_config.json — skipping config asset upload")
released.append(pack)
with open(os.environ["GITHUB_OUTPUT"], "a") as fh:
fh.write(f"released_packs={','.join(released)}\n")
PY
- name: No packs changed
if: steps.changed.outputs.has_changes != 'true'
run: echo "No version bumps detected — nothing to release."
# ── JOB 2: REBUILD PACK CATALOG ─────────────────────────────────
catalog:
name: Rebuild pack catalog
needs: [release]
if: needs.release.outputs.released_packs != ''
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Generate bot token
id: bot-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.BOT_APP_ID }}
private-key: ${{ secrets.BOT_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
ref: main
fetch-depth: 0
token: ${{ steps.bot-token.outputs.token }}
- uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Rebuild pack_catalog.json
run: python tools/build_pack_catalog.py
- name: Commit if changed
env:
BOT_TOKEN: ${{ steps.bot-token.outputs.token }}
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git diff --quiet pack_catalog.json || {
git add pack_catalog.json
git commit -m "chore: rebuild pack_catalog.json [skip ci]"
git push https://x-access-token:${BOT_TOKEN}@github.com/${{ github.repository }} main
}
echo "Catalog job complete."