diff --git a/.github/actions/validate-enrollment/action.yml b/.github/actions/validate-enrollment/action.yml index be2ec482c..4efc498b2 100644 --- a/.github/actions/validate-enrollment/action.yml +++ b/.github/actions/validate-enrollment/action.yml @@ -5,6 +5,10 @@ inputs: source_repo: description: 'Source repository in owner/repo format' required: true + install_mode: + description: 'Installation mode: per-org (config repo) or per-repo (in-repo config)' + required: false + default: 'per-org' outputs: name: @@ -18,6 +22,7 @@ runs: shell: bash env: SOURCE_REPO: ${{ inputs.source_repo }} + INSTALL_MODE: ${{ inputs.install_mode }} run: | set -euo pipefail : "${SOURCE_REPO:?SOURCE_REPO is required}" @@ -31,19 +36,27 @@ runs: echo "::error::source_repo owner does not match org" exit 1 fi - REPO_NAME="${SOURCE_REPO#*/}" - if [[ ! -f config.yaml ]]; then - echo "::error::config.yaml not found" - exit 1 - fi - if ! command -v yq &> /dev/null; then - echo "::error::yq command not found" + if [[ "${INSTALL_MODE}" != "per-org" && "${INSTALL_MODE}" != "per-repo" ]]; then + echo "::error::Invalid install_mode '${INSTALL_MODE}': must be 'per-org' or 'per-repo'" exit 1 fi - ENABLED=$(yq ".repos.\"$REPO_NAME\".enabled" config.yaml) - if [[ "$ENABLED" != "true" ]]; then - echo "::error::repo is not enabled in config.yaml" - exit 1 + REPO_NAME="${SOURCE_REPO#*/}" + if [[ "${INSTALL_MODE}" == "per-repo" ]]; then + echo "Per-repo mode — skipping config.yaml enrollment check (self-enrolled)" + else + if [[ ! -f config.yaml ]]; then + echo "::error::config.yaml not found" + exit 1 + fi + if ! command -v yq &> /dev/null; then + echo "::error::yq command not found" + exit 1 + fi + ENABLED=$(REPO_NAME="${REPO_NAME}" yq 'env(REPO_NAME) as $name | .repos[$name].enabled' config.yaml) + if [[ "$ENABLED" != "true" ]]; then + echo "::error::repo is not enabled in config.yaml" + exit 1 + fi fi echo "Validation passed for ${SOURCE_REPO}" diff --git a/.github/workflows/reusable-code.yml b/.github/workflows/reusable-code.yml index b443708b0..13898e643 100644 --- a/.github/workflows/reusable-code.yml +++ b/.github/workflows/reusable-code.yml @@ -24,6 +24,10 @@ on: required: false type: string default: 'latest' + install_mode: + required: false + type: string + default: 'per-org' secrets: FULLSEND_GCP_WIF_PROVIDER: required: true @@ -55,9 +59,15 @@ jobs: sparse-checkout: | internal/scaffold/fullsend-repo/ - - name: Prepare workspace (upstream defaults + org overrides) + - name: Prepare workspace (upstream defaults + org/repo overrides) + env: + INSTALL_MODE: ${{ inputs.install_mode }} run: | set -euo pipefail + if [[ "${INSTALL_MODE}" != "per-org" && "${INSTALL_MODE}" != "per-repo" ]]; then + echo "::error::Invalid install_mode '${INSTALL_MODE}': must be 'per-org' or 'per-repo'" + exit 1 + fi SRC=".defaults/internal/scaffold/fullsend-repo" LAYERED_DIRS="agents skills schemas harness policies scripts env" for dir in ${LAYERED_DIRS}; do @@ -66,11 +76,15 @@ jobs: cp -r "${SRC}/${dir}/." "${dir}/" fi done + CUSTOM_BASE="customized" + if [[ "${INSTALL_MODE}" == "per-repo" ]]; then + CUSTOM_BASE=".fullsend/customized" + fi for dir in ${LAYERED_DIRS}; do - if [[ -d "customized/${dir}" ]]; then - find "customized/${dir}" -type f ! -name '.gitkeep' -print0 \ + if [[ -d "${CUSTOM_BASE}/${dir}" ]]; then + find "${CUSTOM_BASE}/${dir}" -type f ! -name '.gitkeep' -print0 \ | while IFS= read -r -d '' f; do - rel="${f#customized/}" + rel="${f#"${CUSTOM_BASE}"/}" mkdir -p "$(dirname "${rel}")" cp "${f}" "${rel}" done @@ -85,6 +99,7 @@ jobs: uses: fullsend-ai/fullsend/.github/actions/validate-enrollment@v0 with: source_repo: ${{ inputs.source_repo }} + install_mode: ${{ inputs.install_mode }} - name: Mint coder token id: app-token diff --git a/.github/workflows/reusable-dispatch.yml b/.github/workflows/reusable-dispatch.yml new file mode 100644 index 000000000..553225836 --- /dev/null +++ b/.github/workflows/reusable-dispatch.yml @@ -0,0 +1,406 @@ +# Reusable dispatch workflow for per-repo installation mode. +# Routes events to the appropriate stage reusable workflow via conditional +# workflow_call jobs. This is the per-repo equivalent of the per-org +# dispatch.yml + thin caller pair. +# +# Flow: shim (per-repo) → reusable-dispatch.yml → reusable-{stage}.yml +# Nesting: 3 levels of workflow_call (within GitHub's 4-level limit) +# +# Security: all user-controlled inputs (comment body, labels, usernames) +# are passed via env: variables, not interpolated in run: blocks. +name: Dispatch + +on: + workflow_call: + inputs: + event_action: + description: 'The event action (github.event.action) forwarded by the shim' + required: true + type: string + install_mode: + description: 'Installation mode: per-repo (default) or per-org' + required: false + type: string + default: 'per-repo' + mint_url: + description: 'Token mint URL for OIDC token exchange' + required: true + type: string + gcp_region: + description: 'GCP region for Vertex AI' + required: true + type: string + fullsend_version: + description: 'Fullsend CLI version to use' + required: false + type: string + default: 'latest' + secrets: + FULLSEND_GCP_WIF_PROVIDER: + required: false + FULLSEND_GCP_PROJECT_ID: + required: true + +jobs: + route: + name: Route + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: read + outputs: + stage: ${{ steps.route.outputs.stage }} + trigger_source: ${{ steps.route.outputs.trigger_source }} + event_payload: ${{ steps.payload.outputs.event_payload }} + steps: + - name: Checkout caller repository + uses: actions/checkout@v6 + with: + persist-credentials: false + sparse-checkout: .fullsend/config.yaml + sparse-checkout-cone-mode: false + + - name: Determine stage + id: route + env: + EVENT_NAME: ${{ github.event_name }} + EVENT_ACTION: ${{ inputs.event_action }} + COMMENT_BODY: ${{ github.event.comment.body }} + COMMENT_USER_TYPE: ${{ github.event.comment.user.type }} + COMMENT_USER_LOGIN: ${{ github.event.comment.user.login }} + COMMENT_AUTHOR_ASSOC: ${{ github.event.comment.author_association }} + ISSUE_LABELS: ${{ join(github.event.issue.labels.*.name, ',') }} + PR_LABELS: ${{ join(github.event.pull_request.labels.*.name, ',') }} + ISSUE_HAS_PR: ${{ github.event.issue.pull_request && 'true' || 'false' }} + ISSUE_USER_LOGIN: ${{ github.event.issue.user.login }} + REVIEW_STATE: ${{ github.event.review.state }} + REVIEW_USER_LOGIN: ${{ github.event.review.user.login }} + TRIGGERING_LABEL: ${{ github.event.label.name }} + PR_HEAD_REPO: ${{ github.event.pull_request.head.repo.full_name }} + PR_BASE_REPO: ${{ github.event.pull_request.base.repo.full_name }} + ORG_NAME: ${{ github.repository_owner }} + run: | + set -euo pipefail + + STAGE="" + TRIGGER_SOURCE="" + + is_authorized() { + case "${COMMENT_AUTHOR_ASSOC}" in + OWNER|MEMBER|COLLABORATOR) return 0 ;; + *) return 1 ;; + esac + } + + is_issue_author() { + [[ "${COMMENT_USER_LOGIN}" == "${ISSUE_USER_LOGIN}" ]] + } + + has_label() { + local needle="$1" + local csv="${2:-${ISSUE_LABELS}}" + IFS=',' read -ra labels <<< "${csv}" + for l in "${labels[@]}"; do + [[ "$l" == "$needle" ]] && return 0 + done + return 1 + } + + COMMAND="" + if [[ -n "${COMMENT_BODY:-}" ]]; then + COMMAND="$(printf '%s\n' "${COMMENT_BODY}" | head -1 | awk '{print $1}')" + fi + + case "${EVENT_NAME}" in + issue_comment) + case "${COMMAND}" in + /triage) + STAGE="triage" + ;; + /code) + if [[ "${ISSUE_HAS_PR}" == "false" ]]; then + STAGE="code" + fi + ;; + /review) + STAGE="review" + ;; + /fix) + if [[ "${ISSUE_HAS_PR}" == "true" ]]; then + if [[ "${COMMENT_USER_TYPE}" != "Bot" ]] && is_authorized; then + STAGE="fix" + TRIGGER_SOURCE="${COMMENT_USER_LOGIN}" + fi + fi + ;; + /retro|/fullsend) + if [[ "${COMMENT_USER_TYPE}" != "Bot" ]] && is_authorized; then + if [[ "${COMMAND}" == "/fullsend" ]]; then + SECOND_WORD="$(printf '%s\n' "${COMMENT_BODY}" | head -1 | awk '{print $2}')" + if [[ "${SECOND_WORD}" == "retro" ]]; then + STAGE="retro" + fi + else + STAGE="retro" + fi + fi + ;; + *) + if has_label "needs-info" && ! has_label "type/feature"; then + if [[ "${COMMENT_USER_TYPE}" != "Bot" ]]; then + if [[ "${COMMENT_AUTHOR_ASSOC}" != "NONE" ]] || is_issue_author; then + STAGE="triage" + fi + fi + fi + ;; + esac + ;; + + issues) + if [[ "${EVENT_ACTION}" == "labeled" ]]; then + if [[ "${TRIGGERING_LABEL}" == "ready-to-code" ]]; then + STAGE="code" + elif [[ "${TRIGGERING_LABEL}" == "ready-for-review" ]]; then + STAGE="review" + fi + fi + ;; + + pull_request_target) + case "${EVENT_ACTION}" in + opened|synchronize|ready_for_review) + STAGE="review" + ;; + closed) + STAGE="retro" + ;; + esac + ;; + + pull_request_review) + if [[ "${EVENT_ACTION}" == "submitted" && "${REVIEW_STATE}" == "changes_requested" ]]; then + REVIEW_BOT="${ORG_NAME}-review[bot]" + if [[ "${REVIEW_USER_LOGIN}" == "${REVIEW_BOT}" ]]; then + if [[ -n "${PR_HEAD_REPO}" && -n "${PR_BASE_REPO}" ]]; then + if [[ "${PR_HEAD_REPO}" == "${PR_BASE_REPO}" ]]; then + if ! has_label "fullsend-no-fix" "${PR_LABELS}"; then + STAGE="fix" + TRIGGER_SOURCE="${REVIEW_USER_LOGIN}" + fi + fi + fi + fi + fi + ;; + esac + + if [[ -z "${STAGE}" ]]; then + echo "No stage matched — skipping dispatch" + echo "stage=" >> "${GITHUB_OUTPUT}" + exit 0 + fi + + echo "Routed to stage: ${STAGE}" + echo "stage=${STAGE}" >> "${GITHUB_OUTPUT}" + echo "trigger_source=${TRIGGER_SOURCE}" >> "${GITHUB_OUTPUT}" + + - name: Validate routed stage + if: steps.route.outputs.stage != '' + env: + STAGE: ${{ steps.route.outputs.stage }} + TRIGGER_SOURCE: ${{ steps.route.outputs.trigger_source }} + run: | + set -euo pipefail + + if [[ ! "$STAGE" =~ ^[a-z][a-z0-9_-]*$ ]]; then + echo "::error::Invalid stage name: must start with lowercase letter and contain only [a-z0-9_-]" + exit 1 + fi + + if [[ -n "${TRIGGER_SOURCE:-}" ]]; then + if [[ ! "$TRIGGER_SOURCE" =~ ^[a-zA-Z0-9_-]+(\[bot\])?$ ]]; then + echo "::error::Invalid trigger_source format: must be alphanumeric with optional [bot] suffix" + exit 1 + fi + fi + + - name: Check kill switch + if: steps.route.outputs.stage != '' + run: | + set -euo pipefail + if [[ -f .fullsend/config.yaml ]]; then + KILL_SWITCH=$(yq '.kill_switch // false' .fullsend/config.yaml) + if [[ "$KILL_SWITCH" == "true" ]]; then + echo "::error::Kill switch is active — all agent dispatch halted" + echo "::error::Set kill_switch: false in .fullsend/config.yaml to resume" + exit 1 + fi + fi + + - name: Check role is enabled + if: steps.route.outputs.stage != '' + env: + STAGE: ${{ steps.route.outputs.stage }} + run: | + set -euo pipefail + if [[ ! -f .fullsend/config.yaml ]]; then + exit 0 + fi + STAGE_ROLE="$STAGE" + case "$STAGE" in + code) STAGE_ROLE="coder" ;; + retro|prioritize) STAGE_ROLE="fullsend" ;; + esac + ROLES=$(yq '.roles[]' .fullsend/config.yaml 2>/dev/null || echo "") + if [[ -n "$ROLES" ]] && ! echo "$ROLES" | grep -Fqx "$STAGE_ROLE"; then + echo "::error::Stage '$STAGE' (role: $STAGE_ROLE) is not in configured roles — dispatch blocked" + exit 1 + fi + + - name: Block fork PRs for fix stage + if: steps.route.outputs.stage == 'fix' && github.event.issue.pull_request + env: + GH_TOKEN: ${{ github.token }} + SOURCE_REPO: ${{ github.repository }} + PR_NUMBER: ${{ github.event.issue.number }} + run: | + set -euo pipefail + REPOS=$(gh api "repos/$SOURCE_REPO/pulls/$PR_NUMBER" \ + --jq '[.head.repo.full_name, .base.repo.full_name] | @tsv' 2>/dev/null) || { + echo "::error::Could not determine PR repos — blocking fix for safety" + exit 1 + } + HEAD_REPO=$(printf '%s' "$REPOS" | cut -f1) + BASE_REPO=$(printf '%s' "$REPOS" | cut -f2) + if [[ "$HEAD_REPO" != "$BASE_REPO" ]]; then + echo "::error::Fork PR detected (head=$HEAD_REPO, base=$BASE_REPO) — fix agent blocked" + exit 1 + fi + + - name: Build event payload + id: payload + if: steps.route.outputs.stage != '' + run: | + set -euo pipefail + EVENT_PAYLOAD=$(jq -c '{ + issue: (.issue // null | if . then {number, html_url} else null end), + pull_request: (.pull_request // null | if . then {number, html_url, + head: {ref: .head.ref, repo: {full_name: .head.repo.full_name}}, + base: {ref: .base.ref, repo: {full_name: .base.repo.full_name}}} else null end), + comment: (.comment // null | if . then {body: .body[:4096]} else null end) + }' "$GITHUB_EVENT_PATH") || { + echo "::error::Failed to extract event payload from GITHUB_EVENT_PATH" + exit 1 + } + if [[ -z "${EVENT_PAYLOAD}" || "${EVENT_PAYLOAD}" == "null" ]]; then + echo "::error::Event payload is empty after extraction" + exit 1 + fi + DELIM="PAYLOAD_$(openssl rand -hex 8)" + { + echo "event_payload<<${DELIM}" + echo "${EVENT_PAYLOAD}" + echo "${DELIM}" + } >> "${GITHUB_OUTPUT}" + + triage: + name: Triage + needs: route + if: needs.route.outputs.stage == 'triage' + uses: fullsend-ai/fullsend/.github/workflows/reusable-triage.yml@v0 + with: + event_type: ${{ github.event_name }} + source_repo: ${{ github.repository }} + event_payload: ${{ needs.route.outputs.event_payload }} + install_mode: ${{ inputs.install_mode }} + mint_url: ${{ inputs.mint_url }} + + gcp_region: ${{ inputs.gcp_region }} + fullsend_version: ${{ inputs.fullsend_version }} + secrets: + FULLSEND_GCP_WIF_PROVIDER: ${{ secrets.FULLSEND_GCP_WIF_PROVIDER }} + + + FULLSEND_GCP_PROJECT_ID: ${{ secrets.FULLSEND_GCP_PROJECT_ID }} + + code: + name: Code + needs: route + if: needs.route.outputs.stage == 'code' + uses: fullsend-ai/fullsend/.github/workflows/reusable-code.yml@v0 + with: + event_type: ${{ github.event_name }} + source_repo: ${{ github.repository }} + event_payload: ${{ needs.route.outputs.event_payload }} + install_mode: ${{ inputs.install_mode }} + mint_url: ${{ inputs.mint_url }} + + gcp_region: ${{ inputs.gcp_region }} + fullsend_version: ${{ inputs.fullsend_version }} + secrets: + FULLSEND_GCP_WIF_PROVIDER: ${{ secrets.FULLSEND_GCP_WIF_PROVIDER }} + + + FULLSEND_GCP_PROJECT_ID: ${{ secrets.FULLSEND_GCP_PROJECT_ID }} + + review: + name: Review + needs: route + if: needs.route.outputs.stage == 'review' + uses: fullsend-ai/fullsend/.github/workflows/reusable-review.yml@v0 + with: + event_type: ${{ github.event_name }} + source_repo: ${{ github.repository }} + event_payload: ${{ needs.route.outputs.event_payload }} + install_mode: ${{ inputs.install_mode }} + mint_url: ${{ inputs.mint_url }} + + gcp_region: ${{ inputs.gcp_region }} + fullsend_version: ${{ inputs.fullsend_version }} + secrets: + FULLSEND_GCP_WIF_PROVIDER: ${{ secrets.FULLSEND_GCP_WIF_PROVIDER }} + + + FULLSEND_GCP_PROJECT_ID: ${{ secrets.FULLSEND_GCP_PROJECT_ID }} + + fix: + name: Fix + needs: route + if: needs.route.outputs.stage == 'fix' + uses: fullsend-ai/fullsend/.github/workflows/reusable-fix.yml@v0 + with: + event_type: ${{ github.event_name }} + source_repo: ${{ github.repository }} + event_payload: ${{ needs.route.outputs.event_payload }} + trigger_source: ${{ needs.route.outputs.trigger_source }} + install_mode: ${{ inputs.install_mode }} + mint_url: ${{ inputs.mint_url }} + + gcp_region: ${{ inputs.gcp_region }} + fullsend_version: ${{ inputs.fullsend_version }} + secrets: + FULLSEND_GCP_WIF_PROVIDER: ${{ secrets.FULLSEND_GCP_WIF_PROVIDER }} + + + FULLSEND_GCP_PROJECT_ID: ${{ secrets.FULLSEND_GCP_PROJECT_ID }} + + retro: + name: Retro + needs: route + if: needs.route.outputs.stage == 'retro' + uses: fullsend-ai/fullsend/.github/workflows/reusable-retro.yml@v0 + with: + event_type: ${{ github.event_name }} + source_repo: ${{ github.repository }} + event_payload: ${{ needs.route.outputs.event_payload }} + install_mode: ${{ inputs.install_mode }} + mint_url: ${{ inputs.mint_url }} + + gcp_region: ${{ inputs.gcp_region }} + fullsend_version: ${{ inputs.fullsend_version }} + secrets: + FULLSEND_GCP_WIF_PROVIDER: ${{ secrets.FULLSEND_GCP_WIF_PROVIDER }} + + + FULLSEND_GCP_PROJECT_ID: ${{ secrets.FULLSEND_GCP_PROJECT_ID }} diff --git a/.github/workflows/reusable-fix.yml b/.github/workflows/reusable-fix.yml index 479453761..a21a3af9e 100644 --- a/.github/workflows/reusable-fix.yml +++ b/.github/workflows/reusable-fix.yml @@ -36,6 +36,10 @@ on: required: false type: string default: 'latest' + install_mode: + required: false + type: string + default: 'per-org' secrets: FULLSEND_GCP_WIF_PROVIDER: required: true @@ -67,9 +71,15 @@ jobs: sparse-checkout: | internal/scaffold/fullsend-repo/ - - name: Prepare workspace (upstream defaults + org overrides) + - name: Prepare workspace (upstream defaults + org/repo overrides) + env: + INSTALL_MODE: ${{ inputs.install_mode }} run: | set -euo pipefail + if [[ "${INSTALL_MODE}" != "per-org" && "${INSTALL_MODE}" != "per-repo" ]]; then + echo "::error::Invalid install_mode '${INSTALL_MODE}': must be 'per-org' or 'per-repo'" + exit 1 + fi SRC=".defaults/internal/scaffold/fullsend-repo" LAYERED_DIRS="agents skills schemas harness policies scripts env" for dir in ${LAYERED_DIRS}; do @@ -78,11 +88,15 @@ jobs: cp -r "${SRC}/${dir}/." "${dir}/" fi done + CUSTOM_BASE="customized" + if [[ "${INSTALL_MODE}" == "per-repo" ]]; then + CUSTOM_BASE=".fullsend/customized" + fi for dir in ${LAYERED_DIRS}; do - if [[ -d "customized/${dir}" ]]; then - find "customized/${dir}" -type f ! -name '.gitkeep' -print0 \ + if [[ -d "${CUSTOM_BASE}/${dir}" ]]; then + find "${CUSTOM_BASE}/${dir}" -type f ! -name '.gitkeep' -print0 \ | while IFS= read -r -d '' f; do - rel="${f#customized/}" + rel="${f#"${CUSTOM_BASE}"/}" mkdir -p "$(dirname "${rel}")" cp "${f}" "${rel}" done @@ -97,6 +111,7 @@ jobs: uses: fullsend-ai/fullsend/.github/actions/validate-enrollment@v0 with: source_repo: ${{ inputs.source_repo }} + install_mode: ${{ inputs.install_mode }} - name: Mint coder token id: app-token diff --git a/.github/workflows/reusable-retro.yml b/.github/workflows/reusable-retro.yml index 7f39d0260..b7a4ece33 100644 --- a/.github/workflows/reusable-retro.yml +++ b/.github/workflows/reusable-retro.yml @@ -24,6 +24,10 @@ on: required: false type: string default: 'latest' + install_mode: + required: false + type: string + default: 'per-org' secrets: FULLSEND_GCP_WIF_PROVIDER: required: true @@ -53,9 +57,15 @@ jobs: sparse-checkout: | internal/scaffold/fullsend-repo/ - - name: Prepare workspace (upstream defaults + org overrides) + - name: Prepare workspace (upstream defaults + org/repo overrides) + env: + INSTALL_MODE: ${{ inputs.install_mode }} run: | set -euo pipefail + if [[ "${INSTALL_MODE}" != "per-org" && "${INSTALL_MODE}" != "per-repo" ]]; then + echo "::error::Invalid install_mode '${INSTALL_MODE}': must be 'per-org' or 'per-repo'" + exit 1 + fi SRC=".defaults/internal/scaffold/fullsend-repo" LAYERED_DIRS="agents skills schemas harness policies scripts env" for dir in ${LAYERED_DIRS}; do @@ -64,11 +74,15 @@ jobs: cp -r "${SRC}/${dir}/." "${dir}/" fi done + CUSTOM_BASE="customized" + if [[ "${INSTALL_MODE}" == "per-repo" ]]; then + CUSTOM_BASE=".fullsend/customized" + fi for dir in ${LAYERED_DIRS}; do - if [[ -d "customized/${dir}" ]]; then - find "customized/${dir}" -type f ! -name '.gitkeep' -print0 \ + if [[ -d "${CUSTOM_BASE}/${dir}" ]]; then + find "${CUSTOM_BASE}/${dir}" -type f ! -name '.gitkeep' -print0 \ | while IFS= read -r -d '' f; do - rel="${f#customized/}" + rel="${f#"${CUSTOM_BASE}"/}" mkdir -p "$(dirname "${rel}")" cp "${f}" "${rel}" done @@ -83,13 +97,14 @@ jobs: uses: fullsend-ai/fullsend/.github/actions/validate-enrollment@v0 with: source_repo: ${{ inputs.source_repo }} + install_mode: ${{ inputs.install_mode }} - name: Mint retro token id: app-token uses: fullsend-ai/fullsend/.github/actions/mint-token@v0 with: role: retro - repos: ${{ steps.repo-parts.outputs.name }},.fullsend + repos: ${{ inputs.install_mode == 'per-repo' && steps.repo-parts.outputs.name || format('{0},.fullsend', steps.repo-parts.outputs.name) }} mint_url: ${{ inputs.mint_url }} - name: Checkout target repository diff --git a/.github/workflows/reusable-review.yml b/.github/workflows/reusable-review.yml index 448c32cf4..25ba69082 100644 --- a/.github/workflows/reusable-review.yml +++ b/.github/workflows/reusable-review.yml @@ -24,6 +24,10 @@ on: required: false type: string default: 'latest' + install_mode: + required: false + type: string + default: 'per-org' secrets: FULLSEND_GCP_WIF_PROVIDER: required: true @@ -54,9 +58,15 @@ jobs: sparse-checkout: | internal/scaffold/fullsend-repo/ - - name: Prepare workspace (upstream defaults + org overrides) + - name: Prepare workspace (upstream defaults + org/repo overrides) + env: + INSTALL_MODE: ${{ inputs.install_mode }} run: | set -euo pipefail + if [[ "${INSTALL_MODE}" != "per-org" && "${INSTALL_MODE}" != "per-repo" ]]; then + echo "::error::Invalid install_mode '${INSTALL_MODE}': must be 'per-org' or 'per-repo'" + exit 1 + fi SRC=".defaults/internal/scaffold/fullsend-repo" LAYERED_DIRS="agents skills schemas harness policies scripts env" for dir in ${LAYERED_DIRS}; do @@ -65,11 +75,15 @@ jobs: cp -r "${SRC}/${dir}/." "${dir}/" fi done + CUSTOM_BASE="customized" + if [[ "${INSTALL_MODE}" == "per-repo" ]]; then + CUSTOM_BASE=".fullsend/customized" + fi for dir in ${LAYERED_DIRS}; do - if [[ -d "customized/${dir}" ]]; then - find "customized/${dir}" -type f ! -name '.gitkeep' -print0 \ + if [[ -d "${CUSTOM_BASE}/${dir}" ]]; then + find "${CUSTOM_BASE}/${dir}" -type f ! -name '.gitkeep' -print0 \ | while IFS= read -r -d '' f; do - rel="${f#customized/}" + rel="${f#"${CUSTOM_BASE}"/}" mkdir -p "$(dirname "${rel}")" cp "${f}" "${rel}" done @@ -84,6 +98,7 @@ jobs: uses: fullsend-ai/fullsend/.github/actions/validate-enrollment@v0 with: source_repo: ${{ inputs.source_repo }} + install_mode: ${{ inputs.install_mode }} - name: Mint review token id: app-token diff --git a/.github/workflows/reusable-triage.yml b/.github/workflows/reusable-triage.yml index 59652f021..d0b360ad2 100644 --- a/.github/workflows/reusable-triage.yml +++ b/.github/workflows/reusable-triage.yml @@ -24,6 +24,10 @@ on: required: false type: string default: 'latest' + install_mode: + required: false + type: string + default: 'per-org' secrets: FULLSEND_GCP_WIF_PROVIDER: required: true @@ -53,9 +57,15 @@ jobs: sparse-checkout: | internal/scaffold/fullsend-repo/ - - name: Prepare workspace (upstream defaults + org overrides) + - name: Prepare workspace (upstream defaults + org/repo overrides) + env: + INSTALL_MODE: ${{ inputs.install_mode }} run: | set -euo pipefail + if [[ "${INSTALL_MODE}" != "per-org" && "${INSTALL_MODE}" != "per-repo" ]]; then + echo "::error::Invalid install_mode '${INSTALL_MODE}': must be 'per-org' or 'per-repo'" + exit 1 + fi SRC=".defaults/internal/scaffold/fullsend-repo" LAYERED_DIRS="agents skills schemas harness policies scripts env" for dir in ${LAYERED_DIRS}; do @@ -64,11 +74,15 @@ jobs: cp -r "${SRC}/${dir}/." "${dir}/" fi done + CUSTOM_BASE="customized" + if [[ "${INSTALL_MODE}" == "per-repo" ]]; then + CUSTOM_BASE=".fullsend/customized" + fi for dir in ${LAYERED_DIRS}; do - if [[ -d "customized/${dir}" ]]; then - find "customized/${dir}" -type f ! -name '.gitkeep' -print0 \ + if [[ -d "${CUSTOM_BASE}/${dir}" ]]; then + find "${CUSTOM_BASE}/${dir}" -type f ! -name '.gitkeep' -print0 \ | while IFS= read -r -d '' f; do - rel="${f#customized/}" + rel="${f#"${CUSTOM_BASE}"/}" mkdir -p "$(dirname "${rel}")" cp "${f}" "${rel}" done @@ -83,6 +97,7 @@ jobs: uses: fullsend-ai/fullsend/.github/actions/validate-enrollment@v0 with: source_repo: ${{ inputs.source_repo }} + install_mode: ${{ inputs.install_mode }} - name: Mint triage token id: app-token diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0aa829640..c2c39cfce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - id: detect-private-key - exclude: "internal/layers/secrets_test\\.go$|internal/security/scanner_test\\.go$|tests/.*test_.*\\.py$" + exclude: "internal/layers/secrets_test\\.go$|internal/security/scanner_test\\.go$|internal/dispatch/gcf/provisioner_test\\.go$|tests/.*test_.*\\.py$" - id: check-added-large-files args: ["--maxkb=1000"] - id: check-merge-conflict diff --git a/action.yml b/action.yml index f8949bbff..535ad7648 100644 --- a/action.yml +++ b/action.yml @@ -23,6 +23,10 @@ inputs: target-repo: description: Path to target repo checkout (default GITHUB_WORKSPACE/target-repo). default: '' + github_token: + description: >- + GitHub token for authenticated API calls (avoids 60 req/hour unauthenticated rate limit). + default: ${{ github.token }} runs: using: composite @@ -33,6 +37,7 @@ runs: VERSION: ${{ inputs.version }} RUNNER_OS: ${{ runner.os }} RUNNER_ARCH: ${{ runner.arch }} + GH_TOKEN: ${{ inputs.github_token }} run: | set -euo pipefail @@ -84,7 +89,9 @@ runs: esac if [[ "${VERSION}" == "latest" ]]; then - TAG="$(retry_curl -fsSL -H "Accept: application/vnd.github+json" \ + TAG="$(retry_curl -fsSL \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${GH_TOKEN}" \ "https://api.github.com/repos/fullsend-ai/fullsend/releases/latest" | jq -r '.tag_name')" if [[ -z "${TAG}" || "${TAG}" == "null" ]]; then echo "::error::Could not resolve latest release tag" diff --git a/internal/cli/admin.go b/internal/cli/admin.go index 4925f77b6..61f175e66 100644 --- a/internal/cli/admin.go +++ b/internal/cli/admin.go @@ -5,8 +5,10 @@ import ( "context" "fmt" "io" + "net/url" "os" "os/exec" + "regexp" "sort" "strconv" "strings" @@ -23,6 +25,7 @@ import ( "github.com/fullsend-ai/fullsend/internal/inference" "github.com/fullsend-ai/fullsend/internal/inference/vertex" "github.com/fullsend-ai/fullsend/internal/layers" + "github.com/fullsend-ai/fullsend/internal/scaffold" "github.com/fullsend-ai/fullsend/internal/ui" ) @@ -86,6 +89,43 @@ func validateOrgName(org string) error { return nil } +// githubOwnerPattern matches valid GitHub usernames and org names +// (alphanumeric and single hyphens only, no dots or underscores). +var githubOwnerPattern = regexp.MustCompile(`^[a-zA-Z0-9](-?[a-zA-Z0-9])*$`) + +// githubRepoPattern matches valid GitHub repository names +// (alphanumeric, hyphens, dots, and underscores). +var githubRepoPattern = regexp.MustCompile(`^[a-zA-Z0-9]([a-zA-Z0-9._-]*[a-zA-Z0-9])?$`) + +// rolePattern validates agent role names (lowercase alphanumeric, hyphens, underscores). +var rolePattern = regexp.MustCompile(`^[a-z][a-z0-9_-]*$`) + +// perOrgOnlyFlags are flags that only apply to per-org mode. +var perOrgOnlyFlags = []string{ + "skip-app-setup", "vendor-fullsend-binary", "enroll-all", "enroll-none", + "mint-provider", "mint-project", "mint-region", "mint-source-dir", + "skip-mint-deploy", "force-mint-deploy", "public", +} + +// perRepoOnlyFlags are flags that only apply to per-repo mode. +var perRepoOnlyFlags = []string{ + "mint-url", "scaffold-customized", +} + +// parseAgentRoles splits a comma-separated agents string into a validated role list. +func parseAgentRoles(agents string) ([]string, error) { + var roles []string + for _, entry := range strings.Split(agents, ",") { + if trimmed := strings.TrimSpace(entry); trimmed != "" { + if !rolePattern.MatchString(trimmed) { + return nil, fmt.Errorf("invalid role name %q: must match %s", trimmed, rolePattern.String()) + } + roles = append(roles, trimmed) + } + } + return roles, nil +} + func newInstallCmd() *cobra.Command { var agents string var dryRun bool @@ -103,14 +143,43 @@ func newInstallCmd() *cobra.Command { var mintSkipDeploy bool var mintForceDeploy bool var publicApps bool + // Per-repo flags. + var mintURL string + var scaffoldCustomized bool cmd := &cobra.Command{ - Use: "install ", - Short: "Install fullsend in a GitHub organization", - Long: "Sets up the fullsend agentic development pipeline for a GitHub organization, including app creation, config repo, workflows, secrets, and repo enrollment.", - Args: cobra.ExactArgs(1), + Use: "install ", + Short: "Install fullsend in an organization or repository", + Long: `Sets up the fullsend agentic development pipeline. + +Per-org mode (argument is an org name, e.g. "acme"): + Creates the .fullsend config repo, per-role GitHub Apps, token mint, + shim workflows, secrets, and repo enrollment. + +Per-repo mode (argument is owner/repo, e.g. "acme/widget"): + Bootstraps a single repository with the shim workflow and .fullsend/ + configuration directory. No config repo or cross-repo dispatch needed.`, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - org := args[0] + arg := args[0] + if strings.Contains(arg, "/") { + for _, name := range perOrgOnlyFlags { + if cmd.Flags().Changed(name) { + return fmt.Errorf("--%s is only valid for per-org installation (fullsend admin install )", name) + } + } + return runPerRepoInstall(cmd.Context(), arg, agents, mintURL, gcpRegion, + gcpProject, gcpWIFProvider, + scaffoldCustomized, dryRun) + } + + for _, name := range perRepoOnlyFlags { + if cmd.Flags().Changed(name) { + return fmt.Errorf("--%s is only valid for per-repo installation (fullsend admin install )", name) + } + } + + org := arg if err := validateOrgName(org); err != nil { return err } @@ -129,12 +198,9 @@ func newInstallCmd() *cobra.Command { printer.Header("Installing fullsend for " + org) printer.Blank() - // Parse roles from --agents flag. - var roles []string - for _, entry := range strings.Split(agents, ",") { - if trimmed := strings.TrimSpace(entry); trimmed != "" { - roles = append(roles, trimmed) - } + roles, err := parseAgentRoles(agents) + if err != nil { + return err } // Validate mint provider (only required for real installs, not dry-run). @@ -229,13 +295,23 @@ func newInstallCmd() *cobra.Command { } printer.Blank() + // Pre-copy PEM secrets for shared public apps before app setup. + var sharedSlugs map[string]string + if mintProject != "" && !skipAppSetup { + slugs, err := copySharedAppPEMs(ctx, client, printer, org, roles, mintProject, mintRegion) + if err != nil { + return err + } + sharedSlugs = slugs + } + // Collect agent credentials via app setup. var agentCreds []layers.AgentCredentials if !skipAppSetup { if err := ensureConfigRepoExists(ctx, client, printer, org); err != nil { return err } - creds, err := runAppSetup(ctx, client, printer, org, roles, mintProject, publicApps) + creds, err := runAppSetup(ctx, client, printer, org, roles, mintProject, publicApps, sharedSlugs) if err != nil { return err } @@ -262,10 +338,200 @@ func newInstallCmd() *cobra.Command { cmd.Flags().BoolVar(&mintSkipDeploy, "skip-mint-deploy", false, "skip Cloud Function deployment, reuse existing mint URL") cmd.Flags().BoolVar(&mintForceDeploy, "force-mint-deploy", false, "force Cloud Function redeployment even if unchanged") cmd.Flags().BoolVar(&publicApps, "public", false, "create public (unlisted) GitHub Apps installable by other orgs") + // Per-repo flags. + cmd.Flags().StringVar(&mintURL, "mint-url", "", "token mint URL for OIDC token exchange (per-repo mode)") + cmd.Flags().BoolVar(&scaffoldCustomized, "scaffold-customized", false, "create .fullsend/customized/ directory structure (per-repo mode)") return cmd } +func runPerRepoInstall(ctx context.Context, repoFullName, agents, mintURL, gcpRegion, + gcpProject, gcpWIFProvider string, + scaffoldCustomized, dryRun bool) error { + if strings.Contains(repoFullName, "://") || strings.HasPrefix(repoFullName, "www.") { + return fmt.Errorf("expected owner/repo format, got a URL — use just the owner/repo portion (e.g. acme/widget)") + } + parts := strings.SplitN(repoFullName, "/", 2) + if len(parts) != 2 || parts[0] == "" || parts[1] == "" { + return fmt.Errorf("repo must be in owner/repo format, got %q", repoFullName) + } + owner, repo := parts[0], parts[1] + if !githubOwnerPattern.MatchString(owner) { + return fmt.Errorf("invalid owner name %q: must contain only alphanumeric characters and hyphens", owner) + } + if !githubRepoPattern.MatchString(repo) { + return fmt.Errorf("invalid repo name %q: must contain only alphanumeric characters, hyphens, dots, or underscores", repo) + } + + if mintURL == "" { + return fmt.Errorf("--mint-url is required for per-repo installation") + } + parsedMintURL, err := url.Parse(mintURL) + if err != nil || parsedMintURL.Scheme != "https" || parsedMintURL.Host == "" { + scheme := "" + if parsedMintURL != nil { + scheme = parsedMintURL.Scheme + } + return fmt.Errorf("--mint-url must be a valid HTTPS URL (got scheme=%q)", scheme) + } + if gcpRegion == "" { + return fmt.Errorf("--gcp-region is required for per-repo installation") + } + if gcpProject == "" { + return fmt.Errorf("--gcp-project is required for per-repo installation") + } + roles, err := parseAgentRoles(agents) + if err != nil { + return err + } + + token, err := resolveToken() + if err != nil { + return err + } + + client := gh.New(token) + printer := ui.New(os.Stdout) + + printer.Banner() + printer.Blank() + printer.Header("Installing per-repo fullsend for " + repoFullName) + printer.Blank() + + cfg := config.NewPerRepoConfig(roles) + if err := cfg.Validate(); err != nil { + return fmt.Errorf("invalid config: %w", err) + } + + shimContent, err := scaffold.PerRepoShimTemplate() + if err != nil { + return fmt.Errorf("loading per-repo shim template: %w", err) + } + + cfgYAML, err := cfg.Marshal() + if err != nil { + return fmt.Errorf("marshaling per-repo config: %w", err) + } + + var files []forge.TreeFile + files = append(files, forge.TreeFile{ + Path: ".github/workflows/fullsend.yml", + Content: shimContent, + Mode: "100644", + }) + files = append(files, forge.TreeFile{ + Path: ".fullsend/config.yaml", + Content: cfgYAML, + Mode: "100644", + }) + + if scaffoldCustomized { + for _, dir := range scaffold.PerRepoCustomizedDirs() { + files = append(files, forge.TreeFile{ + Path: dir + "/.gitkeep", + Content: []byte(""), + Mode: "100644", + }) + } + } + + needsWIFProvision := gcpWIFProvider == "" + + repoVars := map[string]string{ + "FULLSEND_MINT_URL": mintURL, + "FULLSEND_GCP_REGION": gcpRegion, + } + + if dryRun { + printer.StepInfo("Dry run — no changes will be made") + printer.Blank() + if needsWIFProvision { + printer.StepInfo("Would provision WIF infrastructure in GCP project " + gcpProject) + printer.StepInfo(fmt.Sprintf(" Service account: fullsend-mint@%s.iam.gserviceaccount.com", gcpProject)) + printer.StepInfo(" WIF pool: fullsend-pool") + printer.StepInfo(fmt.Sprintf(" WIF provider: %s", gcf.BuildRepoProviderID(owner, repo))) + printer.StepInfo(fmt.Sprintf(" Repo restriction: %s/%s", owner, repo)) + printer.Blank() + } + for _, f := range files { + printer.StepDone(fmt.Sprintf("Would write: %s (%d bytes)", f.Path, len(f.Content))) + } + printer.Blank() + printer.StepInfo("Would set repository variables:") + for _, name := range sortedStringMapKeys(repoVars) { + printer.StepInfo(fmt.Sprintf(" %s = %s", name, repoVars[name])) + } + secretNames := []string{"FULLSEND_GCP_PROJECT_ID", "FULLSEND_GCP_WIF_PROVIDER"} + printer.StepInfo(fmt.Sprintf("Would set %d repository secrets:", len(secretNames))) + for _, name := range secretNames { + printer.StepInfo(fmt.Sprintf(" %s", name)) + } + return nil + } + + if err := checkPerRepoScopes(ctx, client, printer); err != nil { + return err + } + + if needsWIFProvision { + printer.StepStart("Provisioning WIF infrastructure") + provisioner := gcf.NewProvisioner(gcf.Config{ + ProjectID: gcpProject, + GitHubOrgs: []string{owner}, + Repo: owner + "/" + repo, + }, gcf.NewLiveGCFClient()) + var provErr error + gcpWIFProvider, provErr = provisioner.ProvisionWIF(ctx) + if provErr != nil { + printer.StepFail("WIF provisioning failed") + return fmt.Errorf("provisioning WIF: %w", provErr) + } + printer.StepDone("WIF infrastructure ready") + printer.StepInfo("IAM policy changes may take up to 7 minutes to propagate") + printer.StepInfo("Agent workflows that authenticate via WIF may fail until propagation completes") + } + + repoSecrets := map[string]string{ + "FULLSEND_GCP_PROJECT_ID": gcpProject, + "FULLSEND_GCP_WIF_PROVIDER": gcpWIFProvider, + } + + printer.StepStart("Writing per-repo scaffold files") + committed, err := client.CommitFiles(ctx, owner, repo, + "chore: initialize fullsend per-repo installation", files) + if err != nil { + printer.StepFail("Failed to write scaffold files") + return fmt.Errorf("committing scaffold files: %w", err) + } + if committed { + printer.StepDone(fmt.Sprintf("Wrote %d files", len(files))) + } else { + printer.StepDone("Scaffold up to date") + } + + printer.StepStart("Configuring repository variables") + for _, name := range sortedStringMapKeys(repoVars) { + if err := client.CreateOrUpdateRepoVariable(ctx, owner, repo, name, repoVars[name]); err != nil { + printer.StepFail(fmt.Sprintf("Failed to set variable %s", name)) + return fmt.Errorf("setting repo variable %s: %w", name, err) + } + } + printer.StepDone(fmt.Sprintf("Set %d repository variables", len(repoVars))) + + printer.StepStart("Configuring repository secrets") + for _, name := range sortedStringMapKeys(repoSecrets) { + if err := client.CreateRepoSecret(ctx, owner, repo, name, repoSecrets[name]); err != nil { + printer.StepFail(fmt.Sprintf("Failed to set secret %s", name)) + return fmt.Errorf("setting repo secret %s: %w", name, err) + } + } + printer.StepDone(fmt.Sprintf("Set %d repository secrets", len(repoSecrets))) + + printer.Blank() + printer.StepDone(fmt.Sprintf("Per-repo installation complete for %s/%s", owner, repo)) + return nil +} + // vendorFullsendBinary cross-compiles the fullsend binary for linux/amd64 // and uploads it to .fullsend/bin/fullsend via layers.VendorBinary. func vendorFullsendBinary(ctx context.Context, client forge.Client, printer *ui.Printer, org string) error { @@ -457,19 +723,87 @@ func runDryRun(ctx context.Context, client forge.Client, printer *ui.Printer, or return printAnalysis(ctx, stack, printer) } +// copySharedAppPEMs detects public GitHub Apps shared across orgs and copies +// their PEM secrets to the target org's naming convention. This runs before +// app setup so that handleExistingApp finds the PEM and returns credentials +// without trying to generate a new key. +// Returns a role → app-slug mapping for detected shared apps so callers +// can pass them as known slugs to app setup. +func copySharedAppPEMs(ctx context.Context, client forge.Client, printer *ui.Printer, org string, roles []string, mintProject, mintRegion string) (map[string]string, error) { + prov := gcf.NewProvisioner(gcf.Config{ + ProjectID: mintProject, + Region: mintRegion, + GitHubOrgs: []string{org}, + }, gcf.NewLiveGCFClient()) + + existingIDs, err := prov.GetExistingRoleAppIDs(ctx) + if err != nil || len(existingIDs) == 0 { + return nil, nil + } + + installations, err := client.ListOrgInstallations(ctx, org) + if err != nil { + return nil, nil + } + + roleSet := make(map[string]bool, len(roles)) + for _, r := range roles { + roleSet[r] = true + } + + sharedSlugs := make(map[string]string) + for _, inst := range installations { + appIDStr := strconv.Itoa(inst.AppID) + for key, existingAppID := range existingIDs { + if existingAppID != appIDStr { + continue + } + parts := strings.SplitN(key, "/", 2) + if len(parts) != 2 { + continue + } + srcOrg, role := parts[0], parts[1] + if srcOrg == org || !roleSet[role] { + continue + } + + sharedSlugs[role] = inst.AppSlug + + exists, _ := prov.SecretExists(ctx, org, role) + if exists { + continue + } + + printer.StepStart(fmt.Sprintf("Shared app detected: %s (app %d) — copying PEM from %s", role, inst.AppID, srcOrg)) + if err := prov.CopyAgentPEM(ctx, srcOrg, org, role); err != nil { + return nil, fmt.Errorf("copying shared PEM for %s: %w", role, err) + } + printer.StepDone(fmt.Sprintf("Copied shared %s PEM", role)) + break + } + } + return sharedSlugs, nil +} + // runAppSetup creates or reuses GitHub Apps for each role. When mintProject is // non-empty, PEMs are also stored in GCP Secret Manager during app creation so // they survive partial provisioning failures. -func runAppSetup(ctx context.Context, client forge.Client, printer *ui.Printer, org string, roles []string, mintProject string, publicApps bool) ([]layers.AgentCredentials, error) { +func runAppSetup(ctx context.Context, client forge.Client, printer *ui.Printer, org string, roles []string, mintProject string, publicApps bool, sharedSlugs map[string]string) ([]layers.AgentCredentials, error) { printer.Header("Setting up GitHub Apps") printer.Blank() setup := appsetup.NewSetup(client, appsetup.StdinPrompter{}, appsetup.DefaultBrowser{}, printer). WithPublicApps(publicApps) - // Try to load known slugs from existing config. + // Merge known slugs: config-based first, then shared app overrides. knownSlugs := loadKnownSlugs(ctx, client, org) - if knownSlugs != nil { + if knownSlugs == nil { + knownSlugs = make(map[string]string) + } + for role, slug := range sharedSlugs { + knownSlugs[role] = slug + } + if len(knownSlugs) > 0 { setup = setup.WithKnownSlugs(knownSlugs) } @@ -714,12 +1048,16 @@ func runUninstall(ctx context.Context, client forge.Client, printer *ui.Printer, // the apps. Without this fallback, a partial uninstall leaves orphaned // apps that block reinstallation (PEM keys are one-shot). var agentSlugs []string + var configMode string cfgData, err := client.GetFileContent(ctx, org, forge.ConfigRepoName, "config.yaml") if err == nil { - if cfg, parseErr := config.ParseOrgConfig(cfgData); parseErr == nil { - for _, agent := range cfg.Agents { + if parsedCfg, parseErr := config.ParseOrgConfig(cfgData); parseErr == nil { + for _, agent := range parsedCfg.Agents { agentSlugs = append(agentSlugs, agent.Slug) } + configMode = parsedCfg.Dispatch.Mode + } else { + printer.StepWarn(fmt.Sprintf("Could not parse existing config: %v; using defaults", parseErr)) } } if len(agentSlugs) == 0 { @@ -727,14 +1065,8 @@ func runUninstall(ctx context.Context, client forge.Client, printer *ui.Printer, for _, role := range config.DefaultAgentRoles() { agentSlugs = append(agentSlugs, appsetup.AppSlug(role)) } - printer.StepInfo("Config repo unavailable; using default app names") - } - - // Detect dispatch mode from existing config. - var configMode string - if cfgData != nil { - if existingCfg, parseErr := config.ParseOrgConfig(cfgData); parseErr == nil { - configMode = existingCfg.Dispatch.Mode + if err != nil { + printer.StepInfo("Config repo unavailable; using default app names") } } @@ -931,50 +1263,14 @@ func buildLayerStack( // all layers; TestCheckInstallScopes_SyncWithLayers asserts parity. var installRequiredScopes = []string{"repo", "workflow", "admin:org"} +// perRepoRequiredScopes is the set of OAuth scopes needed for per-repo install. +var perRepoRequiredScopes = []string{"repo", "workflow"} + // checkInstallScopes verifies that the token has the scopes needed for // install before starting interactive app setup. This avoids wasting // time on browser-based app creation only to fail on missing scopes. func checkInstallScopes(ctx context.Context, client forge.Client, printer *ui.Printer) error { - printer.StepStart("Checking token permissions") - - granted, err := client.GetTokenScopes(ctx) - if err != nil { - printer.StepFail("Could not verify token permissions") - return fmt.Errorf("checking token scopes: %w", err) - } - - if granted == nil { - printer.StepWarn("Preflight skipped: fine-grained token detected (scopes cannot be verified)") - return nil - } - - required := installRequiredScopes - grantedSet := make(map[string]bool, len(granted)) - for _, s := range granted { - grantedSet[s] = true - } - - var missing []string - for _, scope := range required { - if !grantedSet[scope] { - missing = append(missing, scope) - } - } - - if len(missing) > 0 { - printer.StepFail("Token is missing required scopes") - printer.Blank() - result := &layers.PreflightResult{ - Required: required, - Granted: granted, - Missing: missing, - } - printer.ErrorBox("Missing token scopes", result.Error()) - return fmt.Errorf("token is missing required scopes: %s", strings.Join(missing, ", ")) - } - - printer.StepDone("Token permissions verified") - return nil + return checkTokenScopes(ctx, client, printer, installRequiredScopes) } // runPreflight checks that the token has all required scopes for the @@ -1531,8 +1827,65 @@ func saveRepoConfig(ctx context.Context, client forge.Client, printer *ui.Printe return nil } +// checkPerRepoScopes verifies the token has sufficient permissions for per-repo install. +func checkPerRepoScopes(ctx context.Context, client forge.Client, printer *ui.Printer) error { + return checkTokenScopes(ctx, client, printer, perRepoRequiredScopes) +} + +// checkTokenScopes verifies the token has all required OAuth scopes. +func checkTokenScopes(ctx context.Context, client forge.Client, printer *ui.Printer, required []string) error { + printer.StepStart("Checking token permissions") + + granted, err := client.GetTokenScopes(ctx) + if err != nil { + printer.StepFail("Could not verify token permissions") + return fmt.Errorf("checking token scopes: %w", err) + } + + if granted == nil { + printer.StepWarn("Preflight skipped: fine-grained token detected (scopes cannot be verified)") + return nil + } + + grantedSet := make(map[string]bool, len(granted)) + for _, s := range granted { + grantedSet[s] = true + } + + var missing []string + for _, scope := range required { + if !grantedSet[scope] { + missing = append(missing, scope) + } + } + + if len(missing) > 0 { + printer.StepFail("Token is missing required scopes") + printer.Blank() + result := &layers.PreflightResult{ + Required: required, + Granted: granted, + Missing: missing, + } + printer.ErrorBox("Missing token scopes", result.Error()) + return fmt.Errorf("token is missing required scopes: %s", strings.Join(missing, ", ")) + } + + printer.StepDone("Token permissions verified") + return nil +} + // Helper functions. +func sortedStringMapKeys(m map[string]string) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + func repoNameList(repos []forge.Repository) []string { names := make([]string, len(repos)) for i, r := range repos { diff --git a/internal/cli/admin_test.go b/internal/cli/admin_test.go index e5906e8db..42f36a444 100644 --- a/internal/cli/admin_test.go +++ b/internal/cli/admin_test.go @@ -23,14 +23,15 @@ func TestAdminCommand_HasSubcommands(t *testing.T) { for _, sub := range cmd.Commands() { names[sub.Use] = true } - assert.True(t, names["install "], "expected install subcommand") + assert.True(t, names["install "], "expected install subcommand") assert.True(t, names["uninstall "], "expected uninstall subcommand") assert.True(t, names["analyze "], "expected analyze subcommand") assert.True(t, names["enable"], "expected enable subcommand") assert.True(t, names["disable"], "expected disable subcommand") + assert.False(t, names["init "], "init subcommand should not exist — merged into install") } -func TestInstallCmd_RequiresOrg(t *testing.T) { +func TestInstallCmd_RequiresArg(t *testing.T) { cmd := newRootCmd() cmd.SetArgs([]string{"admin", "install"}) err := cmd.Execute() @@ -79,6 +80,111 @@ func TestInstallCmd_Flags(t *testing.T) { mintSourceDirFlag := cmd.Flags().Lookup("mint-source-dir") require.NotNil(t, mintSourceDirFlag, "expected --mint-source-dir flag") + + // Per-repo flags. + mintURLFlag := cmd.Flags().Lookup("mint-url") + require.NotNil(t, mintURLFlag, "expected --mint-url flag") + + // --gcp-auth-mode removed (WIF is the only mode) + gcpAuthModeFlag := cmd.Flags().Lookup("gcp-auth-mode") + assert.Nil(t, gcpAuthModeFlag, "--gcp-auth-mode flag should have been removed") + + scaffoldCustomizedFlag := cmd.Flags().Lookup("scaffold-customized") + require.NotNil(t, scaffoldCustomizedFlag, "expected --scaffold-customized flag") + assert.Equal(t, "false", scaffoldCustomizedFlag.DefValue) +} + +func TestInstallCmd_PerRepoRequiresMintURL(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/widget", "--gcp-region", "us-central1"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "--mint-url is required for per-repo installation") +} + +func TestInstallCmd_PerRepoRequiresGCPRegion(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/widget", "--mint-url", "https://mint.example.com"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "--gcp-region is required for per-repo installation") +} + +func TestInstallCmd_PerRepoRejectsInvalidFormat(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/", "--mint-url", "https://mint.example.com", "--gcp-region", "us-central1"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "repo must be in owner/repo format") +} + +func TestInstallCmd_PerRepoRejectsMultiSlash(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/team/repo", "--mint-url", "https://mint.example.com", "--gcp-region", "us-central1"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid repo name") +} + +func TestInstallCmd_PerRepoRejectsNonHTTPSMintURL(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/widget", "--mint-url", "http://mint.example.com", "--gcp-region", "us-central1"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "--mint-url must be a valid HTTPS URL") +} + +func TestInstallCmd_PerOrgRejectsPerRepoFlags(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme", "--mint-url", "https://mint.example.com"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "--mint-url is only valid for per-repo installation") +} + +func TestInstallCmd_PerRepoRejectsPerOrgFlags(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/widget", "--mint-url", "https://mint.example.com", "--gcp-region", "us-central1", "--mint-project", "my-project"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "--mint-project is only valid for per-org installation") +} + +func TestInstallCmd_PerRepoRequiresGCPProject(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/widget", + "--mint-url", "https://mint.example.com", + "--gcp-region", "us-central1"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "--gcp-project is required for per-repo installation") +} + +func TestParseAgentRoles(t *testing.T) { + tests := []struct { + input string + want []string + wantErr bool + }{ + {"triage,review,coder", []string{"triage", "review", "coder"}, false}, + {" triage , review ", []string{"triage", "review"}, false}, + {"", nil, false}, + {"single", []string{"single"}, false}, + {"Invalid", nil, true}, + {"ok,BAD-role", nil, true}, + } + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + got, err := parseAgentRoles(tt.input) + if tt.wantErr { + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid role name") + } else { + require.NoError(t, err) + assert.Equal(t, tt.want, got) + } + }) + } } func TestUninstallCmd_RequiresOrg(t *testing.T) { @@ -890,3 +996,112 @@ func TestCheckInstallScopes_SyncWithLayers(t *testing.T) { assert.ElementsMatch(t, installRequiredScopes, layerScopes, "installRequiredScopes must match the union of RequiredScopes(OpInstall) from all layers; update the variable if a layer's scopes change") } + +func TestCheckPerRepoScopes_AllPresent(t *testing.T) { + client := &forge.FakeClient{ + TokenScopes: []string{"repo", "workflow", "read:org"}, + } + printer := ui.New(&discardWriter{}) + + err := checkPerRepoScopes(context.Background(), client, printer) + require.NoError(t, err) +} + +func TestCheckPerRepoScopes_Missing(t *testing.T) { + client := &forge.FakeClient{ + TokenScopes: []string{"repo"}, + } + printer := ui.New(&discardWriter{}) + + err := checkPerRepoScopes(context.Background(), client, printer) + require.Error(t, err) + assert.Contains(t, err.Error(), "workflow") + assert.NotContains(t, err.Error(), "admin:org") +} + +func TestCheckPerRepoScopes_FineGrainedToken(t *testing.T) { + client := &forge.FakeClient{ + TokenScopes: nil, + } + printer := ui.New(&discardWriter{}) + + err := checkPerRepoScopes(context.Background(), client, printer) + require.NoError(t, err) +} + +func TestCheckPerRepoScopes_GetTokenScopesError(t *testing.T) { + client := &forge.FakeClient{ + Errors: map[string]error{"GetTokenScopes": errors.New("network error")}, + } + printer := ui.New(&discardWriter{}) + + err := checkPerRepoScopes(context.Background(), client, printer) + require.Error(t, err) + assert.Contains(t, err.Error(), "checking token scopes") + assert.Contains(t, err.Error(), "network error") +} + +func TestCheckPerRepoScopes_DoesNotRequireAdminOrg(t *testing.T) { + client := &forge.FakeClient{ + TokenScopes: []string{"repo", "workflow"}, + } + printer := ui.New(&discardWriter{}) + + err := checkPerRepoScopes(context.Background(), client, printer) + require.NoError(t, err, "per-repo should not require admin:org scope") +} + +func TestPerRepoRequiredScopes_SubsetOfInstallScopes(t *testing.T) { + installSet := make(map[string]bool) + for _, s := range installRequiredScopes { + installSet[s] = true + } + for _, s := range perRepoRequiredScopes { + assert.True(t, installSet[s], + "perRepoRequiredScopes contains %q which is not in installRequiredScopes", s) + } +} + +func TestInstallCmd_PerRepoRejectsInvalidRole(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "acme/widget", + "--agents", "triage,INVALID", + "--mint-url", "https://mint.example.com", + "--gcp-region", "us-central1", + "--gcp-project", "my-project"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid role name") +} + +func TestInstallCmd_PerRepoRejectsOwnerWithDots(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", "my.org/widget", + "--mint-url", "https://mint.example.com", + "--gcp-region", "us-central1"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid owner name") +} + +func TestInstallCmd_PerRepoRejectsURL(t *testing.T) { + tests := []struct { + name string + input string + }{ + {"https URL", "https://github.com/acme/widget"}, + {"http URL", "http://github.com/acme/widget"}, + {"www prefix", "www.github.com/acme/widget"}, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + cmd := newRootCmd() + cmd.SetArgs([]string{"admin", "install", tc.input, + "--mint-url", "https://mint.example.com", + "--gcp-region", "us-central1"}) + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "expected owner/repo format, got a URL") + }) + } +} diff --git a/internal/config/config.go b/internal/config/config.go index a78b1e1aa..fa4c369a0 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -192,3 +192,66 @@ func (c *OrgConfig) AgentSlugs() map[string]string { func (c *OrgConfig) DefaultRoles() []string { return c.Defaults.Roles } + +// PerRepoConfig holds configuration for per-repo installation mode. +// Stored in .fullsend/config.yaml within the target repository. +type PerRepoConfig struct { + Version string `yaml:"version"` + KillSwitch bool `yaml:"kill_switch,omitempty"` + Roles []string `yaml:"roles,omitempty"` +} + +const perRepoConfigHeader = `# fullsend per-repo configuration +# https://github.com/fullsend-ai/fullsend +# +# This file configures fullsend for per-repo installation mode. +# See ADR 0033 for details. +` + +// NewPerRepoConfig creates a new PerRepoConfig with the given roles. +func NewPerRepoConfig(roles []string) *PerRepoConfig { + if roles == nil { + roles = DefaultAgentRoles() + } + return &PerRepoConfig{ + Version: "1", + Roles: roles, + } +} + +// ParsePerRepoConfig parses YAML bytes into a PerRepoConfig. +func ParsePerRepoConfig(data []byte) (*PerRepoConfig, error) { + var cfg PerRepoConfig + if err := yaml.Unmarshal(data, &cfg); err != nil { + return nil, fmt.Errorf("parsing per-repo config: %w", err) + } + return &cfg, nil +} + +// Marshal serializes the PerRepoConfig to YAML with a descriptive header. +func (c *PerRepoConfig) Marshal() ([]byte, error) { + body, err := yaml.Marshal(c) + if err != nil { + return nil, fmt.Errorf("marshaling per-repo config: %w", err) + } + return []byte(perRepoConfigHeader + string(body)), nil +} + +// Validate checks the PerRepoConfig for structural correctness. +func (c *PerRepoConfig) Validate() error { + if c.Version != "1" { + return fmt.Errorf("unsupported version %q: must be \"1\"", c.Version) + } + valid := ValidRoles() + seen := make(map[string]bool, len(c.Roles)) + for _, role := range c.Roles { + if !slices.Contains(valid, role) { + return fmt.Errorf("invalid role %q: must be one of %s", role, strings.Join(valid, ", ")) + } + if seen[role] { + return fmt.Errorf("duplicate role %q in roles", role) + } + seen[role] = true + } + return nil +} diff --git a/internal/config/config_test.go b/internal/config/config_test.go index ffcb9a93d..be975a3bf 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -546,3 +546,121 @@ func TestOrgConfigMarshal_WithDispatchMode(t *testing.T) { assert.Contains(t, string(data), "mode: oidc-mint") assert.Contains(t, string(data), "mint_url: https://fullsend-mint.run.app") } + +func TestNewPerRepoConfig_DefaultRoles(t *testing.T) { + cfg := NewPerRepoConfig(nil) + assert.Equal(t, "1", cfg.Version) + assert.Equal(t, DefaultAgentRoles(), cfg.Roles) + assert.False(t, cfg.KillSwitch) +} + +func TestNewPerRepoConfig_CustomRoles(t *testing.T) { + cfg := NewPerRepoConfig([]string{"triage", "review"}) + assert.Equal(t, []string{"triage", "review"}, cfg.Roles) +} + +func TestPerRepoConfigValidate_Valid(t *testing.T) { + cfg := &PerRepoConfig{ + Version: "1", + Roles: []string{"fullsend", "triage", "coder"}, + } + assert.NoError(t, cfg.Validate()) +} + +func TestPerRepoConfigValidate_InvalidVersion(t *testing.T) { + cfg := &PerRepoConfig{ + Version: "2", + Roles: []string{"fullsend"}, + } + err := cfg.Validate() + assert.Error(t, err) + assert.Contains(t, err.Error(), "unsupported version") +} + +func TestPerRepoConfigValidate_InvalidRole(t *testing.T) { + cfg := &PerRepoConfig{ + Version: "1", + Roles: []string{"fullsend", "invalid-role"}, + } + err := cfg.Validate() + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid role") +} + +func TestPerRepoConfigValidate_DuplicateRole(t *testing.T) { + cfg := &PerRepoConfig{ + Version: "1", + Roles: []string{"fullsend", "triage", "fullsend"}, + } + err := cfg.Validate() + assert.Error(t, err) + assert.Contains(t, err.Error(), "duplicate role") +} + +func TestPerRepoConfigValidate_EmptyRoles(t *testing.T) { + cfg := &PerRepoConfig{ + Version: "1", + Roles: []string{}, + } + assert.NoError(t, cfg.Validate()) +} + +func TestParsePerRepoConfig(t *testing.T) { + yamlData := ` +version: "1" +kill_switch: true +roles: + - fullsend + - triage + - review +` + cfg, err := ParsePerRepoConfig([]byte(yamlData)) + require.NoError(t, err) + assert.Equal(t, "1", cfg.Version) + assert.True(t, cfg.KillSwitch) + assert.Equal(t, []string{"fullsend", "triage", "review"}, cfg.Roles) +} + +func TestParsePerRepoConfig_Invalid(t *testing.T) { + _, err := ParsePerRepoConfig([]byte("not: [valid: yaml")) + assert.Error(t, err) + assert.Contains(t, err.Error(), "parsing per-repo config") +} + +func TestPerRepoConfigMarshal(t *testing.T) { + cfg := &PerRepoConfig{ + Version: "1", + Roles: []string{"fullsend", "triage"}, + } + data, err := cfg.Marshal() + require.NoError(t, err) + assert.Contains(t, string(data), "fullsend per-repo configuration") + assert.Contains(t, string(data), "version: \"1\"") + assert.Contains(t, string(data), "- fullsend") + assert.Contains(t, string(data), "- triage") +} + +func TestPerRepoConfigMarshal_KillSwitchOmitted(t *testing.T) { + cfg := &PerRepoConfig{ + Version: "1", + Roles: []string{"fullsend"}, + } + data, err := cfg.Marshal() + require.NoError(t, err) + assert.NotContains(t, string(data), "kill_switch") +} + +func TestPerRepoConfig_RoundTrip(t *testing.T) { + original := NewPerRepoConfig([]string{"fullsend", "triage", "coder", "review", "fix"}) + data, err := original.Marshal() + require.NoError(t, err) + + headerEnd := strings.Index(string(data), "version:") + require.True(t, headerEnd > 0) + + parsed, err := ParsePerRepoConfig(data[headerEnd:]) + require.NoError(t, err) + assert.Equal(t, original.Version, parsed.Version) + assert.Equal(t, original.Roles, parsed.Roles) + assert.Equal(t, original.KillSwitch, parsed.KillSwitch) +} diff --git a/internal/dispatch/gcf/gcp.go b/internal/dispatch/gcf/gcp.go index 2025e33c9..ada1cbbe9 100644 --- a/internal/dispatch/gcf/gcp.go +++ b/internal/dispatch/gcf/gcp.go @@ -72,11 +72,10 @@ type GCFClient interface { GetSecret(ctx context.Context, projectID, secretID string) error CreateSecret(ctx context.Context, projectID, secretID string) error AddSecretVersion(ctx context.Context, projectID, secretID string, data []byte) error + AccessSecretVersion(ctx context.Context, projectID, secretID string) ([]byte, error) - // IAM binding (Secret Manager resources) + // IAM bindings SetSecretIAMBinding(ctx context.Context, resource, member, role string) error - - // IAM binding (project-level) SetProjectIAMBinding(ctx context.Context, projectID, member, role string) error // Cloud Run IAM (for function invoker policy) @@ -207,6 +206,9 @@ func (c *LiveGCFClient) CreateWIFProvider(ctx context.Context, projectNumber, po if resp.StatusCode == http.StatusConflict { io.Copy(io.Discard, io.LimitReader(resp.Body, 1<<20)) + if err := c.undeleteWIFProvider(ctx, projectNumber, poolID, providerID); err == nil { + return c.UpdateWIFProvider(ctx, projectNumber, poolID, providerID, cfg) + } return c.UpdateWIFProvider(ctx, projectNumber, poolID, providerID, cfg) } if resp.StatusCode != http.StatusOK { @@ -291,6 +293,27 @@ func (c *LiveGCFClient) UpdateWIFProvider(ctx context.Context, projectNumber, po return nil } +// undeleteWIFProvider restores a soft-deleted WIF provider. +// GCP WIF providers are soft-deleted with a 30-day grace period; creating a +// provider with the same ID during this window returns 409. Undeleting first +// allows the subsequent update to succeed. +func (c *LiveGCFClient) undeleteWIFProvider(ctx context.Context, projectNumber, poolID, providerID string) error { + reqURL := fmt.Sprintf("https://iam.googleapis.com/v1/projects/%s/locations/global/workloadIdentityPools/%s/providers/%s:undelete", + url.PathEscape(projectNumber), url.PathEscape(poolID), url.PathEscape(providerID)) + + resp, err := c.Client.DoRequest(ctx, http.MethodPost, reqURL, "{}") + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + io.Copy(io.Discard, io.LimitReader(resp.Body, 1<<20)) + return fmt.Errorf("undelete returned %d", resp.StatusCode) + } + return c.waitForIAMOperation(ctx, resp.Body) +} + // GetSecret checks that a Secret Manager secret exists. func (c *LiveGCFClient) GetSecret(ctx context.Context, projectID, secretID string) error { reqURL := fmt.Sprintf("https://secretmanager.googleapis.com/v1/projects/%s/secrets/%s", @@ -362,6 +385,46 @@ func (c *LiveGCFClient) AddSecretVersion(ctx context.Context, projectID, secretI return nil } +// AccessSecretVersion reads the latest version of a Secret Manager secret. +func (c *LiveGCFClient) AccessSecretVersion(ctx context.Context, projectID, secretID string) ([]byte, error) { + reqURL := fmt.Sprintf("https://secretmanager.googleapis.com/v1/projects/%s/secrets/%s/versions/latest:access", + url.PathEscape(projectID), url.PathEscape(secretID)) + + resp, err := c.Client.DoRequest(ctx, http.MethodGet, reqURL, "") + if err != nil { + return nil, fmt.Errorf("accessing secret version: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusNotFound { + return nil, fmt.Errorf("secret %s: %w", secretID, ErrSecretNotFound) + } + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(io.LimitReader(resp.Body, 1<<20)) + return nil, fmt.Errorf("unexpected status %d accessing secret version: %s", resp.StatusCode, gcp.ExtractErrorMessage(body)) + } + + body, err := io.ReadAll(io.LimitReader(resp.Body, 1<<20)) + if err != nil { + return nil, fmt.Errorf("reading secret version response: %w", err) + } + + var result struct { + Payload struct { + Data string `json:"data"` + } `json:"payload"` + } + if err := json.Unmarshal(body, &result); err != nil { + return nil, fmt.Errorf("parsing secret version response: %w", err) + } + + data, err := base64.StdEncoding.DecodeString(result.Payload.Data) + if err != nil { + return nil, fmt.Errorf("decoding secret payload: %w", err) + } + return data, nil +} + // SetSecretIAMBinding sets an IAM binding on a Secret Manager resource. // Uses read-modify-write with retry on 409 Conflict (etag mismatch). func (c *LiveGCFClient) SetSecretIAMBinding(ctx context.Context, resource, member, role string) error { diff --git a/internal/dispatch/gcf/gcp_test.go b/internal/dispatch/gcf/gcp_test.go index 80e5ae2d4..8db56260a 100644 --- a/internal/dispatch/gcf/gcp_test.go +++ b/internal/dispatch/gcf/gcp_test.go @@ -155,22 +155,28 @@ func TestLiveGCFClient_CreateWIFProvider(t *testing.T) { callCount := 0 srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { callCount++ - if callCount == 1 { + switch callCount { + case 1: assert.Equal(t, http.MethodPost, r.Method) w.WriteHeader(http.StatusConflict) - return + case 2: + // Undelete attempt — returns 400 (not actually deleted). + assert.Equal(t, http.MethodPost, r.Method) + assert.Contains(t, r.URL.Path, ":undelete") + w.WriteHeader(http.StatusBadRequest) + case 3: + assert.Equal(t, http.MethodPatch, r.Method) + assert.Contains(t, r.URL.RawQuery, "attributeCondition") + assert.Contains(t, r.URL.RawQuery, "oidc.allowedAudiences") + var body map[string]interface{} + json.NewDecoder(r.Body).Decode(&body) + oidc, ok := body["oidc"].(map[string]interface{}) + assert.True(t, ok, "oidc config should be in PATCH body") + audiences := oidc["allowedAudiences"].([]interface{}) + assert.Equal(t, []interface{}{"fullsend-mint", "https://iam.googleapis.com/projects/123/locations/global/workloadIdentityPools/pool/providers/gh-oidc"}, audiences) + w.WriteHeader(http.StatusOK) + fmt.Fprintln(w, `{}`) } - assert.Equal(t, http.MethodPatch, r.Method) - assert.Contains(t, r.URL.RawQuery, "attributeCondition") - assert.Contains(t, r.URL.RawQuery, "oidc.allowedAudiences") - var body map[string]interface{} - json.NewDecoder(r.Body).Decode(&body) - oidc, ok := body["oidc"].(map[string]interface{}) - assert.True(t, ok, "oidc config should be in PATCH body") - audiences := oidc["allowedAudiences"].([]interface{}) - assert.Equal(t, []interface{}{"fullsend-mint", "https://iam.googleapis.com/projects/123/locations/global/workloadIdentityPools/pool/providers/gh-oidc"}, audiences) - w.WriteHeader(http.StatusOK) - fmt.Fprintln(w, `{}`) })) defer srv.Close() @@ -179,7 +185,7 @@ func TestLiveGCFClient_CreateWIFProvider(t *testing.T) { AllowedAudiences: []string{"fullsend-mint", "https://iam.googleapis.com/projects/123/locations/global/workloadIdentityPools/pool/providers/gh-oidc"}, }) require.NoError(t, err) - assert.Equal(t, 2, callCount) + assert.Equal(t, 3, callCount) }) } diff --git a/internal/dispatch/gcf/provisioner.go b/internal/dispatch/gcf/provisioner.go index d69cdcbd1..188504dd9 100644 --- a/internal/dispatch/gcf/provisioner.go +++ b/internal/dispatch/gcf/provisioner.go @@ -77,6 +77,7 @@ type Config struct { WIFPoolName string // default: "fullsend-pool" WIFProvider string // default: "github-oidc" GitHubOrgs []string + Repo string // per-repo mode: "owner/repo"; empty = per-org FunctionSourceDir string // path to Cloud Function source directory // AgentPEMs maps role → PEM private key data for all agent Apps. @@ -188,6 +189,55 @@ func (p *Provisioner) StoreAgentPEM(ctx context.Context, org, role string, pemDa return nil } +// CopyAgentPEM copies a PEM secret from one org to another. +// Used when the same public GitHub App is installed in multiple orgs — +// the PEM is the same (tied to the app), just needs a secret under the +// target org's naming convention. +func (p *Provisioner) CopyAgentPEM(ctx context.Context, srcOrg, dstOrg, role string) error { + if p.cfg.ProjectID == "" { + return fmt.Errorf("GCP project ID is required") + } + for _, org := range []string{srcOrg, dstOrg} { + if !githubOrgPattern.MatchString(org) || strings.Contains(org, "--") { + return fmt.Errorf("invalid org name %q", org) + } + } + if !rolePattern.MatchString(role) || strings.Contains(role, "--") { + return fmt.Errorf("invalid role name %q: must match %s", role, rolePattern.String()) + } + + dstID := secretID(dstOrg, role) + if err := p.gcpAPI.GetSecret(ctx, p.cfg.ProjectID, dstID); err == nil { + return nil + } + + srcID := secretID(srcOrg, role) + pemData, err := p.gcpAPI.AccessSecretVersion(ctx, p.cfg.ProjectID, srcID) + if err != nil { + return fmt.Errorf("reading source secret %s: %w", srcID, err) + } + + return p.StoreAgentPEM(ctx, dstOrg, role, pemData) +} + +// GetExistingRoleAppIDs reads ROLE_APP_IDS from the deployed mint function. +// Returns nil if the function doesn't exist or has no ROLE_APP_IDS. +func (p *Provisioner) GetExistingRoleAppIDs(ctx context.Context) (map[string]string, error) { + fn, err := p.gcpAPI.GetFunction(ctx, p.cfg.ProjectID, p.cfg.Region, functionName) + if err != nil || fn == nil || fn.EnvVars == nil { + return nil, nil + } + raw := fn.EnvVars["ROLE_APP_IDS"] + if raw == "" { + return nil, nil + } + var m map[string]string + if err := json.Unmarshal([]byte(raw), &m); err != nil { + return nil, nil + } + return m, nil +} + // Provision creates the GCP infrastructure for the token mint. // // When MintURL is empty, deploys the full mint infrastructure: @@ -365,6 +415,7 @@ func (p *Provisioner) provisionSelfManaged(ctx context.Context) (map[string]stri // Step 4b: Grant Vertex AI access to each installing org's .fullsend repo // at the project level (direct WIF — no intermediate service account). + // IAM policy changes can take up to 7 minutes to propagate. for _, org := range installingOrgs { principal := fmt.Sprintf("principalSet://iam.googleapis.com/projects/%s/locations/global/workloadIdentityPools/%s/attribute.repository/%s/.fullsend", projectNumber, p.cfg.WIFPoolName, org) @@ -372,6 +423,7 @@ func (p *Provisioner) provisionSelfManaged(ctx context.Context) (map[string]stri return nil, fmt.Errorf("granting Vertex AI access for org %s: %w", org, err) } } + log.Printf("granted roles/aiplatform.user to %d org(s) (propagation may take several minutes)", len(installingOrgs)) // Step 5a: Store new agent PEMs only for installing orgs. for _, org := range installingOrgs { @@ -584,6 +636,24 @@ func deriveAllowedRoles(roleAppIDsJSON string) string { return strings.Join(roles, ",") } +// BuildRepoProviderID generates a GCP WIF provider ID scoped to a single repo. +// GCP requires 4-32 chars, [a-z][a-z0-9-]*, no trailing hyphen. +func BuildRepoProviderID(owner, repo string) string { + raw := fmt.Sprintf("gh-%s-%s", owner, repo) + raw = strings.ToLower(raw) + raw = strings.Map(func(r rune) rune { + if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' { + return r + } + return '-' + }, raw) + if len(raw) > 32 { + raw = raw[:32] + } + raw = strings.TrimRight(raw, "-") + return raw +} + // buildAttributeCondition constructs a WIF CEL condition scoped to each org's // .fullsend repo (not org-wide) to limit which workflows can authenticate. func buildAttributeCondition(orgs []string) string { @@ -669,6 +739,88 @@ func (p *Provisioner) waitForReady(ctx context.Context, mintURL string) error { } } +// ProvisionWIF creates the WIF infrastructure (service account, pool, provider, +// principal binding) needed for GitHub Actions to authenticate via OIDC. +// All operations are idempotent. Returns the full WIF provider resource path +// and service account email. +func (p *Provisioner) ProvisionWIF(ctx context.Context) (wifProvider string, err error) { + if p.cfg.ProjectID == "" { + return "", fmt.Errorf("GCP project ID is required") + } + if !gcpProjectIDPattern.MatchString(p.cfg.ProjectID) { + return "", fmt.Errorf("invalid GCP project ID: %q", p.cfg.ProjectID) + } + if len(p.cfg.GitHubOrgs) == 0 { + return "", fmt.Errorf("at least one GitHub org is required") + } + + orgs := make([]string, len(p.cfg.GitHubOrgs)) + seen := make(map[string]bool) + for i, org := range p.cfg.GitHubOrgs { + if !githubOrgPattern.MatchString(org) { + return "", fmt.Errorf("invalid GitHub org name: %q", org) + } + lower := strings.ToLower(org) + if seen[lower] { + return "", fmt.Errorf("duplicate GitHub org after normalization: %q", org) + } + seen[lower] = true + orgs[i] = lower + } + + projectNumber, err := p.gcpAPI.GetProjectNumber(ctx, p.cfg.ProjectID) + if err != nil { + return "", fmt.Errorf("getting project number: %w", err) + } + + if err := p.gcpAPI.CreateWIFPool(ctx, projectNumber, p.cfg.WIFPoolName, "Fullsend GitHub OIDC Pool"); err != nil { + return "", fmt.Errorf("creating WIF pool: %w", err) + } + + var attrCondition string + if p.cfg.Repo != "" { + parts := strings.SplitN(p.cfg.Repo, "/", 2) + p.cfg.WIFProvider = BuildRepoProviderID(parts[0], parts[1]) + attrCondition = fmt.Sprintf("assertion.repository == '%s'", p.cfg.Repo) + } else { + attrCondition = buildAttributeCondition(orgs) + } + + audiences := []string{oidcAudience, iamAudience(projectNumber, p.cfg.WIFPoolName, p.cfg.WIFProvider)} + if err := p.gcpAPI.CreateWIFProvider(ctx, projectNumber, p.cfg.WIFPoolName, p.cfg.WIFProvider, OIDCProviderConfig{ + IssuerURI: oidcIssuer, + AttributeCondition: attrCondition, + AllowedAudiences: audiences, + }); err != nil { + return "", fmt.Errorf("creating WIF provider: %w", err) + } + + // IAM policy changes can take up to 7 minutes to propagate. + // Workflows that rely on these bindings may fail during that window. + if p.cfg.Repo != "" { + principal := fmt.Sprintf("principalSet://iam.googleapis.com/projects/%s/locations/global/workloadIdentityPools/%s/attribute.repository/%s", + projectNumber, p.cfg.WIFPoolName, p.cfg.Repo) + if err := p.gcpAPI.SetProjectIAMBinding(ctx, p.cfg.ProjectID, principal, "roles/aiplatform.user"); err != nil { + return "", fmt.Errorf("granting Vertex AI access for repo %s: %w", p.cfg.Repo, err) + } + log.Printf("granted roles/aiplatform.user to %s (propagation may take several minutes)", p.cfg.Repo) + } else { + for _, org := range orgs { + principal := fmt.Sprintf("principalSet://iam.googleapis.com/projects/%s/locations/global/workloadIdentityPools/%s/attribute.repository/%s/.fullsend", + projectNumber, p.cfg.WIFPoolName, org) + if err := p.gcpAPI.SetProjectIAMBinding(ctx, p.cfg.ProjectID, principal, "roles/aiplatform.user"); err != nil { + return "", fmt.Errorf("granting Vertex AI access for org %s: %w", org, err) + } + } + log.Printf("granted roles/aiplatform.user to %d org(s) (propagation may take several minutes)", len(orgs)) + } + + wifProvider = fmt.Sprintf("projects/%s/locations/global/workloadIdentityPools/%s/providers/%s", + projectNumber, p.cfg.WIFPoolName, p.cfg.WIFProvider) + + return wifProvider, nil +} + func (p *Provisioner) zeroPEMs() { for role, pem := range p.cfg.AgentPEMs { for i := range pem { diff --git a/internal/dispatch/gcf/provisioner_test.go b/internal/dispatch/gcf/provisioner_test.go index f4d08b470..49e960702 100644 --- a/internal/dispatch/gcf/provisioner_test.go +++ b/internal/dispatch/gcf/provisioner_test.go @@ -57,8 +57,9 @@ type fakeGCFClient struct { // (after CreateFunction). If nil, functionInfo is always returned. functionInfoAfterCreate *FunctionInfo - // Captured WIF provider config for assertion. + // Captured WIF provider config and ID for assertion. lastWIFProviderConfig OIDCProviderConfig + lastWIFProviderID string // WIF provider state for GetWIFProvider. wifProvider *WIFProviderInfo @@ -66,6 +67,10 @@ type fakeGCFClient struct { // Track secret names written via AddSecretVersion. secretVersionNames []string + // Per-secret state for CopyAgentPEM tests. + secretData map[string][]byte // secretID → payload + secrets map[string]bool // secretID → exists + // Captured env vars from the last CreateFunction or UpdateFunction call. lastCreateFunctionEnvVars map[string]string @@ -97,8 +102,9 @@ func (f *fakeGCFClient) CreateServiceAccount(_ context.Context, _, _, _ string) func (f *fakeGCFClient) CreateWIFPool(_ context.Context, _, _, _ string) error { return f.record("CreateWIFPool") } -func (f *fakeGCFClient) CreateWIFProvider(_ context.Context, _, _, _ string, cfg OIDCProviderConfig) error { +func (f *fakeGCFClient) CreateWIFProvider(_ context.Context, _, _, providerID string, cfg OIDCProviderConfig) error { f.lastWIFProviderConfig = cfg + f.lastWIFProviderID = providerID return f.record("CreateWIFProvider") } func (f *fakeGCFClient) GetWIFProvider(_ context.Context, _, _, _ string) (*WIFProviderInfo, error) { @@ -112,20 +118,43 @@ func (f *fakeGCFClient) UpdateWIFProvider(_ context.Context, _, _, _ string, cfg f.lastWIFProviderConfig = cfg return f.record("UpdateWIFProvider") } -func (f *fakeGCFClient) GetSecret(_ context.Context, _, _ string) error { +func (f *fakeGCFClient) GetSecret(_ context.Context, _ string, sid string) error { f.calls = append(f.calls, "GetSecret") if err := f.errs["GetSecret"]; err != nil { return err } + if f.secrets != nil { + if !f.secrets[sid] { + return ErrSecretNotFound + } + } return nil } -func (f *fakeGCFClient) CreateSecret(_ context.Context, _, _ string) error { +func (f *fakeGCFClient) CreateSecret(_ context.Context, _ string, sid string) error { + if f.secrets != nil { + f.secrets[sid] = true + } return f.record("CreateSecret") } -func (f *fakeGCFClient) AddSecretVersion(_ context.Context, _ string, secretID string, _ []byte) error { +func (f *fakeGCFClient) AddSecretVersion(_ context.Context, _ string, secretID string, data []byte) error { f.secretVersionNames = append(f.secretVersionNames, secretID) + if f.secretData != nil { + f.secretData[secretID] = append([]byte(nil), data...) + } return f.record("AddSecretVersion") } +func (f *fakeGCFClient) AccessSecretVersion(_ context.Context, _ string, sid string) ([]byte, error) { + f.calls = append(f.calls, "AccessSecretVersion") + if err := f.errs["AccessSecretVersion"]; err != nil { + return nil, err + } + if f.secretData != nil { + if data, ok := f.secretData[sid]; ok { + return data, nil + } + } + return nil, fmt.Errorf("secret %s: %w", sid, ErrSecretNotFound) +} func (f *fakeGCFClient) SetSecretIAMBinding(_ context.Context, _, _, _ string) error { return f.record("SetSecretIAMBinding") } @@ -1215,6 +1244,10 @@ func TestProvisioner_Provision_MultiOrg_WIFCondition(t *testing.T) { assert.Equal(t, "assertion.repository in ['acme/.fullsend', 'widgetco/.fullsend']", fake.lastWIFProviderConfig.AttributeCondition) + + expectedIAMAudience := "https://iam.googleapis.com/projects/123456789/locations/global/workloadIdentityPools/fullsend-pool/providers/github-oidc" + assert.Equal(t, []string{"fullsend-mint", expectedIAMAudience}, + fake.lastWIFProviderConfig.AllowedAudiences) } func TestProvisioner_Provision_SingleOrg_WIFCondition(t *testing.T) { @@ -1234,6 +1267,10 @@ func TestProvisioner_Provision_SingleOrg_WIFCondition(t *testing.T) { assert.Equal(t, "assertion.repository == 'acme/.fullsend'", fake.lastWIFProviderConfig.AttributeCondition) + + expectedIAMAudience := "https://iam.googleapis.com/projects/123456789/locations/global/workloadIdentityPools/fullsend-pool/providers/github-oidc" + assert.Equal(t, []string{"fullsend-mint", expectedIAMAudience}, + fake.lastWIFProviderConfig.AllowedAudiences) } func TestProvisioner_Provision_WIF_AllowedAudiences(t *testing.T) { @@ -1326,6 +1363,257 @@ func TestProvisioner_Provision_MultiOrg_MergeDoesNotOverwriteExistingPEMs(t *tes assert.Contains(t, fake.lastCreateFunctionEnvVars["ROLE_APP_IDS"], `"new-org/coder"`) } +// --- ProvisionWIF tests --- + +func TestProvisionWIF_HappyPath(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + }, fake) + + wifProvider, err := p.ProvisionWIF(context.Background()) + require.NoError(t, err) + + assert.Equal(t, "projects/123456789/locations/global/workloadIdentityPools/fullsend-pool/providers/github-oidc", wifProvider) + + assert.Contains(t, fake.calls, "GetProjectNumber") + assert.Contains(t, fake.calls, "CreateWIFPool") + assert.Contains(t, fake.calls, "CreateWIFProvider") + assert.Contains(t, fake.calls, "SetProjectIAMBinding") + + assert.Equal(t, "assertion.repository == 'acme/.fullsend'", fake.lastWIFProviderConfig.AttributeCondition) +} + +func TestProvisionWIF_MissingProjectID(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + GitHubOrgs: []string{"acme"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "GCP project ID is required") +} + +func TestProvisionWIF_MissingOrgs(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "at least one GitHub org is required") +} + +func TestProvisionWIF_IAMBindingFails(t *testing.T) { + fake := newFakeGCFClient() + fake.errs["SetProjectIAMBinding"] = fmt.Errorf("policy error") + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "granting Vertex AI access for org acme") +} + +func TestProvisionWIF_MultipleOrgs(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme", "beta"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.NoError(t, err) + assert.Equal(t, "assertion.repository in ['acme/.fullsend', 'beta/.fullsend']", fake.lastWIFProviderConfig.AttributeCondition) + + require.Len(t, fake.projectIAMBindings, 2) + assert.Contains(t, fake.projectIAMBindings[0].Member, "attribute.repository/acme/.fullsend") + assert.Contains(t, fake.projectIAMBindings[1].Member, "attribute.repository/beta/.fullsend") +} + +func TestProvisionWIF_GetProjectNumberFails(t *testing.T) { + fake := newFakeGCFClient() + fake.errs["GetProjectNumber"] = fmt.Errorf("forbidden") + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "getting project number") +} + +func TestProvisionWIF_CreateWIFPoolFails(t *testing.T) { + fake := newFakeGCFClient() + fake.errs["CreateWIFPool"] = fmt.Errorf("quota exceeded") + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "creating WIF pool") +} + +func TestProvisionWIF_CreateWIFProviderFails(t *testing.T) { + fake := newFakeGCFClient() + fake.errs["CreateWIFProvider"] = fmt.Errorf("invalid config") + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "creating WIF provider") +} + +func TestProvisionWIF_InvalidOrgName(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"bad org!"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid GitHub org name") +} + +func TestProvisionWIF_DuplicateOrg(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme", "ACME"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "duplicate GitHub org after normalization") +} + +func TestProvisionWIF_DoesNotMutateInput(t *testing.T) { + fake := newFakeGCFClient() + orgs := []string{"ACME"} + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: orgs, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.NoError(t, err) + assert.Equal(t, "ACME", orgs[0], "ProvisionWIF should not mutate the input slice") +} + +func TestProvisionWIF_InvalidProjectID(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "BAD", + GitHubOrgs: []string{"acme"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid GCP project ID") +} + +func TestProvisionWIF_NormalizesOrgCase(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"ACME"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.NoError(t, err) + assert.Equal(t, "assertion.repository == 'acme/.fullsend'", fake.lastWIFProviderConfig.AttributeCondition) +} + +func TestProvisionWIF_RepoScoped(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + Repo: "acme/widget", + }, fake) + + wifPath, err := p.ProvisionWIF(context.Background()) + require.NoError(t, err) + + assert.Equal(t, "gh-acme-widget", fake.lastWIFProviderID) + assert.Equal(t, "assertion.repository == 'acme/widget'", fake.lastWIFProviderConfig.AttributeCondition) + assert.Contains(t, wifPath, "gh-acme-widget") + + require.Len(t, fake.projectIAMBindings, 1) + assert.Contains(t, fake.projectIAMBindings[0].Member, "attribute.repository/acme/widget") + + assert.NotContains(t, fake.calls, "GetWIFProvider") +} + +func TestProvisionWIF_RepoScoped_DoesNotTouchSharedProvider(t *testing.T) { + fake := newFakeGCFClient() + fake.wifProvider = &WIFProviderInfo{ + AttributeCondition: "assertion.repository_owner == 'nonflux'", + } + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + Repo: "acme/widget", + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.NoError(t, err) + + assert.Equal(t, "gh-acme-widget", fake.lastWIFProviderID) + assert.Equal(t, "assertion.repository == 'acme/widget'", fake.lastWIFProviderConfig.AttributeCondition) +} + +func TestProvisionWIF_OrgScoped_Unchanged(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ + ProjectID: "my-project", + GitHubOrgs: []string{"acme"}, + }, fake) + + _, err := p.ProvisionWIF(context.Background()) + require.NoError(t, err) + + assert.Equal(t, "github-oidc", fake.lastWIFProviderID) + assert.Equal(t, "assertion.repository == 'acme/.fullsend'", fake.lastWIFProviderConfig.AttributeCondition) + require.Len(t, fake.projectIAMBindings, 1) + assert.Contains(t, fake.projectIAMBindings[0].Member, "attribute.repository/acme/.fullsend") +} + +func TestBuildRepoProviderID(t *testing.T) { + tests := []struct { + owner, repo string + want string + }{ + {"acme", "widget", "gh-acme-widget"}, + {"Acme", "My.Repo_v2", "gh-acme-my-repo-v2"}, + {"org", "very-long-repository-name-that-exceeds-limit", "gh-org-very-long-repository-name"}, + {"a", "b", "gh-a-b"}, + {"nonflux", "integration-service", "gh-nonflux-integration-service"}, + {"halfsend", "test-repo", "gh-halfsend-test-repo"}, + } + for _, tt := range tests { + t.Run(tt.owner+"/"+tt.repo, func(t *testing.T) { + got := BuildRepoProviderID(tt.owner, tt.repo) + assert.Equal(t, tt.want, got) + assert.GreaterOrEqual(t, len(got), 4) + assert.LessOrEqual(t, len(got), 32) + assert.NotEqual(t, '-', rune(got[len(got)-1])) + }) + } +} + // --- interface compliance --- func TestProvisioner_ImplementsDispatcher(t *testing.T) { @@ -1337,3 +1625,123 @@ func TestProvisioner_ImplementsDispatcher(t *testing.T) { OrgVariableNames() []string } = (*Provisioner)(nil) } + +func TestCopyAgentPEM_CopiesSecret(t *testing.T) { + fake := newFakeGCFClient() + fake.secrets = map[string]bool{ + "fullsend-srcorg--triage-app-pem": true, + } + fake.secretData = map[string][]byte{ + "fullsend-srcorg--triage-app-pem": []byte("-----BEGIN RSA PRIVATE KEY-----\ntest\n-----END RSA PRIVATE KEY-----"), + } + fake.errs["GetSecret"] = nil + + p := NewProvisioner(Config{ProjectID: "proj1"}, fake) + err := p.CopyAgentPEM(context.Background(), "srcorg", "dstorg", "triage") + require.NoError(t, err) + + assert.True(t, fake.secrets["fullsend-dstorg--triage-app-pem"]) + assert.Equal(t, + []byte("-----BEGIN RSA PRIVATE KEY-----\ntest\n-----END RSA PRIVATE KEY-----"), + fake.secretData["fullsend-dstorg--triage-app-pem"], + ) +} + +func TestCopyAgentPEM_DestinationExists_Noop(t *testing.T) { + fake := newFakeGCFClient() + fake.secrets = map[string]bool{ + "fullsend-srcorg--triage-app-pem": true, + "fullsend-dstorg--triage-app-pem": true, + } + fake.secretData = map[string][]byte{} + + p := NewProvisioner(Config{ProjectID: "proj1"}, fake) + err := p.CopyAgentPEM(context.Background(), "srcorg", "dstorg", "triage") + require.NoError(t, err) + assert.NotContains(t, fake.calls, "AccessSecretVersion") +} + +func TestCopyAgentPEM_SourceMissing_Error(t *testing.T) { + fake := newFakeGCFClient() + fake.secrets = map[string]bool{} + fake.secretData = map[string][]byte{} + + p := NewProvisioner(Config{ProjectID: "proj1"}, fake) + err := p.CopyAgentPEM(context.Background(), "srcorg", "dstorg", "triage") + require.Error(t, err) + assert.Contains(t, err.Error(), "reading source secret") +} + +func TestCopyAgentPEM_InvalidOrg(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{ProjectID: "proj1"}, fake) + + err := p.CopyAgentPEM(context.Background(), "bad org!", "dstorg", "triage") + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid org name") +} + +func TestCopyAgentPEM_MissingProjectID(t *testing.T) { + fake := newFakeGCFClient() + p := NewProvisioner(Config{}, fake) + + err := p.CopyAgentPEM(context.Background(), "srcorg", "dstorg", "triage") + require.Error(t, err) + assert.Contains(t, err.Error(), "GCP project ID is required") +} + +func TestGetExistingRoleAppIDs_ReturnsMap(t *testing.T) { + fake := newFakeGCFClient() + fake.functionInfo = &FunctionInfo{ + URI: "https://example.com", + EnvVars: map[string]string{ + "ROLE_APP_IDS": `{"nonflux/triage":"123","nonflux/coder":"456"}`, + }, + } + + p := NewProvisioner(Config{ProjectID: "proj1", Region: "us-central1"}, fake) + m, err := p.GetExistingRoleAppIDs(context.Background()) + require.NoError(t, err) + assert.Equal(t, map[string]string{ + "nonflux/triage": "123", + "nonflux/coder": "456", + }, m) +} + +func TestGetExistingRoleAppIDs_NoFunction(t *testing.T) { + fake := newFakeGCFClient() + fake.functionInfo = nil + + p := NewProvisioner(Config{ProjectID: "proj1", Region: "us-central1"}, fake) + m, err := p.GetExistingRoleAppIDs(context.Background()) + require.NoError(t, err) + assert.Nil(t, m) +} + +func TestGetExistingRoleAppIDs_EmptyEnvVars(t *testing.T) { + fake := newFakeGCFClient() + fake.functionInfo = &FunctionInfo{ + URI: "https://example.com", + EnvVars: map[string]string{}, + } + + p := NewProvisioner(Config{ProjectID: "proj1", Region: "us-central1"}, fake) + m, err := p.GetExistingRoleAppIDs(context.Background()) + require.NoError(t, err) + assert.Nil(t, m) +} + +func TestGetExistingRoleAppIDs_MalformedJSON(t *testing.T) { + fake := newFakeGCFClient() + fake.functionInfo = &FunctionInfo{ + URI: "https://example.com", + EnvVars: map[string]string{ + "ROLE_APP_IDS": "not-json", + }, + } + + p := NewProvisioner(Config{ProjectID: "proj1", Region: "us-central1"}, fake) + m, err := p.GetExistingRoleAppIDs(context.Background()) + require.NoError(t, err) + assert.Nil(t, m) +} diff --git a/internal/layers/workflows.go b/internal/layers/workflows.go index e64c37beb..1ffc9a2db 100644 --- a/internal/layers/workflows.go +++ b/internal/layers/workflows.go @@ -22,6 +22,9 @@ func init() { }); err != nil { panic(fmt.Sprintf("walking scaffold: %v", err)) } + for _, dir := range scaffold.CustomizedDirs() { + managedFiles = append(managedFiles, dir+"/.gitkeep") + } managedFiles = append(managedFiles, codeownersPath) } @@ -87,6 +90,14 @@ func (l *WorkflowsLayer) Install(ctx context.Context) error { return fmt.Errorf("collecting scaffold files: %w", err) } + for _, dir := range scaffold.CustomizedDirs() { + files = append(files, forge.TreeFile{ + Path: dir + "/.gitkeep", + Content: []byte(""), + Mode: "100644", + }) + } + files = append(files, forge.TreeFile{ Path: codeownersPath, Content: []byte(l.codeownersContent()), diff --git a/internal/scaffold/fullsend-repo/templates/shim-per-repo.yaml b/internal/scaffold/fullsend-repo/templates/shim-per-repo.yaml new file mode 100644 index 000000000..e04de726d --- /dev/null +++ b/internal/scaffold/fullsend-repo/templates/shim-per-repo.yaml @@ -0,0 +1,85 @@ +# fullsend shim workflow (per-repo installation mode) +# Routes events to agent workflows via reusable-dispatch.yml. +# All agent execution happens in this repo's context — no external +# config repo is needed. +# +# Security: pull_request_target runs the BASE branch version of this workflow, +# preventing PRs from modifying it to exfiltrate credentials. +# This shim never checks out PR code, so it is not vulnerable to "pwn request" +# attacks. +# +# Routing: this shim forwards the raw event context to reusable-dispatch.yml, +# which determines the stage and conditionally calls the appropriate +# reusable-{stage}.yml workflow. Adding a new stage requires only a case +# branch in reusable-dispatch.yml — zero changes to this repo. +name: fullsend + +permissions: + actions: write + id-token: write + contents: write + issues: write + packages: read + pull-requests: write + +on: + issues: + types: [labeled] + issue_comment: + types: [created] + pull_request_target: + types: [opened, synchronize, ready_for_review, closed] + pull_request_review: + types: [submitted] + +jobs: + dispatch: + concurrency: + group: fullsend-dispatch-${{ github.event.issue.number || github.event.pull_request.number }} + cancel-in-progress: false + if: >- + github.event_name != 'issue_comment' + || github.event.comment.user.type != 'Bot' + uses: fullsend-ai/fullsend/.github/workflows/reusable-dispatch.yml@v0 + with: + event_action: ${{ github.event.action }} + install_mode: per-repo + mint_url: ${{ vars.FULLSEND_MINT_URL }} + gcp_region: ${{ vars.FULLSEND_GCP_REGION }} + secrets: + FULLSEND_GCP_WIF_PROVIDER: ${{ secrets.FULLSEND_GCP_WIF_PROVIDER }} + FULLSEND_GCP_PROJECT_ID: ${{ secrets.FULLSEND_GCP_PROJECT_ID }} + + stop-fix: + if: >- + github.event_name == 'issue_comment' + && github.event.issue.pull_request + && github.event.comment.user.type != 'Bot' + && github.event.comment.body == '/stop-fix' + && ( + github.event.comment.author_association == 'OWNER' + || github.event.comment.author_association == 'MEMBER' + || github.event.comment.author_association == 'COLLABORATOR' + || github.event.comment.author_association == 'CONTRIBUTOR' + || github.event.comment.user.login == github.event.issue.user.login + ) + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + pull-requests: write + steps: + - name: Add fullsend-no-fix label and notify + env: + GH_TOKEN: ${{ github.token }} + PR_NUMBER: ${{ github.event.issue.number }} + REPO: ${{ github.repository }} + run: | + set -euo pipefail + gh label create "fullsend-no-fix" --repo "$REPO" \ + --description "Skip bot-triggered fix agent runs" --color "FBCA04" \ + --force 2>/dev/null || true + gh pr edit "$PR_NUMBER" --repo "$REPO" \ + --add-label "fullsend-no-fix" + gh pr comment "$PR_NUMBER" --repo "$REPO" \ + --body "Fix agent disabled for this PR. Remove the \`fullsend-no-fix\` label or use \`/fix\` to re-engage." diff --git a/internal/scaffold/scaffold.go b/internal/scaffold/scaffold.go index 61e37e4c8..eb564f7b6 100644 --- a/internal/scaffold/scaffold.go +++ b/internal/scaffold/scaffold.go @@ -98,6 +98,31 @@ func WalkFullsendRepoAll(fn func(path string, content []byte) error) error { return walkFullsendRepo(fn, false) } +// PerRepoShimTemplate returns the content of the per-repo shim workflow template. +func PerRepoShimTemplate() ([]byte, error) { + return content.ReadFile("fullsend-repo/templates/shim-per-repo.yaml") +} + +// CustomizedDirs returns the set of customized/ subdirectories +// that should be scaffolded in a per-org .fullsend config repo. +func CustomizedDirs() []string { + dirs := make([]string, 0, len(layeredDirs)) + for _, d := range layeredDirs { + dirs = append(dirs, "customized/"+strings.TrimSuffix(d, "/")) + } + return dirs +} + +// PerRepoCustomizedDirs returns the set of customized/ subdirectories +// that should be scaffolded in a per-repo .fullsend/ setup. +func PerRepoCustomizedDirs() []string { + dirs := make([]string, 0, len(layeredDirs)) + for _, d := range layeredDirs { + dirs = append(dirs, ".fullsend/customized/"+strings.TrimSuffix(d, "/")) + } + return dirs +} + func walkFullsendRepo(fn func(path string, content []byte) error, filter bool) error { return fs.WalkDir(content, "fullsend-repo", func(path string, d fs.DirEntry, err error) error { if err != nil {