From be1a925febe915b54d6a98edb8c18cbcd100d4b1 Mon Sep 17 00:00:00 2001 From: codcod Date: Sun, 18 Jan 2026 21:30:57 +0100 Subject: [PATCH 1/6] feat: initial commit of repos-fix plugin --- .gitignore | 1 + Cargo.toml | 1 + README.md | 1 + justfile | 4 + plugins/repos-fix/Cargo.toml | 27 ++ plugins/repos-fix/README.md | 163 ++++++++ plugins/repos-fix/src/agent.rs | 200 ++++++++++ .../repos-fix/src/analysis/dependencies.rs | 150 ++++++++ plugins/repos-fix/src/analysis/index.rs | 191 ++++++++++ plugins/repos-fix/src/analysis/mod.rs | 69 ++++ plugins/repos-fix/src/analysis/platform.rs | 162 ++++++++ plugins/repos-fix/src/analysis/structure.rs | 356 ++++++++++++++++++ plugins/repos-fix/src/domain.rs | 223 +++++++++++ plugins/repos-fix/src/jira.rs | 238 ++++++++++++ plugins/repos-fix/src/main.rs | 68 ++++ plugins/repos-fix/src/prompt.rs | 222 +++++++++++ .../repos-fix/src/templates/agent_prompt.md | 16 + .../repos-fix/src/templates/cursor_prompt.md | 20 + .../repos-fix/src/templates/cursorrules.md | 14 + .../src/templates/guidelines_android.md | 2 + .../src/templates/guidelines_angular.md | 2 + .../repos-fix/src/templates/guidelines_ios.md | 2 + .../src/templates/guidelines_java.md | 2 + plugins/repos-fix/src/workflow.rs | 354 +++++++++++++++++ plugins/repos-fix/src/workspace.rs | 56 +++ 25 files changed, 2544 insertions(+) create mode 100644 plugins/repos-fix/Cargo.toml create mode 100644 plugins/repos-fix/README.md create mode 100644 plugins/repos-fix/src/agent.rs create mode 100644 plugins/repos-fix/src/analysis/dependencies.rs create mode 100644 plugins/repos-fix/src/analysis/index.rs create mode 100644 plugins/repos-fix/src/analysis/mod.rs create mode 100644 plugins/repos-fix/src/analysis/platform.rs create mode 100644 plugins/repos-fix/src/analysis/structure.rs create mode 100644 plugins/repos-fix/src/domain.rs create mode 100644 plugins/repos-fix/src/jira.rs create mode 100644 plugins/repos-fix/src/main.rs create mode 100644 plugins/repos-fix/src/prompt.rs create mode 100644 plugins/repos-fix/src/templates/agent_prompt.md create mode 100644 plugins/repos-fix/src/templates/cursor_prompt.md create mode 100644 plugins/repos-fix/src/templates/cursorrules.md create mode 100644 plugins/repos-fix/src/templates/guidelines_android.md create mode 100644 plugins/repos-fix/src/templates/guidelines_angular.md create mode 100644 plugins/repos-fix/src/templates/guidelines_ios.md create mode 100644 plugins/repos-fix/src/templates/guidelines_java.md create mode 100644 plugins/repos-fix/src/workflow.rs create mode 100644 plugins/repos-fix/src/workspace.rs diff --git a/.gitignore b/.gitignore index edc02a7..8ed0fd3 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,4 @@ config.yaml tarpaulin-report.html test-*/ tests/test-recipes.yaml +.link-to-xdg-home-config \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 9178811..8052ae4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ members = [ "plugins/repos-health", "plugins/repos-review", "plugins/repos-validate", + "plugins/repos-fix", ] [dependencies] diff --git a/README.md b/README.md index 56a9858..295088b 100644 --- a/README.md +++ b/README.md @@ -142,6 +142,7 @@ overview. Click on a command to see its detailed documentation. | [**`init`**](./docs/commands/init.md) | Generates a `config.yaml` file from local Git repositories. | | [**`validate`**](./plugins/repos-validate/README.md) | Validates config file, repository connectivity, and synchronizes topics (via plugin). | | [**`review`**](./plugins/repos-review/README.md) | Uses UI to review changes (via plugin). | +| [**`fix`**](./plugins/repos-fix/README.md) | Automatically fixes bugs based on JIRA tickets using Cursor AI (via plugin). | For a full list of options for any command, run `repos --help`. diff --git a/justfile b/justfile index ec39362..08f8317 100644 --- a/justfile +++ b/justfile @@ -13,6 +13,8 @@ build: build-plugins: cargo build --release -p repos-health cargo build --release -p repos-validate + cargo build --release -p repos-review + cargo build --release -p repos-fix # Run tests [group('qa')] @@ -37,11 +39,13 @@ link-plugins: sudo ln -sf $(pwd)/target/release/repos-health /usr/local/bin/repos-health sudo ln -sf $(pwd)/target/release/repos-validate /usr/local/bin/repos-validate sudo ln -sf $(pwd)/target/release/repos-review /usr/local/bin/repos-review + sudo ln -sf $(pwd)/target/release/repos-fix /usr/local/bin/repos-fix [group('devex')] unlink-plugins: sudo rm -f /usr/local/bin/repos-health sudo rm -f /usr/local/bin/repos-validate sudo rm -f /usr/local/bin/repos-review + sudo rm -f /usr/local/bin/repos-fix # vim: set filetype=Makefile ts=4 sw=4 et: diff --git a/plugins/repos-fix/Cargo.toml b/plugins/repos-fix/Cargo.toml new file mode 100644 index 0000000..0cd15de --- /dev/null +++ b/plugins/repos-fix/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "repos-fix" +version = "0.1.0" +edition = "2024" + +[[bin]] +name = "repos-fix" +path = "src/main.rs" + +[dependencies] +anyhow = "1.0" +clap = { version = "4.5", features = ["derive"] } +colored = "2.1" +reqwest = { version = "0.12", features = ["json", "blocking"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tokio = { version = "1.0", features = ["full"] } +walkdir = "2.5" +regex = "1.10" +tempfile = "3.10" +html2text = "0.12" +url = "2.5" +base64 = "0.22" +minijinja = "2.0" + +# Use local repos library for plugin context +repos = { path = "../../" } diff --git a/plugins/repos-fix/README.md b/plugins/repos-fix/README.md new file mode 100644 index 0000000..2481260 --- /dev/null +++ b/plugins/repos-fix/README.md @@ -0,0 +1,163 @@ +# repos-fix + +Automatically analyze and fix JIRA maintenance tickets using Cursor AI. + +## Overview + +The `repos-fix` plugin integrates JIRA issue tracking with the Cursor AI agent to automatically implement fixes for maintenance tickets. It operates as a plugin for the `repos` tool. + +Key features: +1. **Fetches JIRA ticket details**: including description, priority, and attachments. +2. **Analyzes the codebase**: Detects platform (Java, iOS, Android, Angular), frameworks, and test structure. +3. **Generates comprehensive prompts**: Creates a "mission" for Cursor AI tailored to the specific project context. +4. **Runs cursor-agent**: Executes the fix in headless mode with auto-retries. +5. **Validates the implementation**: verifying build and tests pass. + +## Prerequisites + +- `repos` tool installed (this plugin is included with it). +- `cursor-agent` CLI installed and available in PATH. +- **JIRA Account**: with API token access. +- **Cursor API Key**: for the AI agent. + +## Installation + +1. **repos tool**: Ensure you have the `repos` tool installed. `repos-fix` is a built-in plugin. +2. **cursor-agent**: Install the Cursor Agent CLI: + ```bash + curl https://cursor.com/install -fsS | bash + ``` + Verify installation with `cursor-agent --version`. + +## Configuration + +Set the following environment variables: + +```bash +# JIRA Configuration +export JIRA_URL=https://your-company.atlassian.net +export JIRA_USERNAME=your-email@company.com +export JIRA_API_TOKEN=your-jira-api-token + +# Cursor API Key +export CURSOR_API_KEY=your-cursor-api-key +``` + +- **JIRA API Token**: Generate at [id.atlassian.com](https://id.atlassian.com/manage-profile/security/api-tokens). +- **Cursor API Key**: Get it from Cursor Settings → General → API Keys. + +### Template Overrides + +You can customize the AI prompts and guidelines by placing files in your configuration directory: +- `${XDG_CONFIG_HOME}/repos/fix/` (usually `~/.config/repos/fix/`) + +Supported files: +- `cursor_prompt.md`: The main instruction set for Cursor. +- `cursorrules.md`: Behavior rules for the agent. +- `agent_prompt.md`: The mission prompt passed to `cursor-agent`. +- Platform guidelines: `guidelines_ios.md`, `guidelines_android.md`, `guidelines_java.md`, `guidelines_angular.md`. + +## Usage + +### Basic Usage + +Fix a JIRA ticket in a specific repository: + +```bash +# Single repository (by name) +repos fix my-backend-service --ticket MAINT-1234 + +# Multiple repositories +repos fix backend-service frontend-app --ticket MAINT-1234 +``` + +### Context-Aware Usage + +If you are already in a `repos` context (e.g., using tag filters), you can omit the repository name: + +```bash +# Fix a ticket in all repos matching 'production' tag +repos fix -t production --ticket MAINT-1234 + +# Auto-select if only one repo is in the current context +repos fix --ticket MAINT-1234 +``` + +### Full JIRA URL + +You can provide the full URL instead of just the ID: + +```bash +repos fix mobile-app --ticket https://company.atlassian.net/browse/MAINT-1234 +``` + +### Analysis Mode (Ask Mode) + +Analyze the issue and propose a solution **without making code changes**: + +```bash +repos fix my-service --ticket MAINT-1234 --ask +``` +This generates a `SOLUTION_SUMMARY.md` with the proposed plan. + +### Advanced Options + +- `--workspace `: Specify a custom directory for generated artifacts (default: `workspace/fix/`). +- `--prompt "..."`: Append extra instructions to the AI agent (e.g., "Use Java 17 features"). + +## Workflow + +When you run `repos fix`, the following steps occur: + +1. **Fetch Ticket**: Downloads JIRA ticket information, description, and attachments. +2. **Setup Workspace**: Creates a working directory at `workspace/fix//`. +3. **Analyze Project**: detailed inspection of platform, languages, frameworks, dependencies, and test setup. +4. **Generate Context**: Creates `mission-context.json` with all analysis data. +5. **Generate Prompts**: Creates `.cursorrules` and `cursor_prompt.md` tailored to the specific project. +6. **Run Cursor Agent**: + - Executes `cursor-agent` with `--force` and `--print` flags. + - **Auto-Retry**: If the agent fails (e.g., build fails, tests fail), it automatically retries up to **3 times**, feeding the error message back to the AI. +7. **Validate**: The agent validates the fix by running build and test commands detected during analysis. +8. **Report**: Generates `SOLUTION_SUMMARY.md` with implementation details. + +## Output + +After execution, check the `workspace/fix//` directory: + +``` +workspace/fix/MAINT-1234/ +├── .cursorrules # AI behavior rules +├── mission-context.json # Complete project analysis & ticket data +├── cursor_prompt.md # The "rulebook" for Cursor +├── agent_prompt.md # The specific mission prompt +├── SOLUTION_SUMMARY.md # Final report of the implemented solution +``` + +## Supported Platforms + +The plugin automatically detects and supports: + +- **iOS**: Xcode projects (`.xcodeproj`, `.xcworkspace`), Swift/Obj-C, CocoaPods, SPM. +- **Android**: Gradle projects, Kotlin/Java, Android Manifests. +- **Java Backend**: Maven (`pom.xml`) or Gradle (`build.gradle`), Spring Boot, JUnit/Mockito. +- **Angular**: `angular.json` or `package.json` with Angular dependencies, TypeScript. + +## Troubleshooting + +### `cursor-agent` not found +Install it via `curl https://cursor.com/install -fsS | bash`. Ensure it is in your `$PATH`. + +### JIRA authentication failed +Check your environment variables: +```bash +echo $JIRA_URL +echo $JIRA_USERNAME +echo $JIRA_API_TOKEN +``` +Ensure `JIRA_API_TOKEN` is a valid API token, not your password. + +### Repository not found +If using context filtering (e.g., `-t tag`), ensure the repository actually matches the filter. You can list matches with `repos list -t tag`. + +### Agent keeps failing +Check the console output for the error message returned by `cursor-agent`. If it fails 3 times, check the generated prompts in the workspace directory to see if the AI instructions need manual adjustment (using `--prompt`). diff --git a/plugins/repos-fix/src/agent.rs b/plugins/repos-fix/src/agent.rs new file mode 100644 index 0000000..8c29f65 --- /dev/null +++ b/plugins/repos-fix/src/agent.rs @@ -0,0 +1,200 @@ +use anyhow::{Context, Result}; +use std::env; +use std::fs; +use std::io::{BufRead, BufReader}; +use std::path::Path; +use std::process::{Command, Stdio}; +use std::thread; + +pub struct CursorAgentRunner { + api_key: String, +} + +impl CursorAgentRunner { + pub fn new() -> Result { + let api_key = + env::var("CURSOR_API_KEY").context("CURSOR_API_KEY environment variable not set")?; + + // Check if cursor-agent is available + Self::check_cursor_agent()?; + + Ok(Self { api_key }) + } + + fn check_cursor_agent() -> Result<()> { + let output = Command::new("cursor-agent").arg("--version").output(); + + match output { + Ok(output) if output.status.success() => { + let version = String::from_utf8_lossy(&output.stdout); + println!("Found cursor-agent: {}", version.trim()); + Ok(()) + } + _ => { + anyhow::bail!( + "cursor-agent not found. Please install it:\n\ + curl https://cursor.com/install -fsS | bash" + ); + } + } + } + + pub fn run(&self, workspace_dir: &Path, prompt: &str, ask: bool) -> Result<()> { + println!("\n{}", "=".repeat(60)); + if ask { + println!("🚀 Starting cursor-agent in ASK mode"); + println!( + "🔍 No code will be changed - only analyzing and creating solution proposal..." + ); + } else { + println!("🚀 Starting cursor-agent"); + println!("💭 This may take several minutes while the AI analyzes and codes..."); + } + println!("{}", "=".repeat(60)); + println!(); + + let mut cmd = Command::new("cursor-agent"); + cmd.arg("--api-key") + .arg(&self.api_key) + .arg("--print") + .arg("--force") + .arg(prompt) + .current_dir(workspace_dir) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let mut child = cmd.spawn().context("Failed to spawn cursor-agent")?; + + let stdout_handle = child.stdout.take().map(|stdout| { + thread::spawn(move || { + let reader = BufReader::new(stdout); + for line in reader.lines().map_while(Result::ok) { + // Show progress indicators + Self::display_progress(&line, ask); + } + }) + }); + + let stderr_handle = child.stderr.take().map(|stderr| { + thread::spawn(move || { + let reader = BufReader::new(stderr); + for line in reader.lines().map_while(Result::ok) { + eprintln!("{}", line); + } + }) + }); + + let status = child.wait().context("Failed to wait for cursor-agent")?; + + if let Some(handle) = stdout_handle { + let _ = handle.join(); + } + if let Some(handle) = stderr_handle { + let _ = handle.join(); + } + + println!(); + println!("{}", "=".repeat(60)); + + if status.success() { + if ask { + println!("🎉 Solution analysis completed successfully!"); + println!("📄 SOLUTION_SUMMARY.md should be created with the proposed solution"); + } else { + println!("🎉 Code fix implementation completed successfully!"); + println!("📄 Check SOLUTION_SUMMARY.md for details"); + } + } else { + anyhow::bail!("cursor-agent exited with status: {}", status); + } + + println!("{}", "=".repeat(60)); + println!(); + + Ok(()) + } + + fn display_progress(line: &str, ask: bool) { + let line_lower = line.to_lowercase(); + + // Simple progress indicators based on keywords + if line_lower.contains("analyzing") || line_lower.contains("reading") { + print!("🔍 Analyzing... "); + } else if line_lower.contains("planning") || line_lower.contains("thinking") { + print!("💡 Planning... "); + } else if !ask && (line_lower.contains("writing") || line_lower.contains("creating")) { + print!("⚡ Implementing... "); + } else if line_lower.contains("testing") || line_lower.contains("building") { + print!("✅ Validating... "); + } else if line_lower.contains("error") || line_lower.contains("failed") { + eprintln!("❌ Error: {}", line); + } + } + + pub fn run_with_retry( + &self, + workspace_dir: &Path, + prompt: &str, + ask: bool, + max_retries: u32, + ) -> Result<()> { + let mut last_error = None; + + for attempt in 1..=max_retries { + let current_prompt = if attempt == 1 { + prompt.to_string() + } else { + let error_message = last_error + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "unknown error".to_string()); + format!( + "{}\n\n**PREVIOUS ATTEMPT FAILED**\nError: {}\n\ + Please analyze the error and fix the code accordingly.", + prompt, error_message + ) + }; + + match self.run(workspace_dir, ¤t_prompt, ask) { + Ok(()) => return Ok(()), + Err(e) => { + last_error = Some(e); + if attempt < max_retries { + eprintln!("\n⚠️ Attempt {} failed", attempt); + eprintln!("🔄 Retrying... ({}/{})\n", attempt + 1, max_retries); + } + } + } + } + + if let Some(error) = last_error { + anyhow::bail!( + "Failed after {} attempts. Last error: {}", + max_retries, + error + ); + } + + anyhow::bail!("Failed after {} attempts.", max_retries); + } + + pub fn verify_solution(&self, workspace_dir: &Path) -> Result { + let solution_file = workspace_dir.join("SOLUTION_SUMMARY.md"); + + if !solution_file.exists() { + eprintln!("⚠️ SOLUTION_SUMMARY.md not found"); + return Ok(false); + } + + let content = + fs::read_to_string(&solution_file).context("Failed to read SOLUTION_SUMMARY.md")?; + + if content.trim().is_empty() { + eprintln!("⚠️ SOLUTION_SUMMARY.md is empty"); + return Ok(false); + } + + println!("✅ SOLUTION_SUMMARY.md created successfully"); + Ok(true) + } +} diff --git a/plugins/repos-fix/src/analysis/dependencies.rs b/plugins/repos-fix/src/analysis/dependencies.rs new file mode 100644 index 0000000..9bf86e1 --- /dev/null +++ b/plugins/repos-fix/src/analysis/dependencies.rs @@ -0,0 +1,150 @@ +use crate::analysis::index::RepoIndex; +use crate::domain::PlatformType; +use std::collections::HashMap; +use std::fs; + +#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] +pub struct DependencyInfo { + pub java: HashMap>, + pub ios: HashMap>, + pub android: HashMap>, + pub angular: HashMap>, +} + +pub struct DependencyAnalyzer<'a> { + index: &'a RepoIndex, +} + +impl<'a> DependencyAnalyzer<'a> { + pub fn new(index: &'a RepoIndex) -> Self { + Self { index } + } + + pub fn analyze(&self, platform: &PlatformType) -> DependencyInfo { + let mut deps = DependencyInfo::default(); + + match platform { + PlatformType::Java => { + if let Some(pom_deps) = self.parse_pom_xml() { + deps.java.insert("maven".to_string(), pom_deps); + } + if let Some(gradle_deps) = self.parse_gradle_files() { + deps.java.insert("gradle".to_string(), gradle_deps); + } + } + PlatformType::Android => { + if let Some(gradle_deps) = self.parse_gradle_files() { + deps.android.insert("gradle".to_string(), gradle_deps); + } + } + PlatformType::Ios => { + if let Some(podfile_deps) = self.parse_podfile() { + deps.ios.insert("cocoapods".to_string(), podfile_deps); + } + } + PlatformType::Angular => { + if let Some(npm_deps) = self.parse_package_json() { + deps.angular.insert("npm".to_string(), npm_deps); + } + } + PlatformType::Unknown => {} + } + + deps + } + + fn parse_pom_xml(&self) -> Option> { + let pom_files = self.index.files_with_name("pom.xml"); + if pom_files.is_empty() { + return None; + } + + let content = fs::read_to_string(pom_files[0]).ok()?; + let deps: Vec = content + .lines() + .filter(|line| line.trim().starts_with("")) + .map(|line| line.trim().to_string()) + .collect(); + + if deps.is_empty() { + None + } else { + Some(deps) + } + } + + fn parse_gradle_files(&self) -> Option> { + let mut all_deps = Vec::new(); + + let gradle_files: Vec<_> = self + .index + .files_with_name("build.gradle") + .into_iter() + .chain(self.index.files_with_name("build.gradle.kts")) + .collect(); + + for gradle_file in gradle_files { + if let Ok(content) = fs::read_to_string(gradle_file) { + for line in content.lines() { + let trimmed = line.trim(); + if trimmed.contains("implementation") + || trimmed.contains("api") + || trimmed.contains("testImplementation") + { + all_deps.push(trimmed.to_string()); + } + } + } + } + + if all_deps.is_empty() { + None + } else { + Some(all_deps) + } + } + + fn parse_podfile(&self) -> Option> { + let podfiles = self.index.files_with_name("Podfile"); + if podfiles.is_empty() { + return None; + } + + let content = fs::read_to_string(podfiles[0]).ok()?; + let pods: Vec = content + .lines() + .filter(|line| line.trim().starts_with("pod ")) + .map(|line| line.trim().to_string()) + .collect(); + + if pods.is_empty() { + None + } else { + Some(pods) + } + } + + fn parse_package_json(&self) -> Option> { + let package_files = self.index.files_with_name("package.json"); + if package_files.is_empty() { + return None; + } + + let content = fs::read_to_string(package_files[0]).ok()?; + let json: serde_json::Value = serde_json::from_str(&content).ok()?; + + let mut deps = Vec::new(); + + if let Some(dependencies) = json.get("dependencies").and_then(|d| d.as_object()) { + for (name, version) in dependencies { + deps.push(format!("{}: {}", name, version)); + } + } + + if deps.is_empty() { + None + } else { + Some(deps) + } + } +} diff --git a/plugins/repos-fix/src/analysis/index.rs b/plugins/repos-fix/src/analysis/index.rs new file mode 100644 index 0000000..6e9bc91 --- /dev/null +++ b/plugins/repos-fix/src/analysis/index.rs @@ -0,0 +1,191 @@ +use anyhow::Result; +use std::collections::{HashSet, HashMap}; +use std::path::{Path, PathBuf}; +use walkdir::WalkDir; + +/// In-memory index of repository files for efficient querying +/// Built with a single filesystem traversal to avoid repeated walks +#[derive(Debug)] +pub struct RepoIndex { + /// All files in the repository + pub files: Vec, + /// Fast lookup by file name (e.g., "pom.xml") + pub file_names: HashSet, + /// Fast lookup by extension (e.g., "java", "kt") + pub extensions: HashSet, + /// Map of relative path to full path for quick queries + #[allow(dead_code)] + pub path_map: HashMap, +} + +impl RepoIndex { + /// Build an index by walking the repository once + pub fn build(root: &Path) -> Result { + let mut files = Vec::new(); + let mut file_names = HashSet::new(); + let mut extensions = HashSet::new(); + let mut path_map = HashMap::new(); + + for entry in WalkDir::new(root) + .follow_links(false) + .into_iter() + .filter_map(|e| e.ok()) + { + let path = entry.path().to_path_buf(); + + if entry.file_type().is_file() { + files.push(path.clone()); + + // Index file name + if let Some(name) = path.file_name().and_then(|n| n.to_str()) { + file_names.insert(name.to_string()); + } + + // Index extension + if let Some(ext) = path.extension().and_then(|e| e.to_str()) { + extensions.insert(ext.to_string()); + } + + // Index relative path + if let Ok(rel_path) = path.strip_prefix(root) { + path_map.insert(rel_path.to_path_buf(), path.clone()); + } + } + } + + Ok(Self { + files, + file_names, + extensions, + path_map, + }) + } + + /// Check if any file with the given name exists + pub fn has_file(&self, name: &str) -> bool { + self.file_names.contains(name) + } + + /// Check if any file with the given extension exists + pub fn has_extension(&self, ext: &str) -> bool { + self.extensions.contains(ext) + } + + /// Check if any file contains a pattern in its path + pub fn has_path_pattern(&self, pattern: &str) -> bool { + self.files.iter().any(|p| { + p.to_string_lossy().contains(pattern) + }) + } + + /// Get all files with a specific extension + #[allow(dead_code)] + pub fn files_with_extension(&self, ext: &str) -> Vec<&PathBuf> { + self.files + .iter() + .filter(|path| { + path.extension() + .and_then(|e| e.to_str()) + .map(|e| e == ext) + .unwrap_or(false) + }) + .collect() + } + + /// Get all files with a specific name + pub fn files_with_name(&self, name: &str) -> Vec<&PathBuf> { + self.files + .iter() + .filter(|path| { + path.file_name() + .and_then(|n| n.to_str()) + .map(|n| n == name) + .unwrap_or(false) + }) + .collect() + } + + /// Get all files matching any of the extensions + pub fn files_with_extensions(&self, exts: &[&str]) -> Vec<&PathBuf> { + self.files + .iter() + .filter(|path| { + path.extension() + .and_then(|e| e.to_str()) + .map(|e| exts.contains(&e)) + .unwrap_or(false) + }) + .collect() + } + + /// Get files in a specific directory (shallow) + #[allow(dead_code)] + pub fn files_in_dir(&self, dir_name: &str) -> Vec<&PathBuf> { + self.files + .iter() + .filter(|path| { + path.parent() + .and_then(|p| p.file_name()) + .and_then(|n| n.to_str()) + .map(|n| n == dir_name) + .unwrap_or(false) + }) + .collect() + } + + /// Get all files matching a path pattern + #[allow(dead_code)] + pub fn files_matching_pattern(&self, pattern: &str) -> Vec<&PathBuf> { + self.files + .iter() + .filter(|path| path.to_string_lossy().contains(pattern)) + .collect() + } + + /// Count files with a specific extension + #[allow(dead_code)] + pub fn count_extension(&self, ext: &str) -> usize { + self.files_with_extension(ext).len() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn test_repo_index_basic() { + let temp = TempDir::new().unwrap(); + let root = temp.path(); + + // Create test structure + fs::write(root.join("pom.xml"), "").unwrap(); + fs::create_dir(root.join("src")).unwrap(); + fs::write(root.join("src").join("Main.java"), "").unwrap(); + fs::write(root.join("src").join("Utils.kt"), "").unwrap(); + + let index = RepoIndex::build(root).unwrap(); + + assert!(index.has_file("pom.xml")); + assert!(index.has_extension("java")); + assert!(index.has_extension("kt")); + assert_eq!(index.count_extension("java"), 1); + assert_eq!(index.count_extension("kt"), 1); + } + + #[test] + fn test_repo_index_patterns() { + let temp = TempDir::new().unwrap(); + let root = temp.path(); + + fs::create_dir_all(root.join("src/main/java")).unwrap(); + fs::write(root.join("src/main/java/App.java"), "").unwrap(); + + let index = RepoIndex::build(root).unwrap(); + + assert!(index.has_path_pattern("src/main/java")); + assert_eq!(index.files_matching_pattern("main").len(), 1); + } +} diff --git a/plugins/repos-fix/src/analysis/mod.rs b/plugins/repos-fix/src/analysis/mod.rs new file mode 100644 index 0000000..7e4fb89 --- /dev/null +++ b/plugins/repos-fix/src/analysis/mod.rs @@ -0,0 +1,69 @@ +mod dependencies; +mod index; +mod platform; +mod structure; + +pub use dependencies::{DependencyAnalyzer, DependencyInfo}; +pub use index::RepoIndex; +pub use platform::{PlatformDetector, PlatformInfo}; +pub use structure::{ + ArchitecturePatterns, BuildCommands, ProjectStructure, StructureAnalyzer, TestStructure, +}; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; +use std::path::Path; + +/// Complete project analysis result +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProjectAnalysis { + pub platform: PlatformInfo, + pub dependencies: DependencyInfo, + pub architecture_patterns: ArchitecturePatterns, + pub test_structure: TestStructure, + pub project_structure: ProjectStructure, + pub build_commands: BuildCommands, +} + +/// Main project analyzer - coordinates all analysis modules +pub struct ProjectAnalyzer { + repo_path: std::path::PathBuf, +} + +impl ProjectAnalyzer { + pub fn new(repo_path: impl AsRef) -> Self { + Self { + repo_path: repo_path.as_ref().to_path_buf(), + } + } + + /// Perform complete project analysis with single filesystem traversal + pub fn analyze(&self) -> Result { + // Single pass: build the file index once + let index = RepoIndex::build(&self.repo_path)?; + + // Detect platform + let platform_detector = PlatformDetector::new(&index, &self.repo_path); + let platform = platform_detector.detect(); + + // Analyze dependencies + let dependency_analyzer = DependencyAnalyzer::new(&index); + let dependencies = dependency_analyzer.analyze(&platform.platform_type); + + // Analyze structure and patterns + let structure_analyzer = StructureAnalyzer::new(&index, &self.repo_path); + let architecture_patterns = structure_analyzer.analyze_architecture(&platform.platform_type); + let test_structure = structure_analyzer.analyze_test_structure(&platform.platform_type); + let project_structure = structure_analyzer.analyze_project_structure(&platform.platform_type); + let build_commands = structure_analyzer.determine_build_commands(&platform.platform_type); + + Ok(ProjectAnalysis { + platform, + dependencies, + architecture_patterns, + test_structure, + project_structure, + build_commands, + }) + } +} diff --git a/plugins/repos-fix/src/analysis/platform.rs b/plugins/repos-fix/src/analysis/platform.rs new file mode 100644 index 0000000..9bab1f4 --- /dev/null +++ b/plugins/repos-fix/src/analysis/platform.rs @@ -0,0 +1,162 @@ +use crate::analysis::index::RepoIndex; +use crate::domain::{Framework, Language, PlatformType}; +use std::path::Path; +use std::fs; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct PlatformInfo { + pub platform_type: PlatformType, + pub languages: Vec, + pub frameworks: Vec, +} + +pub struct PlatformDetector<'a> { + index: &'a RepoIndex, + root: &'a Path, +} + +impl<'a> PlatformDetector<'a> { + pub fn new(index: &'a RepoIndex, root: &'a Path) -> Self { + Self { index, root } + } + + pub fn detect(&self) -> PlatformInfo { + let platform_type = self.detect_platform_type(); + let languages = self.detect_languages(&platform_type); + let frameworks = self.detect_frameworks(&platform_type); + + PlatformInfo { + platform_type, + languages, + frameworks, + } + } + + fn detect_platform_type(&self) -> PlatformType { + // iOS Detection + if self.index.has_path_pattern(".xcodeproj") || self.index.has_path_pattern(".xcworkspace") + { + return PlatformType::Ios; + } + + // Android Detection + if self.index.has_path_pattern("AndroidManifest.xml") + || self.root.join("app/src/main/AndroidManifest.xml").exists() + || self.has_android_gradle_plugin() + { + return PlatformType::Android; + } + + // Angular Detection + if self.index.has_file("angular.json") + || (self.index.has_file("package.json") && self.has_angular_in_package_json()) + { + return PlatformType::Angular; + } + + // Java Backend Detection + if self.index.has_file("pom.xml") || self.index.has_file("build.gradle") { + return PlatformType::Java; + } + + PlatformType::Unknown + } + + fn detect_languages(&self, platform: &PlatformType) -> Vec { + let mut languages = Vec::new(); + + match platform { + PlatformType::Ios => { + if self.index.has_extension("swift") { + languages.push(Language::Swift); + } + if self.index.has_extension("m") || self.index.has_extension("h") { + languages.push(Language::ObjectiveC); + } + } + PlatformType::Android | PlatformType::Java => { + if self.index.has_extension("kt") { + languages.push(Language::Kotlin); + } + if self.index.has_extension("java") { + languages.push(Language::Java); + } + } + PlatformType::Angular => { + languages.push(Language::TypeScript); + if self.index.has_extension("js") { + languages.push(Language::JavaScript); + } + } + PlatformType::Unknown => {} + } + + languages + } + + fn detect_frameworks(&self, platform: &PlatformType) -> Vec { + let mut frameworks = Vec::new(); + + match platform { + PlatformType::Ios => { + if self.index.has_file("Podfile") { + frameworks.push(Framework::CocoaPods); + } + if self.index.has_file("Package.swift") { + frameworks.push(Framework::SwiftPackageManager); + } + } + PlatformType::Android => { + frameworks.push(Framework::Gradle); + } + PlatformType::Java => { + if self.index.has_file("pom.xml") { + frameworks.push(Framework::Maven); + } + if self.index.has_file("build.gradle") || self.index.has_file("build.gradle.kts") { + frameworks.push(Framework::Gradle); + } + } + PlatformType::Angular => { + if self.index.has_file("package.json") { + frameworks.push(Framework::Npm); + } + if self.index.has_file("yarn.lock") { + frameworks.push(Framework::Yarn); + } + } + PlatformType::Unknown => {} + } + + frameworks + } + + fn has_android_gradle_plugin(&self) -> bool { + let gradle_files = self + .index + .files_with_name("build.gradle") + .into_iter() + .chain(self.index.files_with_name("build.gradle.kts")); + + for gradle_file in gradle_files { + if let Ok(content) = fs::read_to_string(gradle_file) + && (content.contains("com.android.application") + || content.contains("com.android.library") + || content.contains("com.android.test")) + { + return true; + } + } + + false + } + + fn has_angular_in_package_json(&self) -> bool { + if let Some(package_json) = self.index.files_with_name("package.json").first() + && let Ok(content) = std::fs::read_to_string(package_json) + { + return content.contains("@angular/core") || content.contains("@angular/cli"); + } + false + } +} diff --git a/plugins/repos-fix/src/analysis/structure.rs b/plugins/repos-fix/src/analysis/structure.rs new file mode 100644 index 0000000..bbbeab6 --- /dev/null +++ b/plugins/repos-fix/src/analysis/structure.rs @@ -0,0 +1,356 @@ +use crate::analysis::index::RepoIndex; +use crate::domain::{PlatformType, TestFramework}; +use std::collections::{BTreeSet, HashSet}; +use std::fs; +use std::path::Path; + +#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] +pub struct ArchitecturePatterns { + pub dependency_injection: Vec, + pub reactive: Vec, + pub ui_framework: Vec, + pub architecture: Vec, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct TestStructure { + pub test_directories: Vec, + pub test_frameworks: Vec, + pub test_patterns: Vec, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct ProjectStructure { + pub source_directories: Vec, + pub resource_directories: Vec, + pub config_files: Vec, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct BuildCommands { + pub main_build: String, + pub test_compile: Option, + pub test_run: String, +} + +pub struct StructureAnalyzer<'a> { + index: &'a RepoIndex, + root: &'a Path, +} + +impl<'a> StructureAnalyzer<'a> { + pub fn new(index: &'a RepoIndex, root: &'a Path) -> Self { + Self { index, root } + } + + pub fn analyze_architecture(&self, platform: &PlatformType) -> ArchitecturePatterns { + let mut patterns = ArchitecturePatterns::default(); + + let source_files = self.collect_source_files(platform); + + // Sample first 50 files to detect patterns + for file_path in source_files.iter().take(50) { + if let Ok(content) = fs::read_to_string(file_path) { + self.detect_di_patterns(&content, &mut patterns); + self.detect_reactive_patterns(&content, &mut patterns); + self.detect_ui_patterns(&content, platform, &mut patterns); + } + } + + patterns + } + + fn detect_di_patterns(&self, content: &str, patterns: &mut ArchitecturePatterns) { + if content.contains("import Koin") || content.contains("org.koin") { + Self::add_unique(&mut patterns.dependency_injection, "koin"); + } + if content.contains("import Hilt") || content.contains("dagger.hilt") { + Self::add_unique(&mut patterns.dependency_injection, "hilt"); + } + if content.contains("import Dagger") || content.contains("dagger.") { + Self::add_unique(&mut patterns.dependency_injection, "dagger"); + } + if content.contains("@Inject") || content.contains("@Autowired") { + Self::add_unique(&mut patterns.dependency_injection, "spring"); + } + } + + fn detect_reactive_patterns(&self, content: &str, patterns: &mut ArchitecturePatterns) { + if content.contains("import RxSwift") || content.contains("import RxCocoa") { + Self::add_unique(&mut patterns.reactive, "rxswift"); + } + if content.contains("import RxJava") || content.contains("io.reactivex") { + Self::add_unique(&mut patterns.reactive, "rxjava"); + } + if content.contains("import Combine") { + Self::add_unique(&mut patterns.reactive, "combine"); + } + if content.contains("kotlinx.coroutines") { + Self::add_unique(&mut patterns.reactive, "coroutines"); + } + if content.contains("import { Observable }") || content.contains("rxjs") { + Self::add_unique(&mut patterns.reactive, "rxjs"); + } + } + + fn detect_ui_patterns( + &self, + content: &str, + platform: &PlatformType, + patterns: &mut ArchitecturePatterns, + ) { + if content.contains("import SwiftUI") { + Self::add_unique(&mut patterns.ui_framework, "swiftui"); + } + if content.contains("import UIKit") { + Self::add_unique(&mut patterns.ui_framework, "uikit"); + } + if content.contains("androidx.compose") { + Self::add_unique(&mut patterns.ui_framework, "jetpack-compose"); + } + if content.contains("@Component") && *platform == PlatformType::Angular { + Self::add_unique(&mut patterns.ui_framework, "angular"); + } + } + + pub fn analyze_test_structure(&self, platform: &PlatformType) -> TestStructure { + let test_directories = self.find_test_directories(); + let test_frameworks = self.detect_test_frameworks(platform); + let test_patterns = self.determine_test_patterns(platform); + + TestStructure { + test_directories, + test_frameworks, + test_patterns, + } + } + + fn find_test_directories(&self) -> Vec { + let test_dir_names = ["test", "tests", "Test", "Tests", "androidTest", "unitTest"]; + let mut test_dirs = BTreeSet::new(); + + for file_path in &self.index.files { + if let Some(parent) = file_path.parent() + && let Some(name) = parent.file_name().and_then(|n| n.to_str()) + && test_dir_names.contains(&name) + && let Ok(rel_path) = parent.strip_prefix(self.root) + { + let rel_str = rel_path.to_string_lossy().to_string(); + test_dirs.insert(rel_str); + } + } + + test_dirs.into_iter().collect() + } + + fn detect_test_frameworks(&self, platform: &PlatformType) -> Vec { + let mut frameworks = HashSet::new(); + let test_files = self.find_test_files(platform); + + for test_file in test_files.iter().take(20) { + if let Ok(content) = fs::read_to_string(test_file) { + match platform { + PlatformType::Java | PlatformType::Android => { + if content.contains("import org.junit") { + frameworks.insert(TestFramework::JUnit); + } + if content.contains("import org.mockito") { + frameworks.insert(TestFramework::Mockito); + } + if content.contains("import io.mockk") { + frameworks.insert(TestFramework::MockK); + } + } + PlatformType::Ios => { + if content.contains("import XCTest") { + frameworks.insert(TestFramework::XCTest); + } + if content.contains("import Quick") { + frameworks.insert(TestFramework::Quick); + } + } + PlatformType::Angular => { + if content.contains("jasmine") || content.contains("describe(") { + frameworks.insert(TestFramework::Jasmine); + } + if content.contains("jest") { + frameworks.insert(TestFramework::Jest); + } + } + PlatformType::Unknown => {} + } + } + } + + frameworks.into_iter().collect() + } + + fn find_test_files(&self, platform: &PlatformType) -> Vec<&Path> { + let patterns: Vec<&str> = match platform { + PlatformType::Java | PlatformType::Android => { + vec!["Test.java", "Test.kt", "Tests.java", "Tests.kt"] + } + PlatformType::Ios => vec!["Test.swift", "Tests.swift", "Spec.swift"], + PlatformType::Angular => vec![".spec.ts", ".spec.js", "test.ts"], + PlatformType::Unknown => vec![], + }; + + self.index + .files + .iter() + .filter(|path| { + let path_str = path.to_string_lossy(); + patterns.iter().any(|pattern| path_str.contains(pattern)) + }) + .map(|p| p.as_path()) + .collect() + } + + fn determine_test_patterns(&self, platform: &PlatformType) -> Vec { + let mut patterns = vec!["unit-tests".to_string()]; + if matches!(platform, PlatformType::Android | PlatformType::Ios) { + patterns.push("ui-tests".to_string()); + } + patterns + } + + pub fn analyze_project_structure(&self, platform: &PlatformType) -> ProjectStructure { + let source_dirs = self.find_source_directories(platform); + let resource_dirs = self.find_resource_directories(platform); + let config_files = self.find_config_files(platform); + + ProjectStructure { + source_directories: source_dirs, + resource_directories: resource_dirs, + config_files, + } + } + + fn find_source_directories(&self, platform: &PlatformType) -> Vec { + let patterns: Vec<&str> = match platform { + PlatformType::Java | PlatformType::Android => { + vec!["src/main/java", "src/main/kotlin", "src"] + } + PlatformType::Ios => vec!["Sources", "src"], + PlatformType::Angular => vec!["src/app", "src"], + PlatformType::Unknown => vec![], + }; + + self.find_matching_dirs(&patterns) + } + + fn find_resource_directories(&self, platform: &PlatformType) -> Vec { + let patterns: Vec<&str> = match platform { + PlatformType::Java | PlatformType::Android => vec!["src/main/resources", "res"], + PlatformType::Ios => vec!["Resources", "Assets.xcassets"], + PlatformType::Angular => vec!["src/assets"], + PlatformType::Unknown => vec![], + }; + + self.find_matching_dirs(&patterns) + } + + fn find_config_files(&self, platform: &PlatformType) -> Vec { + let names: Vec<&str> = match platform { + PlatformType::Java | PlatformType::Android => { + vec!["build.gradle", "pom.xml", "settings.gradle"] + } + PlatformType::Ios => vec!["Package.swift", "Podfile", "project.pbxproj"], + PlatformType::Angular => vec!["angular.json", "package.json", "tsconfig.json"], + PlatformType::Unknown => vec![], + }; + + let mut config_files = Vec::new(); + for name in names { + for file in self.index.files_with_name(name) { + if let Ok(rel_path) = file.strip_prefix(self.root) { + config_files.push(rel_path.to_string_lossy().to_string()); + } + } + } + + config_files + } + + fn find_matching_dirs(&self, patterns: &[&str]) -> Vec { + let mut result = BTreeSet::new(); + + for file_path in &self.index.files { + let path_str = file_path.to_string_lossy(); + for pattern in patterns { + if path_str.contains(pattern) { + if let Some(parent) = file_path.parent() + && let Ok(rel_path) = parent.strip_prefix(self.root) + { + result.insert(rel_path.to_string_lossy().to_string()); + } + break; + } + } + } + + result.into_iter().collect() + } + + pub fn determine_build_commands(&self, platform: &PlatformType) -> BuildCommands { + match platform { + PlatformType::Java => { + if self.index.has_file("pom.xml") { + BuildCommands { + main_build: "mvn compile".to_string(), + test_compile: Some("mvn test-compile".to_string()), + test_run: "mvn test".to_string(), + } + } else { + BuildCommands { + main_build: "./gradlew build".to_string(), + test_compile: Some("./gradlew testClasses".to_string()), + test_run: "./gradlew test".to_string(), + } + } + } + PlatformType::Android => BuildCommands { + main_build: "./gradlew assembleDebug".to_string(), + test_compile: Some("./gradlew compileDebugUnitTestKotlin".to_string()), + test_run: "./gradlew testDebugUnitTest".to_string(), + }, + PlatformType::Ios => BuildCommands { + main_build: "xcodebuild -scheme build".to_string(), + test_compile: Some("xcodebuild -scheme build-for-testing".to_string()), + test_run: "xcodebuild test -scheme ".to_string(), + }, + PlatformType::Angular => BuildCommands { + main_build: "npm run build".to_string(), + test_compile: None, + test_run: "npm test".to_string(), + }, + PlatformType::Unknown => BuildCommands { + main_build: "make".to_string(), + test_compile: None, + test_run: "make test".to_string(), + }, + } + } + + fn collect_source_files(&self, platform: &PlatformType) -> Vec<&Path> { + let extensions: Vec<&str> = match platform { + PlatformType::Java => vec!["java", "kt"], + PlatformType::Android => vec!["java", "kt"], + PlatformType::Ios => vec!["swift", "m", "h"], + PlatformType::Angular => vec!["ts", "js"], + PlatformType::Unknown => vec![], + }; + + self.index + .files_with_extensions(&extensions) + .into_iter() + .map(|p| p.as_path()) + .collect() + } + + fn add_unique(vec: &mut Vec, item: &str) { + if !vec.iter().any(|existing| existing == item) { + vec.push(item.to_string()); + } + } +} diff --git a/plugins/repos-fix/src/domain.rs b/plugins/repos-fix/src/domain.rs new file mode 100644 index 0000000..f00ecca --- /dev/null +++ b/plugins/repos-fix/src/domain.rs @@ -0,0 +1,223 @@ +use serde::{Deserialize, Serialize}; +use std::fmt; + +/// Platform types supported by the analyzer +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum PlatformType { + Ios, + Android, + Angular, + Java, + Unknown, +} + +impl PlatformType { + pub fn as_str(&self) -> &'static str { + match self { + Self::Ios => "ios", + Self::Android => "android", + Self::Angular => "angular", + Self::Java => "java", + Self::Unknown => "unknown", + } + } + + pub fn emoji(&self) -> &'static str { + match self { + Self::Ios => "📱", + Self::Android => "🤖", + Self::Angular => "🌐", + Self::Java => "☕", + Self::Unknown => "💻", + } + } +} + +impl fmt::Display for PlatformType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// Programming languages detected in the codebase +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum Language { + Swift, + #[serde(rename = "objective-c")] + ObjectiveC, + Kotlin, + Java, + TypeScript, + JavaScript, +} + +impl Language { + pub fn as_str(&self) -> &'static str { + match self { + Self::Swift => "swift", + Self::ObjectiveC => "objective-c", + Self::Kotlin => "kotlin", + Self::Java => "java", + Self::TypeScript => "typescript", + Self::JavaScript => "javascript", + } + } +} + +impl fmt::Display for Language { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// Build tool/framework detected +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum Framework { + CocoaPods, + SwiftPackageManager, + Gradle, + Maven, + Npm, + Yarn, +} + +impl Framework { + pub fn as_str(&self) -> &str { + match self { + Self::CocoaPods => "cocoapods", + Self::SwiftPackageManager => "swift-package-manager", + Self::Gradle => "gradle", + Self::Maven => "maven", + Self::Npm => "npm", + Self::Yarn => "yarn", + } + } +} + +impl fmt::Display for Framework { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// Dependency injection frameworks +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +#[allow(dead_code)] +pub enum DiFramework { + Koin, + Hilt, + Dagger, + Spring, +} + +impl DiFramework { + pub fn as_str(&self) -> &str { + match self { + Self::Koin => "koin", + Self::Hilt => "hilt", + Self::Dagger => "dagger", + Self::Spring => "spring", + } + } +} + +impl fmt::Display for DiFramework { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// Reactive programming frameworks +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +#[allow(dead_code)] +pub enum ReactiveFramework { + RxSwift, + RxJava, + Combine, + Coroutines, + RxJS, +} + +impl ReactiveFramework { + pub fn as_str(&self) -> &str { + match self { + Self::RxSwift => "rxswift", + Self::RxJava => "rxjava", + Self::Combine => "combine", + Self::Coroutines => "coroutines", + Self::RxJS => "rxjs", + } + } +} + +impl fmt::Display for ReactiveFramework { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// UI frameworks detected +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[allow(dead_code)] +pub enum UiFramework { + SwiftUI, + UIKit, + #[serde(rename = "jetpack-compose")] + JetpackCompose, + Angular, +} + +impl UiFramework { + pub fn as_str(&self) -> &str { + match self { + Self::SwiftUI => "swiftui", + Self::UIKit => "uikit", + Self::JetpackCompose => "jetpack-compose", + Self::Angular => "angular", + } + } +} + +impl fmt::Display for UiFramework { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// Test frameworks detected +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum TestFramework { + JUnit, + Mockito, + MockK, + XCTest, + Quick, + Jasmine, + Jest, +} + +impl TestFramework { + pub fn as_str(&self) -> &str { + match self { + Self::JUnit => "junit", + Self::Mockito => "mockito", + Self::MockK => "mockk", + Self::XCTest => "xctest", + Self::Quick => "quick", + Self::Jasmine => "jasmine", + Self::Jest => "jest", + } + } +} + +impl fmt::Display for TestFramework { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_str()) + } +} diff --git a/plugins/repos-fix/src/jira.rs b/plugins/repos-fix/src/jira.rs new file mode 100644 index 0000000..57cb239 --- /dev/null +++ b/plugins/repos-fix/src/jira.rs @@ -0,0 +1,238 @@ +use anyhow::{Context, Result}; +use reqwest::blocking::Client; +use reqwest::header::{HeaderMap, HeaderValue, ACCEPT, CONTENT_TYPE}; +use serde::{Deserialize, Serialize}; +use std::env; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JiraTicket { + pub id: String, + pub key: String, + pub title: String, + pub description: String, + pub status: String, + pub priority: String, + pub issue_type: String, + pub assignee: String, + pub reporter: String, + pub created: String, + pub updated: String, + pub attachments: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JiraAttachment { + pub filename: String, + pub content_type: String, + pub size: Option, + pub url: String, + pub source: String, +} + +pub struct JiraClient { + client: Client, + base_url: String, +} + +impl JiraClient { + pub fn with_base_url(base_url: String) -> Result { + let base_url = base_url.trim_end_matches('/').to_string(); + + let username = + env::var("JIRA_USERNAME").context("JIRA_USERNAME environment variable not set")?; + + let token = + env::var("JIRA_API_TOKEN").context("JIRA_API_TOKEN environment variable not set")?; + + let mut headers = HeaderMap::new(); + headers.insert(ACCEPT, HeaderValue::from_static("application/json")); + headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + // Add Basic Auth header manually + let auth_value = format!("{}:{}", username, token); + let encoded = base64::Engine::encode( + &base64::engine::general_purpose::STANDARD, + auth_value.as_bytes(), + ); + let auth_header = HeaderValue::from_str(&format!("Basic {}", encoded)) + .context("Failed to create auth header")?; + headers.insert(reqwest::header::AUTHORIZATION, auth_header); + + let client = Client::builder() + .default_headers(headers) + .build() + .context("Failed to create HTTP client")?; + + Ok(Self { client, base_url }) + } + + pub fn get_ticket(&self, ticket_id: &str) -> Result { + let url = format!("{}/rest/api/3/issue/{}", self.base_url, ticket_id); + + let response = self + .client + .get(&url) + .query(&[("expand", "renderedFields,attachments,comments")]) + .send() + .context("Failed to fetch JIRA ticket")?; + + if !response.status().is_success() { + let status = response.status(); + let error_text = response.text().unwrap_or_default(); + anyhow::bail!("JIRA API error ({}): {}", status, error_text); + } + + let ticket_data: serde_json::Value = + response.json().context("Failed to parse JIRA response")?; + + self.parse_ticket(ticket_data) + } + + fn parse_ticket(&self, data: serde_json::Value) -> Result { + let fields = data + .get("fields") + .context("Missing 'fields' in JIRA response")?; + + let rendered_fields = data.get("renderedFields"); + + let description = if let Some(rendered) = rendered_fields { + rendered + .get("description") + .and_then(|d| d.as_str()) + .unwrap_or("") + } else { + fields + .get("description") + .and_then(|d| d.as_str()) + .unwrap_or("") + }; + + // Clean HTML from description + let description = html2text::from_read(description.as_bytes(), 80); + + let mut attachments = Vec::new(); + + // Parse direct JIRA attachments + if let Some(attachment_array) = fields.get("attachment").and_then(|a| a.as_array()) { + for attachment in attachment_array { + if let Some(att) = Self::parse_attachment(attachment) { + attachments.push(att); + } + } + } + + Ok(JiraTicket { + id: data + .get("id") + .and_then(|i| i.as_str()) + .unwrap_or("") + .to_string(), + key: data + .get("key") + .and_then(|k| k.as_str()) + .unwrap_or("") + .to_string(), + title: fields + .get("summary") + .and_then(|s| s.as_str()) + .unwrap_or("") + .to_string(), + description, + status: fields + .get("status") + .and_then(|s| s.get("name")) + .and_then(|n| n.as_str()) + .unwrap_or("") + .to_string(), + priority: fields + .get("priority") + .and_then(|p| p.get("name")) + .and_then(|n| n.as_str()) + .unwrap_or("") + .to_string(), + issue_type: fields + .get("issuetype") + .and_then(|i| i.get("name")) + .and_then(|n| n.as_str()) + .unwrap_or("") + .to_string(), + assignee: fields + .get("assignee") + .and_then(|a| a.get("displayName")) + .and_then(|n| n.as_str()) + .unwrap_or("Unassigned") + .to_string(), + reporter: fields + .get("reporter") + .and_then(|r| r.get("displayName")) + .and_then(|n| n.as_str()) + .unwrap_or("") + .to_string(), + created: fields + .get("created") + .and_then(|c| c.as_str()) + .unwrap_or("") + .to_string(), + updated: fields + .get("updated") + .and_then(|u| u.as_str()) + .unwrap_or("") + .to_string(), + attachments, + }) + } + + fn parse_attachment(data: &serde_json::Value) -> Option { + Some(JiraAttachment { + filename: data + .get("filename") + .and_then(|f| f.as_str()) + .unwrap_or("") + .to_string(), + content_type: data + .get("mimeType") + .and_then(|m| m.as_str()) + .unwrap_or("") + .to_string(), + size: data.get("size").and_then(|s| s.as_u64()), + url: data + .get("content") + .and_then(|c| c.as_str()) + .unwrap_or("") + .to_string(), + source: "jira".to_string(), + }) + } +} + +pub fn parse_jira_input(input: &str) -> Result<(String, String)> { + let input = input.trim(); + if input.is_empty() { + anyhow::bail!("JIRA ticket input cannot be empty"); + } + + // Check if it's a full URL + if input.starts_with("http") { + let url = url::Url::parse(input).context("Invalid JIRA URL")?; + + let base_url = format!( + "{}://{}", + url.scheme(), + url.host_str().context("Invalid host in URL")? + ); + + // Extract ticket ID from path like /browse/MAINT-1234 + let path = url.path(); + if let Some(ticket_id) = path.strip_prefix("/browse/") { + return Ok((base_url, ticket_id.to_string())); + } + + anyhow::bail!("Could not extract ticket ID from URL: {}", input); + } + + // Assume it's just a ticket ID + let jira_url = env::var("JIRA_URL") + .context("JIRA_URL not set. Provide full URL or set JIRA_URL environment variable")?; + + Ok((jira_url.trim_end_matches('/').to_string(), input.to_string())) +} diff --git a/plugins/repos-fix/src/main.rs b/plugins/repos-fix/src/main.rs new file mode 100644 index 0000000..a4443b9 --- /dev/null +++ b/plugins/repos-fix/src/main.rs @@ -0,0 +1,68 @@ +mod agent; +mod analysis; +mod domain; +mod jira; +mod prompt; +mod workflow; +mod workspace; + +use anyhow::{Context, Result}; +use clap::Parser; +use repos::{is_debug_mode, load_plugin_context}; +use std::path::PathBuf; +use workflow::FixWorkflow; + +#[derive(Parser, Debug)] +#[command(name = "repos-fix")] +#[command(about = "Automatically fix JIRA maintenance tickets using Cursor AI")] +struct Args { + /// Repository names to fix (if not provided, uses filtered repos from context) + repos: Vec, + + /// JIRA ticket ID or full URL (e.g., MAINT-1234 or https://company.atlassian.net/browse/MAINT-1234) + #[arg(long)] + ticket: String, + + /// Ask mode - analyze only, no code changes + #[arg(long)] + ask: bool, + + /// Custom workspace directory + #[arg(short, long)] + workspace: Option, + + /// Additional prompt to append to the generated prompt + #[arg(short, long)] + prompt: Option, +} + +fn main() -> Result<()> { + let args = Args::parse(); + let debug = is_debug_mode(); + + if debug { + eprintln!("Debug mode enabled"); + } + + // Load repositories from injected context + let repos = load_plugin_context()? + .context("Failed to load plugin context. Make sure to run via 'repos fix'")?; + + if debug { + eprintln!("Loaded {} repositories from context", repos.len()); + } + + // Create and run workflow + let workflow = FixWorkflow::new( + repos, + args.ticket, + args.ask, + args.workspace, + args.prompt, + debug, + ); + + workflow.run(&args.repos)?; + + Ok(()) +} diff --git a/plugins/repos-fix/src/prompt.rs b/plugins/repos-fix/src/prompt.rs new file mode 100644 index 0000000..74bf12c --- /dev/null +++ b/plugins/repos-fix/src/prompt.rs @@ -0,0 +1,222 @@ +use crate::analysis::ProjectAnalysis; +use crate::domain::PlatformType; +use crate::jira::JiraTicket; +use anyhow::{Context, Result}; +use minijinja::{Environment, context}; +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; +use std::sync::OnceLock; + +static TEMPLATE_ENV: OnceLock> = OnceLock::new(); + +fn template_override_dir() -> Option { + let xdg_config = env::var_os("XDG_CONFIG_HOME") + .filter(|value| !value.is_empty()) + .map(PathBuf::from); + + let base = xdg_config.or_else(|| { + env::var_os("HOME") + .filter(|value| !value.is_empty()) + .map(|home| PathBuf::from(home).join(".config")) + }); + + base.map(|base| base.join("repos").join("fix")) +} + +fn read_override_template(filename: &str) -> Option { + let path = template_override_dir()?.join(filename); + if path.is_file() { + fs::read_to_string(&path).ok() + } else { + None + } +} + +fn load_template_source(filename: &str, fallback: &'static str) -> &'static str { + if let Some(source) = read_override_template(filename) { + Box::leak(source.into_boxed_str()) + } else { + fallback + } +} + +fn load_guidelines(filename: &str, fallback: &'static str) -> String { + read_override_template(filename).unwrap_or_else(|| fallback.to_string()) +} + +fn get_template_env() -> &'static Environment<'static> { + TEMPLATE_ENV.get_or_init(|| { + let mut env = Environment::new(); + + // Load templates from embedded strings + env.add_template( + "cursor_prompt", + load_template_source("cursor_prompt.md", include_str!("templates/cursor_prompt.md")), + ) + .expect("Failed to add cursor_prompt template"); + env.add_template( + "cursorrules", + load_template_source("cursorrules.md", include_str!("templates/cursorrules.md")), + ) + .expect("Failed to add cursorrules template"); + env.add_template( + "agent_prompt", + load_template_source("agent_prompt.md", include_str!("templates/agent_prompt.md")), + ) + .expect("Failed to add agent_prompt template"); + + env + }) +} + +pub struct PromptGenerator; + +impl PromptGenerator { + pub fn generate_cursor_prompt( + ticket: &JiraTicket, + analysis: &ProjectAnalysis, + additional_prompt: Option<&str>, + ) -> Result { + let env = get_template_env(); + let tmpl = env.get_template("cursor_prompt")?; + + let platform_guidelines = Self::get_platform_guidelines(&analysis.platform.platform_type); + + let ctx = context! { + platform_emoji => analysis.platform.platform_type.emoji(), + ticket => ticket, + platform_name => analysis.platform.platform_type.as_str().to_uppercase(), + languages => analysis.platform.languages.iter() + .map(|l| l.as_str()) + .collect::>() + .join(", "), + frameworks => analysis.platform.frameworks.iter() + .map(|f| f.as_str()) + .collect::>() + .join(", "), + source_dirs => analysis.project_structure.source_directories + .iter() + .take(5) + .cloned() + .collect::>() + .join(", "), + config_files => analysis.project_structure.config_files + .iter() + .take(5) + .cloned() + .collect::>() + .join(", "), + has_di => !analysis.architecture_patterns.dependency_injection.is_empty(), + di_frameworks => analysis.architecture_patterns.dependency_injection.join(", "), + has_reactive => !analysis.architecture_patterns.reactive.is_empty(), + reactive_frameworks => analysis.architecture_patterns.reactive.join(", "), + has_ui => !analysis.architecture_patterns.ui_framework.is_empty(), + ui_frameworks => analysis.architecture_patterns.ui_framework.join(", "), + has_test_frameworks => !analysis.test_structure.test_frameworks.is_empty(), + test_frameworks => analysis.test_structure.test_frameworks.iter() + .map(|f| f.as_str()) + .collect::>() + .join(", "), + has_test_dirs => !analysis.test_structure.test_directories.is_empty(), + test_dirs => analysis.test_structure.test_directories + .iter() + .take(3) + .cloned() + .collect::>() + .join(", "), + platform_guidelines => platform_guidelines, + main_build => analysis.build_commands.main_build, + test_compile => analysis.build_commands.test_compile, + test_run => analysis.build_commands.test_run, + additional_prompt => additional_prompt, + }; + + Ok(tmpl.render(ctx)?) + } + + pub fn generate_cursorrules( + ticket: &JiraTicket, + analysis: &ProjectAnalysis, + ask_mode: bool, + ) -> Result { + let env = get_template_env(); + let tmpl = env.get_template("cursorrules")?; + + let test_step_num = if analysis.build_commands.test_compile.is_some() { + "3" + } else { + "2" + }; + + let ctx = context! { + mode_title => if ask_mode { "ASK Mode Analysis" } else { "Automated Maintenance Assistant" }, + ask_mode => ask_mode, + ticket => ticket, + platform_name => analysis.platform.platform_type.as_str().to_uppercase(), + main_build => analysis.build_commands.main_build, + test_compile => analysis.build_commands.test_compile, + test_run => analysis.build_commands.test_run, + test_step_num => test_step_num, + }; + + Ok(tmpl.render(ctx)?) + } + + pub fn generate_agent_prompt( + ticket: &JiraTicket, + analysis: &ProjectAnalysis, + ask_mode: bool, + additional_prompt: Option<&str>, + ) -> Result { + let env = get_template_env(); + let tmpl = env.get_template("agent_prompt")?; + + let test_run_step = if analysis.build_commands.test_compile.is_some() { + "8" + } else { + "7" + }; + + let ctx = context! { + ask_mode => ask_mode, + ticket => ticket, + main_build => analysis.build_commands.main_build, + test_compile => analysis.build_commands.test_compile, + test_run => analysis.build_commands.test_run, + test_run_step => test_run_step, + additional_prompt => additional_prompt, + }; + + Ok(tmpl.render(ctx)?) + } + + pub fn save_to_file(content: &str, path: &Path, filename: &str) -> Result<()> { + let file_path = path.join(filename); + fs::write(&file_path, content).with_context(|| format!("Failed to write {}", filename))?; + println!("Created: {}", file_path.display()); + Ok(()) + } + + fn get_platform_guidelines(platform: &PlatformType) -> String { + match platform { + PlatformType::Ios => load_guidelines( + "guidelines_ios.md", + include_str!("templates/guidelines_ios.md"), + ), + PlatformType::Android => load_guidelines( + "guidelines_android.md", + include_str!("templates/guidelines_android.md"), + ), + PlatformType::Java => load_guidelines( + "guidelines_java.md", + include_str!("templates/guidelines_java.md"), + ), + PlatformType::Angular => load_guidelines( + "guidelines_angular.md", + include_str!("templates/guidelines_angular.md"), + ), + PlatformType::Unknown => String::new(), + } + } +} diff --git a/plugins/repos-fix/src/templates/agent_prompt.md b/plugins/repos-fix/src/templates/agent_prompt.md new file mode 100644 index 0000000..147410c --- /dev/null +++ b/plugins/repos-fix/src/templates/agent_prompt.md @@ -0,0 +1,16 @@ +{% if ask_mode %}Analyze the ticket and repo context. Do not change code. Create +`SOLUTION_SUMMARY.md`. + +Ticket: {{ ticket.key }} - {{ ticket.title }} +{% else %}Fix the ticket with minimal, compatible changes and tests. + +Ticket: {{ ticket.key }} - {{ ticket.title }} +Build: `{{ main_build }}` +{% if test_compile %}Test compile: `{{ test_compile }}` +{% endif %}Tests: `{{ test_run }}` + +Create `SOLUTION_SUMMARY.md` after completion. + +{% if additional_prompt %}Additional requirements: {{ additional_prompt }} + +{% endif %}{% endif %} diff --git a/plugins/repos-fix/src/templates/cursor_prompt.md b/plugins/repos-fix/src/templates/cursor_prompt.md new file mode 100644 index 0000000..b3f26de --- /dev/null +++ b/plugins/repos-fix/src/templates/cursor_prompt.md @@ -0,0 +1,20 @@ +# {{ platform_emoji }} {{ ticket.key }} - {{ ticket.title }} + +Use `mission-context.json` to understand the repo and constraints. Make minimal, +compatible changes and keep code style consistent. + +- **Platform**: {{ platform_name }} +- **Languages**: {{ languages }} +- **Frameworks**: {{ frameworks }} + +{{ platform_guidelines }} + +## Build and test +- **Build**: `{{ main_build }}` +{% if test_compile %}- **Test compile**: `{{ test_compile }}` +{% endif %}- **Tests**: `{{ test_run }}` + +{% if additional_prompt %} +## Additional requirements +{{ additional_prompt }} +{% endif %} diff --git a/plugins/repos-fix/src/templates/cursorrules.md b/plugins/repos-fix/src/templates/cursorrules.md new file mode 100644 index 0000000..9c27c30 --- /dev/null +++ b/plugins/repos-fix/src/templates/cursorrules.md @@ -0,0 +1,14 @@ +# Repos Fix - {{ mode_title }} + +Ticket: {{ ticket.key }} - {{ ticket.title }} +Platform: {{ platform_name }} + +{% if ask_mode %} +ASK mode: analyze only. Do not change code. Produce `SOLUTION_SUMMARY.md`. +{% else %} +Make minimal, compatible changes. Add or update tests as needed. + +Build: `{{ main_build }}` +{% if test_compile %}Test compile: `{{ test_compile }}` +{% endif %}Tests: `{{ test_run }}` +{% endif %} diff --git a/plugins/repos-fix/src/templates/guidelines_android.md b/plugins/repos-fix/src/templates/guidelines_android.md new file mode 100644 index 0000000..1e852de --- /dev/null +++ b/plugins/repos-fix/src/templates/guidelines_android.md @@ -0,0 +1,2 @@ +### Android +Use existing Android patterns and tooling. diff --git a/plugins/repos-fix/src/templates/guidelines_angular.md b/plugins/repos-fix/src/templates/guidelines_angular.md new file mode 100644 index 0000000..c92cf1e --- /dev/null +++ b/plugins/repos-fix/src/templates/guidelines_angular.md @@ -0,0 +1,2 @@ +### Angular +Use existing Angular patterns and tooling. diff --git a/plugins/repos-fix/src/templates/guidelines_ios.md b/plugins/repos-fix/src/templates/guidelines_ios.md new file mode 100644 index 0000000..2810724 --- /dev/null +++ b/plugins/repos-fix/src/templates/guidelines_ios.md @@ -0,0 +1,2 @@ +### iOS +Use existing iOS patterns and tooling. diff --git a/plugins/repos-fix/src/templates/guidelines_java.md b/plugins/repos-fix/src/templates/guidelines_java.md new file mode 100644 index 0000000..3a59914 --- /dev/null +++ b/plugins/repos-fix/src/templates/guidelines_java.md @@ -0,0 +1,2 @@ +### Java +Use existing Java patterns and tooling. diff --git a/plugins/repos-fix/src/workflow.rs b/plugins/repos-fix/src/workflow.rs new file mode 100644 index 0000000..647a7be --- /dev/null +++ b/plugins/repos-fix/src/workflow.rs @@ -0,0 +1,354 @@ +use crate::agent::CursorAgentRunner; +use crate::analysis::ProjectAnalyzer; +use crate::jira::{JiraClient, JiraTicket, parse_jira_input}; +use crate::prompt::PromptGenerator; +use crate::workspace::{RepoManager, WorkspaceManager}; +use anyhow::{Context, Result}; +use colored::Colorize; +use repos::Repository; +use std::path::{Path, PathBuf}; + +pub struct FixWorkflow { + repos: Vec, + ticket: String, + ask_mode: bool, + workspace_dir: Option, + additional_prompt: Option, + debug: bool, +} + +impl FixWorkflow { + pub fn new( + repos: Vec, + ticket: String, + ask_mode: bool, + workspace_dir: Option, + additional_prompt: Option, + debug: bool, + ) -> Self { + Self { + repos, + ticket, + ask_mode, + workspace_dir, + additional_prompt, + debug, + } + } + + pub fn run(&self, selected_repo_names: &[String]) -> Result<()> { + let selected_repos = self.select_repositories(selected_repo_names)?; + + for repo in selected_repos { + self.process_repository(repo)?; + } + + Ok(()) + } + + fn select_repositories(&self, names: &[String]) -> Result> { + if !names.is_empty() { + // Filter to specified repos + if self.debug { + eprintln!("Filtering to specified repos: {:?}", names); + } + + names + .iter() + .map(|repo_name| { + self.repos + .iter() + .find(|r| r.name == *repo_name) + .ok_or_else(|| { + anyhow::anyhow!( + "Repository '{}' not found in filtered context. Available repos: {}", + repo_name, + self.repos + .iter() + .map(|r| r.name.as_str()) + .collect::>() + .join(", ") + ) + }) + }) + .collect::>>() + } else if self.repos.len() == 1 { + // Single repo in context, use it + Ok(vec![&self.repos[0]]) + } else if self.repos.is_empty() { + anyhow::bail!( + "No repositories in filtered context. Use tags (-t/--tag) to filter, or specify repository names as arguments." + ); + } else { + // Multiple repos in context, require explicit selection + anyhow::bail!( + "Multiple repositories match the filter ({}). Please specify which repository to fix:\n repos fix --ticket {}\n\nAvailable repositories:\n{}", + self.repos.len(), + self.ticket, + self.repos + .iter() + .map(|r| format!(" - {}", r.name)) + .collect::>() + .join("\n") + ); + } + } + + fn process_repository(&self, repo: &Repository) -> Result<()> { + self.print_header(); + + // Step 1: Fetch JIRA ticket + let jira_ticket = self.fetch_jira_ticket()?; + + // Step 2: Setup workspace + let workspace_manager = self.setup_workspace(&jira_ticket.key)?; + let ticket_dir = workspace_manager.get_ticket_dir(); + + // Step 3: Setup repository + let repo_dir = self.setup_repository(repo, workspace_manager)?; + + // Step 4: Analyze project + let analysis = self.analyze_project(&repo_dir)?; + + // Step 5: Generate prompts and context + self.generate_artifacts(&jira_ticket, &analysis, &ticket_dir, repo, &repo_dir)?; + + // Step 6: Run cursor-agent + let agent_runner = CursorAgentRunner::new()?; + self.run_agent(&agent_runner, &ticket_dir, &jira_ticket, &analysis)?; + + // Verify and report + self.verify_and_report(&agent_runner, &ticket_dir, &jira_ticket.key, &repo_dir)?; + + Ok(()) + } + + fn print_header(&self) { + println!("{}", "=".repeat(60)); + println!("{}", "🤖 Repos Fix - Automated JIRA Ticket Resolver".bold()); + println!("{}", "=".repeat(60)); + println!(); + } + + fn fetch_jira_ticket(&self) -> Result { + println!("{}", "Step 1: Fetching JIRA ticket...".bold().cyan()); + let (base_url, ticket_id) = parse_jira_input(&self.ticket)?; + let jira_client = JiraClient::with_base_url(base_url)?; + let ticket = jira_client.get_ticket(&ticket_id)?; + + println!( + " {} Ticket: {} - {}", + "✓".green(), + ticket.key, + ticket.title + ); + println!(" {} Priority: {}", "✓".green(), ticket.priority); + println!( + " {} Attachments: {}", + "✓".green(), + ticket.attachments.len() + ); + println!(); + + Ok(ticket) + } + + fn setup_workspace(&self, ticket_id: &str) -> Result { + println!("{}", "Step 2: Setting up workspace...".bold().cyan()); + let workspace_manager = + WorkspaceManager::new(self.workspace_dir.as_deref(), ticket_id.to_string()); + workspace_manager.setup()?; + let ticket_dir = workspace_manager.get_ticket_dir(); + println!(" {} Workspace: {}", "✓".green(), ticket_dir.display()); + println!(); + + Ok(workspace_manager) + } + + fn setup_repository( + &self, + repo: &Repository, + _workspace_manager: WorkspaceManager, + ) -> Result { + println!("{}", "Step 3: Setting up repository...".bold().cyan()); + let repo_manager = RepoManager::new(repo); + let repo_dir = repo_manager.setup_repository()?; + println!(" {} Repository: {}", "✓".green(), repo_dir.display()); + println!(); + + Ok(repo_dir) + } + + fn analyze_project(&self, repo_dir: &Path) -> Result { + println!("{}", "Step 4: Analyzing project...".bold().cyan()); + let analyzer = ProjectAnalyzer::new(repo_dir); + let analysis = analyzer.analyze()?; + + println!( + " {} Platform: {}", + "✓".green(), + analysis.platform.platform_type.as_str().to_uppercase() + ); + println!( + " {} Languages: {}", + "✓".green(), + analysis + .platform + .languages + .iter() + .map(|l| l.as_str()) + .collect::>() + .join(", ") + ); + + if !analysis + .architecture_patterns + .dependency_injection + .is_empty() + { + println!( + " {} DI Framework: {}", + "✓".green(), + analysis + .architecture_patterns + .dependency_injection + .join(", ") + ); + } + + if !analysis.architecture_patterns.reactive.is_empty() { + println!( + " {} Reactive: {}", + "✓".green(), + analysis.architecture_patterns.reactive.join(", ") + ); + } + + if !analysis.test_structure.test_frameworks.is_empty() { + println!( + " {} Test Framework: {}", + "✓".green(), + analysis + .test_structure + .test_frameworks + .iter() + .map(|f| f.as_str()) + .collect::>() + .join(", ") + ); + } + println!(); + + Ok(analysis) + } + + fn generate_artifacts( + &self, + ticket: &JiraTicket, + analysis: &crate::analysis::ProjectAnalysis, + ticket_dir: &Path, + repo: &Repository, + repo_dir: &Path, + ) -> Result<()> { + println!( + "{}", + "Step 5: Generating context and prompts...".bold().cyan() + ); + + // Save context + let context = serde_json::json!({ + "ticket": ticket, + "repository": { + "name": repo.name, + "url": repo.url, + "path": repo_dir.to_string_lossy() + }, + "analysis": analysis, + "mode": if self.ask_mode { "ask" } else { "implementation" }, + "workspace": ticket_dir.to_string_lossy() + }); + + let context_str = + serde_json::to_string_pretty(&context).context("Failed to serialize context")?; + PromptGenerator::save_to_file(&context_str, ticket_dir, "mission-context.json")?; + + // Generate and save cursor prompt + let cursor_prompt = PromptGenerator::generate_cursor_prompt( + ticket, + analysis, + self.additional_prompt.as_deref(), + )?; + PromptGenerator::save_to_file(&cursor_prompt, ticket_dir, "cursor_prompt.md")?; + + // Generate and save cursorrules + let cursorrules = PromptGenerator::generate_cursorrules(ticket, analysis, self.ask_mode)?; + PromptGenerator::save_to_file(&cursorrules, ticket_dir, ".cursorrules")?; + + // Generate agent prompt + let agent_prompt = PromptGenerator::generate_agent_prompt( + ticket, + analysis, + self.ask_mode, + self.additional_prompt.as_deref(), + )?; + PromptGenerator::save_to_file(&agent_prompt, ticket_dir, "agent_prompt.md")?; + + println!(); + Ok(()) + } + + fn run_agent( + &self, + agent_runner: &CursorAgentRunner, + ticket_dir: &Path, + ticket: &JiraTicket, + analysis: &crate::analysis::ProjectAnalysis, + ) -> Result<()> { + println!("{}", "Step 6: Running cursor-agent...".bold().cyan()); + + let agent_prompt = PromptGenerator::generate_agent_prompt( + ticket, + analysis, + self.ask_mode, + self.additional_prompt.as_deref(), + )?; + agent_runner.run_with_retry(ticket_dir, &agent_prompt, self.ask_mode, 3)?; + + Ok(()) + } + + fn verify_and_report( + &self, + agent_runner: &CursorAgentRunner, + ticket_dir: &Path, + ticket_id: &str, + repo_dir: &Path, + ) -> Result<()> { + if agent_runner.verify_solution(ticket_dir)? { + println!(); + println!("{}", "=".repeat(60)); + println!("{}", "✅ Task completed successfully!".bold().green()); + println!("{}", "=".repeat(60)); + println!(); + println!("📁 Workspace: {}", ticket_dir.display()); + println!("🌿 Branch: {}", ticket_id); + println!("💻 Repository: {}", repo_dir.display()); + println!(); + println!("📋 Generated files:"); + println!(" • .cursorrules - Agent behavior rules"); + println!(" • mission-context.json - Complete analysis data"); + println!(" • cursor_prompt.md - Implementation guidelines, the 'rulebook' for Cursor"); + println!(" • agent_prompt.md - The 'mission' for Cursor Agent"); + println!(" • SOLUTION_SUMMARY.md - Solution details"); + println!(); + } else { + eprintln!("{}", "⚠️ Solution incomplete or not verified".yellow()); + eprintln!( + "Check the workspace for partial results: {}", + ticket_dir.display() + ); + } + + Ok(()) + } +} diff --git a/plugins/repos-fix/src/workspace.rs b/plugins/repos-fix/src/workspace.rs new file mode 100644 index 0000000..c95b5d1 --- /dev/null +++ b/plugins/repos-fix/src/workspace.rs @@ -0,0 +1,56 @@ +use anyhow::{Context, Result}; +use repos::Repository; +use std::fs; +use std::path::{Path, PathBuf}; + +pub struct WorkspaceManager { + workspace_root: PathBuf, +} + +impl WorkspaceManager { + pub fn new(workspace_root: Option<&Path>, ticket_id: String) -> Self { + let workspace_root = workspace_root + .map(|path| path.to_path_buf()) + .unwrap_or_else(|| PathBuf::from("workspace").join("fix").join(&ticket_id)); + + Self { workspace_root } + } + + pub fn setup(&self) -> Result<()> { + fs::create_dir_all(&self.workspace_root).context("Failed to create workspace directory")?; + Ok(()) + } + + pub fn get_ticket_dir(&self) -> PathBuf { + self.workspace_root.clone() + } +} + +pub struct RepoManager<'a> { + repo: &'a Repository, +} + +impl<'a> RepoManager<'a> { + pub fn new(repo: &'a Repository) -> Self { + Self { repo } + } + + pub fn setup_repository(&self) -> Result { + let path = self + .repo + .path + .as_ref() + .context("Repository path is missing in plugin context")?; + let repo_dir = Path::new(path); + + if !repo_dir.exists() || !repo_dir.join(".git").exists() { + anyhow::bail!( + "Repository path is not a git checkout: {}", + repo_dir.display() + ); + } + + println!("Using repository from core context: {}", repo_dir.display()); + Ok(repo_dir.to_path_buf()) + } +} From 1542c1da73680bcbef0705e8cee3e38fec713048 Mon Sep 17 00:00:00 2001 From: codcod Date: Sun, 8 Feb 2026 10:37:12 +0100 Subject: [PATCH 2/6] feat: prompt improvements --- .gitignore | 2 +- plugins/repos-fix/README.md | 4 + plugins/repos-fix/src/agent.rs | 123 +++++++++++++++++++++++++++++- plugins/repos-fix/src/jira.rs | 95 ++++++++++++++++++++++- plugins/repos-fix/src/main.rs | 5 ++ plugins/repos-fix/src/prompt.rs | 31 ++++++++ plugins/repos-fix/src/workflow.rs | 6 +- 7 files changed, 260 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 8ed0fd3..8c4d598 100644 --- a/.gitignore +++ b/.gitignore @@ -33,4 +33,4 @@ config.yaml tarpaulin-report.html test-*/ tests/test-recipes.yaml -.link-to-xdg-home-config \ No newline at end of file +.link-to-xdg-config-home \ No newline at end of file diff --git a/plugins/repos-fix/README.md b/plugins/repos-fix/README.md index 2481260..09e9302 100644 --- a/plugins/repos-fix/README.md +++ b/plugins/repos-fix/README.md @@ -117,6 +117,7 @@ When you run `repos fix`, the following steps occur: 6. **Run Cursor Agent**: - Executes `cursor-agent` with `--force` and `--print` flags. - **Auto-Retry**: If the agent fails (e.g., build fails, tests fail), it automatically retries up to **3 times**, feeding the error message back to the AI. + - **Workflow Switch**: CVE/security tickets use a safe upgrade protocol (no vulnerability reproduction); bug fixes require a repro-first flow. 7. **Validate**: The agent validates the fix by running build and test commands detected during analysis. 8. **Report**: Generates `SOLUTION_SUMMARY.md` with implementation details. @@ -130,9 +131,12 @@ workspace/fix/MAINT-1234/ ├── mission-context.json # Complete project analysis & ticket data ├── cursor_prompt.md # The "rulebook" for Cursor ├── agent_prompt.md # The specific mission prompt +├── ANALYSIS.md # Required pre-change analysis (root cause & plan) ├── SOLUTION_SUMMARY.md # Final report of the implemented solution ``` +Note: `ANALYSIS.md` is expected to be filled in by the agent before any changes. + ## Supported Platforms The plugin automatically detects and supports: diff --git a/plugins/repos-fix/src/agent.rs b/plugins/repos-fix/src/agent.rs index 8c29f65..f4470ea 100644 --- a/plugins/repos-fix/src/agent.rs +++ b/plugins/repos-fix/src/agent.rs @@ -4,6 +4,7 @@ use std::fs; use std::io::{BufRead, BufReader}; use std::path::Path; use std::process::{Command, Stdio}; +use std::sync::{Arc, Mutex}; use std::thread; pub struct CursorAgentRunner { @@ -64,22 +65,28 @@ impl CursorAgentRunner { .stderr(Stdio::piped()); let mut child = cmd.spawn().context("Failed to spawn cursor-agent")?; + let stdout_tail = Arc::new(Mutex::new(Vec::new())); + let stderr_tail = Arc::new(Mutex::new(Vec::new())); let stdout_handle = child.stdout.take().map(|stdout| { + let stdout_tail = Arc::clone(&stdout_tail); thread::spawn(move || { let reader = BufReader::new(stdout); for line in reader.lines().map_while(Result::ok) { // Show progress indicators Self::display_progress(&line, ask); + Self::capture_tail_line(&stdout_tail, line); } }) }); let stderr_handle = child.stderr.take().map(|stderr| { + let stderr_tail = Arc::clone(&stderr_tail); thread::spawn(move || { let reader = BufReader::new(stderr); for line in reader.lines().map_while(Result::ok) { eprintln!("{}", line); + Self::capture_tail_line(&stderr_tail, line); } }) }); @@ -105,7 +112,30 @@ impl CursorAgentRunner { println!("📄 Check SOLUTION_SUMMARY.md for details"); } } else { - anyhow::bail!("cursor-agent exited with status: {}", status); + let stdout_tail = stdout_tail + .lock() + .map(|lines| lines.clone()) + .unwrap_or_default(); + let stderr_tail = stderr_tail + .lock() + .map(|lines| lines.clone()) + .unwrap_or_default(); + let mut tail_summary = String::new(); + + if !stdout_tail.is_empty() { + tail_summary.push_str("\n--- stdout (tail) ---\n"); + tail_summary.push_str(&stdout_tail.join("\n")); + } + if !stderr_tail.is_empty() { + tail_summary.push_str("\n--- stderr (tail) ---\n"); + tail_summary.push_str(&stderr_tail.join("\n")); + } + + anyhow::bail!( + "cursor-agent exited with status: {}{}", + status, + tail_summary + ); } println!("{}", "=".repeat(60)); @@ -131,6 +161,17 @@ impl CursorAgentRunner { } } + fn capture_tail_line(buffer: &Arc>>, line: String) { + const MAX_LINES: usize = 80; + if let Ok(mut lines) = buffer.lock() { + if lines.len() >= MAX_LINES { + let overflow = lines.len() + 1 - MAX_LINES; + lines.drain(0..overflow); + } + lines.push(line); + } + } + pub fn run_with_retry( &self, workspace_dir: &Path, @@ -179,6 +220,10 @@ impl CursorAgentRunner { } pub fn verify_solution(&self, workspace_dir: &Path) -> Result { + if !self.verify_analysis(workspace_dir)? { + return Ok(false); + } + let solution_file = workspace_dir.join("SOLUTION_SUMMARY.md"); if !solution_file.exists() { @@ -197,4 +242,80 @@ impl CursorAgentRunner { println!("✅ SOLUTION_SUMMARY.md created successfully"); Ok(true) } + + fn verify_analysis(&self, workspace_dir: &Path) -> Result { + let analysis_file = workspace_dir.join("ANALYSIS.md"); + + if !analysis_file.exists() { + eprintln!("⚠️ ANALYSIS.md not found"); + return Ok(false); + } + + let content = + fs::read_to_string(&analysis_file).context("Failed to read ANALYSIS.md")?; + + if content.trim().is_empty() { + eprintln!("⚠️ ANALYSIS.md is empty"); + return Ok(false); + } + + let required_sections = [ + "- Root cause hypothesis:", + "- Target files/components:", + "- Plan:", + ]; + let lines: Vec<&str> = content.lines().collect(); + let mut all_sections_present = true; + + for section in required_sections { + let mut found = false; + let mut filled = false; + + for (index, line) in lines.iter().enumerate() { + let trimmed = line.trim(); + if trimmed.starts_with(section) { + found = true; + let remainder = trimmed[section.len()..].trim(); + if !remainder.is_empty() { + filled = true; + break; + } + + for next_line in lines.iter().skip(index + 1) { + let next_trim = next_line.trim(); + if next_trim.is_empty() { + continue; + } + if required_sections + .iter() + .any(|label| next_trim.starts_with(label)) + { + break; + } + filled = true; + break; + } + break; + } + } + + if !found { + eprintln!("⚠️ ANALYSIS.md missing section: {}", section); + all_sections_present = false; + continue; + } + + if !filled { + eprintln!("⚠️ ANALYSIS.md section not filled: {}", section); + all_sections_present = false; + } + } + + if !all_sections_present { + return Ok(false); + } + + println!("✅ ANALYSIS.md created successfully"); + Ok(true) + } } diff --git a/plugins/repos-fix/src/jira.rs b/plugins/repos-fix/src/jira.rs index 57cb239..075a637 100644 --- a/plugins/repos-fix/src/jira.rs +++ b/plugins/repos-fix/src/jira.rs @@ -10,6 +10,7 @@ pub struct JiraTicket { pub key: String, pub title: String, pub description: String, + pub labels: Vec, pub status: String, pub priority: String, pub issue_type: String, @@ -18,6 +19,7 @@ pub struct JiraTicket { pub created: String, pub updated: String, pub attachments: Vec, + pub comments: Vec, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -29,6 +31,13 @@ pub struct JiraAttachment { pub source: String, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct JiraComment { + pub author: String, + pub created: String, + pub body: String, +} + pub struct JiraClient { client: Client, base_url: String, @@ -66,7 +75,7 @@ impl JiraClient { Ok(Self { client, base_url }) } - pub fn get_ticket(&self, ticket_id: &str) -> Result { + pub fn get_ticket(&self, ticket_id: &str, num_comments: usize) -> Result { let url = format!("{}/rest/api/3/issue/{}", self.base_url, ticket_id); let response = self @@ -85,10 +94,10 @@ impl JiraClient { let ticket_data: serde_json::Value = response.json().context("Failed to parse JIRA response")?; - self.parse_ticket(ticket_data) + self.parse_ticket(ticket_data, num_comments) } - fn parse_ticket(&self, data: serde_json::Value) -> Result { + fn parse_ticket(&self, data: serde_json::Value, num_comments: usize) -> Result { let fields = data .get("fields") .context("Missing 'fields' in JIRA response")?; @@ -110,6 +119,20 @@ impl JiraClient { // Clean HTML from description let description = html2text::from_read(description.as_bytes(), 80); + let labels = fields + .get("labels") + .and_then(|labels| labels.as_array()) + .map(|labels| { + labels + .iter() + .filter_map(|label| label.as_str()) + .map(|label| label.to_string()) + .collect::>() + }) + .unwrap_or_default(); + + let comments = Self::parse_comments(fields, rendered_fields, num_comments); + let mut attachments = Vec::new(); // Parse direct JIRA attachments @@ -138,6 +161,7 @@ impl JiraClient { .unwrap_or("") .to_string(), description, + labels, status: fields .get("status") .and_then(|s| s.get("name")) @@ -179,6 +203,7 @@ impl JiraClient { .unwrap_or("") .to_string(), attachments, + comments, }) } @@ -203,6 +228,70 @@ impl JiraClient { source: "jira".to_string(), }) } + + fn parse_comments( + fields: &serde_json::Value, + rendered_fields: Option<&serde_json::Value>, + num_comments: usize, + ) -> Vec { + let Some(comment_array) = fields + .get("comment") + .and_then(|comment| comment.get("comments")) + .and_then(|comments| comments.as_array()) + else { + return Vec::new(); + }; + + let rendered_comments = rendered_fields + .and_then(|rendered| rendered.get("comment")) + .and_then(|comment| comment.get("comments")) + .and_then(|comments| comments.as_array()); + + if num_comments == 0 { + return Vec::new(); + } + + let start_index = comment_array.len().saturating_sub(num_comments); + comment_array + .iter() + .enumerate() + .skip(start_index) + .filter_map(|(index, comment)| { + let rendered_body = rendered_comments + .and_then(|comments| comments.get(index)) + .and_then(|comment| comment.get("body")) + .and_then(|body| body.as_str()); + + let raw_body = rendered_body + .or_else(|| comment.get("body").and_then(|body| body.as_str())) + .unwrap_or(""); + + let body = html2text::from_read(raw_body.as_bytes(), 80).trim().to_string(); + if body.is_empty() { + return None; + } + + let author = comment + .get("author") + .and_then(|author| author.get("displayName")) + .and_then(|name| name.as_str()) + .unwrap_or("Unknown") + .to_string(); + + let created = comment + .get("created") + .and_then(|created| created.as_str()) + .unwrap_or("") + .to_string(); + + Some(JiraComment { + author, + created, + body, + }) + }) + .collect() + } } pub fn parse_jira_input(input: &str) -> Result<(String, String)> { diff --git a/plugins/repos-fix/src/main.rs b/plugins/repos-fix/src/main.rs index a4443b9..013ca20 100644 --- a/plugins/repos-fix/src/main.rs +++ b/plugins/repos-fix/src/main.rs @@ -34,6 +34,10 @@ struct Args { /// Additional prompt to append to the generated prompt #[arg(short, long)] prompt: Option, + + /// Number of recent JIRA comments to include in prompts + #[arg(long, default_value_t = 10)] + num_comments: usize, } fn main() -> Result<()> { @@ -59,6 +63,7 @@ fn main() -> Result<()> { args.ask, args.workspace, args.prompt, + args.num_comments, debug, ); diff --git a/plugins/repos-fix/src/prompt.rs b/plugins/repos-fix/src/prompt.rs index 74bf12c..bfbde9d 100644 --- a/plugins/repos-fix/src/prompt.rs +++ b/plugins/repos-fix/src/prompt.rs @@ -82,6 +82,7 @@ impl PromptGenerator { let tmpl = env.get_template("cursor_prompt")?; let platform_guidelines = Self::get_platform_guidelines(&analysis.platform.platform_type); + let is_security_task = Self::is_security_task(ticket); let ctx = context! { platform_emoji => analysis.platform.platform_type.emoji(), @@ -129,6 +130,7 @@ impl PromptGenerator { main_build => analysis.build_commands.main_build, test_compile => analysis.build_commands.test_compile, test_run => analysis.build_commands.test_run, + is_security_task => is_security_task, additional_prompt => additional_prompt, }; @@ -148,6 +150,7 @@ impl PromptGenerator { } else { "2" }; + let is_security_task = Self::is_security_task(ticket); let ctx = context! { mode_title => if ask_mode { "ASK Mode Analysis" } else { "Automated Maintenance Assistant" }, @@ -158,6 +161,7 @@ impl PromptGenerator { test_compile => analysis.build_commands.test_compile, test_run => analysis.build_commands.test_run, test_step_num => test_step_num, + is_security_task => is_security_task, }; Ok(tmpl.render(ctx)?) @@ -177,6 +181,7 @@ impl PromptGenerator { } else { "7" }; + let is_security_task = Self::is_security_task(ticket); let ctx = context! { ask_mode => ask_mode, @@ -185,6 +190,7 @@ impl PromptGenerator { test_compile => analysis.build_commands.test_compile, test_run => analysis.build_commands.test_run, test_run_step => test_run_step, + is_security_task => is_security_task, additional_prompt => additional_prompt, }; @@ -219,4 +225,29 @@ impl PromptGenerator { PlatformType::Unknown => String::new(), } } + + fn is_security_task(ticket: &JiraTicket) -> bool { + let mut haystack = format!( + "{} {} {}", + ticket.title, ticket.description, ticket.issue_type + ) + .to_lowercase(); + + for label in &ticket.labels { + haystack.push(' '); + haystack.push_str(&label.to_lowercase()); + } + + let security_keywords = ["cve-", "vulnerability", "security", "cwe-", "cvss"]; + let upgrade_keywords = ["upgrade", "update", "bump", "patch", "dependency"]; + + let has_security_keyword = security_keywords + .iter() + .any(|keyword| haystack.contains(keyword)); + let has_upgrade_keyword = upgrade_keywords + .iter() + .any(|keyword| haystack.contains(keyword)); + + has_security_keyword || (has_upgrade_keyword && haystack.contains("cve")) + } } diff --git a/plugins/repos-fix/src/workflow.rs b/plugins/repos-fix/src/workflow.rs index 647a7be..df3e91b 100644 --- a/plugins/repos-fix/src/workflow.rs +++ b/plugins/repos-fix/src/workflow.rs @@ -14,6 +14,7 @@ pub struct FixWorkflow { ask_mode: bool, workspace_dir: Option, additional_prompt: Option, + num_comments: usize, debug: bool, } @@ -24,6 +25,7 @@ impl FixWorkflow { ask_mode: bool, workspace_dir: Option, additional_prompt: Option, + num_comments: usize, debug: bool, ) -> Self { Self { @@ -32,6 +34,7 @@ impl FixWorkflow { ask_mode, workspace_dir, additional_prompt, + num_comments, debug, } } @@ -134,7 +137,7 @@ impl FixWorkflow { println!("{}", "Step 1: Fetching JIRA ticket...".bold().cyan()); let (base_url, ticket_id) = parse_jira_input(&self.ticket)?; let jira_client = JiraClient::with_base_url(base_url)?; - let ticket = jira_client.get_ticket(&ticket_id)?; + let ticket = jira_client.get_ticket(&ticket_id, self.num_comments)?; println!( " {} Ticket: {} - {}", @@ -339,6 +342,7 @@ impl FixWorkflow { println!(" • mission-context.json - Complete analysis data"); println!(" • cursor_prompt.md - Implementation guidelines, the 'rulebook' for Cursor"); println!(" • agent_prompt.md - The 'mission' for Cursor Agent"); + println!(" • ANALYSIS.md - Pre-change analysis and plan"); println!(" • SOLUTION_SUMMARY.md - Solution details"); println!(); } else { From 8d02aa9ca8085ffe8f201ccf3782f36e07f957e1 Mon Sep 17 00:00:00 2001 From: codcod Date: Sun, 8 Feb 2026 10:54:24 +0100 Subject: [PATCH 3/6] feat: add knowledge base processing --- justfile | 6 + plugins/repos-fix/README.md | 8 +- plugins/repos-fix/src/agent.rs | 3 +- .../repos-fix/src/analysis/dependencies.rs | 18 +- plugins/repos-fix/src/analysis/index.rs | 8 +- plugins/repos-fix/src/analysis/mod.rs | 6 +- plugins/repos-fix/src/analysis/platform.rs | 2 +- plugins/repos-fix/src/jira.rs | 11 +- plugins/repos-fix/src/main.rs | 5 + plugins/repos-fix/src/prompt.rs | 32 +- .../repos-fix/src/templates/agent_prompt.md | 6 + .../repos-fix/src/templates/cursor_prompt.md | 18 ++ plugins/repos-fix/src/workflow.rs | 288 +++++++++++++++++- 13 files changed, 369 insertions(+), 42 deletions(-) diff --git a/justfile b/justfile index 08f8317..06ac8d4 100644 --- a/justfile +++ b/justfile @@ -16,6 +16,12 @@ build-plugins: cargo build --release -p repos-review cargo build --release -p repos-fix +# Format and lint the code +[group('qa')] +fmt: + cargo fmt --all + cargo clippy --all-targets --all-features -- -D warnings + # Run tests [group('qa')] test: diff --git a/plugins/repos-fix/README.md b/plugins/repos-fix/README.md index 09e9302..352b754 100644 --- a/plugins/repos-fix/README.md +++ b/plugins/repos-fix/README.md @@ -104,6 +104,7 @@ This generates a `SOLUTION_SUMMARY.md` with the proposed plan. - `--workspace `: Specify a custom directory for generated artifacts (default: `workspace/fix/`). - `--prompt "..."`: Append extra instructions to the AI agent (e.g., "Use Java 17 features"). +- `--knowledge-dir `: Copy markdown knowledge base files into the workspace and inline selected content into prompts. ## Workflow @@ -114,12 +115,13 @@ When you run `repos fix`, the following steps occur: 3. **Analyze Project**: detailed inspection of platform, languages, frameworks, dependencies, and test setup. 4. **Generate Context**: Creates `mission-context.json` with all analysis data. 5. **Generate Prompts**: Creates `.cursorrules` and `cursor_prompt.md` tailored to the specific project. -6. **Run Cursor Agent**: +6. **Include Knowledge Base (optional)**: Copies markdown docs into `workspace/fix//knowledge/` and inlines selected docs into the prompt. +7. **Run Cursor Agent**: - Executes `cursor-agent` with `--force` and `--print` flags. - **Auto-Retry**: If the agent fails (e.g., build fails, tests fail), it automatically retries up to **3 times**, feeding the error message back to the AI. - **Workflow Switch**: CVE/security tickets use a safe upgrade protocol (no vulnerability reproduction); bug fixes require a repro-first flow. -7. **Validate**: The agent validates the fix by running build and test commands detected during analysis. -8. **Report**: Generates `SOLUTION_SUMMARY.md` with implementation details. +8. **Validate**: The agent validates the fix by running build and test commands detected during analysis. +9. **Report**: Generates `SOLUTION_SUMMARY.md` with implementation details. ## Output diff --git a/plugins/repos-fix/src/agent.rs b/plugins/repos-fix/src/agent.rs index f4470ea..6f66947 100644 --- a/plugins/repos-fix/src/agent.rs +++ b/plugins/repos-fix/src/agent.rs @@ -251,8 +251,7 @@ impl CursorAgentRunner { return Ok(false); } - let content = - fs::read_to_string(&analysis_file).context("Failed to read ANALYSIS.md")?; + let content = fs::read_to_string(&analysis_file).context("Failed to read ANALYSIS.md")?; if content.trim().is_empty() { eprintln!("⚠️ ANALYSIS.md is empty"); diff --git a/plugins/repos-fix/src/analysis/dependencies.rs b/plugins/repos-fix/src/analysis/dependencies.rs index 9bf86e1..d3e7d20 100644 --- a/plugins/repos-fix/src/analysis/dependencies.rs +++ b/plugins/repos-fix/src/analysis/dependencies.rs @@ -66,11 +66,7 @@ impl<'a> DependencyAnalyzer<'a> { .map(|line| line.trim().to_string()) .collect(); - if deps.is_empty() { - None - } else { - Some(deps) - } + if deps.is_empty() { None } else { Some(deps) } } fn parse_gradle_files(&self) -> Option> { @@ -117,11 +113,7 @@ impl<'a> DependencyAnalyzer<'a> { .map(|line| line.trim().to_string()) .collect(); - if pods.is_empty() { - None - } else { - Some(pods) - } + if pods.is_empty() { None } else { Some(pods) } } fn parse_package_json(&self) -> Option> { @@ -141,10 +133,6 @@ impl<'a> DependencyAnalyzer<'a> { } } - if deps.is_empty() { - None - } else { - Some(deps) - } + if deps.is_empty() { None } else { Some(deps) } } } diff --git a/plugins/repos-fix/src/analysis/index.rs b/plugins/repos-fix/src/analysis/index.rs index 6e9bc91..ab9260a 100644 --- a/plugins/repos-fix/src/analysis/index.rs +++ b/plugins/repos-fix/src/analysis/index.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use std::collections::{HashSet, HashMap}; +use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; use walkdir::WalkDir; @@ -73,9 +73,9 @@ impl RepoIndex { /// Check if any file contains a pattern in its path pub fn has_path_pattern(&self, pattern: &str) -> bool { - self.files.iter().any(|p| { - p.to_string_lossy().contains(pattern) - }) + self.files + .iter() + .any(|p| p.to_string_lossy().contains(pattern)) } /// Get all files with a specific extension diff --git a/plugins/repos-fix/src/analysis/mod.rs b/plugins/repos-fix/src/analysis/mod.rs index 7e4fb89..54e09ed 100644 --- a/plugins/repos-fix/src/analysis/mod.rs +++ b/plugins/repos-fix/src/analysis/mod.rs @@ -52,9 +52,11 @@ impl ProjectAnalyzer { // Analyze structure and patterns let structure_analyzer = StructureAnalyzer::new(&index, &self.repo_path); - let architecture_patterns = structure_analyzer.analyze_architecture(&platform.platform_type); + let architecture_patterns = + structure_analyzer.analyze_architecture(&platform.platform_type); let test_structure = structure_analyzer.analyze_test_structure(&platform.platform_type); - let project_structure = structure_analyzer.analyze_project_structure(&platform.platform_type); + let project_structure = + structure_analyzer.analyze_project_structure(&platform.platform_type); let build_commands = structure_analyzer.determine_build_commands(&platform.platform_type); Ok(ProjectAnalysis { diff --git a/plugins/repos-fix/src/analysis/platform.rs b/plugins/repos-fix/src/analysis/platform.rs index 9bab1f4..63bd650 100644 --- a/plugins/repos-fix/src/analysis/platform.rs +++ b/plugins/repos-fix/src/analysis/platform.rs @@ -1,7 +1,7 @@ use crate::analysis::index::RepoIndex; use crate::domain::{Framework, Language, PlatformType}; -use std::path::Path; use std::fs; +use std::path::Path; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct PlatformInfo { diff --git a/plugins/repos-fix/src/jira.rs b/plugins/repos-fix/src/jira.rs index 075a637..141b564 100644 --- a/plugins/repos-fix/src/jira.rs +++ b/plugins/repos-fix/src/jira.rs @@ -1,6 +1,6 @@ use anyhow::{Context, Result}; use reqwest::blocking::Client; -use reqwest::header::{HeaderMap, HeaderValue, ACCEPT, CONTENT_TYPE}; +use reqwest::header::{ACCEPT, CONTENT_TYPE, HeaderMap, HeaderValue}; use serde::{Deserialize, Serialize}; use std::env; @@ -266,7 +266,9 @@ impl JiraClient { .or_else(|| comment.get("body").and_then(|body| body.as_str())) .unwrap_or(""); - let body = html2text::from_read(raw_body.as_bytes(), 80).trim().to_string(); + let body = html2text::from_read(raw_body.as_bytes(), 80) + .trim() + .to_string(); if body.is_empty() { return None; } @@ -323,5 +325,8 @@ pub fn parse_jira_input(input: &str) -> Result<(String, String)> { let jira_url = env::var("JIRA_URL") .context("JIRA_URL not set. Provide full URL or set JIRA_URL environment variable")?; - Ok((jira_url.trim_end_matches('/').to_string(), input.to_string())) + Ok(( + jira_url.trim_end_matches('/').to_string(), + input.to_string(), + )) } diff --git a/plugins/repos-fix/src/main.rs b/plugins/repos-fix/src/main.rs index 013ca20..ab02524 100644 --- a/plugins/repos-fix/src/main.rs +++ b/plugins/repos-fix/src/main.rs @@ -35,6 +35,10 @@ struct Args { #[arg(short, long)] prompt: Option, + /// Directory containing markdown knowledge base files + #[arg(long)] + knowledge_dir: Option, + /// Number of recent JIRA comments to include in prompts #[arg(long, default_value_t = 10)] num_comments: usize, @@ -63,6 +67,7 @@ fn main() -> Result<()> { args.ask, args.workspace, args.prompt, + args.knowledge_dir, args.num_comments, debug, ); diff --git a/plugins/repos-fix/src/prompt.rs b/plugins/repos-fix/src/prompt.rs index bfbde9d..298076f 100644 --- a/plugins/repos-fix/src/prompt.rs +++ b/plugins/repos-fix/src/prompt.rs @@ -10,6 +10,14 @@ use std::sync::OnceLock; static TEMPLATE_ENV: OnceLock> = OnceLock::new(); +#[derive(Clone, Debug, Default)] +pub struct KnowledgeContext { + pub dir_name: String, + pub files: Vec, + pub inline_files: Vec, + pub inline_content: Option, +} + fn template_override_dir() -> Option { let xdg_config = env::var_os("XDG_CONFIG_HOME") .filter(|value| !value.is_empty()) @@ -52,19 +60,22 @@ fn get_template_env() -> &'static Environment<'static> { // Load templates from embedded strings env.add_template( "cursor_prompt", - load_template_source("cursor_prompt.md", include_str!("templates/cursor_prompt.md")), + load_template_source( + "cursor_prompt.md", + include_str!("templates/cursor_prompt.md"), + ), ) - .expect("Failed to add cursor_prompt template"); + .expect("Failed to add cursor_prompt template"); env.add_template( "cursorrules", load_template_source("cursorrules.md", include_str!("templates/cursorrules.md")), ) - .expect("Failed to add cursorrules template"); + .expect("Failed to add cursorrules template"); env.add_template( "agent_prompt", load_template_source("agent_prompt.md", include_str!("templates/agent_prompt.md")), ) - .expect("Failed to add agent_prompt template"); + .expect("Failed to add agent_prompt template"); env }) @@ -77,12 +88,14 @@ impl PromptGenerator { ticket: &JiraTicket, analysis: &ProjectAnalysis, additional_prompt: Option<&str>, + knowledge: Option<&KnowledgeContext>, ) -> Result { let env = get_template_env(); let tmpl = env.get_template("cursor_prompt")?; let platform_guidelines = Self::get_platform_guidelines(&analysis.platform.platform_type); let is_security_task = Self::is_security_task(ticket); + let has_knowledge_base = knowledge.map(|ctx| !ctx.files.is_empty()).unwrap_or(false); let ctx = context! { platform_emoji => analysis.platform.platform_type.emoji(), @@ -132,6 +145,11 @@ impl PromptGenerator { test_run => analysis.build_commands.test_run, is_security_task => is_security_task, additional_prompt => additional_prompt, + has_knowledge_base => has_knowledge_base, + knowledge_base_dir => knowledge.map(|ctx| ctx.dir_name.as_str()).unwrap_or(""), + knowledge_base_files => knowledge.map(|ctx| ctx.files.clone()).unwrap_or_default(), + knowledge_base_inline_files => knowledge.map(|ctx| ctx.inline_files.clone()).unwrap_or_default(), + knowledge_base_content => knowledge.and_then(|ctx| ctx.inline_content.as_deref()), }; Ok(tmpl.render(ctx)?) @@ -172,6 +190,7 @@ impl PromptGenerator { analysis: &ProjectAnalysis, ask_mode: bool, additional_prompt: Option<&str>, + knowledge: Option<&KnowledgeContext>, ) -> Result { let env = get_template_env(); let tmpl = env.get_template("agent_prompt")?; @@ -182,6 +201,7 @@ impl PromptGenerator { "7" }; let is_security_task = Self::is_security_task(ticket); + let has_knowledge_base = knowledge.map(|ctx| !ctx.files.is_empty()).unwrap_or(false); let ctx = context! { ask_mode => ask_mode, @@ -192,6 +212,10 @@ impl PromptGenerator { test_run_step => test_run_step, is_security_task => is_security_task, additional_prompt => additional_prompt, + has_knowledge_base => has_knowledge_base, + knowledge_base_dir => knowledge.map(|ctx| ctx.dir_name.as_str()).unwrap_or(""), + knowledge_base_files => knowledge.map(|ctx| ctx.files.clone()).unwrap_or_default(), + knowledge_base_inline_files => knowledge.map(|ctx| ctx.inline_files.clone()).unwrap_or_default(), }; Ok(tmpl.render(ctx)?) diff --git a/plugins/repos-fix/src/templates/agent_prompt.md b/plugins/repos-fix/src/templates/agent_prompt.md index 147410c..12d8232 100644 --- a/plugins/repos-fix/src/templates/agent_prompt.md +++ b/plugins/repos-fix/src/templates/agent_prompt.md @@ -2,9 +2,15 @@ `SOLUTION_SUMMARY.md`. Ticket: {{ ticket.key }} - {{ ticket.title }} +{% if has_knowledge_base %} +Knowledge base: Read `{{ knowledge_base_dir }}/` in the workspace before analysis. +{% endif %} {% else %}Fix the ticket with minimal, compatible changes and tests. Ticket: {{ ticket.key }} - {{ ticket.title }} +{% if has_knowledge_base %} +Knowledge base: Read `{{ knowledge_base_dir }}/` in the workspace before changes. +{% endif %} Build: `{{ main_build }}` {% if test_compile %}Test compile: `{{ test_compile }}` {% endif %}Tests: `{{ test_run }}` diff --git a/plugins/repos-fix/src/templates/cursor_prompt.md b/plugins/repos-fix/src/templates/cursor_prompt.md index b3f26de..5d199f8 100644 --- a/plugins/repos-fix/src/templates/cursor_prompt.md +++ b/plugins/repos-fix/src/templates/cursor_prompt.md @@ -9,6 +9,24 @@ compatible changes and keep code style consistent. {{ platform_guidelines }} +{% if has_knowledge_base %} +## Domain knowledge base +Relevant platform/journey docs are available in `{{ knowledge_base_dir }}/` in +this workspace. Read these before making changes. + +{% if knowledge_base_files and knowledge_base_files | length > 0 %} +Available files: +{% for file in knowledge_base_files %} +- `{{ knowledge_base_dir }}/{{ file }}` +{% endfor %} +{% endif %} + +{% if knowledge_base_content %} +### Inlined highlights (selected) +{{ knowledge_base_content }} +{% endif %} +{% endif %} + ## Build and test - **Build**: `{{ main_build }}` {% if test_compile %}- **Test compile**: `{{ test_compile }}` diff --git a/plugins/repos-fix/src/workflow.rs b/plugins/repos-fix/src/workflow.rs index df3e91b..ea0ba43 100644 --- a/plugins/repos-fix/src/workflow.rs +++ b/plugins/repos-fix/src/workflow.rs @@ -1,11 +1,14 @@ use crate::agent::CursorAgentRunner; use crate::analysis::ProjectAnalyzer; use crate::jira::{JiraClient, JiraTicket, parse_jira_input}; -use crate::prompt::PromptGenerator; +use crate::prompt::{KnowledgeContext, PromptGenerator}; use crate::workspace::{RepoManager, WorkspaceManager}; use anyhow::{Context, Result}; use colored::Colorize; use repos::Repository; +use std::collections::{HashMap, HashSet}; +use std::ffi::OsStr; +use std::fs; use std::path::{Path, PathBuf}; pub struct FixWorkflow { @@ -14,6 +17,7 @@ pub struct FixWorkflow { ask_mode: bool, workspace_dir: Option, additional_prompt: Option, + knowledge_dir: Option, num_comments: usize, debug: bool, } @@ -25,6 +29,7 @@ impl FixWorkflow { ask_mode: bool, workspace_dir: Option, additional_prompt: Option, + knowledge_dir: Option, num_comments: usize, debug: bool, ) -> Self { @@ -34,6 +39,7 @@ impl FixWorkflow { ask_mode, workspace_dir, additional_prompt, + knowledge_dir, num_comments, debug, } @@ -113,12 +119,28 @@ impl FixWorkflow { // Step 4: Analyze project let analysis = self.analyze_project(&repo_dir)?; - // Step 5: Generate prompts and context - self.generate_artifacts(&jira_ticket, &analysis, &ticket_dir, repo, &repo_dir)?; + // Step 5: Prepare knowledge base (optional) + let knowledge = self.prepare_knowledge_base(&jira_ticket, &ticket_dir)?; + + // Step 6: Generate prompts and context + self.generate_artifacts( + &jira_ticket, + &analysis, + &ticket_dir, + repo, + &repo_dir, + knowledge.as_ref(), + )?; - // Step 6: Run cursor-agent + // Step 7: Run cursor-agent let agent_runner = CursorAgentRunner::new()?; - self.run_agent(&agent_runner, &ticket_dir, &jira_ticket, &analysis)?; + self.run_agent( + &agent_runner, + &ticket_dir, + &jira_ticket, + &analysis, + knowledge.as_ref(), + )?; // Verify and report self.verify_and_report(&agent_runner, &ticket_dir, &jira_ticket.key, &repo_dir)?; @@ -252,13 +274,21 @@ impl FixWorkflow { ticket_dir: &Path, repo: &Repository, repo_dir: &Path, + knowledge: Option<&KnowledgeContext>, ) -> Result<()> { println!( "{}", - "Step 5: Generating context and prompts...".bold().cyan() + "Step 6: Generating context and prompts...".bold().cyan() ); // Save context + let knowledge_context = knowledge.map(|ctx| { + serde_json::json!({ + "dir": ctx.dir_name, + "files": ctx.files, + "inline_files": ctx.inline_files + }) + }); let context = serde_json::json!({ "ticket": ticket, "repository": { @@ -268,7 +298,8 @@ impl FixWorkflow { }, "analysis": analysis, "mode": if self.ask_mode { "ask" } else { "implementation" }, - "workspace": ticket_dir.to_string_lossy() + "workspace": ticket_dir.to_string_lossy(), + "knowledge_base": knowledge_context }); let context_str = @@ -280,6 +311,7 @@ impl FixWorkflow { ticket, analysis, self.additional_prompt.as_deref(), + knowledge, )?; PromptGenerator::save_to_file(&cursor_prompt, ticket_dir, "cursor_prompt.md")?; @@ -293,6 +325,7 @@ impl FixWorkflow { analysis, self.ask_mode, self.additional_prompt.as_deref(), + knowledge, )?; PromptGenerator::save_to_file(&agent_prompt, ticket_dir, "agent_prompt.md")?; @@ -306,14 +339,16 @@ impl FixWorkflow { ticket_dir: &Path, ticket: &JiraTicket, analysis: &crate::analysis::ProjectAnalysis, + knowledge: Option<&KnowledgeContext>, ) -> Result<()> { - println!("{}", "Step 6: Running cursor-agent...".bold().cyan()); + println!("{}", "Step 7: Running cursor-agent...".bold().cyan()); let agent_prompt = PromptGenerator::generate_agent_prompt( ticket, analysis, self.ask_mode, self.additional_prompt.as_deref(), + knowledge, )?; agent_runner.run_with_retry(ticket_dir, &agent_prompt, self.ask_mode, 3)?; @@ -355,4 +390,241 @@ impl FixWorkflow { Ok(()) } + + fn prepare_knowledge_base( + &self, + ticket: &JiraTicket, + ticket_dir: &Path, + ) -> Result> { + println!("{}", "Step 5: Preparing knowledge base...".bold().cyan()); + let knowledge_dir = match &self.knowledge_dir { + Some(dir) => dir, + None => { + println!(" ℹ️ No knowledge base directory provided"); + println!(); + return Ok(None); + } + }; + + let markdown_files = Self::list_markdown_files(knowledge_dir)?; + if markdown_files.is_empty() { + println!(" ⚠️ Knowledge base directory has no .md files"); + println!(); + return Ok(None); + } + + let dest_dir = ticket_dir.join("knowledge"); + fs::create_dir_all(&dest_dir) + .with_context(|| format!("Failed to create {}", dest_dir.display()))?; + + let mut file_contents = Vec::new(); + let mut copied_files = Vec::new(); + for path in markdown_files { + let filename = path + .file_name() + .and_then(OsStr::to_str) + .unwrap_or("unknown.md") + .to_string(); + fs::copy(&path, dest_dir.join(&filename)) + .with_context(|| format!("Failed to copy knowledge file {}", path.display()))?; + let content = fs::read_to_string(&path) + .with_context(|| format!("Failed to read knowledge file {}", path.display()))?; + copied_files.push(filename.clone()); + file_contents.push((filename, content)); + } + + copied_files.sort(); + file_contents.sort_by(|a, b| a.0.cmp(&b.0)); + + let selection = Self::select_inline_knowledge(ticket, &file_contents); + let inline_content = Self::build_inline_knowledge(&selection); + + println!(" {} Knowledge files: {}", "✓".green(), copied_files.len()); + if let Some(content) = &inline_content { + println!( + " {} Inlined knowledge size: {} chars", + "✓".green(), + content.len() + ); + } + println!(); + + Ok(Some(KnowledgeContext { + dir_name: "knowledge".to_string(), + files: copied_files, + inline_files: selection.iter().map(|(name, _)| name.clone()).collect(), + inline_content, + })) + } + + fn list_markdown_files(dir: &Path) -> Result> { + let mut files = Vec::new(); + for entry in fs::read_dir(dir) + .with_context(|| format!("Failed to read knowledge directory {}", dir.display()))? + { + let entry = entry?; + let path = entry.path(); + if path.is_file() + && path + .extension() + .and_then(OsStr::to_str) + .map(|ext| ext.eq_ignore_ascii_case("md")) + .unwrap_or(false) + { + files.push(path); + } + } + files.sort(); + Ok(files) + } + + fn select_inline_knowledge( + ticket: &JiraTicket, + files: &[(String, String)], + ) -> Vec<(String, String)> { + const MAX_INLINE_FILES: usize = 4; + const MAX_KEYWORDS: usize = 50; + + let mut keywords = Self::extract_keywords(ticket, MAX_KEYWORDS); + if !ticket.key.is_empty() { + keywords.push(ticket.key.to_lowercase()); + } + let keyword_set: HashSet = keywords.into_iter().collect(); + + let mut scored: Vec<(usize, String, String)> = files + .iter() + .map(|(name, content)| { + let mut score = 0usize; + let name_lower = name.to_lowercase(); + let content_lower = content.to_lowercase(); + for keyword in &keyword_set { + if name_lower.contains(keyword) { + score += 2; + } + if content_lower.contains(keyword) { + score += 1; + } + } + (score, name.clone(), content.clone()) + }) + .collect(); + + scored.sort_by(|a, b| b.0.cmp(&a.0).then_with(|| a.1.cmp(&b.1))); + + let mut selected = Vec::new(); + for (score, name, content) in scored.into_iter() { + if score == 0 && !selected.is_empty() { + break; + } + selected.push((name, content)); + if selected.len() >= MAX_INLINE_FILES { + break; + } + } + + if selected.is_empty() && !files.is_empty() { + let (name, content) = &files[0]; + selected.push((name.clone(), content.clone())); + } + + selected + } + + fn build_inline_knowledge(files: &[(String, String)]) -> Option { + const MAX_INLINE_CHARS: usize = 12_000; + const MAX_FILE_CHARS: usize = 4_000; + + if files.is_empty() { + return None; + } + + let mut combined = String::new(); + for (name, content) in files { + if combined.len() >= MAX_INLINE_CHARS { + break; + } + let mut snippet = content.trim().to_string(); + if snippet.len() > MAX_FILE_CHARS { + snippet.truncate(MAX_FILE_CHARS); + snippet.push_str("\n\n[Truncated]\n"); + } + let entry = format!("## Knowledge Base: {}\n\n{}\n\n", name, snippet); + if combined.len() + entry.len() > MAX_INLINE_CHARS { + break; + } + combined.push_str(&entry); + } + + if combined.trim().is_empty() { + None + } else { + Some(combined) + } + } + + fn extract_keywords(ticket: &JiraTicket, max_keywords: usize) -> Vec { + let mut text = String::new(); + text.push_str(&ticket.title); + text.push(' '); + text.push_str(&ticket.description); + text.push(' '); + text.push_str(&ticket.issue_type); + for label in &ticket.labels { + text.push(' '); + text.push_str(label); + } + + let mut keywords = Vec::new(); + let stopwords = Self::stopwords(); + let mut seen = HashSet::new(); + for token in text + .to_lowercase() + .split(|ch: char| !ch.is_ascii_alphanumeric()) + { + if token.len() < 4 || stopwords.contains(token) { + continue; + } + if seen.insert(token.to_string()) { + keywords.push(token.to_string()); + if keywords.len() >= max_keywords { + break; + } + } + } + keywords + } + + fn stopwords() -> HashSet<&'static str> { + HashMap::from([ + ("that", ""), + ("this", ""), + ("with", ""), + ("from", ""), + ("into", ""), + ("your", ""), + ("have", ""), + ("will", ""), + ("should", ""), + ("could", ""), + ("would", ""), + ("there", ""), + ("their", ""), + ("about", ""), + ("these", ""), + ("those", ""), + ("which", ""), + ("while", ""), + ("where", ""), + ("what", ""), + ("when", ""), + ("like", ""), + ("also", ""), + ("only", ""), + ("make", ""), + ("just", ""), + ]) + .keys() + .copied() + .collect() + } } From 8d4d67053e8651de8131698967a02886e727f080 Mon Sep 17 00:00:00 2001 From: codcod Date: Sun, 8 Feb 2026 11:40:48 +0100 Subject: [PATCH 4/6] test: increase coverage --- plugins/repos-fix/src/agent.rs | 4 +- plugins/repos-fix/src/jira.rs | 132 +++++++++++++++++++++++++++++- plugins/repos-fix/src/prompt.rs | 98 ++++++++++++++++++++++ plugins/repos-fix/src/workflow.rs | 101 +++++++++++++++++++++++ 4 files changed, 330 insertions(+), 5 deletions(-) diff --git a/plugins/repos-fix/src/agent.rs b/plugins/repos-fix/src/agent.rs index 6f66947..9b154e5 100644 --- a/plugins/repos-fix/src/agent.rs +++ b/plugins/repos-fix/src/agent.rs @@ -272,9 +272,9 @@ impl CursorAgentRunner { for (index, line) in lines.iter().enumerate() { let trimmed = line.trim(); - if trimmed.starts_with(section) { + if let Some(remainder) = trimmed.strip_prefix(section) { found = true; - let remainder = trimmed[section.len()..].trim(); + let remainder = remainder.trim(); if !remainder.is_empty() { filled = true; break; diff --git a/plugins/repos-fix/src/jira.rs b/plugins/repos-fix/src/jira.rs index 141b564..0323e0a 100644 --- a/plugins/repos-fix/src/jira.rs +++ b/plugins/repos-fix/src/jira.rs @@ -98,6 +98,13 @@ impl JiraClient { } fn parse_ticket(&self, data: serde_json::Value, num_comments: usize) -> Result { + Self::parse_ticket_data(data, num_comments) + } + + fn parse_ticket_data( + data: serde_json::Value, + num_comments: usize, + ) -> Result { let fields = data .get("fields") .context("Missing 'fields' in JIRA response")?; @@ -297,6 +304,15 @@ impl JiraClient { } pub fn parse_jira_input(input: &str) -> Result<(String, String)> { + let jira_url = env::var("JIRA_URL") + .context("JIRA_URL not set. Provide full URL or set JIRA_URL environment variable")?; + parse_jira_input_with_base_url(input, Some(jira_url.as_str())) +} + +fn parse_jira_input_with_base_url( + input: &str, + jira_url: Option<&str>, +) -> Result<(String, String)> { let input = input.trim(); if input.is_empty() { anyhow::bail!("JIRA ticket input cannot be empty"); @@ -321,12 +337,122 @@ pub fn parse_jira_input(input: &str) -> Result<(String, String)> { anyhow::bail!("Could not extract ticket ID from URL: {}", input); } - // Assume it's just a ticket ID - let jira_url = env::var("JIRA_URL") - .context("JIRA_URL not set. Provide full URL or set JIRA_URL environment variable")?; + let jira_url = jira_url.context( + "JIRA_URL not set. Provide full URL or set JIRA_URL environment variable", + )?; Ok(( jira_url.trim_end_matches('/').to_string(), input.to_string(), )) } + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn parse_jira_input_with_url() { + let result = + parse_jira_input("https://company.atlassian.net/browse/MAINT-1234").unwrap(); + assert_eq!(result.0, "https://company.atlassian.net"); + assert_eq!(result.1, "MAINT-1234"); + } + + #[test] + fn parse_jira_input_with_ticket_id_uses_env() { + let result = + parse_jira_input_with_base_url("MAINT-5678", Some("https://company.atlassian.net/")) + .unwrap(); + assert_eq!(result.0, "https://company.atlassian.net"); + assert_eq!(result.1, "MAINT-5678"); + } + + #[test] + fn parse_jira_input_rejects_empty() { + assert!(parse_jira_input(" ").is_err()); + } + + #[test] + fn parse_ticket_prefers_rendered_fields_and_parses_attachments() { + let data = json!({ + "id": "1001", + "key": "MAINT-1", + "fields": { + "summary": "Fix bug", + "description": "Raw desc", + "labels": ["security", "backend"], + "status": { "name": "Open" }, + "priority": { "name": "P1" }, + "issuetype": { "name": "Bug" }, + "assignee": { "displayName": "Ada" }, + "reporter": { "displayName": "Bob" }, + "created": "2024-01-01", + "updated": "2024-01-02", + "attachment": [ + { + "filename": "log.txt", + "mimeType": "text/plain", + "size": 123, + "content": "https://jira/att/1" + } + ], + "comment": { + "comments": [ + { + "author": { "displayName": "Eve" }, + "created": "2024-01-03", + "body": "Raw

body

" + } + ] + } + }, + "renderedFields": { + "description": "

Rendered desc

", + "comment": { + "comments": [ + { "body": "

Rendered comment

" } + ] + } + } + }); + + let ticket = JiraClient::parse_ticket_data(data, 1).unwrap(); + + assert_eq!(ticket.key, "MAINT-1"); + let description = ticket.description.to_lowercase(); + assert!(description.contains("rendered")); + assert!(!description.contains("raw")); + assert_eq!(ticket.attachments.len(), 1); + assert_eq!(ticket.attachments[0].filename, "log.txt"); + assert_eq!(ticket.attachments[0].source, "jira"); + assert_eq!(ticket.comments.len(), 1); + assert!(ticket.comments[0].body.contains("Rendered comment")); + } + + #[test] + fn parse_comments_respects_limit() { + let fields = json!({ + "comment": { + "comments": [ + { "author": { "displayName": "A" }, "created": "2024-01-01", "body": "first" }, + { "author": { "displayName": "B" }, "created": "2024-01-02", "body": "second" } + ] + } + }); + let rendered = json!({ + "comment": { + "comments": [ + { "body": "first" }, + { "body": "second" } + ] + } + }); + + let comments = JiraClient::parse_comments(&fields, Some(&rendered), 1); + assert_eq!(comments.len(), 1); + assert_eq!(comments[0].author, "B"); + assert_eq!(comments[0].body, "second"); + } +} diff --git a/plugins/repos-fix/src/prompt.rs b/plugins/repos-fix/src/prompt.rs index 298076f..badc5a5 100644 --- a/plugins/repos-fix/src/prompt.rs +++ b/plugins/repos-fix/src/prompt.rs @@ -275,3 +275,101 @@ impl PromptGenerator { has_security_keyword || (has_upgrade_keyword && haystack.contains("cve")) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::domain::PlatformType; + + fn make_ticket( + title: &str, + description: &str, + issue_type: &str, + labels: Vec<&str>, + ) -> JiraTicket { + JiraTicket { + id: "1".to_string(), + key: "MAINT-1".to_string(), + title: title.to_string(), + description: description.to_string(), + labels: labels.into_iter().map(|label| label.to_string()).collect(), + status: "Open".to_string(), + priority: "P1".to_string(), + issue_type: issue_type.to_string(), + assignee: "Unassigned".to_string(), + reporter: "Reporter".to_string(), + created: "2024-01-01".to_string(), + updated: "2024-01-02".to_string(), + attachments: Vec::new(), + comments: Vec::new(), + } + } + + #[test] + fn detects_security_keywords_in_ticket() { + let ticket = make_ticket( + "Upgrade dependency", + "Apply CVE-2024-0001 fix", + "Task", + vec![], + ); + assert!(PromptGenerator::is_security_task(&ticket)); + } + + #[test] + fn detects_security_from_labels() { + let ticket = make_ticket( + "Routine maintenance", + "No mention of cve", + "Task", + vec!["security"], + ); + assert!(PromptGenerator::is_security_task(&ticket)); + } + + #[test] + fn does_not_flag_upgrade_without_cve() { + let ticket = make_ticket( + "Upgrade dependencies", + "Upgrade libraries for performance", + "Task", + vec![], + ); + assert!(!PromptGenerator::is_security_task(&ticket)); + } + + #[test] + fn unknown_platform_guidelines_empty() { + let guidelines = PromptGenerator::get_platform_guidelines(&PlatformType::Unknown); + assert!(guidelines.trim().is_empty()); + } + + #[test] + fn template_override_dir_prefers_xdg_config_home() { + let original_xdg = env::var("XDG_CONFIG_HOME").ok(); + let original_home = env::var("HOME").ok(); + + let temp_dir = tempfile::tempdir().expect("tempdir"); + unsafe { + env::set_var("XDG_CONFIG_HOME", temp_dir.path()); + env::remove_var("HOME"); + } + + let path = template_override_dir().expect("path"); + assert_eq!(path, temp_dir.path().join("repos").join("fix")); + + unsafe { + if let Some(value) = original_xdg { + env::set_var("XDG_CONFIG_HOME", value); + } else { + env::remove_var("XDG_CONFIG_HOME"); + } + + if let Some(value) = original_home { + env::set_var("HOME", value); + } else { + env::remove_var("HOME"); + } + } + } +} diff --git a/plugins/repos-fix/src/workflow.rs b/plugins/repos-fix/src/workflow.rs index ea0ba43..c3cec1c 100644 --- a/plugins/repos-fix/src/workflow.rs +++ b/plugins/repos-fix/src/workflow.rs @@ -23,6 +23,7 @@ pub struct FixWorkflow { } impl FixWorkflow { + #[allow(clippy::too_many_arguments)] pub fn new( repos: Vec, ticket: String, @@ -628,3 +629,103 @@ impl FixWorkflow { .collect() } } + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + + fn make_ticket(title: &str, description: &str, labels: Vec<&str>) -> JiraTicket { + JiraTicket { + id: "1".to_string(), + key: "MAINT-1".to_string(), + title: title.to_string(), + description: description.to_string(), + labels: labels.into_iter().map(|label| label.to_string()).collect(), + status: "Open".to_string(), + priority: "P2".to_string(), + issue_type: "Bug".to_string(), + assignee: "Unassigned".to_string(), + reporter: "Reporter".to_string(), + created: "2024-01-01".to_string(), + updated: "2024-01-02".to_string(), + attachments: Vec::new(), + comments: Vec::new(), + } + } + + #[test] + fn extract_keywords_filters_stopwords_and_short_tokens() { + let ticket = make_ticket( + "Fix payment timeout in checkout", + "Timeout occurs when user tries to pay", + vec!["payments", "urgent"], + ); + let keywords = FixWorkflow::extract_keywords(&ticket, 50); + + assert!(keywords.contains(&"payment".to_string()) || keywords.contains(&"payments".to_string())); + assert!(keywords.contains(&"timeout".to_string())); + assert!(!keywords.contains(&"when".to_string())); + + let unique: HashSet<_> = keywords.iter().cloned().collect(); + assert_eq!(unique.len(), keywords.len()); + } + + #[test] + fn select_inline_knowledge_scores_by_name_and_content() { + let ticket = make_ticket( + "Payment failure during checkout", + "Timeout when processing payment", + vec!["payments"], + ); + let files = vec![ + ("payments-guide.md".to_string(), "Payment retries and timeouts".to_string()), + ("checkout.md".to_string(), "Checkout flow details".to_string()), + ("misc.md".to_string(), "Unrelated content".to_string()), + ]; + + let selected = FixWorkflow::select_inline_knowledge(&ticket, &files); + assert!(!selected.is_empty()); + assert_eq!(selected[0].0, "payments-guide.md"); + } + + #[test] + fn select_inline_knowledge_falls_back_to_first_file() { + let ticket = make_ticket("No matching keywords", "Nothing in common", vec![]); + let files = vec![ + ("alpha.md".to_string(), "first file".to_string()), + ("beta.md".to_string(), "second file".to_string()), + ]; + + let selected = FixWorkflow::select_inline_knowledge(&ticket, &files); + assert_eq!(selected.len(), 1); + assert_eq!(selected[0].0, "alpha.md"); + } + + #[test] + fn build_inline_knowledge_truncates_long_entries() { + let long_content = "a".repeat(5000); + let files = vec![("guide.md".to_string(), long_content)]; + + let inline = FixWorkflow::build_inline_knowledge(&files).expect("inline"); + assert!(inline.contains("## Knowledge Base: guide.md")); + assert!(inline.contains("[Truncated]")); + } + + #[test] + fn build_inline_knowledge_empty_returns_none() { + assert!(FixWorkflow::build_inline_knowledge(&[]).is_none()); + } + + #[test] + fn list_markdown_files_only_returns_md() { + let temp_dir = tempfile::tempdir().expect("tempdir"); + let md_path = temp_dir.path().join("one.md"); + let txt_path = temp_dir.path().join("two.txt"); + fs::write(&md_path, "# doc").expect("write md"); + fs::write(&txt_path, "ignore").expect("write txt"); + + let files = FixWorkflow::list_markdown_files(temp_dir.path()).unwrap(); + assert_eq!(files, vec![md_path]); + } +} From 8777070375daa0e25fa0be422ad45ca5ab69b4d1 Mon Sep 17 00:00:00 2001 From: codcod Date: Sun, 8 Feb 2026 15:31:56 +0100 Subject: [PATCH 5/6] fix: fmt --- .gitignore | 2 +- plugins/repos-fix/README.md | 88 +++++++++++-------- plugins/repos-fix/src/jira.rs | 18 ++-- .../repos-fix/src/templates/cursor_prompt.md | 8 ++ .../src/templates/guidelines_android.md | 1 + .../src/templates/guidelines_angular.md | 1 + .../repos-fix/src/templates/guidelines_ios.md | 1 + .../src/templates/guidelines_java.md | 1 + plugins/repos-fix/src/workflow.rs | 14 ++- 9 files changed, 79 insertions(+), 55 deletions(-) diff --git a/.gitignore b/.gitignore index 8c4d598..64f6407 100644 --- a/.gitignore +++ b/.gitignore @@ -33,4 +33,4 @@ config.yaml tarpaulin-report.html test-*/ tests/test-recipes.yaml -.link-to-xdg-config-home \ No newline at end of file +.link-to-xdg-config-home diff --git a/plugins/repos-fix/README.md b/plugins/repos-fix/README.md index 352b754..46e5549 100644 --- a/plugins/repos-fix/README.md +++ b/plugins/repos-fix/README.md @@ -7,26 +7,29 @@ Automatically analyze and fix JIRA maintenance tickets using Cursor AI. The `repos-fix` plugin integrates JIRA issue tracking with the Cursor AI agent to automatically implement fixes for maintenance tickets. It operates as a plugin for the `repos` tool. Key features: -1. **Fetches JIRA ticket details**: including description, priority, and attachments. -2. **Analyzes the codebase**: Detects platform (Java, iOS, Android, Angular), frameworks, and test structure. -3. **Generates comprehensive prompts**: Creates a "mission" for Cursor AI tailored to the specific project context. -4. **Runs cursor-agent**: Executes the fix in headless mode with auto-retries. -5. **Validates the implementation**: verifying build and tests pass. + +1. **Fetches JIRA ticket details**: including description, priority, and attachments. +2. **Analyzes the codebase**: Detects platform (Java, iOS, Android, Angular), frameworks, and test structure. +3. **Generates comprehensive prompts**: Creates a "mission" for Cursor AI tailored to the specific project context. +4. **Runs cursor-agent**: Executes the fix in headless mode with auto-retries. +5. **Validates the implementation**: verifying build and tests pass. ## Prerequisites -- `repos` tool installed (this plugin is included with it). -- `cursor-agent` CLI installed and available in PATH. -- **JIRA Account**: with API token access. -- **Cursor API Key**: for the AI agent. +- `repos` tool installed (this plugin is included with it). +- `cursor-agent` CLI installed and available in PATH. +- **JIRA Account**: with API token access. +- **Cursor API Key**: for the AI agent. ## Installation -1. **repos tool**: Ensure you have the `repos` tool installed. `repos-fix` is a built-in plugin. -2. **cursor-agent**: Install the Cursor Agent CLI: +1. **repos tool**: Ensure you have the `repos` tool installed. `repos-fix` is a built-in plugin. +2. **cursor-agent**: Install the Cursor Agent CLI: + ```bash curl https://cursor.com/install -fsS | bash ``` + Verify installation with `cursor-agent --version`. ## Configuration @@ -43,19 +46,21 @@ export JIRA_API_TOKEN=your-jira-api-token export CURSOR_API_KEY=your-cursor-api-key ``` -- **JIRA API Token**: Generate at [id.atlassian.com](https://id.atlassian.com/manage-profile/security/api-tokens). -- **Cursor API Key**: Get it from Cursor Settings → General → API Keys. +- **JIRA API Token**: Generate at [id.atlassian.com](https://id.atlassian.com/manage-profile/security/api-tokens). +- **Cursor API Key**: Get it from Cursor Settings → General → API Keys. ### Template Overrides You can customize the AI prompts and guidelines by placing files in your configuration directory: -- `${XDG_CONFIG_HOME}/repos/fix/` (usually `~/.config/repos/fix/`) + +- `${XDG_CONFIG_HOME}/repos/fix/` (usually `~/.config/repos/fix/`) Supported files: -- `cursor_prompt.md`: The main instruction set for Cursor. -- `cursorrules.md`: Behavior rules for the agent. -- `agent_prompt.md`: The mission prompt passed to `cursor-agent`. -- Platform guidelines: `guidelines_ios.md`, `guidelines_android.md`, `guidelines_java.md`, `guidelines_angular.md`. + +- `cursor_prompt.md`: The main instruction set for Cursor. +- `cursorrules.md`: Behavior rules for the agent. +- `agent_prompt.md`: The mission prompt passed to `cursor-agent`. +- Platform guidelines: `guidelines_ios.md`, `guidelines_android.md`, `guidelines_java.md`, `guidelines_angular.md`. ## Usage @@ -98,36 +103,37 @@ Analyze the issue and propose a solution **without making code changes**: ```bash repos fix my-service --ticket MAINT-1234 --ask ``` + This generates a `SOLUTION_SUMMARY.md` with the proposed plan. ### Advanced Options -- `--workspace `: Specify a custom directory for generated artifacts (default: `workspace/fix/`). -- `--prompt "..."`: Append extra instructions to the AI agent (e.g., "Use Java 17 features"). -- `--knowledge-dir `: Copy markdown knowledge base files into the workspace and inline selected content into prompts. +- `--workspace `: Specify a custom directory for generated artifacts (default: `workspace/fix/`). +- `--prompt "..."`: Append extra instructions to the AI agent (e.g., "Use Java 17 features"). +- `--knowledge-dir `: Copy markdown knowledge base files into the workspace and inline selected content into prompts. ## Workflow When you run `repos fix`, the following steps occur: -1. **Fetch Ticket**: Downloads JIRA ticket information, description, and attachments. -2. **Setup Workspace**: Creates a working directory at `workspace/fix//`. -3. **Analyze Project**: detailed inspection of platform, languages, frameworks, dependencies, and test setup. -4. **Generate Context**: Creates `mission-context.json` with all analysis data. -5. **Generate Prompts**: Creates `.cursorrules` and `cursor_prompt.md` tailored to the specific project. -6. **Include Knowledge Base (optional)**: Copies markdown docs into `workspace/fix//knowledge/` and inlines selected docs into the prompt. -7. **Run Cursor Agent**: - - Executes `cursor-agent` with `--force` and `--print` flags. - - **Auto-Retry**: If the agent fails (e.g., build fails, tests fail), it automatically retries up to **3 times**, feeding the error message back to the AI. - - **Workflow Switch**: CVE/security tickets use a safe upgrade protocol (no vulnerability reproduction); bug fixes require a repro-first flow. -8. **Validate**: The agent validates the fix by running build and test commands detected during analysis. -9. **Report**: Generates `SOLUTION_SUMMARY.md` with implementation details. +1. **Fetch Ticket**: Downloads JIRA ticket information, description, and attachments. +2. **Setup Workspace**: Creates a working directory at `workspace/fix//`. +3. **Analyze Project**: detailed inspection of platform, languages, frameworks, dependencies, and test setup. +4. **Generate Context**: Creates `mission-context.json` with all analysis data. +5. **Generate Prompts**: Creates `.cursorrules` and `cursor_prompt.md` tailored to the specific project. +6. **Include Knowledge Base (optional)**: Copies markdown docs into `workspace/fix//knowledge/` and inlines selected docs into the prompt. +7. **Run Cursor Agent**: + - Executes `cursor-agent` with `--force` and `--print` flags. + - **Auto-Retry**: If the agent fails (e.g., build fails, tests fail), it automatically retries up to **3 times**, feeding the error message back to the AI. + - **Workflow Switch**: CVE/security tickets use a safe upgrade protocol (no vulnerability reproduction); bug fixes require a repro-first flow. +8. **Validate**: The agent validates the fix by running build and test commands detected during analysis. +9. **Report**: Generates `SOLUTION_SUMMARY.md` with implementation details. ## Output After execution, check the `workspace/fix//` directory: -``` +```text workspace/fix/MAINT-1234/ ├── .cursorrules # AI behavior rules ├── mission-context.json # Complete project analysis & ticket data @@ -143,27 +149,33 @@ Note: `ANALYSIS.md` is expected to be filled in by the agent before any changes. The plugin automatically detects and supports: -- **iOS**: Xcode projects (`.xcodeproj`, `.xcworkspace`), Swift/Obj-C, CocoaPods, SPM. -- **Android**: Gradle projects, Kotlin/Java, Android Manifests. -- **Java Backend**: Maven (`pom.xml`) or Gradle (`build.gradle`), Spring Boot, JUnit/Mockito. -- **Angular**: `angular.json` or `package.json` with Angular dependencies, TypeScript. +- **iOS**: Xcode projects (`.xcodeproj`, `.xcworkspace`), Swift/Obj-C, CocoaPods, SPM. +- **Android**: Gradle projects, Kotlin/Java, Android Manifests. +- **Java Backend**: Maven (`pom.xml`) or Gradle (`build.gradle`), Spring Boot, JUnit/Mockito. +- **Angular**: `angular.json` or `package.json` with Angular dependencies, TypeScript. ## Troubleshooting ### `cursor-agent` not found + Install it via `curl https://cursor.com/install -fsS | bash`. Ensure it is in your `$PATH`. ### JIRA authentication failed + Check your environment variables: + ```bash echo $JIRA_URL echo $JIRA_USERNAME echo $JIRA_API_TOKEN ``` + Ensure `JIRA_API_TOKEN` is a valid API token, not your password. ### Repository not found + If using context filtering (e.g., `-t tag`), ensure the repository actually matches the filter. You can list matches with `repos list -t tag`. ### Agent keeps failing + Check the console output for the error message returned by `cursor-agent`. If it fails 3 times, check the generated prompts in the workspace directory to see if the AI instructions need manual adjustment (using `--prompt`). diff --git a/plugins/repos-fix/src/jira.rs b/plugins/repos-fix/src/jira.rs index 0323e0a..292aa8e 100644 --- a/plugins/repos-fix/src/jira.rs +++ b/plugins/repos-fix/src/jira.rs @@ -101,10 +101,7 @@ impl JiraClient { Self::parse_ticket_data(data, num_comments) } - fn parse_ticket_data( - data: serde_json::Value, - num_comments: usize, - ) -> Result { + fn parse_ticket_data(data: serde_json::Value, num_comments: usize) -> Result { let fields = data .get("fields") .context("Missing 'fields' in JIRA response")?; @@ -309,10 +306,7 @@ pub fn parse_jira_input(input: &str) -> Result<(String, String)> { parse_jira_input_with_base_url(input, Some(jira_url.as_str())) } -fn parse_jira_input_with_base_url( - input: &str, - jira_url: Option<&str>, -) -> Result<(String, String)> { +fn parse_jira_input_with_base_url(input: &str, jira_url: Option<&str>) -> Result<(String, String)> { let input = input.trim(); if input.is_empty() { anyhow::bail!("JIRA ticket input cannot be empty"); @@ -337,9 +331,8 @@ fn parse_jira_input_with_base_url( anyhow::bail!("Could not extract ticket ID from URL: {}", input); } - let jira_url = jira_url.context( - "JIRA_URL not set. Provide full URL or set JIRA_URL environment variable", - )?; + let jira_url = jira_url + .context("JIRA_URL not set. Provide full URL or set JIRA_URL environment variable")?; Ok(( jira_url.trim_end_matches('/').to_string(), @@ -354,8 +347,7 @@ mod tests { #[test] fn parse_jira_input_with_url() { - let result = - parse_jira_input("https://company.atlassian.net/browse/MAINT-1234").unwrap(); + let result = parse_jira_input("https://company.atlassian.net/browse/MAINT-1234").unwrap(); assert_eq!(result.0, "https://company.atlassian.net"); assert_eq!(result.1, "MAINT-1234"); } diff --git a/plugins/repos-fix/src/templates/cursor_prompt.md b/plugins/repos-fix/src/templates/cursor_prompt.md index 5d199f8..06f8605 100644 --- a/plugins/repos-fix/src/templates/cursor_prompt.md +++ b/plugins/repos-fix/src/templates/cursor_prompt.md @@ -10,29 +10,37 @@ compatible changes and keep code style consistent. {{ platform_guidelines }} {% if has_knowledge_base %} + ## Domain knowledge base + Relevant platform/journey docs are available in `{{ knowledge_base_dir }}/` in this workspace. Read these before making changes. {% if knowledge_base_files and knowledge_base_files | length > 0 %} Available files: {% for file in knowledge_base_files %} + - `{{ knowledge_base_dir }}/{{ file }}` {% endfor %} {% endif %} {% if knowledge_base_content %} + ### Inlined highlights (selected) + {{ knowledge_base_content }} {% endif %} {% endif %} ## Build and test + - **Build**: `{{ main_build }}` {% if test_compile %}- **Test compile**: `{{ test_compile }}` {% endif %}- **Tests**: `{{ test_run }}` {% if additional_prompt %} + ## Additional requirements + {{ additional_prompt }} {% endif %} diff --git a/plugins/repos-fix/src/templates/guidelines_android.md b/plugins/repos-fix/src/templates/guidelines_android.md index 1e852de..7ab40d9 100644 --- a/plugins/repos-fix/src/templates/guidelines_android.md +++ b/plugins/repos-fix/src/templates/guidelines_android.md @@ -1,2 +1,3 @@ ### Android + Use existing Android patterns and tooling. diff --git a/plugins/repos-fix/src/templates/guidelines_angular.md b/plugins/repos-fix/src/templates/guidelines_angular.md index c92cf1e..808b7c0 100644 --- a/plugins/repos-fix/src/templates/guidelines_angular.md +++ b/plugins/repos-fix/src/templates/guidelines_angular.md @@ -1,2 +1,3 @@ ### Angular + Use existing Angular patterns and tooling. diff --git a/plugins/repos-fix/src/templates/guidelines_ios.md b/plugins/repos-fix/src/templates/guidelines_ios.md index 2810724..5d38263 100644 --- a/plugins/repos-fix/src/templates/guidelines_ios.md +++ b/plugins/repos-fix/src/templates/guidelines_ios.md @@ -1,2 +1,3 @@ ### iOS + Use existing iOS patterns and tooling. diff --git a/plugins/repos-fix/src/templates/guidelines_java.md b/plugins/repos-fix/src/templates/guidelines_java.md index 3a59914..b65c0ec 100644 --- a/plugins/repos-fix/src/templates/guidelines_java.md +++ b/plugins/repos-fix/src/templates/guidelines_java.md @@ -1,2 +1,3 @@ ### Java + Use existing Java patterns and tooling. diff --git a/plugins/repos-fix/src/workflow.rs b/plugins/repos-fix/src/workflow.rs index c3cec1c..8eb7e99 100644 --- a/plugins/repos-fix/src/workflow.rs +++ b/plugins/repos-fix/src/workflow.rs @@ -663,7 +663,9 @@ mod tests { ); let keywords = FixWorkflow::extract_keywords(&ticket, 50); - assert!(keywords.contains(&"payment".to_string()) || keywords.contains(&"payments".to_string())); + assert!( + keywords.contains(&"payment".to_string()) || keywords.contains(&"payments".to_string()) + ); assert!(keywords.contains(&"timeout".to_string())); assert!(!keywords.contains(&"when".to_string())); @@ -679,8 +681,14 @@ mod tests { vec!["payments"], ); let files = vec![ - ("payments-guide.md".to_string(), "Payment retries and timeouts".to_string()), - ("checkout.md".to_string(), "Checkout flow details".to_string()), + ( + "payments-guide.md".to_string(), + "Payment retries and timeouts".to_string(), + ), + ( + "checkout.md".to_string(), + "Checkout flow details".to_string(), + ), ("misc.md".to_string(), "Unrelated content".to_string()), ]; From afbb09579f27282168339632ecc3a7ffa475ca72 Mon Sep 17 00:00:00 2001 From: codcod Date: Sun, 8 Feb 2026 15:46:57 +0100 Subject: [PATCH 6/6] test: fix --- plugins/repos-fix/src/jira.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/repos-fix/src/jira.rs b/plugins/repos-fix/src/jira.rs index 292aa8e..fcc7572 100644 --- a/plugins/repos-fix/src/jira.rs +++ b/plugins/repos-fix/src/jira.rs @@ -347,7 +347,11 @@ mod tests { #[test] fn parse_jira_input_with_url() { - let result = parse_jira_input("https://company.atlassian.net/browse/MAINT-1234").unwrap(); + let result = parse_jira_input_with_base_url( + "https://company.atlassian.net/browse/MAINT-1234", + None, // Don't require JIRA_URL env var + ) + .unwrap(); assert_eq!(result.0, "https://company.atlassian.net"); assert_eq!(result.1, "MAINT-1234"); }