diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..27087bb --- /dev/null +++ b/.coveragerc @@ -0,0 +1,10 @@ +[run] +branch = True +source = struct_module + +[report] +omit = + tests/* + */__init__.py +show_missing = True +skip_covered = True diff --git a/.github/workflows/test-script.yaml b/.github/workflows/test-script.yaml index 95e6e9e..438f349 100644 --- a/.github/workflows/test-script.yaml +++ b/.github/workflows/test-script.yaml @@ -42,7 +42,7 @@ jobs: shell: bash run: | echo "REPORT_FILE=${REPORT_OUTPUT}" >> "$GITHUB_ENV" - pytest --cov --cov-branch --cov-report=xml -v --md-report --md-report-flavor gfm --md-report-exclude-outcomes passed skipped xpassed --md-report-output "$REPORT_OUTPUT" --pyargs tests + pytest --cov=struct_module --cov-branch --cov-report=xml -v --md-report --md-report-flavor gfm --md-report-exclude-outcomes passed skipped xpassed --md-report-output "$REPORT_OUTPUT" --pyargs tests - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v5 diff --git a/.gitignore b/.gitignore index ff9930a..769fc1f 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,8 @@ build/* # MkDocs generated documentation site/docs/ + +# Coverage artifacts +.coverage +coverage.xml +htmlcov/ diff --git a/struct_module/content_fetcher.py b/struct_module/content_fetcher.py index 8c7686b..bd75658 100644 --- a/struct_module/content_fetcher.py +++ b/struct_module/content_fetcher.py @@ -84,41 +84,41 @@ def _fetch_http_url(self, url): return response.text - def _fetch_github_file(self, github_url): + def _fetch_github_file(self, github_path): """ Fetch a file from a GitHub repository using HTTPS. - Expected format: github://owner/repo/branch/file_path + Dispatcher passes: owner/repo/branch/file_path """ - self.logger.debug(f"Fetching content from GitHub: {github_url}") - match = re.match(r"github://([^/]+)/([^/]+)/([^/]+)/(.+)", github_url) + self.logger.debug(f"Fetching content from GitHub: {github_path}") + match = re.match(r"([^/]+)/([^/]+)/([^/]+)/(.+)", github_path) if not match: - raise ValueError("Invalid GitHub URL format. Expected github://owner/repo/branch/file_path") + raise ValueError("Invalid GitHub path. Expected owner/repo/branch/file_path") owner, repo, branch, file_path = match.groups() return self._clone_or_fetch_github(owner, repo, branch, file_path, https=True) - def _fetch_github_https_file(self, github_url): + def _fetch_github_https_file(self, github_path): """ Fetch a file from a GitHub repository using HTTPS. - Expected format: githubhttps://owner/repo/branch/file_path + Dispatcher passes: owner/repo/branch/file_path """ - self.logger.debug(f"Fetching content from GitHub (HTTPS): {github_url}") - match = re.match(r"githubhttps://([^/]+)/([^/]+)/([^/]+)/(.+)", github_url) + self.logger.debug(f"Fetching content from GitHub (HTTPS): {github_path}") + match = re.match(r"([^/]+)/([^/]+)/([^/]+)/(.+)", github_path) if not match: - raise ValueError("Invalid GitHub URL format. Expected githubhttps://owner/repo/branch/file_path") + raise ValueError("Invalid GitHub path. Expected owner/repo/branch/file_path") owner, repo, branch, file_path = match.groups() return self._clone_or_fetch_github(owner, repo, branch, file_path, https=True) - def _fetch_github_ssh_file(self, github_url): + def _fetch_github_ssh_file(self, github_path): """ Fetch a file from a GitHub repository using SSH. - Expected format: githubssh://owner/repo/branch/file_path + Dispatcher passes: owner/repo/branch/file_path """ - self.logger.debug(f"Fetching content from GitHub (SSH): {github_url}") - match = re.match(r"githubssh://([^/]+)/([^/]+)/([^/]+)/(.+)", github_url) + self.logger.debug(f"Fetching content from GitHub (SSH): {github_path}") + match = re.match(r"([^/]+)/([^/]+)/([^/]+)/(.+)", github_path) if not match: - raise ValueError("Invalid GitHub URL format. Expected githubssh://owner/repo/branch/file_path") + raise ValueError("Invalid GitHub path. Expected owner/repo/branch/file_path") owner, repo, branch, file_path = match.groups() return self._clone_or_fetch_github(owner, repo, branch, file_path, https=False) @@ -143,18 +143,18 @@ def _clone_or_fetch_github(self, owner, repo, branch, file_path, https=True): with file_full_path.open('r') as file: return file.read() - def _fetch_s3_file(self, s3_url): + def _fetch_s3_file(self, s3_path): """ Fetch a file from an S3 bucket. - Expected format: s3://bucket_name/key + Dispatcher passes: bucket_name/key """ if not boto3_available: raise ImportError("boto3 is not installed. Please install it to use S3 fetching.") - self.logger.debug(f"Fetching content from S3: {s3_url}") - match = re.match(r"s3://([^/]+)/(.+)", s3_url) + self.logger.debug(f"Fetching content from S3: {s3_path}") + match = re.match(r"([^/]+)/(.+)", s3_path) if not match: - raise ValueError("Invalid S3 URL format. Expected s3://bucket_name/key") + raise ValueError("Invalid S3 path. Expected bucket_name/key") bucket_name, key = match.groups() local_file_path = self.cache_dir / Path(key).name @@ -176,18 +176,18 @@ def _fetch_s3_file(self, s3_url): with local_file_path.open('r') as file: return file.read() - def _fetch_gcs_file(self, gcs_url): + def _fetch_gcs_file(self, gcs_path): """ Fetch a file from Google Cloud Storage. - Expected format: gs://bucket_name/key + Dispatcher passes: bucket_name/key """ if not gcs_available: raise ImportError("google-cloud-storage is not installed. Please install it to use GCS fetching.") - self.logger.debug(f"Fetching content from GCS: {gcs_url}") - match = re.match(r"gs://([^/]+)/(.+)", gcs_url) + self.logger.debug(f"Fetching content from GCS: {gcs_path}") + match = re.match(r"([^/]+)/(.+)", gcs_path) if not match: - raise ValueError("Invalid GCS URL format. Expected gs://bucket_name/key") + raise ValueError("Invalid GCS path. Expected bucket_name/key") bucket_name, key = match.groups() local_file_path = self.cache_dir / Path(key).name diff --git a/tests/test_commands_more.py b/tests/test_commands_more.py new file mode 100644 index 0000000..20852f7 --- /dev/null +++ b/tests/test_commands_more.py @@ -0,0 +1,181 @@ +import argparse +import subprocess +from unittest.mock import patch, MagicMock + +import pytest + +from struct_module.commands.generate import GenerateCommand +from struct_module.commands.info import InfoCommand +from struct_module.commands.list import ListCommand +from struct_module.commands.mcp import MCPCommand +from struct_module.commands.validate import ValidateCommand + + +@pytest.fixture +def parser(): + return argparse.ArgumentParser() + + +def test_generate_creates_base_path_and_console_output(parser, tmp_path): + command = GenerateCommand(parser) + args = parser.parse_args(['struct-x', str(tmp_path / 'base')]) + + # Minimal config: one file item with string content to avoid fetch + config = {'files': [{'hello.txt': 'Hello'}], 'folders': []} + + # Ensure the input store file exists to avoid FileNotFoundError inside TemplateRenderer + store_dir = tmp_path / 'store' + store_dir.mkdir(parents=True, exist_ok=True) + with open(store_dir / 'input.json', 'w') as fh: + fh.write('{}') + + with patch.object(command, '_load_yaml_config', return_value=config), \ + patch('os.path.exists', side_effect=lambda p: False if str(tmp_path / 'base') in p else True), \ + patch('os.makedirs') as mock_makedirs, \ + patch('builtins.print') as mock_print: + # Choose console output to avoid writing files + args.output = 'file' # still triggers base path creation logic + args.input_store = str(store_dir / 'input.json') + args.dry_run = True + args.vars = None + args.backup = None + args.file_strategy = 'overwrite' + args.global_system_prompt = None + args.structures_path = None + args.non_interactive = True + + command.execute(args) + + mock_makedirs.assert_called() # base path created + mock_makedirs.assert_called() # base path created + + +def test_generate_pre_hook_failure_aborts(parser, tmp_path): + command = GenerateCommand(parser) + args = parser.parse_args(['struct-x', str(tmp_path)]) + + config = {'pre_hooks': ['exit 1'], 'files': []} + + def fake_run(cmd, shell, check, capture_output, text): + raise subprocess.CalledProcessError(1, cmd, output='', stderr='boom') + + with patch.object(command, '_load_yaml_config', return_value=config), \ + patch('subprocess.run', side_effect=fake_run), \ + patch.object(command, '_create_structure') as mock_create_structure: + command.execute(args) + mock_create_structure.assert_not_called() + + +def test_generate_mappings_file_not_found(parser, tmp_path): + command = GenerateCommand(parser) + args = parser.parse_args(['struct-x', str(tmp_path)]) + args.mappings_file = ['missing.yaml'] + + with patch('os.path.exists', return_value=False): + # Should return early without error + command.execute(args) + + +def test_info_nonexistent_file_logs_error(parser): + command = InfoCommand(parser) + args = parser.parse_args(['does-not-exist']) + + with patch('os.path.exists', return_value=False): + # Should just log error and return without exception + command.execute(args) + + +def test_list_with_custom_structures_path(parser, tmp_path): + command = ListCommand(parser) + args = parser.parse_args(['-s', str(tmp_path / 'custom')]) + + custom = str(tmp_path / 'custom') + contribs = '/path/to/contribs' + + def mock_join(*parts): + # emulate join used in list._list_structures + if parts[-1] == '..': + return '/path/to' # dir of commands + if parts[-1] == 'contribs': + return contribs + return '/'.join(parts) + + walk_map = { + custom: [(custom, [], ['a.yaml'])], + contribs: [(contribs, [], ['b.yaml'])], + } + + def mock_walk(path): + return walk_map.get(path, []) + + with patch('os.path.dirname', return_value='/path/to/commands'), \ + patch('os.path.realpath', return_value='/path/to/commands'), \ + patch('os.path.join', side_effect=mock_join), \ + patch('os.walk', side_effect=mock_walk), \ + patch('builtins.print') as mock_print: + command._list_structures(args) + mock_print.assert_called() # printed list + + +def test_mcp_command_server_flag(parser): + command = MCPCommand(parser) + args = parser.parse_args(['--server']) + + async def fake_start(): + return None + + with patch.object(command, '_start_mcp_server', side_effect=fake_start) as mock_start: + command.execute(args) + mock_start.assert_called_once() + + +# ValidateCommand error-path tests on helpers + +def test_validate_structure_config_errors(parser): + v = ValidateCommand(parser) + with pytest.raises(ValueError): + v._validate_structure_config('not-a-list') + with pytest.raises(ValueError): + v._validate_structure_config(["not-a-dict"]) # non-dict item + with pytest.raises(ValueError): + v._validate_structure_config([{123: 'abc'}]) # non-str name + with pytest.raises(ValueError): + v._validate_structure_config([{ 'x': 123 }]) # non-str/non-dict content + with pytest.raises(ValueError): + v._validate_structure_config([{ 'x': {} }]) # dict missing keys + + +def test_validate_folders_config_errors(parser): + v = ValidateCommand(parser) + with pytest.raises(ValueError): + v._validate_folders_config('not-a-list') + with pytest.raises(ValueError): + v._validate_folders_config(["not-a-dict"]) # non-dict item + with pytest.raises(ValueError): + v._validate_folders_config([{123: {}}]) # non-str name + with pytest.raises(ValueError): + v._validate_folders_config([{ 'name': 'not-a-dict' }]) + with pytest.raises(ValueError): + v._validate_folders_config([{ 'name': {} }]) # missing 'struct' + with pytest.raises(ValueError): + v._validate_folders_config([{ 'name': { 'struct': 10 } }]) # invalid type + with pytest.raises(ValueError): + v._validate_folders_config([{ 'name': { 'struct': 'x', 'with': 'not-dict' } }]) + + +def test_validate_variables_config_errors(parser): + v = ValidateCommand(parser) + with pytest.raises(ValueError): + v._validate_variables_config('not-a-list') + with pytest.raises(ValueError): + v._validate_variables_config(["not-a-dict"]) # non-dict item + with pytest.raises(ValueError): + v._validate_variables_config([{123: {}}]) # non-str name + with pytest.raises(ValueError): + v._validate_variables_config([{ 'name': 'not-a-dict' }]) + with pytest.raises(ValueError): + v._validate_variables_config([{ 'name': {} }]) # missing type + with pytest.raises(ValueError): + v._validate_variables_config([{ 'name': { 'type': 'bad' } }]) + with pytest.raises(ValueError): + v._validate_variables_config([{ 'name': { 'type': 'boolean', 'default': 'yes' } }]) diff --git a/tests/test_content_fetcher_more.py b/tests/test_content_fetcher_more.py new file mode 100644 index 0000000..88c9c0f --- /dev/null +++ b/tests/test_content_fetcher_more.py @@ -0,0 +1,221 @@ +import io +import os +import stat +import subprocess +from pathlib import Path + +import pytest + +from struct_module.content_fetcher import ContentFetcher + + +def test_fetch_local_file(tmp_path): + p = tmp_path / "file.txt" + p.write_text("hello") + cf = ContentFetcher(cache_dir=tmp_path / "cache") + assert cf.fetch_content(f"file://{p}") == "hello" + + +def test_fetch_http_url_caches(monkeypatch, tmp_path): + url = "https://example.com/data.txt" + + class Resp: + text = "DATA" + def raise_for_status(self): + return None + + def fake_get(u): + assert u == url + return Resp() + + cf = ContentFetcher(cache_dir=tmp_path / "cache") + monkeypatch.setattr("struct_module.content_fetcher.requests.get", fake_get) + + # First call populates cache + assert cf.fetch_content(url) == "DATA" + + # Second call should read from cache and not invoke requests.get + def boom(u): + raise AssertionError("should not be called due to cache hit") + monkeypatch.setattr("struct_module.content_fetcher.requests.get", boom) + assert cf.fetch_content(url) == "DATA" + + +def test_fetch_github_https_and_pull(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + repo_dir = tmp_path / "cache" / "owner_repo_main" + file_rel = "path/to/file.txt" + file_full = repo_dir / file_rel + + # Simulate repo already cloned -> should call pull + repo_dir.mkdir(parents=True, exist_ok=True) + file_full.parent.mkdir(parents=True, exist_ok=True) + file_full.write_text("GDATA") + + calls = {"pull": 0, "clone": 0} + + def fake_run(args, check): + if args[:2] == ["git", "clone"]: + calls["clone"] += 1 + # create the structure for clone case + repo_dir.mkdir(parents=True, exist_ok=True) + file_full.parent.mkdir(parents=True, exist_ok=True) + file_full.write_text("GDATA") + elif args[:3] == ["git", "-C", str(repo_dir)]: + calls["pull"] += 1 + else: + raise AssertionError(f"Unexpected git call: {args}") + + monkeypatch.setattr(subprocess, "run", fake_run) + + out = cf.fetch_content("githubhttps://owner/repo/main/path/to/file.txt") + assert out == "GDATA" + # Since repo existed, should have pulled + assert calls["pull"] == 1 + + +def test_fetch_github_clone_path(monkeypatch, tmp_path): + # Force fresh clone path + cf = ContentFetcher(cache_dir=tmp_path / "cache") + repo_dir = tmp_path / "cache" / "owner_repo_dev" + file_rel = "f.txt" + file_full = repo_dir / file_rel + + calls = {"clone": 0} + + def fake_run(args, check): + if args[:2] == ["git", "clone"]: + calls["clone"] += 1 + repo_dir.mkdir(parents=True, exist_ok=True) + file_full.parent.mkdir(parents=True, exist_ok=True) + file_full.write_text("X") + else: + raise AssertionError("Only clone expected") + + monkeypatch.setattr(subprocess, "run", fake_run) + + out = cf.fetch_content("github://owner/repo/dev/f.txt") + assert out == "X" + assert calls["clone"] == 1 + + +def test_fetch_github_file_not_found(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + repo_dir = tmp_path / "cache" / "owner_repo_main" + + def fake_run(args, check): + # Ensure repo exists but no file + if args[:2] == ["git", "clone"]: + repo_dir.mkdir(parents=True, exist_ok=True) + elif args[:3] == ["git", "-C", str(repo_dir)]: + return None + + monkeypatch.setattr(subprocess, "run", fake_run) + + with pytest.raises(FileNotFoundError): + cf.fetch_content("githubssh://owner/repo/main/does_not_exist.txt") + + +def test_fetch_unsupported(): + cf = ContentFetcher(cache_dir=Path("/tmp/cache")) + with pytest.raises(ValueError): + cf.fetch_content("unknown://foo") + + +def test_http_error_bubbles_and_no_cache(monkeypatch, tmp_path): + url = "https://example.com/oops" + + class Resp: + def raise_for_status(self): + raise Exception("HTTP error") + + def fake_get(u): + assert u == url + return Resp() + + cf = ContentFetcher(cache_dir=tmp_path / "cache") + monkeypatch.setattr("struct_module.content_fetcher.requests.get", fake_get) + + with pytest.raises(Exception): + cf.fetch_content(url) + + # Subsequent call uses requests again (no cache file was created) + called = {"count": 0} + def fake_get2(u): + called["count"] += 1 + return Resp() + monkeypatch.setattr("struct_module.content_fetcher.requests.get", fake_get2) + with pytest.raises(Exception): + cf.fetch_content(url) + assert called["count"] == 1 + + +def test_github_invalid_path_raises(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + with pytest.raises(ValueError): + cf.fetch_content("github://owner/repo-only") + with pytest.raises(ValueError): + cf.fetch_content("githubhttps://owner/repo-only") + with pytest.raises(ValueError): + cf.fetch_content("githubssh://owner/repo-only") + + +def test_s3_unavailable_raises_valueerror(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + # Force unavailable path; dispatcher will not include s3 and treat as unsupported + import struct_module.content_fetcher as mod + monkeypatch.setattr(mod, "boto3_available", False) + with pytest.raises(ValueError): + cf.fetch_content("s3://bucket/key.txt") + + +def test_gcs_unavailable_raises_valueerror(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + import struct_module.content_fetcher as mod + monkeypatch.setattr(mod, "gcs_available", False) + with pytest.raises(ValueError): + cf.fetch_content("gs://bucket/key.txt") + + +def test_s3_invalid_path_raises_valueerror(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + # Ensure available so it reaches regex + import struct_module.content_fetcher as mod + monkeypatch.setattr(mod, "boto3_available", True) + # Do not mock boto3 since we only test invalid pattern, which raises earlier + with pytest.raises(ValueError): + cf.fetch_content("s3://invalid-format") + + +def test_gcs_invalid_path_raises_valueerror(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + import struct_module.content_fetcher as mod + monkeypatch.setattr(mod, "gcs_available", True) + with pytest.raises(ValueError): + cf.fetch_content("gs://invalid-format") + + +def test_git_clone_error_bubbles(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + def fake_run(args, check): + if args[:2] == ["git", "clone"]: + raise subprocess.CalledProcessError(1, args) + monkeypatch.setattr(subprocess, "run", fake_run) + with pytest.raises(subprocess.CalledProcessError): + cf.fetch_content("github://owner/repo/main/file.txt") + + +def test_git_pull_error_bubbles(monkeypatch, tmp_path): + cf = ContentFetcher(cache_dir=tmp_path / "cache") + repo_dir = tmp_path / "cache" / "owner_repo_main" + # Simulate existing repo so it tries pull + repo_dir.mkdir(parents=True, exist_ok=True) + (repo_dir / "file.txt").write_text("x") + + def fake_run(args, check): + if args[:3] == ["git", "-C", str(repo_dir)]: + raise subprocess.CalledProcessError(1, args) + monkeypatch.setattr(subprocess, "run", fake_run) + + with pytest.raises(subprocess.CalledProcessError): + cf.fetch_content("githubhttps://owner/repo/main/file.txt") diff --git a/tests/test_filters_more.py b/tests/test_filters_more.py new file mode 100644 index 0000000..bb772eb --- /dev/null +++ b/tests/test_filters_more.py @@ -0,0 +1,93 @@ +import os +import types +import pytest + +from struct_module import filters + + +def test_slugify_basic(): + assert filters.slugify("Hello World!") == "hello-world" + assert filters.slugify("Already-Slugified_123") == "already-slugified123" + + +def test_get_default_branch_success(monkeypatch): + # Build a minimal fake Github client + class FakeRepo: + default_branch = "main" + + class FakeGithub: + def __init__(self, token=None): + self.token = token + def get_repo(self, name): + assert name == "owner/repo" + return FakeRepo() + + monkeypatch.setenv("GITHUB_TOKEN", "tok") + monkeypatch.setattr(filters, "Github", FakeGithub) + + assert filters.get_default_branch("owner/repo") == "main" + + +def test_get_default_branch_error(monkeypatch): + class FakeGithub: + def get_repo(self, name): + raise Exception("boom") + + monkeypatch.delenv("GITHUB_TOKEN", raising=False) + monkeypatch.setattr(filters, "Github", FakeGithub) + filters.cache.clear() + + assert filters.get_default_branch("owner/repo") == "DEFAULT_BRANCH_ERROR" + + +def test_get_latest_release_success(monkeypatch): + class FakeRepo: + default_branch = "dev" + def get_latest_release(self): + class R: + tag_name = "v1.2.3" + return R() + + class FakeGithub: + def __init__(self, token=None): + pass + def get_repo(self, name): + return FakeRepo() + + monkeypatch.setattr(filters, "Github", FakeGithub) + monkeypatch.delenv("GITHUB_TOKEN", raising=False) + # Clear cache between tests to ensure function recomputes + filters.cache.clear() + + assert filters.get_latest_release("owner/repo") == "v1.2.3" + + +def test_get_latest_release_falls_back_to_default_branch(monkeypatch): + class FakeRepo: + default_branch = "main" + def get_latest_release(self): + raise Exception("no releases") + + class FakeGithub: + def get_repo(self, name): + return FakeRepo() + + monkeypatch.setattr(filters, "Github", FakeGithub) + filters.cache.clear() + + assert filters.get_latest_release("owner/repo") == "main" + + +def test_get_latest_release_error(monkeypatch): + class FakeRepo: + def get_latest_release(self): + raise Exception("no releases") + + class FakeGithub: + def get_repo(self, name): + raise Exception("bad repo") + + monkeypatch.setattr(filters, "Github", FakeGithub) + filters.cache.clear() + + assert filters.get_latest_release("owner/repo") == "LATEST_RELEASE_ERROR" diff --git a/tests/test_utils_more.py b/tests/test_utils_more.py new file mode 100644 index 0000000..964709d --- /dev/null +++ b/tests/test_utils_more.py @@ -0,0 +1,65 @@ +import builtins +import os +import subprocess +import textwrap +import types +import yaml +import pytest + +from struct_module import utils + + +def test_read_config_file(tmp_path): + cfg = tmp_path / "config.yaml" + cfg.write_text(textwrap.dedent( + """ + a: 1 + b: two + nested: + x: y + """ + )) + data = utils.read_config_file(str(cfg)) + assert data == {"a": 1, "b": "two", "nested": {"x": "y"}} + + +def test_merge_configs_prefers_args_and_fills_missing(): + class Args: + def __init__(self): + self.a = None + self.b = "cli" + self.c = None + args = Args() + merged = utils.merge_configs({"a": 1, "b": "file", "c": 3}, args) + assert merged["a"] == 1 # filled from file because arg is None + assert merged["b"] == "cli" # arg wins because it is not None + assert merged["c"] == 3 + + +def test_get_current_repo_https(monkeypatch): + def fake_check_output(cmd, text): + return "https://github.com/owner/repo.git\n" + monkeypatch.setattr(subprocess, "check_output", fake_check_output) + assert utils.get_current_repo() == "owner/repo" + + +def test_get_current_repo_ssh(monkeypatch): + def fake_check_output(cmd, text): + return "git@github.com:owner/repo.git\n" + monkeypatch.setattr(subprocess, "check_output", fake_check_output) + assert utils.get_current_repo() == "owner/repo" + + +def test_get_current_repo_not_github(monkeypatch): + # Current behavior: any git@host format returns owner/repo regardless of host + def fake_check_output(cmd, text): + return "git@example.com:owner/repo.git\n" + monkeypatch.setattr(subprocess, "check_output", fake_check_output) + assert utils.get_current_repo() == "owner/repo" + + +def test_get_current_repo_not_git(monkeypatch): + def raise_called(*args, **kwargs): + raise subprocess.CalledProcessError(1, ["git", "config"]) # simulate no git + monkeypatch.setattr(subprocess, "check_output", raise_called) + assert utils.get_current_repo() == "Error: Not a Git repository or no remote URL set"