diff --git a/CHANGELOG.md b/CHANGELOG.md index 8bdb50623..21fda86d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,9 @@ release. ## [Unreleased] +### Added +- Added ISD to kernel feature [#602](https://github.com/DOI-USGS/ale/issues/602) + ### Changed - Reduced linescan ISD ephemeris sampling from one-per-line to every 10th line for images with 1000+ lines, significantly reducing ISD file sizes and load times for large sensors. Configurable via `reduction` and `ephem_sample_rate` props. [#677](https://github.com/DOI-USGS/ale/pull/677) diff --git a/ale/isd_to_kernel.py b/ale/isd_to_kernel.py new file mode 100755 index 000000000..8ec02de93 --- /dev/null +++ b/ale/isd_to_kernel.py @@ -0,0 +1,597 @@ +import argparse +import json +import logging +import os, sys +import pyspiceql as psql +import spiceypy as spice + +from datetime import datetime +from pathlib import Path + + +logger = logging.getLogger(__name__) + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "-f", "--isd_file", + type=Path, + help="Input ISD (Image Support Data) JSON file to extract kernel information from." + ) + parser.add_argument( + "-k", "--kernel_type", + default=None, + type=str, + help="Kernel type to create from ISD. Acceptable kernel types are " + "[spk, ck, fk, ik, lsk, mk, pck, sclk]." + ) + parser.add_argument( + "-o", "--outfile", + type=str, + help="Optional output file. If not specified, this will be set to " + "the ISD file name with the appropriate kernel extension." + ) + parser.add_argument( + "-d", "--data", + type=str, + help="JSON object of keywords for text kernels only." + ) + parser.add_argument( + "-c", "--comment", + required="--semiminor" in sys.argv or "-b" in sys.argv, + type=str, + default=None, + help="Optional comment string to append to the kernel." + ) + parser.add_argument( + "--overwrite", + action="store_true", + help="Optional boolean flag on overwriting an existing kernel." + ) + parser.add_argument( + "-v", "--verbose", + action="store_true", + help="Display information as program runs." + ) + args = parser.parse_args() + + log_level = logging.ERROR + if args.verbose: + log_level = logging.INFO + + logger.setLevel(log_level) + + try: + isd_to_kernel(isd_file=args.isd_file, + kernel_type=args.kernel_type, + outfile=args.outfile, + data=args.data, + overwrite=args.overwrite, + log_level=log_level) + except Exception as err: + sys.exit(f"Could not complete isd_to_kernel task: {err}") + + +def spk_comment(outfile: str, + segment_id: str, + start_time: str, + end_time: str, + instrument_id: str, + target_body: str, + target_name: str, + center_body: str, + center_name: str, + reference_frame: str, + records: int, + degree: int, + kernels: dict, + comment: str = ""): + """ + Generates a formatted metadata header for an SPK file. + + The resulting string follows a standardized template containing pedigree, + usage notes, and a segment summary. This header is typically written to + the comment area of the binary SPK kernel. + + Parameters + ---------- + outfile : str + Output kernel file + segment_id : str + Unique identifier for the data segment + start_time : str + Ephemeris start time. + end_time : str + Ephemeris end time. + instrument_id : str + Name of the instrument + target_body : str + NAIF integer code for the target body + target_name : str + Name of the target + center_body : str + NAIF integer code for the center body + center_name : str + Name of the center body + reference_frame : str + Reference frame name + records : int + Number of states in the kernel + degree : int + Polynomial degree used for interpolation + kernels : dict + Dictionary of supporting kernels + comment : str, optional + Additional user-provided notes to append + + Returns: + str: A multi-line string formatted as a NAIF SPK comment block. + """ + current_datetime = datetime.now().isoformat(sep=" ", timespec="seconds") + spk_comment = f"""**************************************************************************** + USGS ALE Generated SPK Kernel + Created By: ALE + Date Created: {current_datetime} + **************************************************************************** + + + Position Data in the File + ----------------------------------------------------------------------- + + This file contains time ordered array of geometric states + (kilometers) and rates of change (kilometers/second) of body + relative to center, specified relative to frame. + + + Status + ----------------------------------------------------------------------- + + This kernel was generated for the purposes of storing C-Smithed + position updates that may have been generated from ALE processing + techniques (controlled imaging, jitter analysis, etc...). + + + Pedigree + ----------------------------------------------------------------------- + + This file was generated by an automated process. The ALE + application 'isd_to_kernel' was given an ISD to parse and extract the + necessary information to create an SPK file. + + + Angular Rates + ----------------------------------------------------------------------- + + This kernel typically contains state vectors of rates of change + as a function of time but may only contain position vectors. The + ephemeris given is for the body moving relative to the center of + motion. + + + Usage Note + ----------------------------------------------------------------------- + + To make use of this file in a typical SPICE based application, + users must supply at least a leapseconds kernel. This file is + necessary for time-based conversions. They should be the same + kernels that were originally used to initialize the image. + + Note that ALE defaults to applying light time and stellar + abberation correction when computing positions relative to s/c and + target body. Currently, this correction should not be utilized + for kernels created by ALE. Therefore the computation correcting + for light time/stellar abberation is turned off. It should be + noted that this option applies to all files + contained herein. (ID:USGS_SPK_ABCORR=NONE) + + The contents of this kernel are summarized below. + + User Comments + ----------------------------------------------------------------------- + {comment} + + Segment (by file) Summary\n\ + ----------------------------------------------------------------------- + + The following sections describe each segment in this SPK kernel. + Each segment is a file in the input list. Kernels were + consolidated using SpiceQL. + + ----------------------------------------------------------------------- + File: {outfile} + Segment ID: {segment_id} + StartTime: {start_time} + EndTime: {end_time} + Instrument: {instrument_id} + Target Body: {target_body}, {target_name} + Center Body: {center_body}, {center_name} + RefFrame: {reference_frame} + Records: {records} + PolyDegree: {degree} + Kernels: {kernels} + """ + return spk_comment + + +def ck_comment(outfile: str, + segment_id: str, + start_time: str, + end_time: str, + instrument_id: str, + target_body: str, + target_name: str, + center_body: str, + center_name: str, + reference_frame: str, + records: int, + has_av: bool, + kernels: dict, + comment: str = ""): + """ + Generates a formatted metadata header for an CK file. + + Standardizes the orientation data documentation, including pedigree and + usage notes regarding SCLK and LSK requirements for the specific mission. + + Parameters + ---------- + outfile : str + Output kernel file + segment_id : str + Unique identifier for the data segment + start_time : str + Ephemeris start time. + end_time : str + Ephemeris end time. + instrument_id : str + Name of the instrument + target_body : str + NAIF integer code for the target body + target_name : str + Name of the target + center_body : str + NAIF integer code for the center body + center_name : str + Name of the center body + reference_frame : str + Reference frame name + records : int + Number of orientations in the kernel + has_av : bool + Indicates if angular velocity is included + kernels : dict + Dictionary of supporting kernels + comment : str, optional + Additional user-provided notes to append + + Returns: + str: A multi-line string formatted as a NAIF CK comment block. + """ + current_datetime = datetime.now().isoformat(sep=" ", timespec="seconds") + ck_comment = f"""**************************************************************************** + USGS ALE Generated CK Kernel + Created By: ALE + Date Created: {current_datetime} + **************************************************************************** + + Orientation Data in the File + ----------------------------------------------------------------------- + + This file contains orientation and potentially derived angular + rates (where possible/specified). + + + Status + ----------------------------------------------------------------------- + + This kernel was generated for the purpose of storing C-Smithed + pointing updates generated through ALE processing techniques + (control nets, jitter analysis, etc...). These CK kernels + are intended to mimick CKs provided by individual mission + (NAV teams). + + Pedigree + ----------------------------------------------------------------------- + + This file was generated by an automated process. The ALE + application 'isd_to_kernel' was used to create the CK kernel + given an ISD. + + + Angular Rates + ----------------------------------------------------------------------- + + This kernel may or may not contain angular velocity vectors. Efforts + are made to preserve and provide angular velocities where they + originally existed. + + + Usage Note + ----------------------------------------------------------------------- + + To make use of this file in a typical SPICE based application, + you must supply a leapseconds kernel, a mission spacecraft clock + kernel, and the instrument/spacecraft frame kernel. These files + provide the supporting ancillary data to properly query this + C-kernel for attitude content. They should be the same kernels that + were originally used to initialize the image. + + User Comments + ----------------------------------------------------------------------- + + {comment} + + Segment (by file) Summary + ----------------------------------------------------------------------- + + The follow sections describe each segment in this CK kernel. Each + segment is a file in the input list. Kernels were consolidated + using SpiceQL. + + ----------------------------------------------------------------------- + File: {outfile} + Segment ID: {segment_id} + StartTime: {start_time} + EndTime: {end_time} + Instrument: {instrument_id} + Target Body: {target_body}, {target_name} + Center Body: {center_body}, {center_name} + RefFrame: {reference_frame} + Records: {records} + HasAV: {has_av} + Kernels: {kernels} + """ + return ck_comment + + +def isd_to_kernel( + isd_file: os.PathLike = None, + kernel_type: str = "mk", + outfile: os.PathLike = None, + data: str = None, + comment: str = None, + overwrite: bool = False, + log_level=logging.ERROR +): + """ + Converts ALE Image Support Data (ISD) to SPICE kernels. + + This function orchestrates the extraction of geometric and pointing data + from an ISD JSON file, performs necessary time and frame translations + via SpiceQL, and writes the resulting data into a binary (SPK, CK) or + text-based (IK, FK, etc.) SPICE kernel. + + Parameters + ---------- + isd_file : os.PathLike, optional + Path to the input ISD JSON file. Required for binary kernels. + kernel_type : str + The type of kernel to create. Defaults to 'mk'. + outfile : os.PathLike, optional + The desired output kernel file name/path. + data : str, optional + A JSON string containing keyword-value pairs. Required for text kernels. + comment : str, optional + Custom user text to include in the kernel comment area. + overwrite : bool + If True, deletes an existing outfile path. + Defaults to False. + log_level : int + Logging severity level. Defaults to logging.ERROR. + + Returns + ---------- + None: The function writes the kernel directly to the filesystem. + """ + logging.basicConfig(format="%(message)s", level=log_level) + logger.setLevel(log_level) + + # Default comment if empty + if comment is None: + comment = f"Auto-generated comment by ALE at {datetime.now().isoformat(sep=' ', timespec='seconds')}" + out_comment = comment + + # If outfile is not specified, name output file as same + # name as isd_file with appropriate kernel file extension + if outfile is None: + if psql.Kernel.isBinary(kernel_type): + if isd_file is None: + raise Exception("Missing ISD file.") + elif Path(isd_file).suffix != ".json": + raise Exception("ISD must be in JSON.") + outfile = Path(isd_file).with_suffix(psql.Kernel.getExt(kernel_type)) + elif psql.Kernel.isText(kernel_type): + raise Exception("Must enter an outfile name for text kernels.") + else: + raise Exception(f"{psql.Kernel.getExt(kernel_type)}") + outfile = str(os.path.abspath(outfile)) + logger.info(f"outfile={outfile}") + + # Default, no overwrite + if os.path.isfile(outfile): + if overwrite: + os.remove(outfile) + else: + raise Exception(f"Output file [{outfile}] already exists.") + + filename, ext = os.path.splitext(Path(outfile)) + + # Check that the outfile extension matches the kernel_type + # If not, append correct extension and proceed + expected_ext = psql.Kernel.getExt(kernel_type) + if ext.lower() != expected_ext.lower(): + outfile = str(Path(filename).with_suffix(expected_ext)) + logger.info( + f"Extension mismatch: The output file extension [{ext}] does not match " + f"the expected extension [{expected_ext}] for kernel type [{kernel_type.upper()}]." + f"The kernel will output to file [{outfile}] instead." + ) + + if psql.Kernel.isBinary(kernel_type): + # Get properties from isd_file + with open(isd_file, 'r') as f: + isd_data = f.read() + + # ISD data + isd_dict = json.loads(isd_data) + + # spk properties + state_positions = isd_dict["instrument_position"]["positions"] + state_times = isd_dict["instrument_position"]["ephemeris_times"] + state_velocities = isd_dict["instrument_position"]["velocities"] + + # ck properties + inst_pt_velocities = isd_dict["instrument_pointing"]["angular_velocities"] + inst_pt_quaternions = isd_dict["instrument_pointing"]["quaternions"] + inst_pt_times = isd_dict["instrument_pointing"]["ephemeris_times"] + + # Comment properties + body_code = isd_dict["naif_keywords"]["BODY_CODE"] + body_frame_code = isd_dict["naif_keywords"]["BODY_FRAME_CODE"] + start_time = isd_dict["instrument_pointing"]["ck_table_start_time"] + end_time = isd_dict["instrument_pointing"]["ck_table_end_time"] + inst_frame_code = isd_dict["instrument_pointing"]["time_dependent_frames"][0] + target_code = int(inst_frame_code/1000) + records = len(state_positions) + has_av = len(inst_pt_velocities) > 0 + + # Get frame and mission names + frame_name = next((v for k, v in isd_dict.get("naif_keywords", {}).items() + if k.startswith("FRAME_") and k.endswith("_NAME")), None) + if not frame_name: + frame_name = isd_dict["name_platform"] + logger.info(f"Could not find 'FRAME_*_NAME' in ISD 'naif_keywords. " + f"Attempt platform name [{frame_name}] instead to get mission name.") + mission_name = psql.getSpiceqlName(frame_name) + if not mission_name: + logger.info(f"Check SpiceQL's 'aliasMap' to verify that frame name [{frame_name}] is valid.") + raise Exception(f"Could not find mission name for frame name [{frame_name}].") + logger.info(f"frame_name={frame_name}, mission_name={mission_name}") + + # Get kernels + _, kernels = psql.searchForKernelsets(spiceqlNames=["base", mission_name], startTime=start_time, stopTime=end_time) + logger.info(f"kernels={kernels}") + + # Translate codes to name + target_name, _ = psql.translateCodeToName(target_code, mission_name, False, False) + body_name, _ = psql.translateCodeToName(body_code, mission_name, False, False) + + # Calculate degree + number_of_states = len(state_positions[0]) + degree_min = min(7, number_of_states-1) + degree_output = (((degree_min - 1) / 2) * 2) + 1 + if degree_output%2 == 0 or degree_output >= degree_min: + degree = degree_output - 1 + degree = int(degree) + + # Create segmentId + # Note: 40 char limit + # sensor_name = isd_dict["name_sensor"] + segment_id = f"{mission_name}:{frame_name}" + if len(segment_id) > 40: + logger.info(f"Segment ID [{segment_id}] with length {str(len(segment_id))} " + "is over the 40 char max limit. Truncating.") + logger.info(f"segment_id={segment_id}") + + # Get referenceFrame + reference_frame_id = isd_dict["instrument_position"]["reference_frame"] + reference_frame = spice.frmnam(reference_frame_id) + logger.info(f"reference_frame={reference_frame}") + + if psql.Kernel.isSpk(kernel_type): + if len(state_positions) != len(state_times): + raise ValueError("Positions and Times length mismatch!") + + out_comment = spk_comment( + outfile=outfile, + segment_id=segment_id, + start_time=start_time, + end_time=end_time, + instrument_id=frame_name, + target_body=target_code, + target_name=target_name, + center_body=body_code, + center_name=body_name, + reference_frame=reference_frame, + records=records, + degree=degree, + kernels=kernels, + comment=comment) + psql.writeSpk( + outfile, + state_positions, + state_times, + body_code, + body_frame_code, + reference_frame, + segment_id, + degree, + state_velocities, + out_comment + ) + elif psql.Kernel.isCk(kernel_type): + # Get sclks and lsk + if "sclk" in kernels: + sclk_kernels = kernels["sclk"] + else: + raise Exception(f"Could not find SCLKs for [{isd_file}].") + if "lsk" in kernels: + lsk_kernel = kernels["lsk"][0] + else: + raise Exception(f"Could not find LSK for [{isd_file}].") + logger.info(f"sclk_kernels={sclk_kernels}, lsk_kernel={lsk_kernel}") + + out_comment = ck_comment( + outfile=outfile, + segment_id=segment_id, + start_time=start_time, + end_time=end_time, + instrument_id=frame_name, + target_body=target_code, + target_name=target_name, + center_body=body_code, + center_name=body_name, + reference_frame=reference_frame, + records=records, + has_av=has_av, + kernels=kernels, + comment=comment) + psql.writeCk( + outfile, + inst_pt_quaternions, + inst_pt_times, + inst_frame_code, + reference_frame, + segment_id, + ",".join(sclk_kernels), + str(lsk_kernel), + inst_pt_velocities, + comment + ) + elif psql.Kernel.isText(kernel_type): + + def is_valid_json(json_str): + try: + json.loads(json_str) + return True + except ValueError as e: + return False + + if data is None: + raise Exception(f"Must enter JSON keywords to generate kernel [{outfile}].") + elif not is_valid_json(data): + raise Exception("The 'data' payload is not valid JSON.") + + data = json.loads(data) + + logger.info(f"Generating text kernel type [{kernel_type}]") + psql.writeTextKernel( + outfile, + kernel_type, + data, + out_comment + ) + else: + raise Exception(f"Could not generate kernel [{outfile}] for kernel type [{kernel_type}].") + \ No newline at end of file diff --git a/setup.py b/setup.py index 394d750b6..2921e4a27 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,8 @@ package_data={'': ['config.yml']}, entry_points={ "console_scripts": [ - "isd_generate=ale.isd_generate:main" + "isd_generate=ale.isd_generate:main", + "isd_to_kernel=ale.isd_to_kernel:main" ], }, ) diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index f5e69d482..013106ed4 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -148,7 +148,7 @@ def get_table_data(image, table_name): return read_table_data(table_label["Table"], table_file[0]) -def get_isd(instrument): +def get_isd_path(instrument): if not isinstance(instrument, str): raise KeyError('instrument name is not a string') @@ -156,7 +156,11 @@ def get_isd(instrument): if not label_file: raise Exception(f'Could not find label file for {instrument}') - return json.load(open(label_file[0])) + return label_file[0] + + +def get_isd(instrument): + return json.load(open(get_isd_path(instrument))) def get_image_kernels(image): diff --git a/tests/pytests/test_isd_to_kernel.py b/tests/pytests/test_isd_to_kernel.py new file mode 100644 index 000000000..526cacc62 --- /dev/null +++ b/tests/pytests/test_isd_to_kernel.py @@ -0,0 +1,214 @@ +import json +import pytest +import re + +from ale.isd_to_kernel import isd_to_kernel +from conftest import get_isd, get_isd_path +from unittest.mock import patch + + +@patch("pyspiceql.searchForKernelsets") +@patch("pyspiceql.getSpiceqlName") +@patch("pyspiceql.translateCodeToName") +@patch("pyspiceql.writeSpk") +def test_spk_generation(mock_write_spk, mock_translate, mock_get_name, mock_search, tmp_path): + """Test that isd_to_kernel correctly handles SPK generation.""" + + mock_get_name.return_value = "mex" + mock_search.return_value = [None, {"sclk": ["sclk.tsc"], "lsk": ["lsk.tls"]}] + mock_translate.return_value = ["MARS", "J2000"] + + outfile = tmp_path / "test_spk.bsp" + + isd_data = get_isd("ctx") + isd_file = get_isd_path("ctx") + + isd_to_kernel( + isd_file=isd_file, + kernel_type="spk", + outfile=outfile, + overwrite=True + ) + + assert mock_write_spk.called + args, kwargs = mock_write_spk.call_args + + assert args[0] == str(outfile) # output file path + assert args[1][0] == isd_data["instrument_position"]["positions"][0] # state positions + assert args[2][0] == isd_data["instrument_position"]["ephemeris_times"][0] # ephemeris times + assert args[3] == isd_data["naif_keywords"]["BODY_CODE"] # body code + assert args[4] == isd_data["naif_keywords"]["BODY_FRAME_CODE"] # body frame code + assert args[5] == "J2000" # reference frame + assert args[6] == f"{mock_get_name.return_value}:{isd_data['name_sensor']}" # segment id + assert args[7] == 1 # degree + assert args[8][0] == isd_data["instrument_position"]["velocities"][0] # state velocities + assert "USGS ALE Generated SPK Kernel" in args[9] # comment header + + assert len(args[1]) == len(args[2]) == len(args[8]) == 401 + + +@patch("pyspiceql.getSpiceqlName") +@patch("pyspiceql.searchForKernelsets") +@patch("pyspiceql.translateCodeToName") +@patch("ale.isd_to_kernel.write_ck") +def test_ck_generation(mock_write_ck, mock_translate, mock_search, mock_get_name, tmp_path): + """Test that isd_to_kernel correctly handles CK generation.""" + + mock_get_name.return_value = "mex" + mock_translate.return_value = ["MARS", "J2000"] + + # Mock return for SCLK and LSK search + mock_search.return_value = [None, { + "sclk": ["mex_sclk.tsc"], + "lsk": ["naif0012.tls"] + }] + + outfile = tmp_path / "test_ck.bc" + + isd_data = get_isd("ctx") + isd_file = get_isd_path("ctx") + + isd_to_kernel( + isd_file=isd_file, + kernel_type="ck", + outfile=outfile, + overwrite=True + ) + + assert mock_write_ck.called + args, kwargs = mock_write_ck.call_args + + assert args[0] == str(outfile) # output file path + assert args[1][0] == isd_data["instrument_pointing"]["quaternions"][0] # quaternions + assert args[2][0] == isd_data["instrument_pointing"]["ephemeris_times"][0] # ephemeris times + assert args[3] == isd_data["instrument_pointing"]["time_dependent_frames"][0] # instrument frame code + assert args[4] == "J2000" # reference frame + assert args[6] == ["mex_sclk.tsc"] # sclk kernels list + assert args[7] == "naif0012.tls" # lsk kernel (first element of list) + assert args[8][0] == isd_data["instrument_pointing"]["angular_velocities"][0] # angular velocities + assert "USGS ALE Generated CK Kernel" in args[9] # comment header + + assert len(args[1]) == len(args[2]) == len(args[8]) == 401 + + +@patch("pyspiceql.writeTextKernel") +def test_text_kernel_generation(mock_write_text, tmp_path): + """Test that isd_to_kernel correctly handles text kernel generation.""" + + kernel_type = "IK" + outfile = tmp_path / "test.ti" + data = '{"TEST_KEYWORD": "TEST_VALUE"}' + + isd_to_kernel( + kernel_type=kernel_type, + data=data, + outfile=outfile + ) + + assert mock_write_text.called + args, kwargs = mock_write_text.call_args + + assert args[0] == str(outfile) + assert args[1] == kernel_type + assert args[2] == json.loads(data) + + +def test_invalid_isd_extension(): + """Verify that non-JSON files raise an error.""" + expected_msg = "ISD must be in JSON" + with pytest.raises(Exception, match=expected_msg): + isd_to_kernel(isd_file="test.txt", kernel_type="spk") + + +def test_invalid_kernel_type(): + """Verify that invalid kernel types raise an error.""" + # SpiceQL error + expected_msg = "std::exception: abc is not a valid kernel type" + with pytest.raises(Exception, match=re.escape(expected_msg)): + isd_to_kernel(isd_file="test.json", kernel_type="abc") + + +def test_empty_data(tmp_path): + """Verify that text kernels require a data payload.""" + outfile = tmp_path / "test.tf" + abs_outfile = str(outfile.resolve()) + + expected_msg = f"Must enter JSON keywords to generate kernel [{abs_outfile}]." + + with pytest.raises(Exception, match=re.escape(expected_msg)): + isd_to_kernel(kernel_type="fk", outfile=outfile) + + +def test_invalid_data(tmp_path): + """Verify that data payload is JSON.""" + outfile = tmp_path / "test.tf" + data = "bad data" + expected_msg = "The 'data' payload is not valid JSON." + + with pytest.raises(Exception, match=re.escape(expected_msg)): + isd_to_kernel(kernel_type="fk", outfile=outfile, data=data) + + +def test_missing_isd(): + """Verify missing ISD file for binary kernels raises an error.""" + expected_msg = "Missing ISD file." + with pytest.raises(Exception, match=expected_msg): + isd_to_kernel(kernel_type="ck") + + +def test_missing_outfile(): + """Verify missing outfile file for text kernels raises an error.""" + expected_msg = "Must enter an outfile name for text kernels." + with pytest.raises(Exception, match=expected_msg): + isd_to_kernel(kernel_type="pck") + + +@patch("pyspiceql.getSpiceqlName") +@patch("pyspiceql.searchForKernelsets") +@patch("pyspiceql.translateCodeToName") +@patch("pyspiceql.writeSpk") +def test_outfile_extension_correction(mock_write_spk, mock_translate, mock_search, mock_get_name, tmp_path): + """Verify that isd_to_kernel corrects a wrong extension (e.g., .txt -> .bsp).""" + + mock_get_name.return_value = "mex" + mock_translate.return_value = ["MARS", "J2000"] + mock_search.return_value = [None, {"sclk": ["mock.tsc"], "lsk": ["mock.tls"]}] + + outfile = tmp_path / "test.abc" + expected_outfile = str(tmp_path / "test.bsp") + + isd_to_kernel( + isd_file=get_isd_path("ctx"), + kernel_type="spk", + outfile=outfile, + overwrite=True + ) + + # The function should have changed 'test.abc' to 'test.bsp' + args, _ = mock_write_spk.call_args + actual_path_used = args[0] + + assert actual_path_used == expected_outfile + assert actual_path_used.endswith(".bsp") + assert not actual_path_used.endswith(".abc") + + +@patch("pyspiceql.getSpiceqlName") +@patch("pyspiceql.searchForKernelsets") +@patch("pyspiceql.translateCodeToName") +@patch("pyspiceql.writeSpk") +def test_mismatched_times_positions(mock_write, mock_translate, mock_search, mock_get_name, tmp_path): + """Verify state positions and times size are same.""" + mock_get_name.return_value = "mex" + mock_translate.return_value = ["MARS", "J2000"] + mock_search.return_value = [None, {"sclk": ["mock.tsc"], "lsk": ["mock.tls"]}] + + isd_data = get_isd("ctx") + + # Bump only ephemeris times + isd_data["instrument_position"]["ephemeris_times"].append(9999.0) + broken_isd = tmp_path / "bad.json" + broken_isd.write_text(json.dumps(isd_data)) + + with pytest.raises(ValueError, match="Positions and Times length mismatch!"): + isd_to_kernel(isd_file=broken_isd, kernel_type="spk")