From d1665e3c1670614b676d1c97bcbe1bc287ea9753 Mon Sep 17 00:00:00 2001 From: Ben Lovell Date: Wed, 21 Jan 2026 13:36:57 +0100 Subject: [PATCH 1/8] fix: don't change teams when JWT expires (#158) --- crates/config/src/session.rs | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/crates/config/src/session.rs b/crates/config/src/session.rs index f7a22b07..44bea0e4 100644 --- a/crates/config/src/session.rs +++ b/crates/config/src/session.rs @@ -217,8 +217,11 @@ impl Session { jwt: session_response.token.jwt.clone(), }; - // Remember the current active team's JWT if there is one - let active_team_jwt = self.active_team.as_ref().map(|team| team.token.jwt.clone()); + // Remember current active team after refresh + let active_team_aid = self + .active_team + .as_ref() + .and_then(|team| extract_aid_from_jwt(&team.token.jwt)); // Update teams self.teams = session_response @@ -240,15 +243,14 @@ impl Session { }) .collect(); - // Try to restore the active team based on the JWT - let jwt_match_found = if let Some(jwt) = active_team_jwt { - self.set_active_team_by_jwt(&jwt) - } else { - false - }; + // Try to restore the active team by account ID (JWT changes after refresh) + let found_active_team = active_team_aid + .as_ref() + .map(|aid| self.set_active_team_by_aid(aid)) + .unwrap_or(false); - // If no active team was set by JWT, fall back to a personal team - if !jwt_match_found && self.active_team.is_none() { + // Fall back to personal team if previous active team not found + if !found_active_team { // Find a team with team_type="personal" if let Some(personal_team) = self.teams.iter().find(|team| team.team_type == "personal") { From 46d637f7426f2ea93c31ae323d1a4cf454297a4b Mon Sep 17 00:00:00 2001 From: HulmaNaseer <42720638+HulmaNaseer@users.noreply.github.com> Date: Wed, 21 Jan 2026 15:15:47 +0000 Subject: [PATCH 2/8] add upsert retries for tower tables for failed commits (#178) * add upsert retries for tower tables for failed commits * tests for commits retry * adding retry tracking and assertion --- src/tower/_tables.py | 66 +++++++++++----- tests/tower/test_tables.py | 155 +++++++++++++++++++++++++++++++++++++ 2 files changed, 203 insertions(+), 18 deletions(-) diff --git a/src/tower/_tables.py b/src/tower/_tables.py index 451ebed6..87b34f14 100644 --- a/src/tower/_tables.py +++ b/src/tower/_tables.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from pyiceberg.exceptions import NoSuchTableError +from pyiceberg.exceptions import CommitFailedException TTable = TypeVar("TTable", bound="Table") @@ -25,6 +26,9 @@ namespace_or_default, ) +import time +import random + @dataclass class RowsAffectedInformation: @@ -178,13 +182,20 @@ def insert(self, data: pa.Table) -> TTable: self._stats.inserts += data.num_rows return self - def upsert(self, data: pa.Table, join_cols: Optional[list[str]] = None) -> TTable: + def upsert( + self, + data: pa.Table, + join_cols: Optional[list[str]] = None, + max_retries: int = 5, + retry_delay_seconds: float = 0.5, + ) -> TTable: """ - Performs an upsert operation (update or insert) on the Iceberg table. + Performs an upsert operation (update or insert) on the Iceberg table. In case of commit conflicts, reloads the metadata and retries. This method will: - Update existing rows if they match the join columns - Insert new rows if no match is found + - Retry for max_retries if commits fail All operations are case-sensitive by default. Args: @@ -192,10 +203,17 @@ def upsert(self, data: pa.Table, join_cols: Optional[list[str]] = None) -> TTabl must match the schema of the target table. join_cols (Optional[list[str]]): The columns that form the key to match rows on. If not provided, all columns will be used for matching. + max_retries (int): Maximum number of retry attempts on commit conflicts. + Defaults to 5. + retry_delay_seconds (float): Wait time in seconds between retries. + Defaults to 0.5 seconds. Returns: TTable: The table instance with the upserted rows, allowing for method chaining. + Raises: + CommitFailedException: If all retry attempts are exhausted. + Note: - The operation is always case-sensitive - When a match is found, all columns are updated @@ -217,22 +235,34 @@ def upsert(self, data: pa.Table, join_cols: Optional[list[str]] = None) -> TTabl >>> print(f"Updated {stats.updates} rows") >>> print(f"Inserted {stats.inserts} rows") """ - res = self._table.upsert( - data, - join_cols=join_cols, - # All upserts will always be case sensitive. Perhaps we'll add this - # as a parameter in the future? - case_sensitive=True, - # These are the defaults, but we're including them to be complete. - when_matched_update_all=True, - when_not_matched_insert_all=True, - ) - - # Update the stats with the results of the relevant upsert. - self._stats.updates += res.rows_updated - self._stats.inserts += res.rows_inserted - - return self + last_exception = None + + for attempt in range(max_retries + 1): + try: + if attempt > 0: + self._table.refresh() + + res = self._table.upsert( + data, + join_cols=join_cols, + # All upserts will always be case sensitive. Perhaps we'll add this + # as a parameter in the future? + case_sensitive=True, + # These are the defaults, but we're including them to be complete. + when_matched_update_all=True, + when_not_matched_insert_all=True, + ) + + self._stats.updates += res.rows_updated + self._stats.inserts += res.rows_inserted + return self + + except CommitFailedException as e: + last_exception = e + if attempt < max_retries: + time.sleep(retry_delay_seconds) + + raise last_exception def delete(self, filters: Union[str, List[pc.Expression]]) -> TTable: """ diff --git a/tests/tower/test_tables.py b/tests/tower/test_tables.py index 6aa36f2c..1203b669 100644 --- a/tests/tower/test_tables.py +++ b/tests/tower/test_tables.py @@ -5,11 +5,15 @@ import pathlib from urllib.parse import urljoin from urllib.request import pathname2url +import threading # We import all the things we need from Tower. import tower.polars as pl import pyarrow as pa from pyiceberg.catalog.memory import InMemoryCatalog +from pyiceberg.catalog.sql import SqlCatalog + +import concurrent.futures # Imports the library under test import tower @@ -42,6 +46,28 @@ def in_memory_catalog(): pass +@pytest.fixture +def sql_catalog(): + temp_dir = tempfile.mkdtemp() # ← Returns string path, no auto-cleanup + abs_path = pathlib.Path(temp_dir).absolute() + file_url = urljoin("file:", pathname2url(str(abs_path))) + + catalog = SqlCatalog( + "test.sql.catalog", + **{ + "uri": f"sqlite:///{abs_path}/catalog.db?check_same_thread=False", + "warehouse": file_url, + }, + ) + + yield catalog + + try: + shutil.rmtree(abs_path) + except FileNotFoundError: + pass + + def test_reading_and_writing_to_tables(in_memory_catalog): schema = pa.schema( [ @@ -166,6 +192,135 @@ def test_upsert_to_tables(in_memory_catalog): assert res["age"].item() == 26 +def test_upsert_concurrent_writes_with_retry(sql_catalog): + """Test that concurrent upserts succeed with retry logic handling conflicts.""" + schema = pa.schema( + [ + pa.field("ticker", pa.string()), + pa.field("date", pa.string()), + pa.field("price", pa.float64()), + ] + ) + + ref = tower.tables("concurrent_test", catalog=sql_catalog) + table = ref.create_if_not_exists(schema) + + initial_data = pa.Table.from_pylist( + [ + {"ticker": "AAPL", "date": "2024-01-01", "price": 100.0}, + {"ticker": "GOOGL", "date": "2024-01-01", "price": 200.0}, + {"ticker": "MSFT", "date": "2024-01-01", "price": 300.0}, + ], + schema=schema, + ) + table.insert(initial_data) + + retry_count = {"value": 0} + retry_lock = threading.Lock() + + def upsert_ticker(ticker: str, new_price: float): + t = tower.tables("concurrent_test", catalog=sql_catalog).load() + + original_refresh = t._table.refresh + + def tracked_refresh(): + with retry_lock: + retry_count["value"] += 1 + return original_refresh() + + t._table.refresh = tracked_refresh + + data = pa.Table.from_pylist( + [{"ticker": ticker, "date": "2024-01-01", "price": new_price}], + schema=schema, + ) + t.upsert(data, join_cols=["ticker", "date"]) + return ticker + + with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor: + futures = [ + executor.submit(upsert_ticker, "AAPL", 150.0), + executor.submit(upsert_ticker, "GOOGL", 250.0), + executor.submit(upsert_ticker, "MSFT", 350.0), + ] + results = [f.result() for f in concurrent.futures.as_completed(futures)] + + assert len(results) == 3 + assert ( + retry_count["value"] > 0 + ), "Expected at least one retry due to concurrent conflicts" + + final_table = tower.tables("concurrent_test", catalog=sql_catalog).load() + df = final_table.read() + + assert len(df) == 3 + + ticker_prices = {row["ticker"]: row["price"] for row in df.iter_rows(named=True)} + + assert ticker_prices["AAPL"] == 150.0 + assert ticker_prices["GOOGL"] == 250.0 + assert ticker_prices["MSFT"] == 350.0 + + +def test_upsert_concurrent_writes_same_row(sql_catalog): + """Test concurrent upserts to the SAME row - last write wins.""" + schema = pa.schema( + [ + pa.field("id", pa.int64()), + pa.field("counter", pa.int64()), + ] + ) + + ref = tower.tables("concurrent_same_row_test", catalog=sql_catalog) + table = ref.create_if_not_exists(schema) + + initial_data = pa.Table.from_pylist( + [{"id": 1, "counter": 0}], + schema=schema, + ) + table.insert(initial_data) + + retry_count = {"value": 0} + retry_lock = threading.Lock() + + def upsert_counter(value: int): + t = tower.tables("concurrent_same_row_test", catalog=sql_catalog).load() + + original_refresh = t._table.refresh + + def tracked_refresh(): + with retry_lock: + retry_count["value"] += 1 + return original_refresh() + + t._table.refresh = tracked_refresh + + data = pa.Table.from_pylist( + [{"id": 1, "counter": value}], + schema=schema, + ) + t.upsert(data, join_cols=["id"]) + return value + + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + futures = [executor.submit(upsert_counter, i) for i in range(1, 6)] + results = [f.result() for f in concurrent.futures.as_completed(futures)] + + assert len(results) == 5 + + assert ( + retry_count["value"] > 0 + ), "Expected at least one retry due to concurrent conflicts" + + final_table = tower.tables("concurrent_same_row_test", catalog=sql_catalog).load() + df = final_table.read() + + assert len(df) == 1 + + final_counter = df.select("counter").item() + assert final_counter in [1, 2, 3, 4, 5] + + def test_delete_from_tables(in_memory_catalog): schema = pa.schema( [ From 1b5df8e499cc0ebb451a9fcdd8bf8ac623822885 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 21 Jan 2026 15:29:20 +0000 Subject: [PATCH 3/8] Bump version to v0.3.44-rc.1 --- Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- pyproject.toml | 2 +- uv.lock | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5ea25d7c..f02ce552 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -491,7 +491,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "base64", "chrono", @@ -598,7 +598,7 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "aes-gcm", "base64", @@ -3236,7 +3236,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "pem", "rsa", @@ -3506,7 +3506,7 @@ checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tower" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "tokio", "tower-api", @@ -3531,7 +3531,7 @@ dependencies = [ [[package]] name = "tower-api" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "reqwest", "serde", @@ -3543,7 +3543,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "axum", "bytes", @@ -3613,7 +3613,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "async-compression", "config", @@ -3631,7 +3631,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "async-trait", "chrono", @@ -3654,7 +3654,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-telemetry" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "tracing", "tracing-appender", @@ -3663,7 +3663,7 @@ dependencies = [ [[package]] name = "tower-uv" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "async-compression", "async_zip", @@ -3677,7 +3677,7 @@ dependencies = [ [[package]] name = "tower-version" -version = "0.3.43" +version = "0.3.44-rc.1" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index a6fc1635..2d965242 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.43" +version = "0.3.44-rc.1" description = "Tower is the best way to host Python data apps in production" rust-version = "1.81" authors = ["Brad Heller "] diff --git a/pyproject.toml b/pyproject.toml index b0e89ac6..1f2d9a49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.43" +version = "0.3.44rc1" description = "Tower CLI and runtime environment for Tower." authors = [{ name = "Tower Computing Inc.", email = "brad@tower.dev" }] readme = "README.md" diff --git a/uv.lock b/uv.lock index 51bf7d0e..84bec35c 100644 --- a/uv.lock +++ b/uv.lock @@ -2744,7 +2744,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.43" +version = "0.3.44rc1" source = { editable = "." } dependencies = [ { name = "attrs" }, From f336857d0029cd99f74d978f1f849ba144cbbd1b Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Mon, 26 Jan 2026 09:28:37 +0100 Subject: [PATCH 4/8] Add `--no-browser` to Login to skip opening browsers at login (#183) * chore: Add `--no-browser` to Login to skip opening browsers in environments where our CLI is tricked * Update crates/tower-cmd/src/session.rs Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update crates/tower-cmd/src/session.rs Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- crates/tower-cmd/src/lib.rs | 2 +- crates/tower-cmd/src/session.rs | 50 ++++++++++++++++++++++++--------- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/crates/tower-cmd/src/lib.rs b/crates/tower-cmd/src/lib.rs index cd4fcbda..a55b4967 100644 --- a/crates/tower-cmd/src/lib.rs +++ b/crates/tower-cmd/src/lib.rs @@ -99,7 +99,7 @@ impl App { } match matches.subcommand() { - Some(("login", _)) => session::do_login(config).await, + Some(("login", args)) => session::do_login(config, args).await, Some(("version", _)) => version::do_version().await, Some(("apps", sub_matches)) => { let apps_command = sub_matches.subcommand(); diff --git a/crates/tower-cmd/src/session.rs b/crates/tower-cmd/src/session.rs index 7301b46b..8f0f8f24 100644 --- a/crates/tower-cmd/src/session.rs +++ b/crates/tower-cmd/src/session.rs @@ -1,5 +1,5 @@ use crate::output; -use clap::Command; +use clap::{Arg, ArgMatches, Command}; use config::{Config, Session}; use tokio::{time, time::Duration}; use tower_api::models::CreateDeviceLoginTicketResponse; @@ -8,18 +8,29 @@ use tower_telemetry::debug; use crate::api; pub fn login_cmd() -> Command { - Command::new("login").about("Create a session with Tower") + Command::new("login") + .arg( + Arg::new("no-browser") + .long("no-browser") + .short('n') + .help("Do not attempt to open the browser automatically") + .action(clap::ArgAction::SetTrue), + ) + .about("Create a session with Tower") } -pub async fn do_login(config: Config) { +pub async fn do_login(config: Config, args: &ArgMatches) { output::banner(); + // Open a browser by default, unless the --no-browser flag is set. + let open_browser = !args.get_flag("no-browser"); + let mut spinner = output::spinner("Starting device login..."); match api::create_device_login_ticket(&config).await { Ok(resp) => { spinner.success(); - handle_device_login(config, resp).await; + handle_device_login(config, open_browser, resp).await; } Err(err) => { spinner.failure(); @@ -28,18 +39,29 @@ pub async fn do_login(config: Config) { } } -async fn handle_device_login(config: Config, claim: CreateDeviceLoginTicketResponse) { +async fn handle_device_login( + config: Config, + open_browser: bool, + claim: CreateDeviceLoginTicketResponse, +) { + // Put this in the debug logs just in case something weird happens. + debug!("Login URL: {}", claim.login_url); + + let login_instructions = format!( + "Please open the following URL in your browser: {}\n", + claim.login_url + ); + // Try to open the login URL in browser - if let Err(err) = webbrowser::open(&claim.login_url) { - debug!("failed to open web browser: {}", err); - - let line = format!( - "Please open the following URL in your browser: {}\n", - claim.login_url - ); - output::write(&line); + if open_browser { + if let Err(err) = webbrowser::open(&claim.login_url) { + debug!("failed to open web browser: {}", err); + output::write(&login_instructions); + } else { + debug!("opened browser to {}", claim.login_url); + } } else { - debug!("opened browser to {}", claim.login_url); + output::write(&login_instructions); } let mut spinner = output::spinner("Waiting for login..."); From a3307b036fc2648e790bd125443975b32c3f78e7 Mon Sep 17 00:00:00 2001 From: Burak Dede Date: Mon, 26 Jan 2026 09:28:52 +0100 Subject: [PATCH 5/8] Set app description correctly on create only (#182) * Fix #166: persist app description on create and deploy * Improve #166 fix: description semantics and error handling improvements - Change towerfile desc. to Option to distinguish absence and explicit empty - Add ApiCreateError for more accurate error reporting on app createion - Add UnexpectedApiResponse error to prevent silent description synchronization skip - Simplify error conversion in `ensure_app_exists` * Simplify description update: always update on deploy, no diff check Change from diff-checking approach to "latest wins" semantics; - Always update description on deploy when present (no comparison) - Skip update for empty/None (preserves server state) - Remove spinner and entity validation (simpler, more resilient) * simplify: don't block deploy during description update * scope description field to only app create flows and remove from deploy * fix linter/formatting errors --- crates/config/src/towerfile.rs | 9 ++++-- crates/tower-cmd/src/api.rs | 1 + crates/tower-cmd/src/deploy.rs | 12 +++---- crates/tower-cmd/src/error.rs | 14 +++++++- crates/tower-cmd/src/mcp.rs | 2 +- crates/tower-cmd/src/util/apps.rs | 32 ++++++------------- .../features/cli_app_management.feature | 9 +++++- tests/integration/features/steps/cli_steps.py | 32 +++++++++++++++++++ tests/mock-api-server/main.py | 27 +++++++++++++++- 9 files changed, 103 insertions(+), 35 deletions(-) diff --git a/crates/config/src/towerfile.rs b/crates/config/src/towerfile.rs index 648bc00f..aa385e1a 100644 --- a/crates/config/src/towerfile.rs +++ b/crates/config/src/towerfile.rs @@ -28,8 +28,8 @@ pub struct App { #[serde(default)] pub schedule: String, - #[serde(default)] - pub description: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub description: Option, #[serde(default)] pub import_paths: Vec, @@ -58,7 +58,7 @@ impl Towerfile { script: String::from(""), source: vec![], schedule: String::from("0 0 * * *"), - description: String::from(""), + description: None, import_paths: vec![], }, } @@ -147,6 +147,7 @@ mod test { assert_eq!(towerfile.app.script, "./script.py"); assert_eq!(towerfile.app.source, vec!["*.py"]); assert_eq!(towerfile.app.schedule, "0 0 * * *"); + assert_eq!(towerfile.app.description, None); } #[test] @@ -163,6 +164,7 @@ mod test { assert_eq!(towerfile.app.script, "./script.py"); assert_eq!(towerfile.app.source, vec!["*.py"]); assert_eq!(towerfile.app.schedule, ""); + assert_eq!(towerfile.app.description, None); } #[test] @@ -316,6 +318,7 @@ default = "value2" assert_eq!(towerfile.app.name, reparsed.app.name); assert_eq!(towerfile.app.script, reparsed.app.script); assert_eq!(towerfile.app.source, reparsed.app.source); + assert_eq!(towerfile.app.description, reparsed.app.description); assert_eq!(towerfile.parameters.len(), reparsed.parameters.len()); assert_eq!(towerfile.parameters[0].name, reparsed.parameters[0].name); } diff --git a/crates/tower-cmd/src/api.rs b/crates/tower-cmd/src/api.rs index 772bfb21..0078a15f 100644 --- a/crates/tower-cmd/src/api.rs +++ b/crates/tower-cmd/src/api.rs @@ -73,6 +73,7 @@ pub async fn create_app( create_app_params: tower_api::models::CreateAppParams { schema: None, name: name.to_string(), + // API create expects short_description; CLI/Towerfile expose "description". short_description: Some(description.to_string()), slug: None, is_externally_accessible: None, diff --git a/crates/tower-cmd/src/deploy.rs b/crates/tower-cmd/src/deploy.rs index cf3edcb7..15baadf7 100644 --- a/crates/tower-cmd/src/deploy.rs +++ b/crates/tower-cmd/src/deploy.rs @@ -43,6 +43,9 @@ pub async fn do_deploy(config: Config, args: &ArgMatches) { crate::Error::ApiDeployError { source } => { output::tower_error_and_die(source, "Deploying app failed") } + crate::Error::ApiCreateAppError { source } => { + output::tower_error_and_die(source, "Creating app failed") + } crate::Error::ApiDescribeAppError { source } => { output::tower_error_and_die(source, "Fetching app details failed") } @@ -72,16 +75,13 @@ pub async fn deploy_from_dir( let api_config = config.into(); // Add app existence check before proceeding - if let Err(err) = util::apps::ensure_app_exists( + util::apps::ensure_app_exists( &api_config, &towerfile.app.name, - &towerfile.app.description, + towerfile.app.description.as_deref(), create_app, ) - .await - { - return Err(crate::Error::ApiDescribeAppError { source: err }); - } + .await?; let spec = PackageSpec::from_towerfile(&towerfile); let mut spinner = output::spinner("Building package..."); diff --git a/crates/tower-cmd/src/error.rs b/crates/tower-cmd/src/error.rs index ff5ab381..777deec5 100644 --- a/crates/tower-cmd/src/error.rs +++ b/crates/tower-cmd/src/error.rs @@ -1,6 +1,6 @@ use snafu::prelude::*; use tower_api::apis::default_api::{ - DeployAppError, DescribeAppError, DescribeRunError, RunAppError, + CreateAppError, DeployAppError, DescribeAppError, DescribeRunError, RunAppError, }; use tower_telemetry::debug; @@ -91,6 +91,12 @@ pub enum Error { source: tower_api::apis::Error, }, + // API create app error + #[snafu(display("API create app error: {}", source))] + ApiCreateAppError { + source: tower_api::apis::Error, + }, + // API describe app error #[snafu(display("API describe app error: {}", source))] ApiDescribeAppError { @@ -173,6 +179,12 @@ impl From> for Error { } } +impl From> for Error { + fn from(source: tower_api::apis::Error) -> Self { + Self::ApiCreateAppError { source } + } +} + impl From> for Error { fn from(source: tower_api::apis::Error) -> Self { Self::ApiDescribeAppError { source } diff --git a/crates/tower-cmd/src/mcp.rs b/crates/tower-cmd/src/mcp.rs index af0a5b27..95d9440d 100644 --- a/crates/tower-cmd/src/mcp.rs +++ b/crates/tower-cmd/src/mcp.rs @@ -734,7 +734,7 @@ impl TowerService { towerfile.app.script = script; } if let Some(description) = request.description { - towerfile.app.description = description; + towerfile.app.description = Some(description); } if let Some(source) = request.source { towerfile.app.source = source; diff --git a/crates/tower-cmd/src/util/apps.rs b/crates/tower-cmd/src/util/apps.rs index 949785d1..38012bae 100644 --- a/crates/tower-cmd/src/util/apps.rs +++ b/crates/tower-cmd/src/util/apps.rs @@ -1,5 +1,4 @@ use crate::output; -use http::StatusCode; use promptly::prompt_default; use tower_api::apis::{ configuration::Configuration, @@ -10,9 +9,9 @@ use tower_api::models::CreateAppParams as CreateAppParamsModel; pub async fn ensure_app_exists( api_config: &Configuration, app_name: &str, - description: &str, + description: Option<&str>, create_app: bool, -) -> Result<(), tower_api::apis::Error> { +) -> Result<(), crate::Error> { // Try to describe the app first (with spinner) let mut spinner = output::spinner("Checking app..."); let describe_result = default_api::describe_app( @@ -27,7 +26,7 @@ pub async fn ensure_app_exists( ) .await; - // If the app exists, return Ok + // If the app exists, return Ok (description is create-only). if describe_result.is_ok() { spinner.success(); return Ok(()); @@ -49,7 +48,7 @@ pub async fn ensure_app_exists( // If it's not a 404 error, fail the spinner and return the error if !is_not_found { spinner.failure(); - return Err(err); + return Err(crate::Error::ApiDescribeAppError { source: err }); } // App not found - stop spinner before prompting user @@ -68,7 +67,7 @@ pub async fn ensure_app_exists( // If the user doesn't want to create the app, return the original error if !create_app { - return Err(err); + return Err(crate::Error::ApiDescribeAppError { source: err }); } // Try to create the app (with a new spinner) @@ -79,7 +78,8 @@ pub async fn ensure_app_exists( create_app_params: CreateAppParamsModel { schema: None, name: app_name.to_string(), - short_description: Some(description.to_string()), + // API create expects short_description; CLI/Towerfile expose "description". + short_description: description.map(|desc| desc.to_string()), slug: None, is_externally_accessible: None, subdomain: None, @@ -96,21 +96,9 @@ pub async fn ensure_app_exists( } Err(create_err) => { spinner.failure(); - // Convert any creation error to a response error - Err(tower_api::apis::Error::ResponseError( - tower_api::apis::ResponseContent { - tower_trace_id: "".to_string(), - status: match &create_err { - tower_api::apis::Error::ResponseError(resp) => resp.status, - _ => StatusCode::INTERNAL_SERVER_ERROR, - }, - content: match &create_err { - tower_api::apis::Error::ResponseError(resp) => resp.content.clone(), - _ => create_err.to_string(), - }, - entity: None, - }, - )) + Err(crate::Error::ApiCreateAppError { + source: create_err, + }) } } } diff --git a/tests/integration/features/cli_app_management.feature b/tests/integration/features/cli_app_management.feature index 0add0a6d..fbadbe34 100644 --- a/tests/integration/features/cli_app_management.feature +++ b/tests/integration/features/cli_app_management.feature @@ -26,4 +26,11 @@ Feature: CLI App Management When I run "tower apps create --json --name test-cli-app-123 --description 'Test app'" via CLI Then the output should be valid JSON And the JSON should contain the created app information - And the app name should be "test-cli-app-123" \ No newline at end of file + And the app name should be "test-cli-app-123" + And the app description should be "Test app" + + Scenario: CLI deploy --create creates app with description from Towerfile + Given I have a valid Towerfile in the current directory + When I run "tower deploy --create" via CLI + And I run "tower apps show --json {app_name}" via CLI using created app name + Then the app description should be "A test app" diff --git a/tests/integration/features/steps/cli_steps.py b/tests/integration/features/steps/cli_steps.py index a491e491..53274330 100644 --- a/tests/integration/features/steps/cli_steps.py +++ b/tests/integration/features/steps/cli_steps.py @@ -333,3 +333,35 @@ def step_app_name_should_be(context, expected_name): assert ( actual_name == expected_name ), f"Expected app name '{expected_name}', got '{actual_name}'" + + +@step('the app description should be "{expected_description}"') +def step_app_description_should_be(context, expected_description): + """Verify app description matches expected value""" + data = json.loads(context.cli_output) + candidates = [] + + if "app" in data: + candidates.append(data["app"]) + if "data" in data and "app" in data["data"]: + candidates.append(data["data"]["app"]) + + if not candidates: + candidates.append(data) + + actual_description = None + for candidate in candidates: + if isinstance(candidate, dict): + if "short_description" in candidate: + actual_description = candidate["short_description"] + break + if "description" in candidate: + actual_description = candidate["description"] + break + + assert ( + actual_description is not None + ), f"Could not find app description in JSON response: {data}" + assert ( + actual_description == expected_description + ), f"Expected description '{expected_description}', got '{actual_description}'" diff --git a/tests/mock-api-server/main.py b/tests/mock-api-server/main.py index e597d0b4..2fc1565a 100644 --- a/tests/mock-api-server/main.py +++ b/tests/mock-api-server/main.py @@ -132,6 +132,10 @@ async def create_app(app_data: Dict[str, Any]): if app_name in mock_apps_db: return {"app": mock_apps_db[app_name]} + description = app_data.get("description") + if description is None: + description = app_data.get("short_description", "") + new_app = { "created_at": datetime.datetime.now().isoformat(), "health_status": "healthy", @@ -148,7 +152,7 @@ async def create_app(app_data: Dict[str, Any]): "running": 0, }, "schedule": None, - "short_description": app_data.get("short_description", ""), + "short_description": description or "", "status": "active", "subdomain": "", "version": None, @@ -171,6 +175,27 @@ async def describe_app(name: str, response: Response): return {"app": app_info, "runs": []} # Simplistic, no runs yet +@app.put("/v1/apps/{name}") +async def update_app(name: str, app_data: Dict[str, Any], response: Response): + app_info = mock_apps_db.get(name) + if not app_info: + response.status_code = 404 + return { + "$schema": "https://api.tower.dev/v1/schemas/ErrorModel.json", + "title": "Not Found", + "status": 404, + "detail": f"App '{name}' not found", + } + + if "description" in app_data: + app_info["short_description"] = app_data.get("description") or "" + elif "short_description" in app_data: + app_info["short_description"] = app_data.get("short_description") or "" + + mock_apps_db[name] = app_info + return {"app": app_info} + + @app.delete("/v1/apps/{name}") async def delete_app(name: str): if name not in mock_apps_db: From 5aed05589be7ba4ec20f698a39c433ab763ef92a Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Thu, 29 Jan 2026 15:29:03 +0100 Subject: [PATCH 6/8] Upgrade UV and delete orphaned lock files (#185) * chore: Upgrade `uv` to latest version * chore: Upgrade `uv`, delete orphaned lock files * chore: Tighten up lock detection slightly --- Cargo.lock | 20 ++++ Cargo.toml | 4 + crates/tower-uv/Cargo.toml | 4 + crates/tower-uv/src/lib.rs | 233 ++++++++++++++++++++++++++++++++++++- 4 files changed, 259 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f02ce552..cb0eb160 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1013,6 +1013,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "futures" version = "0.3.31" @@ -2786,6 +2796,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + [[package]] name = "sealed" version = "0.5.0" @@ -3668,8 +3684,12 @@ dependencies = [ "async-compression", "async_zip", "dirs", + "fs2", "futures-lite", + "hex", + "regex", "reqwest", + "seahash", "tokio", "tokio-tar", "tower-telemetry", diff --git a/Cargo.toml b/Cargo.toml index 2d965242..a2a20f99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,20 +28,24 @@ config = { path = "crates/config" } crypto = { path = "crates/crypto" } ctrlc = "3" dirs = "5" +fs2 = "0.4" futures = "0.3" futures-util = "0.3" futures-lite = "2.6" glob = "0.3" +hex = "0.4" http = "1.1" indicatif = "0.17" nix = { version = "0.30", features = ["signal"] } pem = "3" promptly = "0.3" rand = "0.8" +regex = "1" reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls", "stream"] } reqwest-eventsource = { version = "0.6" } rpassword = "7" rsa = "0.9" +seahash = "4.1" serde = "1" serde_json = "1.0" sha2 = "0.10" diff --git a/crates/tower-uv/Cargo.toml b/crates/tower-uv/Cargo.toml index b85e9d0c..5c42f83e 100644 --- a/crates/tower-uv/Cargo.toml +++ b/crates/tower-uv/Cargo.toml @@ -10,8 +10,12 @@ license = { workspace = true } async-compression = { workspace = true } async_zip = { workspace = true } dirs = { workspace = true } +fs2 = { workspace = true } futures-lite = { workspace = true } +hex = { workspace = true } +regex = { workspace = true } reqwest = { workspace = true } +seahash = { workspace = true } tokio = { workspace = true } tokio-tar = { workspace = true } tower-telemetry = { workspace = true } diff --git a/crates/tower-uv/src/lib.rs b/crates/tower-uv/src/lib.rs index 33daf16b..b334bc76 100644 --- a/crates/tower-uv/src/lib.rs +++ b/crates/tower-uv/src/lib.rs @@ -1,13 +1,19 @@ use std::collections::HashMap; -use std::path::PathBuf; +use std::fs::{self, OpenOptions}; +use std::hash::{Hash, Hasher}; +use std::path::{Path, PathBuf}; use std::process::Stdio; + +use fs2::FileExt; +use regex::Regex; +use seahash::SeaHasher; use tokio::process::{Child, Command}; use tower_telemetry::debug; pub mod install; // UV_VERSION is the version of UV to download and install when setting up a local UV deployment. -pub const UV_VERSION: &str = "0.7.13"; +pub const UV_VERSION: &str = "0.9.27"; #[derive(Debug)] pub enum Error { @@ -106,6 +112,122 @@ fn normalize_env_vars(env_vars: &HashMap) -> HashMap.lock`) in the temp directory for concurrent operation +/// safety. These files are not automatically cleaned up when UV exits. This function finds all +/// such files and removes any that are not currently locked by another process. +pub fn cleanup_stale_uv_lock_files() { + let temp_dir = std::env::temp_dir(); + + let entries = match fs::read_dir(&temp_dir) { + Ok(entries) => entries, + Err(e) => { + debug!( + "Failed to read temp directory for lock file cleanup: {:?}", + e + ); + return; + } + }; + + for entry in entries.flatten() { + let path = entry.path(); + + // Only process files matching the uv-*.lock pattern + if let Some(file_name) = path.file_name() { + if is_uv_lock_file_name(&file_name) { + continue; + } + } else { + continue; + } + + // Try to open the file and acquire an exclusive lock + let file = match OpenOptions::new().read(true).write(true).open(&path) { + Ok(f) => f, + Err(e) => { + debug!("Failed to open lock file {:?}: {:?}", path, e); + continue + } + }; + + // Try to acquire an exclusive lock without blocking + if file.try_lock_exclusive().is_ok() { + // We got the lock, meaning no other process is using this file. + // Unlock and delete it. + let _ = FileExt::unlock(&file); + drop(file); // Close the file handle before deleting + + if let Err(e) = fs::remove_file(&path) { + debug!("Failed to remove stale lock file {:?}: {:?}", path, e); + } else { + debug!("Cleaned up stale UV lock file: {:?}", path); + } + } + // If we couldn't get the lock, another process is using it, so leave it alone + } +} + +fn is_uv_lock_file_name>(lock_name: S) -> bool { + // There isn't a really great way of _not_ instantiating this on each call, without using a + // LazyLock or some other synchronization method. So, we just take the runtime hit instead of + // the synchonization hit. + let uv_lock_pattern = Regex::new(r"^uv-[0-9a-f]{16}\.lock$").unwrap(); + let os_str = lock_name.as_ref(); + + os_str.to_str() + .map(|name| uv_lock_pattern.is_match(name)) + .unwrap_or(false) +} + +/// Computes the lock file path that uv will create for a given working directory. +/// +/// This replicates uv's lock file naming: `uv-{hash}.lock` where the hash is +/// a seahash of the workspace path. When running uv commands with a specific +/// working directory, this function can predict which lock file will be used. +pub fn compute_uv_lock_file_path(cwd: &Path) -> PathBuf { + let mut hasher = SeaHasher::new(); + cwd.hash(&mut hasher); + let hash = hasher.finish(); + let hash_hex = hex::encode(hash.to_le_bytes()); + + std::env::temp_dir().join(format!("uv-{}.lock", hash_hex)) +} + +/// Cleans up the lock file for a specific working directory after uv exits. +/// +/// This should be called after a uv process completes to clean up the lock file +/// it created. The lock file is only removed if no other process is using it. +/// +/// Returns `true` if the lock file was successfully removed, `false` otherwise +/// (either because it didn't exist, couldn't be opened, or is still locked). +pub fn cleanup_lock_file_for_cwd(cwd: &Path) -> bool { + let lock_path = compute_uv_lock_file_path(cwd); + + let file = match OpenOptions::new().read(true).write(true).open(&lock_path) { + Ok(f) => f, + Err(_) => return false, // File doesn't exist or can't be opened + }; + + // Only delete if we can acquire exclusive lock (no other process using it) + if file.try_lock_exclusive().is_ok() { + let _ = FileExt::unlock(&file); + drop(file); // Close the file handle before deleting + + if let Err(e) = fs::remove_file(&lock_path) { + debug!("Failed to remove lock file {:?}: {:?}", lock_path, e); + false + } else { + debug!("Cleaned up UV lock file for {:?}: {:?}", cwd, lock_path); + true + } + } else { + // Another process is still using this lock file + false + } +} + async fn test_uv_path(path: &PathBuf) -> Result<(), Error> { let res = Command::new(&path) .arg("--color") @@ -170,6 +292,11 @@ impl Uv { .arg("venv") .envs(env_vars); + #[cfg(unix)] + { + cmd.process_group(0); + } + if let Some(dir) = &self.cache_dir { cmd.arg("--cache-dir").arg(dir); } @@ -302,3 +429,105 @@ impl Uv { test_uv_path(&self.uv_path).await.is_ok() } } + +#[cfg(test)] +mod tests { + use super::*; + use std::io::Write; + + #[test] + fn test_compute_uv_lock_file_path_is_deterministic() { + let path = Path::new("/some/project/path"); + + let lock_path_1 = compute_uv_lock_file_path(path); + let lock_path_2 = compute_uv_lock_file_path(path); + + assert_eq!(lock_path_1, lock_path_2); + } + + #[test] + fn test_compute_uv_lock_file_path_different_paths_produce_different_hashes() { + let path_a = Path::new("/project/a"); + let path_b = Path::new("/project/b"); + + let lock_path_a = compute_uv_lock_file_path(path_a); + let lock_path_b = compute_uv_lock_file_path(path_b); + + assert_ne!(lock_path_a, lock_path_b); + } + + #[test] + fn test_compute_uv_lock_file_path_format() { + let path = Path::new("/test/path"); + let lock_path = compute_uv_lock_file_path(path); + + let file_name = lock_path.file_name().unwrap().to_str().unwrap(); + assert!(file_name.starts_with("uv-")); + assert!(file_name.ends_with(".lock")); + + // Hash should be 16 hex characters (8 bytes as hex) + let hash_part = &file_name[3..file_name.len() - 5]; + assert_eq!(hash_part.len(), 16); + assert!(hash_part.chars().all(|c| c.is_ascii_hexdigit())); + } + + #[test] + fn test_cleanup_lock_file_for_cwd_removes_unlocked_file() { + let temp_dir = std::env::temp_dir(); + let test_cwd = temp_dir.join("test-cleanup-cwd"); + + // Compute where the lock file would be + let lock_path = compute_uv_lock_file_path(&test_cwd); + + // Create the lock file manually + { + let mut file = fs::File::create(&lock_path).unwrap(); + file.write_all(b"test").unwrap(); + } + + assert!(lock_path.exists()); + + // Clean it up + let result = cleanup_lock_file_for_cwd(&test_cwd); + + assert!(result); + assert!(!lock_path.exists()); + } + + #[test] + fn test_cleanup_lock_file_for_cwd_returns_false_for_nonexistent() { + let nonexistent_cwd = Path::new("/nonexistent/path/that/does/not/exist"); + + let result = cleanup_lock_file_for_cwd(nonexistent_cwd); + + assert!(!result); + } + + #[test] + fn test_cleanup_lock_file_for_cwd_respects_lock() { + let temp_dir = std::env::temp_dir(); + let test_cwd = temp_dir.join("test-cleanup-locked-cwd"); + + let lock_path = compute_uv_lock_file_path(&test_cwd); + + // Create and hold a lock on the file + let file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .truncate(true) + .open(&lock_path) + .unwrap(); + file.lock_exclusive().unwrap(); + + // Try to clean up while locked - should fail + let result = cleanup_lock_file_for_cwd(&test_cwd); + + assert!(!result); + assert!(lock_path.exists()); + + // Release the lock and clean up + drop(file); + let _ = fs::remove_file(&lock_path); + } +} From 603371afa5a02fe3c9ebbd7b5ba4cb6d409b3c57 Mon Sep 17 00:00:00 2001 From: Vim <121349594+sammuti@users.noreply.github.com> Date: Thu, 29 Jan 2026 07:31:17 -0700 Subject: [PATCH 7/8] [TOW-1394] UV cache & package path cleanup (#184) * [TOW-1394][WIP] Attempt cache cleanup * In case of no cache_dir provided create temporary directories for uv and clean them * Correctly set cache and clean * Track & explicitly remove tmp pacakge dir * Test cleanup --- crates/tower-runtime/src/subprocess.rs | 72 +++++++- crates/tower-runtime/tests/subprocess_test.rs | 173 ++++++++++++++++++ crates/tower-uv/src/lib.rs | 35 ++-- 3 files changed, 262 insertions(+), 18 deletions(-) create mode 100644 crates/tower-runtime/tests/subprocess_test.rs diff --git a/crates/tower-runtime/src/subprocess.rs b/crates/tower-runtime/src/subprocess.rs index 713722f2..de3147ce 100644 --- a/crates/tower-runtime/src/subprocess.rs +++ b/crates/tower-runtime/src/subprocess.rs @@ -11,6 +11,7 @@ use crate::{App, OutputReceiver, StartOptions, Status}; use async_trait::async_trait; use std::path::PathBuf; use std::sync::Arc; +use tmpdir::TmpDir; use tokio::fs::File; use tokio::io::AsyncWriteExt; use tokio::sync::Mutex; @@ -37,7 +38,7 @@ impl SubprocessBackend { mut package_stream: Box, ) -> Result { // Create temp directory for this package - let temp_dir = tmpdir::TmpDir::new("tower-package") + let temp_dir = TmpDir::new("tower-package") .await .map_err(|_| Error::PackageCreateFailed)?; @@ -78,14 +79,39 @@ impl ExecutionBackend for SubprocessBackend { _ => self.cache_dir.clone(), }; + // Create a unique temp directory for uv if no cache directory is configured + let (final_cache_dir, uv_temp_dir) = if cache_dir.is_none() { + let temp_path = std::env::temp_dir().join(format!("tower-uv-{}", spec.id)); + tokio::fs::create_dir_all(&temp_path) + .await + .map_err(|_| Error::PackageCreateFailed)?; + // Use the temp directory as cache_dir and track it for cleanup + (Some(temp_path.clone()), Some(temp_path)) + } else { + // Use provided cache_dir, no temp dir to clean up + (cache_dir, None) + }; + // Receive package stream and unpack it - let package = self.receive_and_unpack_package(spec.package_stream).await?; + let mut package = self.receive_and_unpack_package(spec.package_stream).await?; let unpacked_path = package .unpacked_path .clone() .ok_or(Error::PackageUnpackFailed)?; + // Extract tmp_dir from package for cleanup tracking + // We need to keep this alive until execution completes + let package_tmp_dir = package.tmp_dir.take(); + + // Set TMPDIR to the same isolated directory to ensure lock files also go there + let mut env_vars = spec.env_vars; + if let Some(ref temp_dir) = uv_temp_dir { + env_vars.insert("TMPDIR".to_string(), temp_dir.to_string_lossy().to_string()); + env_vars.insert("TEMP".to_string(), temp_dir.to_string_lossy().to_string()); + env_vars.insert("TMP".to_string(), temp_dir.to_string_lossy().to_string()); + } + let opts = StartOptions { ctx: spec.telemetry_ctx, package: Package::from_unpacked_path(unpacked_path).await?, @@ -93,9 +119,9 @@ impl ExecutionBackend for SubprocessBackend { environment: spec.environment, secrets: spec.secrets, parameters: spec.parameters, - env_vars: spec.env_vars, + env_vars, output_sender: output_sender.clone(), - cache_dir, + cache_dir: final_cache_dir, // UV will use this via --cache-dir flag }; // Start the LocalApp @@ -105,7 +131,8 @@ impl ExecutionBackend for SubprocessBackend { id: spec.id, app: Arc::new(Mutex::new(app)), output_receiver: Arc::new(Mutex::new(output_receiver)), - _package: package, // Keep package alive so temp dir doesn't get cleaned up + package_tmp_dir, + uv_temp_dir, }) } @@ -133,7 +160,22 @@ pub struct SubprocessHandle { id: String, app: Arc>, output_receiver: Arc>, - _package: Package, // Keep package alive to prevent temp dir cleanup + package_tmp_dir: Option, // Track package temp directory for cleanup + uv_temp_dir: Option, // Track UV's temp directory for cleanup +} + +impl Drop for SubprocessHandle { + fn drop(&mut self) { + // Best-effort cleanup of UV temp directory when handle is dropped + if let Some(temp_dir) = self.uv_temp_dir.take() { + let _ = std::fs::remove_dir_all(&temp_dir); + } + + // Best-effort cleanup of package temp directory when handle is dropped + if let Some(tmp_dir) = self.package_tmp_dir.take() { + let _ = std::fs::remove_dir_all(tmp_dir.to_path_buf()); + } + } } #[async_trait] @@ -195,6 +237,24 @@ impl ExecutionHandle for SubprocessHandle { async fn cleanup(&mut self) -> Result<(), Error> { // Ensure the app is terminated self.terminate().await?; + + // Clean up uv's temp directory if it was created + if let Some(ref temp_dir) = self.uv_temp_dir { + if let Err(e) = tokio::fs::remove_dir_all(temp_dir).await { + // Log but don't fail - cleanup is best-effort + tower_telemetry::debug!("Failed to clean up uv temp directory: {:?}", e); + } + } + + // Clean up package temp directory + if let Some(tmp_dir) = self.package_tmp_dir.take() { + let path = tmp_dir.to_path_buf(); + if let Err(e) = tokio::fs::remove_dir_all(&path).await { + // Log but don't fail - cleanup is best-effort + tower_telemetry::debug!("Failed to clean up package temp directory: {:?}", e); + } + } + Ok(()) } } diff --git a/crates/tower-runtime/tests/subprocess_test.rs b/crates/tower-runtime/tests/subprocess_test.rs new file mode 100644 index 00000000..40a99929 --- /dev/null +++ b/crates/tower-runtime/tests/subprocess_test.rs @@ -0,0 +1,173 @@ +use std::collections::HashMap; +use std::path::PathBuf; + +use tower_runtime::execution::{ + CacheBackend, CacheConfig, CacheIsolation, ExecutionBackend, ExecutionHandle, ExecutionSpec, + ResourceLimits, RuntimeConfig, +}; +use tower_runtime::subprocess::SubprocessBackend; +use tower_runtime::Status; + +use config::Towerfile; +use tower_package::{Package, PackageSpec}; + +fn get_example_app_dir(name: &str) -> PathBuf { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("tests"); + path.push("example-apps"); + path.push(name); + assert!( + path.exists(), + "Example app directory does not exist: {}", + path.display() + ); + path +} + +async fn build_package_from_dir(dir: &PathBuf) -> Package { + let towerfile = Towerfile::from_path(dir.join("Towerfile")).expect("Failed to load Towerfile"); + let spec = PackageSpec::from_towerfile(&towerfile); + let mut package = Package::build(spec) + .await + .expect("Failed to build package from directory"); + package.unpack().await.expect("Failed to unpack package"); + package +} + +async fn create_execution_spec(id: String, package: Package) -> ExecutionSpec { + let tar_gz_path = package + .package_file_path + .expect("Package should have tar.gz file"); + + let file = tokio::fs::File::open(&tar_gz_path) + .await + .expect("Failed to open package file"); + + ExecutionSpec { + id, + telemetry_ctx: tower_telemetry::Context::new(), + package_stream: Box::new(file), + environment: "test".to_string(), + secrets: HashMap::new(), + parameters: HashMap::new(), + env_vars: HashMap::new(), + runtime: RuntimeConfig { + image: "python:3.11".to_string(), + version: None, + cache: CacheConfig { + enable_bundle_cache: false, + enable_runtime_cache: false, + enable_dependency_cache: false, + backend: CacheBackend::None, + isolation: CacheIsolation::None, + }, + entrypoint: None, + command: None, + }, + resources: ResourceLimits { + cpu_millicores: None, + memory_mb: None, + storage_mb: None, + max_pids: None, + gpu_count: 0, + timeout_seconds: 300, + }, + networking: None, + } +} + +/// Check if specific execution's temp directory exists +fn uv_temp_dir_exists(execution_id: &str) -> bool { + let tmp_dir = std::env::temp_dir(); + let uv_cache_dir = tmp_dir.join(format!("tower-uv-{}", execution_id)); + uv_cache_dir.exists() +} + +#[tokio::test] +async fn test_no_temp_file_accumulation_happy_path() { + let execution_id = "test-happy-cleanup"; + + // Execute app with dependencies + let app_dir = get_example_app_dir("02-use-faker"); + let package = build_package_from_dir(&app_dir).await; + let backend = SubprocessBackend::new(None); + let spec = create_execution_spec(execution_id.to_string(), package).await; + + let mut handle = backend + .create(spec) + .await + .expect("Failed to create execution"); + let status = handle + .wait_for_completion() + .await + .expect("Failed to wait for completion"); + + assert_eq!(status, Status::Exited, "App should exit successfully"); + + // Cleanup + handle.cleanup().await.expect("Failed to cleanup"); + + // Verify this execution's temp directory was cleaned up + assert!( + !uv_temp_dir_exists(execution_id), + "UV temp directory should be cleaned up after execution" + ); +} + +#[tokio::test] +async fn test_no_temp_file_accumulation_on_termination() { + let execution_id = "test-terminate-cleanup"; + + // Execute app with dependencies + let app_dir = get_example_app_dir("02-use-faker"); + let package = build_package_from_dir(&app_dir).await; + let backend = SubprocessBackend::new(None); + let spec = create_execution_spec(execution_id.to_string(), package).await; + + let mut handle = backend + .create(spec) + .await + .expect("Failed to create execution"); + + // Let it start, then terminate + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + handle.terminate().await.expect("Failed to terminate"); + + // Cleanup + handle.cleanup().await.expect("Failed to cleanup"); + + // Verify this execution's temp directory was cleaned up + assert!( + !uv_temp_dir_exists(execution_id), + "UV temp directory should be cleaned up after termination" + ); +} + +#[tokio::test] +async fn test_multiple_executions_no_accumulation() { + // Run multiple executions + for i in 0..3 { + let execution_id = format!("test-multi-cleanup-{}", i); + let app_dir = get_example_app_dir("01-hello-world"); + let package = build_package_from_dir(&app_dir).await; + let backend = SubprocessBackend::new(None); + let spec = create_execution_spec(execution_id.clone(), package).await; + + let mut handle = backend + .create(spec) + .await + .expect("Failed to create execution"); + handle + .wait_for_completion() + .await + .expect("Failed to wait for completion"); + handle.cleanup().await.expect("Failed to cleanup"); + + // Verify each execution's temp directory is cleaned up + assert!( + !uv_temp_dir_exists(&execution_id), + "UV temp directory {} should be cleaned up", + execution_id + ); + } +} diff --git a/crates/tower-uv/src/lib.rs b/crates/tower-uv/src/lib.rs index b334bc76..526cfb23 100644 --- a/crates/tower-uv/src/lib.rs +++ b/crates/tower-uv/src/lib.rs @@ -67,14 +67,18 @@ fn normalize_env_vars(env_vars: &HashMap) -> HashMap) -> HashMap Date: Thu, 29 Jan 2026 15:32:49 +0100 Subject: [PATCH 8/8] Update version to v0.3.44 --- Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- pyproject.toml | 2 +- uv.lock | 4 ++-- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cb0eb160..b261146a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -491,7 +491,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "base64", "chrono", @@ -598,7 +598,7 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "aes-gcm", "base64", @@ -3252,7 +3252,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "pem", "rsa", @@ -3522,7 +3522,7 @@ checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tower" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "tokio", "tower-api", @@ -3547,7 +3547,7 @@ dependencies = [ [[package]] name = "tower-api" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "reqwest", "serde", @@ -3559,7 +3559,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "axum", "bytes", @@ -3629,7 +3629,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "async-compression", "config", @@ -3647,7 +3647,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "async-trait", "chrono", @@ -3670,7 +3670,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-telemetry" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "tracing", "tracing-appender", @@ -3679,7 +3679,7 @@ dependencies = [ [[package]] name = "tower-uv" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "async-compression", "async_zip", @@ -3697,7 +3697,7 @@ dependencies = [ [[package]] name = "tower-version" -version = "0.3.44-rc.1" +version = "0.3.44" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index a2a20f99..0dddbc2a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.44-rc.1" +version = "0.3.44" description = "Tower is the best way to host Python data apps in production" rust-version = "1.81" authors = ["Brad Heller "] diff --git a/pyproject.toml b/pyproject.toml index 1f2d9a49..a257d228 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.44rc1" +version = "0.3.44" description = "Tower CLI and runtime environment for Tower." authors = [{ name = "Tower Computing Inc.", email = "brad@tower.dev" }] readme = "README.md" diff --git a/uv.lock b/uv.lock index 84bec35c..85b478a2 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.11'", @@ -2744,7 +2744,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.44rc1" +version = "0.3.44" source = { editable = "." } dependencies = [ { name = "attrs" },