diff --git a/.github/workflows/test-zsh-setup.yml b/.github/workflows/test-zsh-setup.yml new file mode 100644 index 0000000000..6c7435b6a9 --- /dev/null +++ b/.github/workflows/test-zsh-setup.yml @@ -0,0 +1,274 @@ +# ------------------------------------------------------------------- +# ------------------------------- WARNING --------------------------- +# ------------------------------------------------------------------- +# +# This file was automatically generated by gh-workflows using the +# gh-workflow-gen bin. You should add and commit this file to your +# git repository. **DO NOT EDIT THIS FILE BY HAND!** Any manual changes +# will be lost if the file is regenerated. +# +# To make modifications, update your `build.rs` configuration to adjust +# the workflow description as needed, then regenerate this file to apply +# those changes. +# +# ------------------------------------------------------------------- +# ----------------------------- END WARNING ------------------------- +# ------------------------------------------------------------------- + +name: Test ZSH Setup +'on': + pull_request: + types: + - opened + - synchronize + - reopened + paths: + - crates/forge_main/src/zsh/** + - crates/forge_main/src/ui.rs + - crates/forge_ci/tests/scripts/test-zsh-setup.sh + - crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh + - crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh + - '.github/workflows/test-zsh-setup.yml' + push: + branches: + - main + workflow_dispatch: {} +jobs: + test_zsh_setup_amd64: + name: Test ZSH Setup (amd64) + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Checkout Code + uses: actions/checkout@v6 + - name: Cache Cargo registry and git + uses: actions/cache@v4 + with: + path: |- + ~/.cargo/registry + ~/.cargo/git + key: cargo-registry-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: |- + cargo-registry-${{ runner.os }}-${{ runner.arch }}- + cargo-registry-${{ runner.os }}- + - name: Cache Rust toolchains + uses: actions/cache@v4 + with: + path: ~/.rustup + key: rustup-${{ runner.os }}-${{ runner.arch }} + - name: Cache build artifacts + uses: actions/cache@v4 + with: + path: target + key: build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }} + restore-keys: |- + build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}- + build-${{ runner.os }}-${{ runner.arch }}- + - name: Setup Protobuf Compiler + uses: arduino/setup-protoc@v3 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Setup Cross Toolchain + uses: taiki-e/setup-cross-toolchain-action@v1 + with: + target: x86_64-unknown-linux-musl + - name: Run ZSH setup test suite + run: bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --native-build --no-cleanup --jobs 4 + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: zsh-setup-results-linux-amd64 + path: test-results-linux/ + retention-days: 7 + if-no-files-found: ignore + test_zsh_setup_arm64: + name: Test ZSH Setup (arm64) + runs-on: ubuntu-24.04-arm + permissions: + contents: read + steps: + - name: Checkout Code + uses: actions/checkout@v6 + - name: Cache Cargo registry and git + uses: actions/cache@v4 + with: + path: |- + ~/.cargo/registry + ~/.cargo/git + key: cargo-registry-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: |- + cargo-registry-${{ runner.os }}-${{ runner.arch }}- + cargo-registry-${{ runner.os }}- + - name: Cache Rust toolchains + uses: actions/cache@v4 + with: + path: ~/.rustup + key: rustup-${{ runner.os }}-${{ runner.arch }} + - name: Cache build artifacts + uses: actions/cache@v4 + with: + path: target + key: build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }} + restore-keys: |- + build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}- + build-${{ runner.os }}-${{ runner.arch }}- + - name: Setup Protobuf Compiler + uses: arduino/setup-protoc@v3 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Setup Cross Toolchain + uses: taiki-e/setup-cross-toolchain-action@v1 + with: + target: aarch64-unknown-linux-musl + - name: Run ZSH setup test suite (exclude Arch) + run: bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --native-build --no-cleanup --exclude "Arch Linux" --jobs 4 + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: zsh-setup-results-linux-arm64 + path: test-results-linux/ + retention-days: 7 + if-no-files-found: ignore + test_zsh_setup_macos_arm64: + name: Test ZSH Setup (macOS arm64) + runs-on: macos-latest + permissions: + contents: read + steps: + - name: Checkout Code + uses: actions/checkout@v6 + - name: Cache Cargo registry and git + uses: actions/cache@v4 + with: + path: |- + ~/.cargo/registry + ~/.cargo/git + key: cargo-registry-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: |- + cargo-registry-${{ runner.os }}-${{ runner.arch }}- + cargo-registry-${{ runner.os }}- + - name: Cache Rust toolchains + uses: actions/cache@v4 + with: + path: ~/.rustup + key: rustup-${{ runner.os }}-${{ runner.arch }} + - name: Cache build artifacts + uses: actions/cache@v4 + with: + path: target + key: build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }} + restore-keys: |- + build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}- + build-${{ runner.os }}-${{ runner.arch }}- + - name: Setup Protobuf Compiler + uses: arduino/setup-protoc@v3 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Install shellcheck + run: brew install shellcheck + - name: Run macOS ZSH setup test suite + run: bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --no-cleanup + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: zsh-setup-results-macos-arm64 + path: test-results-macos/ + retention-days: 7 + if-no-files-found: ignore + test_zsh_setup_windows: + name: Test ZSH Setup (Windows x86_64) + runs-on: windows-latest + permissions: + contents: read + steps: + - name: Checkout Code + uses: actions/checkout@v6 + - name: Cache Cargo registry and git + uses: actions/cache@v4 + with: + path: |- + ~/.cargo/registry + ~/.cargo/git + key: cargo-registry-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: |- + cargo-registry-${{ runner.os }}-${{ runner.arch }}- + cargo-registry-${{ runner.os }}- + - name: Cache Rust toolchains + uses: actions/cache@v4 + with: + path: ~/.rustup + key: rustup-${{ runner.os }}-${{ runner.arch }} + - name: Cache build artifacts + uses: actions/cache@v4 + with: + path: target + key: build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }} + restore-keys: |- + build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}- + build-${{ runner.os }}-${{ runner.arch }}- + - name: Setup Protobuf Compiler + uses: arduino/setup-protoc@v3 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Run Windows ZSH setup test suite + run: bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --no-cleanup + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: zsh-setup-results-windows + path: test-results-windows/ + retention-days: 7 + if-no-files-found: ignore + test_zsh_setup_windows_arm64: + name: Test ZSH Setup (Windows arm64) + runs-on: windows-11-arm + permissions: + contents: read + steps: + - name: Checkout Code + uses: actions/checkout@v6 + - name: Cache Cargo registry and git + uses: actions/cache@v4 + with: + path: |- + ~/.cargo/registry + ~/.cargo/git + key: cargo-registry-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: |- + cargo-registry-${{ runner.os }}-${{ runner.arch }}- + cargo-registry-${{ runner.os }}- + - name: Cache Rust toolchains + uses: actions/cache@v4 + with: + path: ~/.rustup + key: rustup-${{ runner.os }}-${{ runner.arch }} + - name: Cache build artifacts + uses: actions/cache@v4 + with: + path: target + key: build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }} + restore-keys: |- + build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}- + build-${{ runner.os }}-${{ runner.arch }}- + - name: Setup Protobuf Compiler + uses: arduino/setup-protoc@v3 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Run Windows ARM64 ZSH setup test suite + run: bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --no-cleanup + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: zsh-setup-results-windows-arm64 + path: test-results-windows/ + retention-days: 7 + if-no-files-found: ignore +concurrency: + group: test-zsh-setup-${{ github.ref }} + cancel-in-progress: true diff --git a/Cargo.lock b/Cargo.lock index cd8cdea4d6..fc24231981 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1922,6 +1922,7 @@ dependencies = [ "serde_yml", "strum 0.28.0", "strum_macros 0.28.0", + "tempfile", "thiserror 2.0.18", "tokio", "tokio-stream", @@ -2018,6 +2019,7 @@ dependencies = [ "anyhow", "arboard", "async-recursion", + "async-trait", "atty", "chrono", "clap", @@ -2051,6 +2053,8 @@ dependencies = [ "open", "pretty_assertions", "reedline", + "regex", + "reqwest 0.12.28", "rustls 0.23.36", "serde", "serde_json", @@ -2205,6 +2209,7 @@ dependencies = [ "strip-ansi-escapes", "strum 0.28.0", "strum_macros 0.28.0", + "sysinfo 0.38.2", "tempfile", "thiserror 2.0.18", "tokio", diff --git a/crates/forge_api/src/api.rs b/crates/forge_api/src/api.rs index c254e6c5ef..0878e1f173 100644 --- a/crates/forge_api/src/api.rs +++ b/crates/forge_api/src/api.rs @@ -253,4 +253,10 @@ pub trait API: Sync + Send { &self, data_parameters: DataGenerationParameters, ) -> Result>>; + + /// Returns all tracked background processes with their alive status. + fn list_background_processes(&self) -> Result>; + + /// Kills a background process by PID and optionally deletes its log file. + fn kill_background_process(&self, pid: u32, delete_log: bool) -> Result<()>; } diff --git a/crates/forge_api/src/forge_api.rs b/crates/forge_api/src/forge_api.rs index 1aa57c769a..587272e282 100644 --- a/crates/forge_api/src/forge_api.rs +++ b/crates/forge_api/src/forge_api.rs @@ -8,8 +8,8 @@ use forge_app::{ AgentProviderResolver, AgentRegistry, AppConfigService, AuthService, CommandInfra, CommandLoaderService, ConversationService, DataGenerationApp, EnvironmentInfra, EnvironmentService, FileDiscoveryService, ForgeApp, GitApp, GrpcInfra, McpConfigManager, - McpService, ProviderAuthService, ProviderService, Services, User, UserUsage, Walker, - WorkspaceService, + McpService, ProviderAuthService, ProviderService, Services, ShellService, User, UserUsage, + Walker, WorkspaceService, }; use forge_domain::{Agent, ConsoleWriter, InitAuth, LoginInfo, *}; use forge_infra::ForgeInfra; @@ -416,6 +416,14 @@ impl< app.execute(data_parameters).await } + fn list_background_processes(&self) -> Result> { + self.services.shell_service().list_background_processes() + } + + fn kill_background_process(&self, pid: u32, delete_log: bool) -> Result<()> { + self.services.shell_service().kill_background_process(pid, delete_log) + } + async fn get_default_provider(&self) -> Result> { let provider_id = self.services.get_default_provider().await?; self.services.get_provider(provider_id).await diff --git a/crates/forge_app/src/fmt/fmt_input.rs b/crates/forge_app/src/fmt/fmt_input.rs index 931bcdcbfd..61701dadb2 100644 --- a/crates/forge_app/src/fmt/fmt_input.rs +++ b/crates/forge_app/src/fmt/fmt_input.rs @@ -100,11 +100,14 @@ impl FormatContent for ToolCatalog { let display_path = display_path_for(&input.path); Some(TitleFormat::debug("Undo").sub_title(display_path).into()) } - ToolCatalog::Shell(input) => Some( - TitleFormat::debug(format!("Execute [{}]", env.shell)) - .sub_title(&input.command) - .into(), - ), + ToolCatalog::Shell(input) => { + let label = if input.background { + format!("Spawned [{}]", env.shell) + } else { + format!("Execute [{}]", env.shell) + }; + Some(TitleFormat::debug(label).sub_title(&input.command).into()) + } ToolCatalog::Fetch(input) => { Some(TitleFormat::debug("GET").sub_title(&input.url).into()) } diff --git a/crates/forge_app/src/fmt/fmt_output.rs b/crates/forge_app/src/fmt/fmt_output.rs index 1e2944dd5c..70f4ad67c2 100644 --- a/crates/forge_app/src/fmt/fmt_output.rs +++ b/crates/forge_app/src/fmt/fmt_output.rs @@ -65,7 +65,7 @@ mod tests { use crate::operation::ToolOperation; use crate::{ Content, FsRemoveOutput, FsUndoOutput, FsWriteOutput, HttpResponse, Match, MatchResult, - PatchOutput, ReadOutput, ResponseContext, SearchResult, ShellOutput, + PatchOutput, ReadOutput, ResponseContext, SearchResult, ShellOutput, ShellOutputKind, }; // ContentFormat methods are now implemented in ChatResponseContent @@ -427,12 +427,12 @@ mod tests { fn test_shell_success() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "ls -la".to_string(), stdout: "file1.txt\nfile2.txt".to_string(), stderr: "".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -449,12 +449,12 @@ mod tests { fn test_shell_success_with_stderr() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "command_with_warnings".to_string(), stdout: "output line".to_string(), stderr: "warning line".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -471,12 +471,12 @@ mod tests { fn test_shell_failure() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "failing_command".to_string(), stdout: "".to_string(), stderr: "Error: command not found".to_string(), exit_code: Some(127), - }, + }), shell: "/bin/bash".to_string(), description: None, }, diff --git a/crates/forge_app/src/git_app.rs b/crates/forge_app/src/git_app.rs index c798aa463f..dbc9ead852 100644 --- a/crates/forge_app/src/git_app.rs +++ b/crates/forge_app/src/git_app.rs @@ -162,23 +162,25 @@ where let commit_result = self .services - .execute(commit_command, cwd, false, true, None, None) + .execute(commit_command, cwd, false, true, false, None, None) .await .context("Failed to commit changes")?; - if !commit_result.output.success() { - anyhow::bail!("Git commit failed: {}", commit_result.output.stderr); + let output = commit_result.foreground().expect("git commit runs in foreground"); + + if !output.success() { + anyhow::bail!("Git commit failed: {}", output.stderr); } // Combine stdout and stderr for logging - let git_output = if commit_result.output.stdout.is_empty() { - commit_result.output.stderr.clone() - } else if commit_result.output.stderr.is_empty() { - commit_result.output.stdout.clone() + let git_output = if output.stdout.is_empty() { + output.stderr.clone() + } else if output.stderr.is_empty() { + output.stdout.clone() } else { format!( "{}\n{}", - commit_result.output.stdout, commit_result.output.stderr + output.stdout, output.stderr ) }; @@ -230,6 +232,7 @@ where cwd.to_path_buf(), false, true, + false, None, None, ), @@ -238,6 +241,7 @@ where cwd.to_path_buf(), false, true, + false, None, None, ), @@ -246,7 +250,10 @@ where let recent_commits = recent_commits.context("Failed to get recent commits")?; let branch_name = branch_name.context("Failed to get branch name")?; - Ok((recent_commits.output.stdout, branch_name.output.stdout)) + Ok(( + recent_commits.foreground().expect("git log runs in foreground").stdout.clone(), + branch_name.foreground().expect("git rev-parse runs in foreground").stdout.clone(), + )) } /// Fetches diff from git (staged or unstaged) @@ -257,6 +264,7 @@ where cwd.to_path_buf(), false, true, + false, None, None, ), @@ -265,6 +273,7 @@ where cwd.to_path_buf(), false, true, + false, None, None, ) @@ -274,17 +283,18 @@ where let unstaged_diff = unstaged_diff.context("Failed to get unstaged changes")?; // Use staged changes if available, otherwise fall back to unstaged changes - let has_staged_files = !staged_diff.output.stdout.trim().is_empty(); + let has_staged_files = !staged_diff.foreground().expect("git diff runs in foreground").stdout.trim().is_empty(); let diff_output = if has_staged_files { staged_diff - } else if !unstaged_diff.output.stdout.trim().is_empty() { + } else if !unstaged_diff.foreground().expect("git diff runs in foreground").stdout.trim().is_empty() { unstaged_diff } else { return Err(GitAppError::NoChangesToCommit.into()); }; - let size = diff_output.output.stdout.len(); - Ok((diff_output.output.stdout, size, has_staged_files)) + let fg = diff_output.foreground().expect("git diff runs in foreground"); + let size = fg.stdout.len(); + Ok((fg.stdout.clone(), size, has_staged_files)) } /// Resolves the provider and model from the active agent's configuration. diff --git a/crates/forge_app/src/infra.rs b/crates/forge_app/src/infra.rs index 4a7eb87550..2fa59a0a60 100644 --- a/crates/forge_app/src/infra.rs +++ b/crates/forge_app/src/infra.rs @@ -5,8 +5,8 @@ use std::path::{Path, PathBuf}; use anyhow::Result; use bytes::Bytes; use forge_domain::{ - AuthCodeParams, CommandOutput, Environment, FileInfo, McpServerConfig, OAuthConfig, - OAuthTokenResponse, ToolDefinition, ToolName, ToolOutput, + AuthCodeParams, BackgroundCommandOutput, CommandOutput, Environment, FileInfo, McpServerConfig, + OAuthConfig, OAuthTokenResponse, ToolDefinition, ToolName, ToolOutput, }; use reqwest::Response; use reqwest::header::HeaderMap; @@ -140,6 +140,18 @@ pub trait CommandInfra: Send + Sync { working_dir: PathBuf, env_vars: Option>, ) -> anyhow::Result; + + /// Spawns a command as a detached background process. + /// + /// The process's stdout/stderr are redirected to a temporary log file. + /// Returns a `BackgroundCommandOutput` with the PID, log path, and the + /// temp-file handle that owns the log file on disk. + async fn execute_command_background( + &self, + command: String, + working_dir: PathBuf, + env_vars: Option>, + ) -> anyhow::Result; } #[async_trait::async_trait] diff --git a/crates/forge_app/src/operation.rs b/crates/forge_app/src/operation.rs index 50c885c702..8b207de9ae 100644 --- a/crates/forge_app/src/operation.rs +++ b/crates/forge_app/src/operation.rs @@ -18,7 +18,7 @@ use crate::truncation::{ use crate::utils::{compute_hash, format_display_path}; use crate::{ FsRemoveOutput, FsUndoOutput, FsWriteOutput, HttpResponse, PatchOutput, PlanCreateOutput, - ReadOutput, ResponseContext, SearchResult, ShellOutput, + ReadOutput, ResponseContext, SearchResult, ShellOutput, ShellOutputKind, }; #[derive(Debug, Default, Setters)] @@ -548,38 +548,59 @@ impl ToolOperation { forge_domain::ToolOutput::text(elm) } ToolOperation::Shell { output } => { - let mut parent_elem = Element::new("shell_output") - .attr("command", &output.output.command) - .attr("shell", &output.shell); + let mut parent_elem = Element::new("shell_output"); - if let Some(description) = &output.description { - parent_elem = parent_elem.attr("description", description); - } + match &output.kind { + ShellOutputKind::Background { command, pid, log_file } => { + parent_elem = parent_elem + .attr("command", command) + .attr("shell", &output.shell) + .attr("mode", "background"); - if let Some(exit_code) = output.output.exit_code { - parent_elem = parent_elem.attr("exit_code", exit_code); - } + if let Some(description) = &output.description { + parent_elem = parent_elem.attr("description", description); + } - let truncated_output = truncate_shell_output( - &output.output.stdout, - &output.output.stderr, - env.stdout_max_prefix_length, - env.stdout_max_suffix_length, - env.stdout_max_line_length, - ); + let bg_elem = Element::new("background") + .attr("pid", *pid) + .attr("log_file", log_file.display().to_string()); + parent_elem = parent_elem.append(bg_elem); + } + ShellOutputKind::Foreground(cmd_output) => { + parent_elem = parent_elem + .attr("command", &cmd_output.command) + .attr("shell", &output.shell); - let stdout_elem = create_stream_element( - &truncated_output.stdout, - content_files.stdout.as_deref(), - ); + if let Some(description) = &output.description { + parent_elem = parent_elem.attr("description", description); + } - let stderr_elem = create_stream_element( - &truncated_output.stderr, - content_files.stderr.as_deref(), - ); + if let Some(exit_code) = cmd_output.exit_code { + parent_elem = parent_elem.attr("exit_code", exit_code); + } - parent_elem = parent_elem.append(stdout_elem); - parent_elem = parent_elem.append(stderr_elem); + let truncated_output = truncate_shell_output( + &cmd_output.stdout, + &cmd_output.stderr, + env.stdout_max_prefix_length, + env.stdout_max_suffix_length, + env.stdout_max_line_length, + ); + + let stdout_elem = create_stream_element( + &truncated_output.stdout, + content_files.stdout.as_deref(), + ); + + let stderr_elem = create_stream_element( + &truncated_output.stderr, + content_files.stderr.as_deref(), + ); + + parent_elem = parent_elem.append(stdout_elem); + parent_elem = parent_elem.append(stderr_elem); + } + } forge_domain::ToolOutput::text(parent_elem) } @@ -1005,12 +1026,12 @@ mod tests { fn test_shell_output_no_truncation() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "echo hello".to_string(), stdout: "hello\nworld".to_string(), stderr: "".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -1038,12 +1059,12 @@ mod tests { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "long_command".to_string(), stdout, stderr: "".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -1073,12 +1094,12 @@ mod tests { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "error_command".to_string(), stdout: "".to_string(), stderr, exit_code: Some(1), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -1114,12 +1135,12 @@ mod tests { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "complex_command".to_string(), stdout, stderr, exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -1150,12 +1171,12 @@ mod tests { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "boundary_command".to_string(), stdout, stderr: "".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -1176,12 +1197,12 @@ mod tests { fn test_shell_output_single_line_each() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "simple_command".to_string(), stdout: "single stdout line".to_string(), stderr: "single stderr line".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -1202,12 +1223,12 @@ mod tests { fn test_shell_output_empty_streams() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "silent_command".to_string(), stdout: "".to_string(), stderr: "".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -1241,12 +1262,12 @@ mod tests { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "line_test_command".to_string(), stdout, stderr, exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -2267,12 +2288,12 @@ mod tests { fn test_shell_success() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "ls -la".to_string(), stdout: "total 8\ndrwxr-xr-x 2 user user 4096 Jan 1 12:00 .\ndrwxr-xr-x 10 user user 4096 Jan 1 12:00 ..".to_string(), stderr: "".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }, @@ -2294,12 +2315,12 @@ mod tests { fn test_shell_with_description() { let fixture = ToolOperation::Shell { output: ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { command: "git status".to_string(), stdout: "On branch main\nnothing to commit, working tree clean".to_string(), stderr: "".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: Some("Shows working tree status".to_string()), }, diff --git a/crates/forge_app/src/orch_spec/orch_runner.rs b/crates/forge_app/src/orch_spec/orch_runner.rs index 402232cf07..8cf75354aa 100644 --- a/crates/forge_app/src/orch_spec/orch_runner.rs +++ b/crates/forge_app/src/orch_spec/orch_runner.rs @@ -18,7 +18,8 @@ use crate::set_conversation_id::SetConversationId; use crate::system_prompt::SystemPrompt; use crate::user_prompt::UserPromptGenerator; use crate::{ - AgentService, AttachmentService, ShellOutput, ShellService, SkillFetchService, TemplateService, + AgentService, AttachmentService, ShellOutput, ShellOutputKind, ShellService, + SkillFetchService, TemplateService, }; static TEMPLATE_DIR: Dir<'static> = include_dir!("$CARGO_MANIFEST_DIR/../../templates"); @@ -224,6 +225,7 @@ impl ShellService for Runner { _cwd: std::path::PathBuf, _keep_ansi: bool, _silent: bool, + _background: bool, _env_vars: Option>, _description: Option, ) -> anyhow::Result { @@ -232,15 +234,23 @@ impl ShellService for Runner { Ok(output) } else { Ok(ShellOutput { - output: forge_domain::CommandOutput { + kind: ShellOutputKind::Foreground(forge_domain::CommandOutput { stdout: String::new(), stderr: String::new(), command: String::new(), exit_code: Some(1), - }, + }), shell: "/bin/bash".to_string(), description: None, }) } } + + fn list_background_processes(&self) -> anyhow::Result> { + Ok(Vec::new()) + } + + fn kill_background_process(&self, _pid: u32, _delete_log: bool) -> anyhow::Result<()> { + Ok(()) + } } diff --git a/crates/forge_app/src/orch_spec/orch_system_spec.rs b/crates/forge_app/src/orch_spec/orch_system_spec.rs index c704f3f520..08b7997c27 100644 --- a/crates/forge_app/src/orch_spec/orch_system_spec.rs +++ b/crates/forge_app/src/orch_spec/orch_system_spec.rs @@ -1,7 +1,7 @@ use forge_domain::{ChatCompletionMessage, CommandOutput, Content, FinishReason, Workflow}; use insta::assert_snapshot; -use crate::ShellOutput; +use crate::{ShellOutput, ShellOutputKind}; use crate::orch_spec::orch_runner::TestContext; #[tokio::test] @@ -44,12 +44,12 @@ async fn test_system_prompt_tool_supported() { #[tokio::test] async fn test_system_prompt_with_extensions() { let shell_output = ShellOutput { - output: CommandOutput { + kind: ShellOutputKind::Foreground(CommandOutput { stdout: include_str!("../fixtures/git_ls_files_mixed.txt").to_string(), stderr: String::new(), command: "git ls-files".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }; @@ -81,12 +81,12 @@ async fn test_system_prompt_with_extensions_truncated() { let stdout = files.join("\n"); let shell_output = ShellOutput { - output: CommandOutput { + kind: ShellOutputKind::Foreground(CommandOutput { stdout, stderr: String::new(), command: "git ls-files".to_string(), exit_code: Some(0), - }, + }), shell: "/bin/bash".to_string(), description: None, }; diff --git a/crates/forge_app/src/services.rs b/crates/forge_app/src/services.rs index 7b5a6dd3c3..5cdd366831 100644 --- a/crates/forge_app/src/services.rs +++ b/crates/forge_app/src/services.rs @@ -5,10 +5,11 @@ use bytes::Bytes; use derive_setters::Setters; use forge_domain::{ AgentId, AnyProvider, Attachment, AuthContextRequest, AuthContextResponse, AuthMethod, - ChatCompletionMessage, CommandOutput, Context, Conversation, ConversationId, Environment, File, - FileStatus, Image, InitAuth, LoginInfo, McpConfig, McpServers, Model, ModelId, Node, Provider, - ProviderId, ResultStream, Scope, SearchParams, SyncProgress, SyntaxError, Template, - ToolCallFull, ToolOutput, Workflow, WorkspaceAuth, WorkspaceId, WorkspaceInfo, + ChatCompletionMessage, CommandOutput, Context, Conversation, + ConversationId, Environment, File, FileStatus, Image, InitAuth, LoginInfo, McpConfig, + McpServers, Model, ModelId, Node, Provider, ProviderId, ResultStream, Scope, SearchParams, + SyncProgress, SyntaxError, Template, ToolCallFull, ToolOutput, Workflow, WorkspaceAuth, + WorkspaceId, WorkspaceInfo, }; use merge::Merge; use reqwest::Response; @@ -19,13 +20,44 @@ use url::Url; use crate::Walker; use crate::user::{User, UserUsage}; +/// Distinguishes foreground (ran to completion) from background (spawned and +/// still running) shell execution results. +#[derive(Debug, Clone)] +pub enum ShellOutputKind { + /// Command ran to completion (or timed out). Contains the full output. + Foreground(CommandOutput), + /// Command was spawned in the background. Contains process metadata. + Background { + /// The command string that was executed. + command: String, + /// OS process ID of the background process. + pid: u32, + /// Absolute path to the log file capturing stdout/stderr. + log_file: PathBuf, + }, +} + #[derive(Debug, Clone)] pub struct ShellOutput { - pub output: CommandOutput, + /// The execution result -- either foreground output or background metadata. + pub kind: ShellOutputKind, + /// Shell used to execute the command (e.g. "bash", "zsh"). pub shell: String, + /// Optional human-readable description of the command. pub description: Option, } +impl ShellOutput { + /// Returns a reference to the foreground `CommandOutput` if this is a + /// foreground result, or `None` if this is a background result. + pub fn foreground(&self) -> Option<&CommandOutput> { + match &self.kind { + ShellOutputKind::Foreground(output) => Some(output), + ShellOutputKind::Background { .. } => None, + } + } +} + #[derive(Debug)] pub struct PatchOutput { pub errors: Vec, @@ -484,9 +516,16 @@ pub trait ShellService: Send + Sync { cwd: PathBuf, keep_ansi: bool, silent: bool, + background: bool, env_vars: Option>, description: Option, ) -> anyhow::Result; + + /// Returns all tracked background processes with their alive status. + fn list_background_processes(&self) -> anyhow::Result>; + + /// Kills a background process by PID and removes it from tracking. + fn kill_background_process(&self, pid: u32, delete_log: bool) -> anyhow::Result<()>; } #[async_trait::async_trait] @@ -910,13 +949,22 @@ impl ShellService for I { cwd: PathBuf, keep_ansi: bool, silent: bool, + background: bool, env_vars: Option>, description: Option, ) -> anyhow::Result { self.shell_service() - .execute(command, cwd, keep_ansi, silent, env_vars, description) + .execute(command, cwd, keep_ansi, silent, background, env_vars, description) .await } + + fn list_background_processes(&self) -> anyhow::Result> { + self.shell_service().list_background_processes() + } + + fn kill_background_process(&self, pid: u32, delete_log: bool) -> anyhow::Result<()> { + self.shell_service().kill_background_process(pid, delete_log) + } } impl EnvironmentService for I { diff --git a/crates/forge_app/src/snapshots/forge_app__tool_registry__all_rendered_tool_descriptions.snap b/crates/forge_app/src/snapshots/forge_app__tool_registry__all_rendered_tool_descriptions.snap index b0bc30ec22..6eb7434ca5 100644 --- a/crates/forge_app/src/snapshots/forge_app__tool_registry__all_rendered_tool_descriptions.snap +++ b/crates/forge_app/src/snapshots/forge_app__tool_registry__all_rendered_tool_descriptions.snap @@ -151,6 +151,17 @@ Good examples: Bad example: cd /foo/bar && pytest tests +Background execution: + - Set `background: true` to run long-lived processes (web servers, file watchers, dev servers) as detached background jobs. + - The command returns immediately with a **log file path** and **process ID (PID)** instead of waiting for completion. + - The process continues running independently even after the session ends. + - CRITICAL: Always remember the log file path returned by background commands. You will need it to check output, diagnose errors, or verify the process is working. After compaction the log file path will still be available in the summary. + - Use `read` on the log file path to inspect process output at any time. + - Examples of when to use background: + - Starting a web server: `npm start`, `python manage.py runserver`, `cargo run --bin server` + - Starting a file watcher: `npm run watch`, `cargo watch` + - Starting any process that runs indefinitely and should not block your workflow + Returns complete output including stdout, stderr, and exit code for diagnostic purposes. --- diff --git a/crates/forge_app/src/system_prompt.rs b/crates/forge_app/src/system_prompt.rs index b8131ed2d8..38ef7226ae 100644 --- a/crates/forge_app/src/system_prompt.rs +++ b/crates/forge_app/src/system_prompt.rs @@ -45,6 +45,7 @@ impl SystemPrompt { self.environment.cwd.clone(), false, true, + false, None, None, ) @@ -52,11 +53,12 @@ impl SystemPrompt { .ok()?; // If git command fails (e.g., not in a git repo), return None - if output.output.exit_code != Some(0) { + let fg = output.foreground()?; + if fg.exit_code != Some(0) { return None; } - parse_extensions(&output.output.stdout, max_extensions) + parse_extensions(&fg.stdout, max_extensions) } pub async fn add_system_message( diff --git a/crates/forge_app/src/tool_executor.rs b/crates/forge_app/src/tool_executor.rs index b96bf2c708..b6e9c9f54f 100644 --- a/crates/forge_app/src/tool_executor.rs +++ b/crates/forge_app/src/tool_executor.rs @@ -6,7 +6,7 @@ use forge_domain::{CodebaseQueryResult, ToolCallContext, ToolCatalog, ToolOutput use crate::fmt::content::FormatContent; use crate::operation::{TempContentFiles, ToolOperation}; -use crate::services::{Services, ShellService}; +use crate::services::{Services, ShellOutputKind, ShellService}; use crate::{ AgentRegistry, ConversationService, EnvironmentService, FollowUpService, FsPatchService, FsReadService, FsRemoveService, FsSearchService, FsUndoService, FsWriteService, @@ -83,30 +83,34 @@ impl< Ok(files) } ToolOperation::Shell { output } => { - let env = self.services.get_environment(); - let stdout_lines = output.output.stdout.lines().count(); - let stderr_lines = output.output.stderr.lines().count(); - let stdout_truncated = - stdout_lines > env.stdout_max_prefix_length + env.stdout_max_suffix_length; - let stderr_truncated = - stderr_lines > env.stdout_max_prefix_length + env.stdout_max_suffix_length; + if let ShellOutputKind::Foreground(ref cmd_output) = output.kind { + let env = self.services.get_environment(); + let stdout_lines = cmd_output.stdout.lines().count(); + let stderr_lines = cmd_output.stderr.lines().count(); + let stdout_truncated = + stdout_lines > env.stdout_max_prefix_length + env.stdout_max_suffix_length; + let stderr_truncated = + stderr_lines > env.stdout_max_prefix_length + env.stdout_max_suffix_length; - let mut files = TempContentFiles::default(); + let mut files = TempContentFiles::default(); - if stdout_truncated { - files = files.stdout( - self.create_temp_file("forge_shell_stdout_", ".txt", &output.output.stdout) - .await?, - ); - } - if stderr_truncated { - files = files.stderr( - self.create_temp_file("forge_shell_stderr_", ".txt", &output.output.stderr) - .await?, - ); - } + if stdout_truncated { + files = files.stdout( + self.create_temp_file("forge_shell_stdout_", ".txt", &cmd_output.stdout) + .await?, + ); + } + if stderr_truncated { + files = files.stderr( + self.create_temp_file("forge_shell_stderr_", ".txt", &cmd_output.stderr) + .await?, + ); + } - Ok(files) + Ok(files) + } else { + Ok(TempContentFiles::default()) + } } _ => Ok(TempContentFiles::default()), } @@ -261,6 +265,7 @@ impl< PathBuf::from(normalized_cwd), input.keep_ansi, false, + input.background, input.env.clone(), input.description.clone(), ) diff --git a/crates/forge_app/src/transformers/trim_context_summary.rs b/crates/forge_app/src/transformers/trim_context_summary.rs index 333a14b9fb..ef64a1473d 100644 --- a/crates/forge_app/src/transformers/trim_context_summary.rs +++ b/crates/forge_app/src/transformers/trim_context_summary.rs @@ -49,7 +49,7 @@ fn to_op(tool: &SummaryTool) -> Operation<'_> { SummaryTool::FileUpdate { path } => Operation::File(path), SummaryTool::FileRemove { path } => Operation::File(path), SummaryTool::Undo { path } => Operation::File(path), - SummaryTool::Shell { command } => Operation::Shell(command), + SummaryTool::Shell { command, .. } => Operation::Shell(command), SummaryTool::Search { pattern } => Operation::Search(pattern), SummaryTool::SemSearch { queries } => Operation::CodebaseSearch { queries }, SummaryTool::Fetch { url } => Operation::Fetch(url), diff --git a/crates/forge_ci/src/workflows/mod.rs b/crates/forge_ci/src/workflows/mod.rs index d6ec89505b..347e3ce564 100644 --- a/crates/forge_ci/src/workflows/mod.rs +++ b/crates/forge_ci/src/workflows/mod.rs @@ -6,6 +6,7 @@ mod labels; mod release_drafter; mod release_publish; mod stale; +mod test_zsh_setup; pub use autofix::*; pub use ci::*; @@ -13,3 +14,4 @@ pub use labels::*; pub use release_drafter::*; pub use release_publish::*; pub use stale::*; +pub use test_zsh_setup::*; diff --git a/crates/forge_ci/src/workflows/test_zsh_setup.rs b/crates/forge_ci/src/workflows/test_zsh_setup.rs new file mode 100644 index 0000000000..3b193dd522 --- /dev/null +++ b/crates/forge_ci/src/workflows/test_zsh_setup.rs @@ -0,0 +1,206 @@ +use gh_workflow::generate::Generate; +use gh_workflow::*; +use indexmap::indexmap; +use serde_json::json; + +/// Creates the common cache + protoc steps shared by all jobs. +fn common_setup_steps() -> Vec> { + vec![ + Step::new("Cache Cargo registry and git") + .uses("actions", "cache", "v4") + .with(Input::from(indexmap! { + "path".to_string() => json!("~/.cargo/registry\n~/.cargo/git"), + "key".to_string() => json!("cargo-registry-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}"), + "restore-keys".to_string() => json!("cargo-registry-${{ runner.os }}-${{ runner.arch }}-\ncargo-registry-${{ runner.os }}-"), + })), + Step::new("Cache Rust toolchains") + .uses("actions", "cache", "v4") + .with(Input::from(indexmap! { + "path".to_string() => json!("~/.rustup"), + "key".to_string() => json!("rustup-${{ runner.os }}-${{ runner.arch }}"), + })), + Step::new("Cache build artifacts") + .uses("actions", "cache", "v4") + .with(Input::from(indexmap! { + "path".to_string() => json!("target"), + "key".to_string() => json!("build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }}"), + "restore-keys".to_string() => json!("build-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/Cargo.lock') }}-\nbuild-${{ runner.os }}-${{ runner.arch }}-"), + })), + Step::new("Setup Protobuf Compiler") + .uses("arduino", "setup-protoc", "v3") + .with(Input::from(indexmap! { + "repo-token".to_string() => json!("${{ secrets.GITHUB_TOKEN }}"), + })), + ] +} + +/// Creates an upload-artifact step that always runs. +fn upload_results_step(artifact_name: &str, results_path: &str) -> Step { + Step::new("Upload test results") + .uses("actions", "upload-artifact", "v4") + .if_condition(Expression::new("always()")) + .with(Input::from(indexmap! { + "name".to_string() => json!(artifact_name), + "path".to_string() => json!(results_path), + "retention-days".to_string() => json!(7), + "if-no-files-found".to_string() => json!("ignore"), + })) +} + +/// Generate the ZSH setup E2E test workflow +pub fn generate_test_zsh_setup_workflow() { + // Job for amd64 runner - tests all distros including Arch Linux + let mut test_amd64 = Job::new("Test ZSH Setup (amd64)") + .permissions(Permissions::default().contents(Level::Read)) + .runs_on("ubuntu-latest") + .add_step(Step::new("Checkout Code").uses("actions", "checkout", "v6")); + + for step in common_setup_steps() { + test_amd64 = test_amd64.add_step(step); + } + + test_amd64 = test_amd64 + .add_step( + Step::new("Setup Cross Toolchain") + .uses("taiki-e", "setup-cross-toolchain-action", "v1") + .with(Input::from(indexmap! { + "target".to_string() => json!("x86_64-unknown-linux-musl"), + })), + ) + .add_step( + Step::new("Run ZSH setup test suite") + .run("bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --native-build --no-cleanup --jobs 4"), + ) + .add_step(upload_results_step( + "zsh-setup-results-linux-amd64", + "test-results-linux/", + )); + + // Job for arm64 runner - excludes Arch Linux (no arm64 image available) + let mut test_arm64 = Job::new("Test ZSH Setup (arm64)") + .permissions(Permissions::default().contents(Level::Read)) + .runs_on("ubuntu-24.04-arm") + .add_step(Step::new("Checkout Code").uses("actions", "checkout", "v6")); + + for step in common_setup_steps() { + test_arm64 = test_arm64.add_step(step); + } + + test_arm64 = test_arm64 + .add_step( + Step::new("Setup Cross Toolchain") + .uses("taiki-e", "setup-cross-toolchain-action", "v1") + .with(Input::from(indexmap! { + "target".to_string() => json!("aarch64-unknown-linux-musl"), + })), + ) + .add_step( + Step::new("Run ZSH setup test suite (exclude Arch)") + .run(r#"bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --native-build --no-cleanup --exclude "Arch Linux" --jobs 4"#), + ) + .add_step(upload_results_step( + "zsh-setup-results-linux-arm64", + "test-results-linux/", + )); + + // macOS Apple Silicon (arm64) job - runs natively on macos-latest + let mut test_macos_arm64 = Job::new("Test ZSH Setup (macOS arm64)") + .permissions(Permissions::default().contents(Level::Read)) + .runs_on("macos-latest") + .add_step(Step::new("Checkout Code").uses("actions", "checkout", "v6")); + + for step in common_setup_steps() { + test_macos_arm64 = test_macos_arm64.add_step(step); + } + + test_macos_arm64 = test_macos_arm64 + .add_step(Step::new("Install shellcheck").run("brew install shellcheck")) + .add_step( + Step::new("Run macOS ZSH setup test suite") + .run("bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --no-cleanup"), + ) + .add_step(upload_results_step( + "zsh-setup-results-macos-arm64", + "test-results-macos/", + )); + + // Windows x86_64 job - runs natively in Git Bash on windows-latest + let mut test_windows = Job::new("Test ZSH Setup (Windows x86_64)") + .permissions(Permissions::default().contents(Level::Read)) + .runs_on("windows-latest") + .add_step(Step::new("Checkout Code").uses("actions", "checkout", "v6")); + + for step in common_setup_steps() { + test_windows = test_windows.add_step(step); + } + + test_windows = test_windows + .add_step( + Step::new("Run Windows ZSH setup test suite") + .run("bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --no-cleanup"), + ) + .add_step(upload_results_step( + "zsh-setup-results-windows", + "test-results-windows/", + )); + + // Windows ARM64 job - runs natively in Git Bash on windows-11-arm + let mut test_windows_arm64 = Job::new("Test ZSH Setup (Windows arm64)") + .permissions(Permissions::default().contents(Level::Read)) + .runs_on("windows-11-arm") + .add_step(Step::new("Checkout Code").uses("actions", "checkout", "v6")); + + for step in common_setup_steps() { + test_windows_arm64 = test_windows_arm64.add_step(step); + } + + test_windows_arm64 = test_windows_arm64 + .add_step( + Step::new("Run Windows ARM64 ZSH setup test suite") + .run("bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --no-cleanup"), + ) + .add_step(upload_results_step( + "zsh-setup-results-windows-arm64", + "test-results-windows/", + )); + + // Event triggers: + // 1. Push to main + // 2. PR with path changes to zsh files, ui.rs, test script, or workflow + // 3. Manual workflow_dispatch + // Note: "test: zsh-setup" in PR body/commit is handled via workflow_dispatch + let events = Event::default() + .push(Push::default().add_branch("main")) + .pull_request( + PullRequest::default() + .add_type(PullRequestType::Opened) + .add_type(PullRequestType::Synchronize) + .add_type(PullRequestType::Reopened) + .add_path("crates/forge_main/src/zsh/**") + .add_path("crates/forge_main/src/ui.rs") + .add_path("crates/forge_ci/tests/scripts/test-zsh-setup.sh") + .add_path("crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh") + .add_path("crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh") + .add_path(".github/workflows/test-zsh-setup.yml"), + ) + .workflow_dispatch(WorkflowDispatch::default()); + + let workflow = Workflow::default() + .name("Test ZSH Setup") + .on(events) + .concurrency( + Concurrency::default() + .group("test-zsh-setup-${{ github.ref }}") + .cancel_in_progress(true), + ) + .add_job("test_zsh_setup_amd64", test_amd64) + .add_job("test_zsh_setup_arm64", test_arm64) + .add_job("test_zsh_setup_macos_arm64", test_macos_arm64) + .add_job("test_zsh_setup_windows", test_windows) + .add_job("test_zsh_setup_windows_arm64", test_windows_arm64); + + Generate::new(workflow) + .name("test-zsh-setup.yml") + .generate() + .unwrap(); +} diff --git a/crates/forge_ci/tests/ci.rs b/crates/forge_ci/tests/ci.rs index 469fd968aa..9915e4bb88 100644 --- a/crates/forge_ci/tests/ci.rs +++ b/crates/forge_ci/tests/ci.rs @@ -29,3 +29,8 @@ fn test_stale_workflow() { fn test_autofix_workflow() { workflow::generate_autofix_workflow(); } + +#[test] +fn test_zsh_setup_workflow() { + workflow::generate_test_zsh_setup_workflow(); +} diff --git a/crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh b/crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh new file mode 100755 index 0000000000..081a16be8a --- /dev/null +++ b/crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh @@ -0,0 +1,1187 @@ +#!/bin/bash +# ============================================================================= +# macOS-native E2E test suite for `forge zsh setup` +# +# Tests the complete zsh setup flow natively on macOS using temp HOME directory +# isolation. Covers both "with Homebrew" and "without Homebrew" scenarios, +# verifying dependency detection, installation (zsh, Oh My Zsh, plugins, tools), +# .zshrc configuration, and doctor diagnostics. +# +# Unlike the Linux test suite (test-zsh-setup.sh) which uses Docker containers, +# this script runs directly on the macOS host with HOME directory isolation. +# Each test scenario gets a fresh temp HOME to prevent state leakage. +# +# Build targets (from CI): +# - x86_64-apple-darwin (Intel Macs) +# - aarch64-apple-darwin (Apple Silicon) +# +# Prerequisites: +# - macOS (Darwin) host +# - Rust toolchain +# - git (Xcode CLT or Homebrew) +# +# Usage: +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh # build + test all +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --quick # shellcheck only +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --filter "brew" # run only matching +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --skip-build # skip build, use existing +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --no-cleanup # keep temp dirs +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --dry-run # show plan, don't run +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --list # list scenarios and exit +# bash crates/forge_ci/tests/scripts/test-zsh-setup-macos.sh --help # show usage +# +# Relationship to test-zsh-setup.sh: +# test-zsh-setup.sh tests `forge zsh setup` inside Docker (Linux distros). +# This script tests `forge zsh setup` natively on macOS. +# Both use the same CHECK_* line protocol for verification. +# ============================================================================= + +set -euo pipefail + +# ============================================================================= +# Platform guard +# ============================================================================= + +if [ "$(uname -s)" != "Darwin" ]; then + echo "Error: This script must be run on macOS (Darwin)." >&2 + echo "For Linux testing, use test-zsh-setup.sh (Docker-based)." >&2 + exit 1 +fi + +# ============================================================================= +# Constants +# ============================================================================= + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +readonly SCRIPT_DIR + +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../../.." && pwd)" +readonly PROJECT_ROOT + +readonly RED='\033[0;31m' +readonly GREEN='\033[0;32m' +readonly YELLOW='\033[1;33m' +readonly BLUE='\033[0;34m' +readonly BOLD='\033[1m' +readonly DIM='\033[2m' +readonly NC='\033[0m' + +readonly SHELLCHECK_EXCLUSIONS="SC2155,SC2086,SC1090,SC2034,SC2181,SC2016,SC2162" + +# Detect host architecture and set build target +HOST_ARCH="$(uname -m)" +readonly HOST_ARCH + +if [ "$HOST_ARCH" = "arm64" ] || [ "$HOST_ARCH" = "aarch64" ]; then + BUILD_TARGET="aarch64-apple-darwin" +elif [ "$HOST_ARCH" = "x86_64" ]; then + BUILD_TARGET="x86_64-apple-darwin" +else + echo "Error: Unsupported host architecture: $HOST_ARCH" >&2 + echo "Supported: arm64, aarch64, x86_64" >&2 + exit 1 +fi +readonly BUILD_TARGET + +# Detect Homebrew prefix (differs between Apple Silicon and Intel) +if [ -d "/opt/homebrew" ]; then + BREW_PREFIX="/opt/homebrew" +elif [ -d "/usr/local/Homebrew" ]; then + BREW_PREFIX="/usr/local" +else + BREW_PREFIX="" +fi +readonly BREW_PREFIX + +# ============================================================================= +# Test scenarios +# ============================================================================= + +# Format: "scenario_id|label|brew_mode|test_type" +# scenario_id - unique identifier +# label - human-readable name +# brew_mode - "with_brew" or "no_brew" +# test_type - "standard", "preinstalled_all", "rerun", "partial", +# "no_git", "no_zsh" +readonly SCENARIOS=( + # --- With Homebrew --- + "BREW_BARE|Fresh install (with brew)|with_brew|standard" + "BREW_PREINSTALLED_ALL|Pre-installed everything (with brew)|with_brew|preinstalled_all" + "BREW_RERUN|Re-run idempotency (with brew)|with_brew|rerun" + "BREW_PARTIAL|Partial install - only plugins missing (with brew)|with_brew|partial" + "BREW_NO_GIT|No git (with brew)|with_brew|no_git" + + # --- Without Homebrew --- + "NOBREW_BARE|Fresh install (no brew, GitHub releases)|no_brew|standard" + "NOBREW_RERUN|Re-run idempotency (no brew)|no_brew|rerun" + + # --- No zsh in PATH (brew available to install it) --- + "BREW_NO_ZSH|No zsh in PATH (brew installs it)|with_brew|no_zsh" +) + +# ============================================================================= +# Runtime state +# ============================================================================= + +PASS=0 +FAIL=0 +SKIP=0 +FAILURES=() + +# CLI options +MODE="full" +FILTER_PATTERN="" +EXCLUDE_PATTERN="" +NO_CLEANUP=false +SKIP_BUILD=false +DRY_RUN=false + +# Shared temp paths +RESULTS_DIR="" +REAL_HOME="$HOME" + +# ============================================================================= +# Logging helpers +# ============================================================================= + +log_header() { echo -e "\n${BOLD}${BLUE}$1${NC}"; } +log_pass() { echo -e " ${GREEN}PASS${NC} $1"; PASS=$((PASS + 1)); } +log_fail() { echo -e " ${RED}FAIL${NC} $1"; FAIL=$((FAIL + 1)); FAILURES+=("$1"); } +log_skip() { echo -e " ${YELLOW}SKIP${NC} $1"; SKIP=$((SKIP + 1)); } +log_info() { echo -e " ${DIM}$1${NC}"; } + +# ============================================================================= +# Argument parsing +# ============================================================================= + +print_usage() { + cat < Run only scenarios whose label matches (grep -iE) + --exclude Skip scenarios whose label matches (grep -iE) + --skip-build Skip binary build, use existing binary + --no-cleanup Keep temp directories and results after tests + --dry-run Show what would be tested without running anything + --list List all test scenarios and exit + --help Show this help message + +Notes: + - This script runs natively on macOS (no Docker). + - "With brew" tests may install packages via Homebrew. + On CI runners (ephemeral VMs), this is safe. + For local development, use --dry-run to review first. + - "Without brew" tests hide Homebrew from PATH and verify + GitHub release fallback for tools (fzf, bat, fd). +EOF +} + +parse_args() { + while [ $# -gt 0 ]; do + case "$1" in + --quick) + MODE="quick" + shift + ;; + --filter) + FILTER_PATTERN="${2:?--filter requires a pattern}" + shift 2 + ;; + --exclude) + EXCLUDE_PATTERN="${2:?--exclude requires a pattern}" + shift 2 + ;; + --skip-build) + SKIP_BUILD=true + shift + ;; + --no-cleanup) + NO_CLEANUP=true + shift + ;; + --dry-run) + DRY_RUN=true + shift + ;; + --list) + list_scenarios + exit 0 + ;; + --help|-h) + print_usage + exit 0 + ;; + *) + echo "Unknown option: $1" >&2 + print_usage >&2 + exit 1 + ;; + esac + done +} + +list_scenarios() { + echo -e "${BOLD}Build Target:${NC}" + printf " %-55s %s\n" "$BUILD_TARGET" "$HOST_ARCH" + + echo -e "\n${BOLD}Test Scenarios:${NC}" + local idx=0 + for entry in "${SCENARIOS[@]}"; do + idx=$((idx + 1)) + IFS='|' read -r _id label brew_mode test_type <<< "$entry" + printf " %2d. %-55s [%s] %s\n" "$idx" "$label" "$brew_mode" "$test_type" + done + + echo "" + echo -e "${BOLD}Homebrew:${NC}" + if [ -n "$BREW_PREFIX" ]; then + echo " Found at: $BREW_PREFIX" + else + echo " Not found (no-brew scenarios only)" + fi +} + +# ============================================================================= +# Build binary +# ============================================================================= + +build_binary() { + local binary_path="$PROJECT_ROOT/target/${BUILD_TARGET}/debug/forge" + + if [ "$SKIP_BUILD" = true ] && [ -f "$binary_path" ]; then + log_info "Skipping build for ${BUILD_TARGET} (binary exists)" + return 0 + fi + + # Ensure target is installed + if ! rustup target list --installed 2>/dev/null | grep -q "$BUILD_TARGET"; then + log_info "Adding Rust target ${BUILD_TARGET}..." + rustup target add "$BUILD_TARGET" 2>/dev/null || true + fi + + log_info "Building ${BUILD_TARGET} with cargo (debug)..." + if ! cargo build --target "$BUILD_TARGET" 2>"$RESULTS_DIR/build-${BUILD_TARGET}.log"; then + log_fail "Build failed for ${BUILD_TARGET}" + log_info "Build log: $RESULTS_DIR/build-${BUILD_TARGET}.log" + echo "" + echo "===== Full build log =====" + cat "$RESULTS_DIR/build-${BUILD_TARGET}.log" 2>/dev/null || echo "Log file not found" + echo "==========================" + echo "" + return 1 + fi + + if [ -f "$binary_path" ]; then + log_pass "Built ${BUILD_TARGET} -> $(du -h "$binary_path" | cut -f1)" + return 0 + else + log_fail "Binary not found after build: ${binary_path}" + return 1 + fi +} + +# ============================================================================= +# Static analysis +# ============================================================================= + +run_static_checks() { + log_header "Phase 1: Static Analysis" + + if bash -n "${BASH_SOURCE[0]}" 2>/dev/null; then + log_pass "bash -n syntax check" + else + log_fail "bash -n syntax check" + fi + + if command -v shellcheck > /dev/null 2>&1; then + if shellcheck -x -e "$SHELLCHECK_EXCLUSIONS" "${BASH_SOURCE[0]}" 2>/dev/null; then + log_pass "shellcheck (excluding $SHELLCHECK_EXCLUSIONS)" + else + log_fail "shellcheck (excluding $SHELLCHECK_EXCLUSIONS)" + fi + else + log_skip "shellcheck (not installed)" + fi +} + +# ============================================================================= +# PATH filtering helpers +# ============================================================================= + +# Build a PATH that excludes Homebrew directories. +# The forge binary must be placed in $1 (a temp bin dir) which is prepended. +filter_path_no_brew() { + local temp_bin="$1" + local filtered="" + local IFS=':' + + for dir in $PATH; do + # Skip Homebrew directories + case "$dir" in + /opt/homebrew/bin|/opt/homebrew/sbin) continue ;; + /usr/local/bin|/usr/local/sbin) + # On Intel Macs, /usr/local/bin is Homebrew. On Apple Silicon it's not. + # Check if this is actually a Homebrew path + if [ -d "/usr/local/Homebrew" ]; then + continue + fi + ;; + esac + if [ -n "$filtered" ]; then + filtered="${filtered}:${dir}" + else + filtered="${dir}" + fi + done + + # Prepend the temp bin directory + echo "${temp_bin}:${filtered}" +} + +# Build a PATH that hides git by creating a symlink directory. +# On macOS, /usr/bin/git is an Xcode CLT shim — we can't just remove /usr/bin. +# Instead, create a temp dir with symlinks to everything in /usr/bin except git. +filter_path_no_git() { + local temp_bin="$1" + local no_git_dir="$2" + + mkdir -p "$no_git_dir" + + # Symlink everything from /usr/bin except git + for f in /usr/bin/*; do + local base + base=$(basename "$f") + if [ "$base" = "git" ]; then + continue + fi + ln -sf "$f" "$no_git_dir/$base" 2>/dev/null || true + done + + # Build new PATH replacing /usr/bin with our filtered dir + local filtered="" + local IFS=':' + for dir in $PATH; do + case "$dir" in + /usr/bin) + dir="$no_git_dir" + ;; + esac + # Also skip brew git paths + case "$dir" in + /opt/homebrew/bin|/usr/local/bin) + # These might contain git too; skip them for no-git test + continue + ;; + esac + if [ -n "$filtered" ]; then + filtered="${filtered}:${dir}" + else + filtered="${dir}" + fi + done + + echo "${temp_bin}:${filtered}" +} + +# Build a PATH that hides zsh but keeps brew available. +# For the BREW_NO_ZSH scenario: create filtered copies of /usr/bin and /bin +# that exclude zsh, so forge must install zsh via brew. +filter_path_no_zsh() { + local temp_bin="$1" + local no_zsh_dir="$2" + local no_zsh_bin_dir="${no_zsh_dir}-bin" + + mkdir -p "$no_zsh_dir" + mkdir -p "$no_zsh_bin_dir" + + # Symlink everything from /usr/bin except zsh + for f in /usr/bin/*; do + local base + base=$(basename "$f") + if [ "$base" = "zsh" ]; then + continue + fi + ln -sf "$f" "$no_zsh_dir/$base" 2>/dev/null || true + done + + # Symlink everything from /bin except zsh (macOS has zsh at /bin/zsh too) + for f in /bin/*; do + local base + base=$(basename "$f") + if [ "$base" = "zsh" ]; then + continue + fi + ln -sf "$f" "$no_zsh_bin_dir/$base" 2>/dev/null || true + done + + # Build new PATH: keep brew dirs, replace /usr/bin and /bin with filtered dirs + local filtered="" + local IFS=':' + for dir in $PATH; do + case "$dir" in + /usr/bin) + dir="$no_zsh_dir" + ;; + /bin) + dir="$no_zsh_bin_dir" + ;; + esac + if [ -n "$filtered" ]; then + filtered="${filtered}:${dir}" + else + filtered="${dir}" + fi + done + + echo "${temp_bin}:${filtered}" +} + +# ============================================================================= +# Verification function +# ============================================================================= + +# Run verification checks against the current HOME and emit CHECK_* lines. +# Arguments: +# $1 - test_type: "standard" | "no_git" | "preinstalled_all" | "rerun" | +# "partial" | "no_zsh" +# $2 - setup_output: the captured output from forge zsh setup +# $3 - setup_exit: the exit code from forge zsh setup +run_verify_checks() { + local test_type="$1" + local setup_output="$2" + local setup_exit="$3" + + echo "SETUP_EXIT=${setup_exit}" + + # --- Verify zsh binary --- + if command -v zsh > /dev/null 2>&1; then + local zsh_ver + zsh_ver=$(zsh --version 2>&1 | head -1) || zsh_ver="(failed)" + if zsh -c "zmodload zsh/zle && zmodload zsh/datetime && zmodload zsh/stat" > /dev/null 2>&1; then + echo "CHECK_ZSH=PASS ${zsh_ver} (modules OK)" + else + echo "CHECK_ZSH=FAIL ${zsh_ver} (modules broken)" + fi + else + if [ "$test_type" = "no_git" ]; then + echo "CHECK_ZSH=PASS (expected: zsh not needed in ${test_type} test)" + else + echo "CHECK_ZSH=FAIL zsh not found in PATH" + fi + fi + + # --- Verify Oh My Zsh --- + if [ "$test_type" = "no_git" ]; then + echo "CHECK_OMZ_DIR=PASS (expected: partial OMZ in ${test_type} test)" + elif [ -d "$HOME/.oh-my-zsh" ]; then + local omz_ok=true + local omz_detail="dir=OK" + for subdir in custom/plugins themes lib; do + if [ ! -d "$HOME/.oh-my-zsh/$subdir" ]; then + omz_ok=false + omz_detail="${omz_detail}, ${subdir}=MISSING" + fi + done + if [ "$omz_ok" = true ]; then + echo "CHECK_OMZ_DIR=PASS ${omz_detail}" + else + echo "CHECK_OMZ_DIR=FAIL ${omz_detail}" + fi + else + if [ "$test_type" = "no_git" ]; then + echo "CHECK_OMZ_DIR=PASS (expected: no OMZ in ${test_type} test)" + else + echo "CHECK_OMZ_DIR=FAIL ~/.oh-my-zsh not found" + fi + fi + + # --- Verify Oh My Zsh defaults in .zshrc --- + if [ -f "$HOME/.zshrc" ]; then + local omz_defaults_ok=true + local omz_defaults_detail="" + if grep -q 'ZSH_THEME=' "$HOME/.zshrc" 2>/dev/null; then + omz_defaults_detail="theme=OK" + else + omz_defaults_ok=false + omz_defaults_detail="theme=MISSING" + fi + if grep -q '^plugins=' "$HOME/.zshrc" 2>/dev/null; then + omz_defaults_detail="${omz_defaults_detail}, plugins=OK" + else + omz_defaults_ok=false + omz_defaults_detail="${omz_defaults_detail}, plugins=MISSING" + fi + if [ "$omz_defaults_ok" = true ]; then + echo "CHECK_OMZ_DEFAULTS=PASS ${omz_defaults_detail}" + else + echo "CHECK_OMZ_DEFAULTS=FAIL ${omz_defaults_detail}" + fi + else + if [ "$test_type" = "no_git" ]; then + echo "CHECK_OMZ_DEFAULTS=PASS (expected: no .zshrc in ${test_type} test)" + else + echo "CHECK_OMZ_DEFAULTS=FAIL ~/.zshrc not found" + fi + fi + + # --- Verify plugins --- + local zsh_custom="${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}" + if [ -d "$zsh_custom/plugins/zsh-autosuggestions" ]; then + if ls "$zsh_custom/plugins/zsh-autosuggestions/"*.zsh 1>/dev/null 2>&1; then + echo "CHECK_AUTOSUGGESTIONS=PASS" + else + echo "CHECK_AUTOSUGGESTIONS=FAIL (dir exists but no .zsh files)" + fi + else + if [ "$test_type" = "no_git" ]; then + echo "CHECK_AUTOSUGGESTIONS=PASS (expected: no plugins in ${test_type} test)" + else + echo "CHECK_AUTOSUGGESTIONS=FAIL not installed" + fi + fi + + if [ -d "$zsh_custom/plugins/zsh-syntax-highlighting" ]; then + if ls "$zsh_custom/plugins/zsh-syntax-highlighting/"*.zsh 1>/dev/null 2>&1; then + echo "CHECK_SYNTAX_HIGHLIGHTING=PASS" + else + echo "CHECK_SYNTAX_HIGHLIGHTING=FAIL (dir exists but no .zsh files)" + fi + else + if [ "$test_type" = "no_git" ]; then + echo "CHECK_SYNTAX_HIGHLIGHTING=PASS (expected: no plugins in ${test_type} test)" + else + echo "CHECK_SYNTAX_HIGHLIGHTING=FAIL not installed" + fi + fi + + # --- Verify .zshrc forge markers and content --- + if [ -f "$HOME/.zshrc" ]; then + if grep -q '# >>> forge initialize >>>' "$HOME/.zshrc" && \ + grep -q '# <<< forge initialize <<<' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_MARKERS=PASS" + else + echo "CHECK_ZSHRC_MARKERS=FAIL markers not found" + fi + + if grep -q 'eval "\$(forge zsh plugin)"' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_PLUGIN=PASS" + else + echo "CHECK_ZSHRC_PLUGIN=FAIL plugin eval not found" + fi + + if grep -q 'eval "\$(forge zsh theme)"' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_THEME=PASS" + else + echo "CHECK_ZSHRC_THEME=FAIL theme eval not found" + fi + + if grep -q 'NERD_FONT=0' "$HOME/.zshrc"; then + echo "CHECK_NO_NERD_FONT_DISABLE=FAIL (NERD_FONT=0 found in non-interactive mode)" + else + echo "CHECK_NO_NERD_FONT_DISABLE=PASS" + fi + + if grep -q 'FORGE_EDITOR' "$HOME/.zshrc"; then + echo "CHECK_NO_FORGE_EDITOR=FAIL (FORGE_EDITOR found in non-interactive mode)" + else + echo "CHECK_NO_FORGE_EDITOR=PASS" + fi + + # Check marker uniqueness (idempotency) + local start_count + local end_count + start_count=$(grep -c '# >>> forge initialize >>>' "$HOME/.zshrc" 2>/dev/null || echo "0") + end_count=$(grep -c '# <<< forge initialize <<<' "$HOME/.zshrc" 2>/dev/null || echo "0") + if [ "$start_count" -eq 1 ] && [ "$end_count" -eq 1 ]; then + echo "CHECK_MARKER_UNIQUE=PASS" + else + echo "CHECK_MARKER_UNIQUE=FAIL (start=${start_count}, end=${end_count})" + fi + else + if [ "$test_type" = "no_git" ]; then + echo "CHECK_ZSHRC_MARKERS=PASS (expected: no .zshrc in ${test_type} test)" + echo "CHECK_ZSHRC_PLUGIN=PASS (expected: no .zshrc in ${test_type} test)" + echo "CHECK_ZSHRC_THEME=PASS (expected: no .zshrc in ${test_type} test)" + echo "CHECK_NO_NERD_FONT_DISABLE=PASS (expected: no .zshrc in ${test_type} test)" + echo "CHECK_NO_FORGE_EDITOR=PASS (expected: no .zshrc in ${test_type} test)" + echo "CHECK_MARKER_UNIQUE=PASS (expected: no .zshrc in ${test_type} test)" + else + echo "CHECK_ZSHRC_MARKERS=FAIL no .zshrc" + echo "CHECK_ZSHRC_PLUGIN=FAIL no .zshrc" + echo "CHECK_ZSHRC_THEME=FAIL no .zshrc" + echo "CHECK_NO_NERD_FONT_DISABLE=FAIL no .zshrc" + echo "CHECK_NO_FORGE_EDITOR=FAIL no .zshrc" + echo "CHECK_MARKER_UNIQUE=FAIL no .zshrc" + fi + fi + + # --- Check if forge zsh setup's own doctor run failed --- + # forge zsh setup runs doctor internally. Even if our independent doctor call + # succeeds (different environment), we must detect if setup's doctor failed. + if [ "$test_type" != "no_git" ]; then + if echo "$setup_output" | grep -qi "forge zsh doctor failed"; then + echo "CHECK_SETUP_DOCTOR=FAIL (setup reported doctor failure)" + else + echo "CHECK_SETUP_DOCTOR=PASS" + fi + fi + + # --- Run forge zsh doctor --- + local doctor_output + local doctor_exit=0 + doctor_output=$(forge zsh doctor 2>&1) || doctor_exit=$? + if [ "$test_type" = "no_git" ]; then + echo "CHECK_DOCTOR_EXIT=PASS (skipped for ${test_type} test)" + else + if [ $doctor_exit -eq 0 ]; then + echo "CHECK_DOCTOR_EXIT=PASS (exit=0)" + else + echo "CHECK_DOCTOR_EXIT=FAIL (exit=${doctor_exit})" + fi + fi + + # --- Verify output format --- + local output_ok=true + local output_detail="" + + if echo "$setup_output" | grep -qi "found\|not found\|installed\|Detecting"; then + output_detail="detect=OK" + else + output_ok=false + output_detail="detect=MISSING" + fi + + if [ "$test_type" = "no_git" ]; then + if echo "$setup_output" | grep -qi "git is required"; then + output_detail="${output_detail}, git_error=OK" + else + output_ok=false + output_detail="${output_detail}, git_error=MISSING" + fi + echo "CHECK_OUTPUT_FORMAT=PASS ${output_detail}" + elif [ "$test_type" = "no_zsh" ]; then + if echo "$setup_output" | grep -qi "zsh not found\|zsh.*not found"; then + output_detail="${output_detail}, zsh_detect=OK" + else + output_ok=false + output_detail="${output_detail}, zsh_detect=MISSING" + fi + echo "CHECK_OUTPUT_FORMAT=PASS ${output_detail}" + else + if echo "$setup_output" | grep -qi "Setup complete\|complete"; then + output_detail="${output_detail}, complete=OK" + else + output_ok=false + output_detail="${output_detail}, complete=MISSING" + fi + + if echo "$setup_output" | grep -qi "Configuring\|configured\|forge plugins"; then + output_detail="${output_detail}, configure=OK" + else + output_ok=false + output_detail="${output_detail}, configure=MISSING" + fi + + if [ "$output_ok" = true ]; then + echo "CHECK_OUTPUT_FORMAT=PASS ${output_detail}" + else + echo "CHECK_OUTPUT_FORMAT=FAIL ${output_detail}" + fi + fi + + # --- Edge-case-specific checks --- + case "$test_type" in + preinstalled_all) + if echo "$setup_output" | grep -qi "All dependencies already installed"; then + echo "CHECK_EDGE_ALL_PRESENT=PASS" + else + echo "CHECK_EDGE_ALL_PRESENT=FAIL (should show all deps installed)" + fi + if echo "$setup_output" | grep -qi "The following will be installed"; then + echo "CHECK_EDGE_NO_INSTALL=FAIL (should not install anything)" + else + echo "CHECK_EDGE_NO_INSTALL=PASS (correctly skipped installation)" + fi + ;; + no_git) + if echo "$setup_output" | grep -qi "git is required"; then + echo "CHECK_EDGE_NO_GIT=PASS" + else + echo "CHECK_EDGE_NO_GIT=FAIL (should show git required error)" + fi + if [ "$setup_exit" -eq 0 ]; then + echo "CHECK_EDGE_NO_GIT_EXIT=PASS (exit=0, graceful)" + else + echo "CHECK_EDGE_NO_GIT_EXIT=FAIL (exit=${setup_exit}, should be 0)" + fi + ;; + no_zsh) + # When zsh is hidden from PATH but brew is available, forge should install zsh via brew + if echo "$setup_output" | grep -qi "zsh not found\|zsh.*not found"; then + echo "CHECK_EDGE_NO_ZSH=PASS (correctly detects zsh missing and installs via brew)" + else + echo "CHECK_EDGE_NO_ZSH=FAIL (should detect zsh not found)" + fi + ;; + rerun) + # Already verified marker uniqueness above. Check second-run specifics later. + ;; + partial) + if echo "$setup_output" | grep -qi "zsh-autosuggestions\|zsh-syntax-highlighting"; then + echo "CHECK_EDGE_PARTIAL_PLUGINS=PASS (plugins in install plan)" + else + echo "CHECK_EDGE_PARTIAL_PLUGINS=FAIL (plugins not mentioned)" + fi + local install_plan + install_plan=$(echo "$setup_output" | sed -n '/The following will be installed/,/^$/p' 2>/dev/null || echo "") + if [ -n "$install_plan" ]; then + if echo "$install_plan" | grep -qi "zsh (shell)\|Oh My Zsh"; then + echo "CHECK_EDGE_PARTIAL_NO_ZSH=FAIL (should not install zsh/OMZ)" + else + echo "CHECK_EDGE_PARTIAL_NO_ZSH=PASS (correctly skips zsh/OMZ)" + fi + else + echo "CHECK_EDGE_PARTIAL_NO_ZSH=PASS (no install plan = nothing to install)" + fi + ;; + esac + + # --- Emit raw output for debugging --- + echo "OUTPUT_BEGIN" + echo "$setup_output" + echo "OUTPUT_END" +} + +# ============================================================================= +# Result evaluation +# ============================================================================= + +parse_check_lines() { + local output="$1" + local all_pass=true + local fail_details="" + + while IFS= read -r line; do + case "$line" in + CHECK_*=PASS*) + ;; + CHECK_*=FAIL*) + all_pass=false + fail_details="${fail_details} ${line}\n" + ;; + esac + done <<< "$output" + + if [ "$all_pass" = true ]; then + echo "PASS" + else + echo "FAIL" + echo -e "$fail_details" + fi +} + +# ============================================================================= +# Pre-setup helpers for edge cases +# ============================================================================= + +# Pre-install Oh My Zsh into the current HOME (for preinstalled_all and partial tests) +preinstall_omz() { + local script_url="https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh" + sh -c "$(curl -fsSL "$script_url")" "" --unattended > /dev/null 2>&1 || true +} + +# Pre-install zsh plugins into the current HOME +preinstall_plugins() { + local zsh_custom="${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}" + git clone --quiet https://github.com/zsh-users/zsh-autosuggestions.git \ + "$zsh_custom/plugins/zsh-autosuggestions" 2>/dev/null || true + git clone --quiet https://github.com/zsh-users/zsh-syntax-highlighting.git \ + "$zsh_custom/plugins/zsh-syntax-highlighting" 2>/dev/null || true +} + +# ============================================================================= +# Test execution +# ============================================================================= + +# Run a single test scenario. +# Arguments: +# $1 - scenario entry string ("id|label|brew_mode|test_type") +run_single_test() { + local entry="$1" + IFS='|' read -r scenario_id label brew_mode test_type <<< "$entry" + + local safe_label + safe_label=$(echo "$label" | tr '[:upper:]' '[:lower:]' | tr ' /' '_-' | tr -cd '[:alnum:]_-') + local result_file="$RESULTS_DIR/${safe_label}.result" + local output_file="$RESULTS_DIR/${safe_label}.output" + + local binary_path="$PROJECT_ROOT/target/${BUILD_TARGET}/debug/forge" + + # Check binary exists + if [ ! -f "$binary_path" ]; then + cat > "$result_file" </dev/null || true + fi + ;; + partial) + # Pre-install OMZ only (no plugins) + preinstall_omz + ;; + esac + + # Run forge zsh setup + local setup_output="" + local setup_exit=0 + setup_output=$(PATH="$test_path" HOME="$temp_home" NO_COLOR=1 FORGE_EDITOR=vi forge zsh setup --non-interactive 2>&1) || setup_exit=$? + + # Run verification + local verify_output + verify_output=$(PATH="$test_path" HOME="$temp_home" FORGE_EDITOR=vi run_verify_checks "$test_type" "$setup_output" "$setup_exit" 2>&1) || true + + # Handle rerun scenario: run forge a second time + if [ "$test_type" = "rerun" ]; then + # Update PATH to include ~/.local/bin for GitHub-installed tools + local rerun_path="${temp_home}/.local/bin:${test_path}" + local rerun_output="" + local rerun_exit=0 + rerun_output=$(PATH="$rerun_path" HOME="$temp_home" NO_COLOR=1 FORGE_EDITOR=vi forge zsh setup --non-interactive 2>&1) || rerun_exit=$? + + if [ "$rerun_exit" -eq 0 ]; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_EXIT=PASS" + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_EXIT=FAIL (exit=${rerun_exit})" + fi + + if echo "$rerun_output" | grep -qi "All dependencies already installed"; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=PASS" + elif [ "$brew_mode" = "no_brew" ]; then + # Without brew, fzf/bat/fd can't install, so forge will still try to + # install them on re-run. Verify the core components (OMZ + plugins) are + # detected as already present — that's the idempotency we care about. + if echo "$rerun_output" | grep -qi "Oh My Zsh installed" && \ + echo "$rerun_output" | grep -qi "zsh-autosuggestions installed" && \ + echo "$rerun_output" | grep -qi "zsh-syntax-highlighting installed"; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=PASS (core deps detected; tools skipped due to no brew)" + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=FAIL (core deps not detected on re-run without brew)" + fi + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=FAIL (second run should skip installs)" + fi + + # Check marker uniqueness after re-run + if [ -f "$temp_home/.zshrc" ]; then + local start_count + start_count=$(grep -c '# >>> forge initialize >>>' "$temp_home/.zshrc" 2>/dev/null || echo "0") + if [ "$start_count" -eq 1 ]; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_MARKERS=PASS (still exactly 1 marker set)" + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_MARKERS=FAIL (found ${start_count} marker sets)" + fi + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_MARKERS=FAIL (no .zshrc after re-run)" + fi + + # Append second run output for debugging + verify_output="${verify_output} +OUTPUT_BEGIN +===== SECOND RUN (idempotency check) ===== +${rerun_output} +========================================== +OUTPUT_END" + fi + + # Restore HOME + export HOME="$saved_home" + + # Parse SETUP_EXIT + local parsed_setup_exit + parsed_setup_exit=$(grep '^SETUP_EXIT=' <<< "$verify_output" | head -1 | cut -d= -f2) + + # Evaluate CHECK lines + local eval_result + eval_result=$(parse_check_lines "$verify_output") + local status + local details + status=$(head -1 <<< "$eval_result") + details=$(tail -n +2 <<< "$eval_result") + + # Check setup exit code + if [ -n "$parsed_setup_exit" ] && [ "$parsed_setup_exit" != "0" ] && \ + [ "$test_type" != "no_git" ]; then + status="FAIL" + details="${details} SETUP_EXIT=${parsed_setup_exit} (expected 0)\n" + fi + + # Write result + cat > "$result_file" < "$output_file" + + # Cleanup temp HOME unless --no-cleanup + if [ "$NO_CLEANUP" = false ]; then + rm -rf "$temp_home" + else + # Copy diagnostic files into RESULTS_DIR for artifact upload + local diag_dir="$RESULTS_DIR/${safe_label}-home" + mkdir -p "$diag_dir" + # Copy key files that help debug failures + cp "$temp_home/.zshrc" "$diag_dir/zshrc" 2>/dev/null || true + cp -r "$temp_home/.oh-my-zsh/custom/plugins" "$diag_dir/omz-plugins" 2>/dev/null || true + ls -la "$temp_home/" > "$diag_dir/home-listing.txt" 2>/dev/null || true + ls -la "$temp_home/.oh-my-zsh/" > "$diag_dir/omz-listing.txt" 2>/dev/null || true + ls -la "$temp_home/.local/bin/" > "$diag_dir/local-bin-listing.txt" 2>/dev/null || true + # Save the PATH that was used + echo "$test_path" > "$diag_dir/test-path.txt" 2>/dev/null || true + log_info "Diagnostics saved to: ${diag_dir}" + # Still remove the temp HOME itself (diagnostics are in RESULTS_DIR now) + rm -rf "$temp_home" + fi +} + +# ============================================================================= +# Result collection and reporting +# ============================================================================= + +collect_test_results() { + log_header "Results" + + local has_results=false + if [ -d "$RESULTS_DIR" ]; then + for f in "$RESULTS_DIR"/*.result; do + if [ -f "$f" ]; then + has_results=true + break + fi + done + fi + + if [ "$has_results" = false ]; then + log_skip "No test results found" + return + fi + + for result_file in "$RESULTS_DIR"/*.result; do + [ -f "$result_file" ] || continue + local status + status=$(grep '^STATUS:' "$result_file" | head -1 | awk '{print $2}' || echo "UNKNOWN") + local label + label=$(grep '^LABEL:' "$result_file" | head -1 | sed 's/^LABEL: //' || echo "(unknown test)") + + case "$status" in + PASS) + log_pass "$label" + ;; + FAIL) + log_fail "$label" + local details + details=$(grep '^DETAILS:' "$result_file" | head -1 | sed 's/^DETAILS: //' || true) + if [ -n "$details" ] && [ "$details" != " " ]; then + echo -e " ${DIM}${details}${NC}" + fi + # Show failing CHECK lines from output file + local output_file="${result_file%.result}.output" + if [ -f "$output_file" ]; then + grep 'CHECK_.*=FAIL' "$output_file" 2>/dev/null | while read -r line; do + echo -e " ${RED}${line}${NC}" + done || true + fi + ;; + *) + log_skip "$label" + ;; + esac + done +} + +print_report() { + echo "" + echo -e "${BOLD}================================================================${NC}" + local total=$((PASS + FAIL + SKIP)) + if [ "$FAIL" -eq 0 ]; then + echo -e "${GREEN}${BOLD} RESULTS: ${PASS} passed, ${FAIL} failed, ${SKIP} skipped (${total} total)${NC}" + else + echo -e "${RED}${BOLD} RESULTS: ${PASS} passed, ${FAIL} failed, ${SKIP} skipped (${total} total)${NC}" + fi + echo -e "${BOLD}================================================================${NC}" + + if [ ${#FAILURES[@]} -gt 0 ]; then + echo "" + echo -e "${RED}${BOLD}Failed tests:${NC}" + for f in "${FAILURES[@]}"; do + echo -e " ${RED}* ${f}${NC}" + done + fi + + if [ "$NO_CLEANUP" = true ] && [ -n "$RESULTS_DIR" ] && [ -d "$RESULTS_DIR" ]; then + echo "" + echo -e " ${DIM}Results preserved: ${RESULTS_DIR}${NC}" + fi +} + +# ============================================================================= +# Test orchestrator +# ============================================================================= + +run_tests() { + # Create results directory — use a known path for CI artifact upload + if [ "$NO_CLEANUP" = true ]; then + RESULTS_DIR="$PROJECT_ROOT/test-results-macos" + rm -rf "$RESULTS_DIR" + mkdir -p "$RESULTS_DIR" + else + RESULTS_DIR=$(mktemp -d) + fi + + # Build binary + log_header "Phase 2: Build Binary" + if ! build_binary; then + echo "Error: Build failed. Cannot continue without binary." >&2 + exit 1 + fi + + log_header "Phase 3: macOS E2E Tests" + log_info "Results dir: ${RESULTS_DIR}" + log_info "Build target: ${BUILD_TARGET}" + log_info "Homebrew: ${BREW_PREFIX:-not found}" + echo "" + + # Run each scenario sequentially + for entry in "${SCENARIOS[@]}"; do + IFS='|' read -r _id label brew_mode _test_type <<< "$entry" + + # Apply filter + if [ -n "$FILTER_PATTERN" ] && ! echo "$label" | grep -qiE "$FILTER_PATTERN"; then + continue + fi + if [ -n "$EXCLUDE_PATTERN" ] && echo "$label" | grep -qiE "$EXCLUDE_PATTERN"; then + continue + fi + + # Skip brew tests if brew is not installed + if [ "$brew_mode" = "with_brew" ] && [ -z "$BREW_PREFIX" ]; then + log_skip "${label} (Homebrew not installed)" + continue + fi + + if [ "$DRY_RUN" = true ]; then + log_info "[dry-run] Would run: ${label}" + continue + fi + + log_info "Running: ${label}..." + run_single_test "$entry" + done + + # Collect and display results + if [ "$DRY_RUN" = false ]; then + collect_test_results + fi +} + +# ============================================================================= +# Main +# ============================================================================= + +main() { + parse_args "$@" + + echo -e "${BOLD}${BLUE}Forge ZSH Setup - macOS E2E Test Suite${NC}" + echo "" + + run_static_checks + + if [ "$MODE" = "quick" ]; then + echo "" + print_report + if [ "$FAIL" -gt 0 ]; then + exit 1 + fi + exit 0 + fi + + run_tests + + echo "" + print_report + + # Cleanup results dir unless --no-cleanup + if [ "$NO_CLEANUP" = false ] && [ -n "$RESULTS_DIR" ] && [ -d "$RESULTS_DIR" ]; then + rm -rf "$RESULTS_DIR" + fi + + if [ "$FAIL" -gt 0 ]; then + exit 1 + fi + exit 0 +} + +main "$@" diff --git a/crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh b/crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh new file mode 100644 index 0000000000..5cdcc2355c --- /dev/null +++ b/crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh @@ -0,0 +1,1042 @@ +#!/bin/bash +# ============================================================================= +# Windows/Git Bash-native E2E test suite for `forge zsh setup` +# +# Tests the complete zsh setup flow natively on Windows using Git Bash with +# temp HOME directory isolation. Covers dependency detection, MSYS2 package +# download + zsh installation, Oh My Zsh + plugin installation, .bashrc +# auto-start configuration (Windows-specific), .zshrc forge marker +# configuration, and doctor diagnostics. +# +# Unlike the Linux test suite (test-zsh-setup.sh) which uses Docker containers, +# and the macOS suite (test-zsh-setup-macos.sh) which runs natively on macOS, +# this script runs directly on Windows inside Git Bash with HOME directory +# isolation. Each test scenario gets a fresh temp HOME to prevent state leakage. +# +# Build targets (auto-detected from architecture): +# - x86_64-pc-windows-msvc (x86_64 runners) +# - aarch64-pc-windows-msvc (ARM64 runners) +# +# Prerequisites: +# - Windows with Git Bash (Git for Windows) +# - Rust toolchain +# - Network access (MSYS2 repo, GitHub for Oh My Zsh + plugins) +# +# Usage: +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh # build + test all +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --quick # shellcheck only +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --filter "fresh" # run only matching +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --skip-build # skip build, use existing +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --no-cleanup # keep temp dirs +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --dry-run # show plan, don't run +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --list # list scenarios and exit +# bash crates/forge_ci/tests/scripts/test-zsh-setup-windows.sh --help # show usage +# +# Relationship to sibling test suites: +# test-zsh-setup.sh — Docker-based E2E tests for Linux distros +# test-zsh-setup-macos.sh — Native E2E tests for macOS +# test-zsh-setup-windows.sh — Native E2E tests for Windows/Git Bash (this file) +# All three use the same CHECK_* line protocol for verification. +# ============================================================================= + +set -euo pipefail + +# ============================================================================= +# Platform guard +# ============================================================================= + +case "$(uname -s)" in + MINGW*|MSYS*) ;; # OK — Git Bash / MSYS2 + *) + echo "Error: This script must be run in Git Bash on Windows." >&2 + echo "For Linux testing, use test-zsh-setup.sh (Docker-based)." >&2 + echo "For macOS testing, use test-zsh-setup-macos.sh." >&2 + exit 1 + ;; +esac + +# ============================================================================= +# Constants +# ============================================================================= + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +readonly SCRIPT_DIR + +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../../.." && pwd)" +readonly PROJECT_ROOT + +readonly RED='\033[0;31m' +readonly GREEN='\033[0;32m' +readonly YELLOW='\033[1;33m' +readonly BLUE='\033[0;34m' +readonly BOLD='\033[1m' +readonly DIM='\033[2m' +readonly NC='\033[0m' + +readonly SHELLCHECK_EXCLUSIONS="SC2155,SC2086,SC1090,SC2034,SC2181,SC2016,SC2162" + +# Detect architecture and select build target +case "$(uname -m)" in + x86_64|AMD64) + BUILD_TARGET="x86_64-pc-windows-msvc" + ;; + aarch64|arm64|ARM64) + BUILD_TARGET="aarch64-pc-windows-msvc" + ;; + *) + echo "Error: Unsupported architecture: $(uname -m)" >&2 + exit 1 + ;; +esac +readonly BUILD_TARGET + +# ============================================================================= +# Test scenarios +# ============================================================================= + +# Format: "scenario_id|label|test_type" +# scenario_id - unique identifier +# label - human-readable name +# test_type - "standard", "preinstalled_all", "rerun", "partial" +# +# NOTE: Unlike the Linux/macOS test suites, there is NO "no_git" scenario here. +# On Windows, forge.exe is a native MSVC binary that resolves git through Windows +# PATH resolution (CreateProcessW, where.exe, etc.), not bash PATH. Hiding git +# by filtering the bash PATH or renaming binaries is fundamentally unreliable +# because Git for Windows installs in multiple locations (/usr/bin, /mingw64/bin, +# C:\Program Files\Git\cmd, etc.) and Windows system PATH entries bypass bash. +# The no-git early-exit logic is platform-independent and tested on Linux/macOS. +readonly SCENARIOS=( + # Standard fresh install — the primary happy path + "FRESH|Fresh install (Git Bash)|standard" + + # Pre-installed everything — verify fast path (two-pass approach) + "PREINSTALLED_ALL|Pre-installed everything (fast path)|preinstalled_all" + + # Re-run idempotency — verify no duplicate markers + "RERUN|Re-run idempotency|rerun" + + # Partial install — only plugins missing + "PARTIAL|Partial install (only plugins missing)|partial" +) + +# ============================================================================= +# Runtime state +# ============================================================================= + +PASS=0 +FAIL=0 +SKIP=0 +FAILURES=() + +# CLI options +MODE="full" +FILTER_PATTERN="" +EXCLUDE_PATTERN="" +NO_CLEANUP=false +SKIP_BUILD=false +DRY_RUN=false + +# Shared temp paths +RESULTS_DIR="" +REAL_HOME="$HOME" + +# ============================================================================= +# Logging helpers +# ============================================================================= + +log_header() { echo -e "\n${BOLD}${BLUE}$1${NC}"; } +log_pass() { echo -e " ${GREEN}PASS${NC} $1"; PASS=$((PASS + 1)); } +log_fail() { echo -e " ${RED}FAIL${NC} $1"; FAIL=$((FAIL + 1)); FAILURES+=("$1"); } +log_skip() { echo -e " ${YELLOW}SKIP${NC} $1"; SKIP=$((SKIP + 1)); } +log_info() { echo -e " ${DIM}$1${NC}"; } + +# ============================================================================= +# Argument parsing +# ============================================================================= + +print_usage() { + cat < Run only scenarios whose label matches (grep -iE) + --exclude Skip scenarios whose label matches (grep -iE) + --skip-build Skip binary build, use existing binary + --no-cleanup Keep temp directories and results after tests + --dry-run Show what would be tested without running anything + --list List all test scenarios and exit + --help Show this help message + +Notes: + - This script runs natively in Git Bash on Windows (no Docker). + - The FRESH scenario downloads MSYS2 packages (zsh, ncurses, etc.) and + installs zsh into the Git Bash /usr tree. This requires network access + and may need administrator privileges. + - Each test scenario uses an isolated temp HOME directory. + - On CI runners (GitHub Actions windows-latest), administrator access is + typically available by default. +EOF +} + +parse_args() { + while [ $# -gt 0 ]; do + case "$1" in + --quick) + MODE="quick" + shift + ;; + --filter) + FILTER_PATTERN="${2:?--filter requires a pattern}" + shift 2 + ;; + --exclude) + EXCLUDE_PATTERN="${2:?--exclude requires a pattern}" + shift 2 + ;; + --skip-build) + SKIP_BUILD=true + shift + ;; + --no-cleanup) + NO_CLEANUP=true + shift + ;; + --dry-run) + DRY_RUN=true + shift + ;; + --list) + list_scenarios + exit 0 + ;; + --help|-h) + print_usage + exit 0 + ;; + *) + echo "Unknown option: $1" >&2 + print_usage >&2 + exit 1 + ;; + esac + done +} + +list_scenarios() { + echo -e "${BOLD}Build Target:${NC}" + printf " %-55s %s\n" "$BUILD_TARGET" "$(uname -m)" + + echo -e "\n${BOLD}Test Scenarios:${NC}" + local idx=0 + for entry in "${SCENARIOS[@]}"; do + idx=$((idx + 1)) + IFS='|' read -r _id label test_type <<< "$entry" + printf " %2d. %-55s %s\n" "$idx" "$label" "$test_type" + done +} + +# ============================================================================= +# Build binary +# ============================================================================= + +build_binary() { + local binary_path="$PROJECT_ROOT/target/${BUILD_TARGET}/debug/forge.exe" + + if [ "$SKIP_BUILD" = true ] && [ -f "$binary_path" ]; then + log_info "Skipping build for ${BUILD_TARGET} (binary exists)" + return 0 + fi + + # Ensure target is installed + if ! rustup target list --installed 2>/dev/null | grep -q "$BUILD_TARGET"; then + log_info "$(uname -m)" + log_info "Adding Rust target ${BUILD_TARGET}..." + rustup target add "$BUILD_TARGET" 2>/dev/null || true + fi + + log_info "Building ${BUILD_TARGET} with cargo (debug)..." + if ! cargo build --target "$BUILD_TARGET" 2>"$RESULTS_DIR/build-${BUILD_TARGET}.log"; then + log_fail "Build failed for ${BUILD_TARGET}" + log_info "Build log: $RESULTS_DIR/build-${BUILD_TARGET}.log" + echo "" + echo "===== Full build log =====" + cat "$RESULTS_DIR/build-${BUILD_TARGET}.log" 2>/dev/null || echo "Log file not found" + echo "==========================" + echo "" + return 1 + fi + + if [ -f "$binary_path" ]; then + log_pass "Built ${BUILD_TARGET} -> $(du -h "$binary_path" | cut -f1)" + return 0 + else + log_fail "Binary not found after build: ${binary_path}" + return 1 + fi +} + +# ============================================================================= +# Static analysis +# ============================================================================= + +run_static_checks() { + log_header "Phase 1: Static Analysis" + + if bash -n "${BASH_SOURCE[0]}" 2>/dev/null; then + log_pass "bash -n syntax check" + else + log_fail "bash -n syntax check" + fi + + if command -v shellcheck > /dev/null 2>&1; then + if shellcheck -x -e "$SHELLCHECK_EXCLUSIONS" "${BASH_SOURCE[0]}" 2>/dev/null; then + log_pass "shellcheck (excluding $SHELLCHECK_EXCLUSIONS)" + else + log_fail "shellcheck (excluding $SHELLCHECK_EXCLUSIONS)" + fi + else + log_skip "shellcheck (not installed)" + fi +} + +# ============================================================================= +# Verification function +# ============================================================================= + +# Run verification checks against the current HOME and emit CHECK_* lines. +# Arguments: +# $1 - test_type: "standard" | "preinstalled_all" | "rerun" | "partial" +# $2 - setup_output: the captured output from forge zsh setup +# $3 - setup_exit: the exit code from forge zsh setup +run_verify_checks() { + local test_type="$1" + local setup_output="$2" + local setup_exit="$3" + + echo "SETUP_EXIT=${setup_exit}" + + # --- Verify zsh binary --- + if [ -f "/usr/bin/zsh.exe" ] || command -v zsh > /dev/null 2>&1; then + local zsh_ver + zsh_ver=$(zsh --version 2>&1 | head -1) || zsh_ver="(failed)" + if zsh -c "zmodload zsh/zle && zmodload zsh/datetime && zmodload zsh/stat" > /dev/null 2>&1; then + echo "CHECK_ZSH=PASS ${zsh_ver} (modules OK)" + else + echo "CHECK_ZSH=FAIL ${zsh_ver} (modules broken)" + fi + else + echo "CHECK_ZSH=FAIL zsh not found in PATH or /usr/bin/zsh.exe" + fi + + # --- Verify zsh.exe is in /usr/bin (Windows-specific) --- + if [ -f "/usr/bin/zsh.exe" ]; then + echo "CHECK_ZSH_EXE_LOCATION=PASS" + else + echo "CHECK_ZSH_EXE_LOCATION=FAIL (/usr/bin/zsh.exe not found)" + fi + + # --- Verify Oh My Zsh --- + if [ -d "$HOME/.oh-my-zsh" ]; then + local omz_ok=true + local omz_detail="dir=OK" + for subdir in custom/plugins themes lib; do + if [ ! -d "$HOME/.oh-my-zsh/$subdir" ]; then + omz_ok=false + omz_detail="${omz_detail}, ${subdir}=MISSING" + fi + done + if [ "$omz_ok" = true ]; then + echo "CHECK_OMZ_DIR=PASS ${omz_detail}" + else + echo "CHECK_OMZ_DIR=FAIL ${omz_detail}" + fi + else + echo "CHECK_OMZ_DIR=FAIL ~/.oh-my-zsh not found" + fi + + # --- Verify Oh My Zsh defaults in .zshrc --- + if [ -f "$HOME/.zshrc" ]; then + local omz_defaults_ok=true + local omz_defaults_detail="" + if grep -q 'ZSH_THEME=' "$HOME/.zshrc" 2>/dev/null; then + omz_defaults_detail="theme=OK" + else + omz_defaults_ok=false + omz_defaults_detail="theme=MISSING" + fi + if grep -q '^plugins=' "$HOME/.zshrc" 2>/dev/null; then + omz_defaults_detail="${omz_defaults_detail}, plugins=OK" + else + omz_defaults_ok=false + omz_defaults_detail="${omz_defaults_detail}, plugins=MISSING" + fi + if [ "$omz_defaults_ok" = true ]; then + echo "CHECK_OMZ_DEFAULTS=PASS ${omz_defaults_detail}" + else + echo "CHECK_OMZ_DEFAULTS=FAIL ${omz_defaults_detail}" + fi + else + echo "CHECK_OMZ_DEFAULTS=FAIL ~/.zshrc not found" + fi + + # --- Verify plugins --- + local zsh_custom="${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}" + if [ -d "$zsh_custom/plugins/zsh-autosuggestions" ]; then + if ls "$zsh_custom/plugins/zsh-autosuggestions/"*.zsh 1>/dev/null 2>&1; then + echo "CHECK_AUTOSUGGESTIONS=PASS" + else + echo "CHECK_AUTOSUGGESTIONS=FAIL (dir exists but no .zsh files)" + fi + else + echo "CHECK_AUTOSUGGESTIONS=FAIL not installed" + fi + + if [ -d "$zsh_custom/plugins/zsh-syntax-highlighting" ]; then + if ls "$zsh_custom/plugins/zsh-syntax-highlighting/"*.zsh 1>/dev/null 2>&1; then + echo "CHECK_SYNTAX_HIGHLIGHTING=PASS" + else + echo "CHECK_SYNTAX_HIGHLIGHTING=FAIL (dir exists but no .zsh files)" + fi + else + echo "CHECK_SYNTAX_HIGHLIGHTING=FAIL not installed" + fi + + # --- Verify .zshrc forge markers and content --- + if [ -f "$HOME/.zshrc" ]; then + if grep -q '# >>> forge initialize >>>' "$HOME/.zshrc" && \ + grep -q '# <<< forge initialize <<<' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_MARKERS=PASS" + else + echo "CHECK_ZSHRC_MARKERS=FAIL markers not found" + fi + + if grep -q 'eval "\$(forge zsh plugin)"' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_PLUGIN=PASS" + else + echo "CHECK_ZSHRC_PLUGIN=FAIL plugin eval not found" + fi + + if grep -q 'eval "\$(forge zsh theme)"' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_THEME=PASS" + else + echo "CHECK_ZSHRC_THEME=FAIL theme eval not found" + fi + + if grep -q 'NERD_FONT=0' "$HOME/.zshrc"; then + echo "CHECK_NO_NERD_FONT_DISABLE=FAIL (NERD_FONT=0 found in non-interactive mode)" + else + echo "CHECK_NO_NERD_FONT_DISABLE=PASS" + fi + + if grep -q 'FORGE_EDITOR' "$HOME/.zshrc"; then + echo "CHECK_NO_FORGE_EDITOR=FAIL (FORGE_EDITOR found in non-interactive mode)" + else + echo "CHECK_NO_FORGE_EDITOR=PASS" + fi + + # Check marker uniqueness (idempotency) + local start_count + local end_count + start_count=$(grep -c '# >>> forge initialize >>>' "$HOME/.zshrc" 2>/dev/null || echo "0") + end_count=$(grep -c '# <<< forge initialize <<<' "$HOME/.zshrc" 2>/dev/null || echo "0") + if [ "$start_count" -eq 1 ] && [ "$end_count" -eq 1 ]; then + echo "CHECK_MARKER_UNIQUE=PASS" + else + echo "CHECK_MARKER_UNIQUE=FAIL (start=${start_count}, end=${end_count})" + fi + else + echo "CHECK_ZSHRC_MARKERS=FAIL no .zshrc" + echo "CHECK_ZSHRC_PLUGIN=FAIL no .zshrc" + echo "CHECK_ZSHRC_THEME=FAIL no .zshrc" + echo "CHECK_NO_NERD_FONT_DISABLE=FAIL no .zshrc" + echo "CHECK_NO_FORGE_EDITOR=FAIL no .zshrc" + echo "CHECK_MARKER_UNIQUE=FAIL no .zshrc" + fi + + # --- Windows-specific: Verify .bash_profile auto-start configuration --- + if [ -f "$HOME/.bash_profile" ]; then + if grep -q '# >>> forge initialize >>>' "$HOME/.bash_profile" && \ + grep -q '# <<< forge initialize <<<' "$HOME/.bash_profile" && \ + grep -q 'exec.*zsh' "$HOME/.bash_profile"; then + echo "CHECK_BASHRC_AUTOSTART=PASS" + else + echo "CHECK_BASHRC_AUTOSTART=FAIL (auto-start block not found in .bash_profile)" + fi + + # Check uniqueness of auto-start block + local autostart_count + autostart_count=$(grep -c '# >>> forge initialize >>>' "$HOME/.bash_profile" 2>/dev/null || echo "0") + if [ "$autostart_count" -eq 1 ]; then + echo "CHECK_BASHRC_MARKER_UNIQUE=PASS" + else + echo "CHECK_BASHRC_MARKER_UNIQUE=FAIL (found ${autostart_count} auto-start blocks)" + fi + else + echo "CHECK_BASHRC_AUTOSTART=FAIL (.bash_profile not found)" + echo "CHECK_BASHRC_MARKER_UNIQUE=FAIL (.bash_profile not found)" + fi + + # Check suppression files created by forge + if [ -f "$HOME/.bash_profile" ]; then + echo "CHECK_BASH_PROFILE_EXISTS=PASS" + else + echo "CHECK_BASH_PROFILE_EXISTS=FAIL" + fi + + if [ -f "$HOME/.bash_login" ]; then + echo "CHECK_BASH_LOGIN_EXISTS=PASS" + else + echo "CHECK_BASH_LOGIN_EXISTS=FAIL" + fi + + if [ -f "$HOME/.profile" ]; then + echo "CHECK_PROFILE_EXISTS=PASS" + else + echo "CHECK_PROFILE_EXISTS=FAIL" + fi + + + # --- Check if forge zsh setup's own doctor run failed --- + # forge zsh setup runs doctor internally. Even if our independent doctor call + # succeeds (different environment), we must detect if setup's doctor failed. + if echo "$setup_output" | grep -qi "forge zsh doctor failed"; then + echo "CHECK_SETUP_DOCTOR=FAIL (setup reported doctor failure)" + else + echo "CHECK_SETUP_DOCTOR=PASS" + fi + + # --- Run forge zsh doctor --- + local doctor_output + local doctor_exit=0 + doctor_output=$(forge zsh doctor 2>&1) || doctor_exit=$? + if [ $doctor_exit -eq 0 ]; then + echo "CHECK_DOCTOR_EXIT=PASS (exit=0)" + else + echo "CHECK_DOCTOR_EXIT=FAIL (exit=${doctor_exit})" + fi + + # --- Verify output format --- + local output_ok=true + local output_detail="" + + if echo "$setup_output" | grep -qi "found\|not found\|installed\|Detecting"; then + output_detail="detect=OK" + else + output_ok=false + output_detail="detect=MISSING" + fi + + if echo "$setup_output" | grep -qi "Setup complete\|complete"; then + output_detail="${output_detail}, complete=OK" + else + output_ok=false + output_detail="${output_detail}, complete=MISSING" + fi + + if echo "$setup_output" | grep -qi "Configuring\|configured\|forge plugins"; then + output_detail="${output_detail}, configure=OK" + else + output_ok=false + output_detail="${output_detail}, configure=MISSING" + fi + + # Windows-specific: check for Git Bash summary message. + # When setup_fully_successful is true, the output contains "Git Bash" and + # "source ~/.bash_profile". When tools (fzf/bat/fd) fail to install (common on + # Windows CI — "No package manager on Windows"), the warning message + # "Setup completed with some errors" is shown instead. Accept either. + if echo "$setup_output" | grep -qi "Git Bash\|source.*bashrc"; then + output_detail="${output_detail}, gitbash_summary=OK" + echo "CHECK_SUMMARY_GITBASH=PASS" + elif echo "$setup_output" | grep -qi "Setup completed with some errors\|completed with some errors"; then + output_detail="${output_detail}, gitbash_summary=OK(warning)" + echo "CHECK_SUMMARY_GITBASH=PASS (warning path: tools install failed but setup completed)" + else + output_detail="${output_detail}, gitbash_summary=MISSING" + echo "CHECK_SUMMARY_GITBASH=FAIL (expected Git Bash summary or warning message)" + fi + + if [ "$output_ok" = true ]; then + echo "CHECK_OUTPUT_FORMAT=PASS ${output_detail}" + else + echo "CHECK_OUTPUT_FORMAT=FAIL ${output_detail}" + fi + + # --- Edge-case-specific checks --- + case "$test_type" in + preinstalled_all) + # On Windows CI, fzf/bat/fd are never available ("No package manager on + # Windows"), so "All dependencies already installed" is never shown — forge + # still lists fzf/bat/fd in the install plan. Accept the case where only + # tools (not core deps) are listed for installation. + if echo "$setup_output" | grep -qi "All dependencies already installed"; then + echo "CHECK_EDGE_ALL_PRESENT=PASS" + else + # Check that core deps (zsh, OMZ, plugins) are NOT in the install plan + # but only tools (fzf, bat, fd) are listed + local install_section + install_section=$(echo "$setup_output" | sed -n '/The following will be installed/,/^$/p' 2>/dev/null || echo "") + if [ -n "$install_section" ]; then + if echo "$install_section" | grep -qi "zsh (shell)\|Oh My Zsh\|autosuggestions\|syntax-highlighting"; then + echo "CHECK_EDGE_ALL_PRESENT=FAIL (core deps should not be in install plan)" + else + echo "CHECK_EDGE_ALL_PRESENT=PASS (core deps pre-installed; only tools remain)" + fi + else + echo "CHECK_EDGE_ALL_PRESENT=PASS (no install plan shown)" + fi + fi + if echo "$setup_output" | grep -qi "The following will be installed"; then + # On Windows, this is expected because fzf/bat/fd are always missing. + # Verify only tools are in the list, not core deps. + local install_items + install_items=$(echo "$setup_output" | sed -n '/The following will be installed/,/^$/p' 2>/dev/null || echo "") + if echo "$install_items" | grep -qi "zsh (shell)\|Oh My Zsh\|autosuggestions\|syntax-highlighting"; then + echo "CHECK_EDGE_NO_INSTALL=FAIL (core deps should not be reinstalled)" + else + echo "CHECK_EDGE_NO_INSTALL=PASS (only tools listed — core deps correctly skipped)" + fi + else + echo "CHECK_EDGE_NO_INSTALL=PASS (correctly skipped installation)" + fi + ;; + partial) + if echo "$setup_output" | grep -qi "zsh-autosuggestions\|zsh-syntax-highlighting"; then + echo "CHECK_EDGE_PARTIAL_PLUGINS=PASS (plugins in install plan)" + else + echo "CHECK_EDGE_PARTIAL_PLUGINS=FAIL (plugins not mentioned)" + fi + local install_plan + install_plan=$(echo "$setup_output" | sed -n '/The following will be installed/,/^$/p' 2>/dev/null || echo "") + if [ -n "$install_plan" ]; then + if echo "$install_plan" | grep -qi "zsh (shell)\|Oh My Zsh"; then + echo "CHECK_EDGE_PARTIAL_NO_ZSH=FAIL (should not install zsh/OMZ)" + else + echo "CHECK_EDGE_PARTIAL_NO_ZSH=PASS (correctly skips zsh/OMZ)" + fi + else + echo "CHECK_EDGE_PARTIAL_NO_ZSH=PASS (no install plan = nothing to install)" + fi + ;; + esac + + # --- Emit raw output for debugging --- + echo "OUTPUT_BEGIN" + echo "$setup_output" + echo "OUTPUT_END" +} + +# ============================================================================= +# Result evaluation +# ============================================================================= + +parse_check_lines() { + local output="$1" + local all_pass=true + local fail_details="" + + while IFS= read -r line; do + case "$line" in + CHECK_*=PASS*) + ;; + CHECK_*=FAIL*) + all_pass=false + fail_details="${fail_details} ${line}\n" + ;; + esac + done <<< "$output" + + if [ "$all_pass" = true ]; then + echo "PASS" + else + echo "FAIL" + echo -e "$fail_details" + fi +} + +# ============================================================================= +# Test execution +# ============================================================================= + +# Run a single test scenario. +# Arguments: +# $1 - scenario entry string ("id|label|test_type") +run_single_test() { + local entry="$1" + IFS='|' read -r scenario_id label test_type <<< "$entry" + + local safe_label + safe_label=$(echo "$label" | tr '[:upper:]' '[:lower:]' | tr ' /' '_-' | tr -cd '[:alnum:]_-') + local result_file="$RESULTS_DIR/${safe_label}.result" + local output_file="$RESULTS_DIR/${safe_label}.output" + + local binary_path="$PROJECT_ROOT/target/${BUILD_TARGET}/debug/forge.exe" + + # Check binary exists + if [ ! -f "$binary_path" ]; then + cat > "$result_file" < /dev/null 2>&1 || true + ;; + partial) + # Run forge once to get a full install, then remove plugins + PATH="$test_path" HOME="$temp_home" NO_COLOR=1 FORGE_EDITOR=vi forge.exe zsh setup --non-interactive > /dev/null 2>&1 || true + # Remove plugins to simulate partial install + local zsh_custom_dir="${temp_home}/.oh-my-zsh/custom/plugins" + rm -rf "${zsh_custom_dir}/zsh-autosuggestions" 2>/dev/null || true + rm -rf "${zsh_custom_dir}/zsh-syntax-highlighting" 2>/dev/null || true + ;; + esac + + # Run forge zsh setup + local setup_output="" + local setup_exit=0 + setup_output=$(PATH="$test_path" HOME="$temp_home" NO_COLOR=1 FORGE_EDITOR=vi forge.exe zsh setup --non-interactive 2>&1) || setup_exit=$? + + # Strip ANSI escape codes for reliable grep matching + setup_output=$(printf '%s' "$setup_output" | sed 's/\x1b\[[0-9;]*m//g') + + # Run verification + local verify_output + verify_output=$(PATH="$test_path" HOME="$temp_home" FORGE_EDITOR=vi run_verify_checks "$test_type" "$setup_output" "$setup_exit" 2>&1) || true + + # Handle rerun scenario: run forge a second time + if [ "$test_type" = "rerun" ]; then + # Update PATH to include ~/.local/bin for GitHub-installed tools + local rerun_path="${temp_home}/.local/bin:${test_path}" + local rerun_output="" + local rerun_exit=0 + rerun_output=$(PATH="$rerun_path" HOME="$temp_home" NO_COLOR=1 FORGE_EDITOR=vi forge.exe zsh setup --non-interactive 2>&1) || rerun_exit=$? + rerun_output=$(printf '%s' "$rerun_output" | sed 's/\x1b\[[0-9;]*m//g') + + if [ "$rerun_exit" -eq 0 ]; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_EXIT=PASS" + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_EXIT=FAIL (exit=${rerun_exit})" + fi + + if echo "$rerun_output" | grep -qi "All dependencies already installed"; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=PASS" + else + # On Windows, fzf/bat/fd are never installable, so "All dependencies + # already installed" never appears. Instead, check that core deps + # (zsh, OMZ, plugins) are not in the install plan on the second run. + local rerun_install_section + rerun_install_section=$(echo "$rerun_output" | sed -n '/The following will be installed/,/^$/p' 2>/dev/null || echo "") + if [ -n "$rerun_install_section" ]; then + if echo "$rerun_install_section" | grep -qi "zsh (shell)\|Oh My Zsh\|autosuggestions\|syntax-highlighting"; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=FAIL (core deps should not be reinstalled on re-run)" + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=PASS (core deps skipped on re-run; only tools remain)" + fi + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_SKIP=PASS (no install plan on re-run)" + fi + fi + + # Check marker uniqueness after re-run + if [ -f "$temp_home/.zshrc" ]; then + local start_count + start_count=$(grep -c '# >>> forge initialize >>>' "$temp_home/.zshrc" 2>/dev/null || echo "0") + if [ "$start_count" -eq 1 ]; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_MARKERS=PASS (still exactly 1 marker set)" + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_MARKERS=FAIL (found ${start_count} marker sets)" + fi + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_MARKERS=FAIL (no .zshrc after re-run)" + fi + + # Check bashrc auto-start block uniqueness after re-run (Windows-specific) + if [ -f "$temp_home/.bash_profile" ]; then + local autostart_count + autostart_count=$(grep -c '# >>> forge initialize >>>' "$temp_home/.bash_profile" 2>/dev/null || echo "0") + if [ "$autostart_count" -eq 1 ]; then + verify_output="${verify_output} +CHECK_EDGE_RERUN_BASHRC=PASS (still exactly 1 auto-start block)" + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_BASHRC=FAIL (found ${autostart_count} auto-start blocks)" + fi + else + verify_output="${verify_output} +CHECK_EDGE_RERUN_BASHRC=FAIL (no .bash_profile after re-run)" + fi + + # Append second run output for debugging + verify_output="${verify_output} +OUTPUT_BEGIN +===== SECOND RUN (idempotency check) ===== +${rerun_output} +========================================== +OUTPUT_END" + fi + + # Restore HOME + export HOME="$saved_home" + + # Parse SETUP_EXIT + local parsed_setup_exit + parsed_setup_exit=$(grep '^SETUP_EXIT=' <<< "$verify_output" | head -1 | cut -d= -f2) + + # Evaluate CHECK lines + local eval_result + eval_result=$(parse_check_lines "$verify_output") + local status + local details + status=$(head -1 <<< "$eval_result") + details=$(tail -n +2 <<< "$eval_result") + + # Check setup exit code + if [ -n "$parsed_setup_exit" ] && [ "$parsed_setup_exit" != "0" ]; then + status="FAIL" + details="${details} SETUP_EXIT=${parsed_setup_exit} (expected 0)\n" + fi + + # Write result + cat > "$result_file" < "$output_file" + + # Cleanup temp HOME unless --no-cleanup + if [ "$NO_CLEANUP" = false ]; then + rm -rf "$temp_home" + else + # Copy diagnostic files into RESULTS_DIR for artifact upload + local diag_dir="$RESULTS_DIR/${safe_label}-home" + mkdir -p "$diag_dir" + # Copy key files that help debug failures + cp "$temp_home/.zshrc" "$diag_dir/zshrc" 2>/dev/null || true + cp "$temp_home/.bashrc" "$diag_dir/bashrc" 2>/dev/null || true + cp "$temp_home/.zshenv" "$diag_dir/zshenv" 2>/dev/null || true + cp "$temp_home/.bash_profile" "$diag_dir/bash_profile" 2>/dev/null || true + cp "$temp_home/.bash_login" "$diag_dir/bash_login" 2>/dev/null || true + cp "$temp_home/.profile" "$diag_dir/profile" 2>/dev/null || true + cp -r "$temp_home/.oh-my-zsh/custom/plugins" "$diag_dir/omz-plugins" 2>/dev/null || true + ls -la "$temp_home/" > "$diag_dir/home-listing.txt" 2>/dev/null || true + ls -la "$temp_home/.oh-my-zsh/" > "$diag_dir/omz-listing.txt" 2>/dev/null || true + ls -la "$temp_home/.local/bin/" > "$diag_dir/local-bin-listing.txt" 2>/dev/null || true + # Save the PATH that was used + echo "$test_path" > "$diag_dir/test-path.txt" 2>/dev/null || true + log_info "Diagnostics saved to: ${diag_dir}" + # Still remove the temp HOME itself (diagnostics are in RESULTS_DIR now) + rm -rf "$temp_home" + fi +} + +# ============================================================================= +# Result collection and reporting +# ============================================================================= + +collect_test_results() { + log_header "Results" + + local has_results=false + if [ -d "$RESULTS_DIR" ]; then + for f in "$RESULTS_DIR"/*.result; do + if [ -f "$f" ]; then + has_results=true + break + fi + done + fi + + if [ "$has_results" = false ]; then + log_skip "No test results found" + return + fi + + for result_file in "$RESULTS_DIR"/*.result; do + [ -f "$result_file" ] || continue + local status + status=$(grep '^STATUS:' "$result_file" | head -1 | awk '{print $2}' || echo "UNKNOWN") + local label + label=$(grep '^LABEL:' "$result_file" | head -1 | sed 's/^LABEL: //' || echo "(unknown test)") + + case "$status" in + PASS) + log_pass "$label" + ;; + FAIL) + log_fail "$label" + local details + details=$(grep '^DETAILS:' "$result_file" | head -1 | sed 's/^DETAILS: //' || true) + if [ -n "$details" ] && [ "$details" != " " ]; then + echo -e " ${DIM}${details}${NC}" + fi + # Show failing CHECK lines from output file + local output_file="${result_file%.result}.output" + if [ -f "$output_file" ]; then + grep 'CHECK_.*=FAIL' "$output_file" 2>/dev/null | while read -r line; do + echo -e " ${RED}${line}${NC}" + done || true + fi + ;; + *) + log_skip "$label" + ;; + esac + done +} + +print_report() { + echo "" + echo -e "${BOLD}================================================================${NC}" + local total=$((PASS + FAIL + SKIP)) + if [ "$FAIL" -eq 0 ]; then + echo -e "${GREEN}${BOLD} RESULTS: ${PASS} passed, ${FAIL} failed, ${SKIP} skipped (${total} total)${NC}" + else + echo -e "${RED}${BOLD} RESULTS: ${PASS} passed, ${FAIL} failed, ${SKIP} skipped (${total} total)${NC}" + fi + echo -e "${BOLD}================================================================${NC}" + + if [ ${#FAILURES[@]} -gt 0 ]; then + echo "" + echo -e "${RED}${BOLD}Failed tests:${NC}" + for f in "${FAILURES[@]}"; do + echo -e " ${RED}* ${f}${NC}" + done + fi + + if [ "$NO_CLEANUP" = true ] && [ -n "$RESULTS_DIR" ] && [ -d "$RESULTS_DIR" ]; then + echo "" + echo -e " ${DIM}Results preserved: ${RESULTS_DIR}${NC}" + fi +} + +# ============================================================================= +# Test orchestrator +# ============================================================================= + +run_tests() { + # Create results directory — use a known path for CI artifact upload + if [ "$NO_CLEANUP" = true ]; then + RESULTS_DIR="$PROJECT_ROOT/test-results-windows" + rm -rf "$RESULTS_DIR" + mkdir -p "$RESULTS_DIR" + else + RESULTS_DIR=$(mktemp -d) + fi + + # Build binary + log_header "Phase 2: Build Binary" + if ! build_binary; then + echo "Error: Build failed. Cannot continue without binary." >&2 + exit 1 + fi + + log_header "Phase 3: Windows/Git Bash E2E Tests" + log_info "Results dir: ${RESULTS_DIR}" + log_info "Build target: ${BUILD_TARGET}" + log_info "Git Bash: $(uname -s) $(uname -r)" + echo "" + + # Run each scenario sequentially + for entry in "${SCENARIOS[@]}"; do + IFS='|' read -r _id label _test_type <<< "$entry" + + # Apply filter + if [ -n "$FILTER_PATTERN" ] && ! echo "$label" | grep -qiE "$FILTER_PATTERN"; then + continue + fi + if [ -n "$EXCLUDE_PATTERN" ] && echo "$label" | grep -qiE "$EXCLUDE_PATTERN"; then + continue + fi + + if [ "$DRY_RUN" = true ]; then + log_info "[dry-run] Would run: ${label}" + continue + fi + + log_info "Running: ${label}..." + run_single_test "$entry" + done + + # Collect and display results + if [ "$DRY_RUN" = false ]; then + collect_test_results + fi +} + +# ============================================================================= +# Main +# ============================================================================= + +main() { + parse_args "$@" + + echo -e "${BOLD}${BLUE}Forge ZSH Setup - Windows/Git Bash E2E Test Suite${NC}" + echo "" + + run_static_checks + + if [ "$MODE" = "quick" ]; then + echo "" + print_report + if [ "$FAIL" -gt 0 ]; then + exit 1 + fi + exit 0 + fi + + run_tests + + echo "" + print_report + + # Cleanup results dir unless --no-cleanup + if [ "$NO_CLEANUP" = false ] && [ -n "$RESULTS_DIR" ] && [ -d "$RESULTS_DIR" ]; then + rm -rf "$RESULTS_DIR" + fi + + if [ "$FAIL" -gt 0 ]; then + exit 1 + fi + exit 0 +} + +main "$@" diff --git a/crates/forge_ci/tests/scripts/test-zsh-setup.sh b/crates/forge_ci/tests/scripts/test-zsh-setup.sh new file mode 100755 index 0000000000..468bf09ddc --- /dev/null +++ b/crates/forge_ci/tests/scripts/test-zsh-setup.sh @@ -0,0 +1,1497 @@ +#!/bin/bash +# ============================================================================= +# Docker-based E2E test suite for `forge zsh setup` +# +# Builds forge binaries for each Linux target (matching CI release.yml), then +# tests the complete zsh setup flow inside Docker containers across multiple +# distributions: dependency detection, installation (zsh, Oh My Zsh, plugins), +# .zshrc configuration, and doctor diagnostics. +# +# Build targets (from CI): +# - x86_64-unknown-linux-musl (cross=true, static) +# - x86_64-unknown-linux-gnu (cross=false, dynamic) +# +# Prerequisites: +# - Docker installed and running +# - Rust toolchain with cross (cargo install cross) +# - protoc (for non-cross builds) +# +# Usage: +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh # build + test all +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --quick # shellcheck only +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --filter "alpine" # run only matching +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --jobs 4 # limit parallelism +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --skip-build # skip build, use existing +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --targets musl # only test musl target +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --list # list images and exit +# bash crates/forge_ci/tests/scripts/test-zsh-setup.sh --help # show usage +# +# Adding new test images: +# Append entries to the IMAGES array using the format: +# "docker_image|Human Label|extra_pre_install_packages" +# The third field is for packages to pre-install BEFORE forge runs (e.g., zsh +# for the pre-installed-zsh edge case). Leave empty for bare images. +# +# Relationship to test-cli.sh: +# test-cli.sh tests the CLI installer script (static/cli). +# This script tests `forge zsh setup` — the Rust-native zsh setup command. +# Both use the same Docker/FIFO parallel execution patterns. +# ============================================================================= + +set -euo pipefail + +# ============================================================================= +# Constants +# ============================================================================= + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +readonly SCRIPT_DIR + +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../../.." && pwd)" +readonly PROJECT_ROOT + +readonly RED='\033[0;31m' +readonly GREEN='\033[0;32m' +readonly YELLOW='\033[1;33m' +readonly BLUE='\033[0;34m' +readonly BOLD='\033[1m' +readonly DIM='\033[2m' +readonly NC='\033[0m' + +readonly SHELLCHECK_EXCLUSIONS="SC2155,SC2086,SC1090,SC2034,SC2181,SC2016,SC2162" +readonly DOCKER_TAG_PREFIX="forge-zsh-test" +readonly DEFAULT_MAX_JOBS=8 + +# Detect host architecture +HOST_ARCH="$(uname -m)" +readonly HOST_ARCH + +# Build targets — matches CI release.yml for Linux +# Only include targets that match the host architecture +# Format: "target|cross_flag|label" +# target - Rust target triple +# cross_flag - "true" to build with cross, "false" for cargo +# label - human-readable name +if [ "$HOST_ARCH" = "aarch64" ] || [ "$HOST_ARCH" = "arm64" ]; then + # ARM64 runner: only build arm64 targets + readonly BUILD_TARGETS=( + "aarch64-unknown-linux-musl|true|musl (static)" + "aarch64-unknown-linux-gnu|false|gnu (dynamic)" + ) +elif [ "$HOST_ARCH" = "x86_64" ] || [ "$HOST_ARCH" = "amd64" ]; then + # x86_64 runner: only build x86_64 targets + readonly BUILD_TARGETS=( + "x86_64-unknown-linux-musl|true|musl (static)" + "x86_64-unknown-linux-gnu|false|gnu (dynamic)" + ) +else + echo "Error: Unsupported host architecture: $HOST_ARCH" >&2 + echo "Supported: x86_64, amd64, aarch64, arm64" >&2 + exit 1 +fi + +# Docker images — one entry per supported Linux variant +# +# Format: "image|label|extra_packages" +# image - Docker Hub image reference +# label - human-readable name for the test report +# extra_packages - packages to pre-install before forge runs (empty = bare) +readonly IMAGES=( + # --- Tier 1: apt-get (Debian/Ubuntu) --- + "ubuntu:24.04|Ubuntu 24.04 (apt-get)|" + "ubuntu:22.04|Ubuntu 22.04 (apt-get)|" + "debian:bookworm-slim|Debian 12 Slim (apt-get)|" + + # --- Tier 2: dnf (Fedora/RHEL) --- + "fedora:41|Fedora 41 (dnf)|" + "rockylinux:9|Rocky Linux 9 (dnf)|" + + # --- Tier 3: apk (Alpine) --- + "alpine:3.20|Alpine 3.20 (apk)|" + + # --- Tier 4: pacman (Arch) --- + "archlinux:latest|Arch Linux (pacman)|" + + # --- Tier 5: zypper (openSUSE) --- + "opensuse/tumbleweed:latest|openSUSE Tumbleweed (zypper)|" + + # --- Tier 6: xbps (Void) --- + "ghcr.io/void-linux/void-glibc:latest|Void Linux glibc (xbps)|" +) + +# Edge case images — special test scenarios +readonly EDGE_CASES=( + # Pre-installed zsh: verify setup skips zsh install + "PREINSTALLED_ZSH|ubuntu:24.04|Pre-installed zsh (skip zsh install)|zsh" + + # Pre-installed everything: verify fast path + "PREINSTALLED_ALL|ubuntu:24.04|Pre-installed everything (fast path)|FULL_PREINSTALL" + + # No git: verify graceful failure + "NO_GIT|ubuntu:24.04|No git (graceful failure)|NO_GIT" + + # Broken zsh: verify reinstall + "BROKEN_ZSH|ubuntu:24.04|Broken zsh (modules removed)|BROKEN_ZSH" + + # Re-run idempotency: verify no duplicates + "RERUN|ubuntu:24.04|Re-run idempotency|RERUN" + + # Partial install: only plugins missing + "PARTIAL|ubuntu:24.04|Partial install (only plugins missing)|PARTIAL" +) + +# ============================================================================= +# Runtime state +# ============================================================================= + +PASS=0 +FAIL=0 +SKIP=0 +FAILURES=() + +# CLI options +MODE="full" +MAX_JOBS="" +FILTER_PATTERN="" +EXCLUDE_PATTERN="" +NO_CLEANUP=false +SKIP_BUILD=false +TARGET_FILTER="" # empty = all, "musl" or "gnu" to filter +NATIVE_BUILD=false # if true, use cargo instead of cross + +# Shared temp paths +RESULTS_DIR="" + +# ============================================================================= +# Logging helpers +# ============================================================================= + +log_header() { echo -e "\n${BOLD}${BLUE}$1${NC}"; } +log_pass() { echo -e " ${GREEN}PASS${NC} $1"; PASS=$((PASS + 1)); } +log_fail() { echo -e " ${RED}FAIL${NC} $1"; FAIL=$((FAIL + 1)); FAILURES+=("$1"); } +log_skip() { echo -e " ${YELLOW}SKIP${NC} $1"; SKIP=$((SKIP + 1)); } +log_info() { echo -e " ${DIM}$1${NC}"; } + +# ============================================================================= +# Argument parsing +# ============================================================================= + +print_usage() { + cat < Max parallel Docker jobs (default: nproc, cap $DEFAULT_MAX_JOBS) + --filter Run only images whose label matches (grep -iE) + --exclude Skip images whose label matches (grep -iE) + --skip-build Skip binary build, use existing binaries + --targets Only test matching targets: "musl", "gnu", or "all" (default: all) + --native-build Use cargo instead of cross for building (for CI runners) + --no-cleanup Keep Docker images and results dir after tests + --list List all test images and exit + --help Show this help message + +Environment variables: + PARALLEL_JOBS Fallback for --jobs +EOF +} + +parse_args() { + while [ $# -gt 0 ]; do + case "$1" in + --quick) + MODE="quick" + shift + ;; + --jobs) + MAX_JOBS="${2:?--jobs requires a number}" + shift 2 + ;; + --filter) + FILTER_PATTERN="${2:?--filter requires a pattern}" + shift 2 + ;; + --exclude) + EXCLUDE_PATTERN="${2:?--exclude requires a pattern}" + shift 2 + ;; + --skip-build) + SKIP_BUILD=true + shift + ;; + --targets) + TARGET_FILTER="${2:?--targets requires a value (musl, gnu, or all)}" + shift 2 + ;; + --native-build) + NATIVE_BUILD=true + shift + ;; + --no-cleanup) + NO_CLEANUP=true + shift + ;; + --list) + list_images + exit 0 + ;; + --help|-h) + print_usage + exit 0 + ;; + *) + echo "Unknown option: $1" >&2 + print_usage >&2 + exit 1 + ;; + esac + done + + if [ -z "$MAX_JOBS" ] && [ -n "${PARALLEL_JOBS:-}" ]; then + MAX_JOBS="$PARALLEL_JOBS" + fi +} + +list_images() { + echo -e "${BOLD}Build Targets:${NC}" + local idx=0 + for entry in "${BUILD_TARGETS[@]}"; do + idx=$((idx + 1)) + IFS='|' read -r target _cross label <<< "$entry" + printf " %2d. %-55s %s\n" "$idx" "$label" "$target" + done + + echo -e "\n${BOLD}Base Images:${NC}" + for entry in "${IMAGES[@]}"; do + idx=$((idx + 1)) + IFS='|' read -r image label _packages <<< "$entry" + printf " %2d. %-55s %s\n" "$idx" "$label" "$image" + done + + echo -e "\n${BOLD}Edge Cases:${NC}" + for entry in "${EDGE_CASES[@]}"; do + idx=$((idx + 1)) + IFS='|' read -r _type image label _packages <<< "$entry" + printf " %2d. %-55s %s\n" "$idx" "$label" "$image" + done +} + +# ============================================================================= +# Build binaries +# ============================================================================= + +# Build a binary for a given target, matching CI release.yml logic. +# Uses cross for cross-compiled targets, cargo for native targets. +# If NATIVE_BUILD is true, always uses cargo regardless of use_cross flag. +build_binary() { + local target="$1" + local use_cross="$2" + local binary_path="$PROJECT_ROOT/target/${target}/debug/forge" + + if [ "$SKIP_BUILD" = true ] && [ -f "$binary_path" ]; then + log_info "Skipping build for ${target} (binary exists)" + return 0 + fi + + # Override use_cross if --native-build flag is set + if [ "$NATIVE_BUILD" = true ]; then + use_cross="false" + fi + + if [ "$use_cross" = "true" ]; then + if ! command -v cross > /dev/null 2>&1; then + log_fail "cross not installed (needed for ${target}). Install with: cargo install cross" + return 1 + fi + log_info "Building ${target} with cross (debug)..." + if ! cross build --target "$target" 2>"$RESULTS_DIR/build-${target}.log"; then + log_fail "Build failed for ${target}" + log_info "Build log: $RESULTS_DIR/build-${target}.log" + echo "" + echo "===== Full build log =====" + cat "$RESULTS_DIR/build-${target}.log" 2>/dev/null || echo "Log file not found" + echo "==========================" + echo "" + return 1 + fi + else + # Native build with cargo — mirrors CI: no cross, uses setup-cross-toolchain + if ! rustup target list --installed 2>/dev/null | grep -q "$target"; then + log_info "Adding Rust target ${target}..." + rustup target add "$target" 2>/dev/null || true + fi + log_info "Building ${target} with cargo (debug)..." + if ! cargo build --target "$target" 2>"$RESULTS_DIR/build-${target}.log"; then + log_fail "Build failed for ${target}" + log_info "Build log: $RESULTS_DIR/build-${target}.log" + echo "" + echo "===== Full build log =====" + cat "$RESULTS_DIR/build-${target}.log" 2>/dev/null || echo "Log file not found" + echo "==========================" + echo "" + return 1 + fi + fi + + if [ -f "$binary_path" ]; then + log_pass "Built ${target} -> $(du -h "$binary_path" | cut -f1)" + return 0 + else + log_fail "Binary not found after build: ${binary_path}" + return 1 + fi +} + +# Build all selected targets. Exits immediately if any build fails. +build_all_targets() { + log_header "Phase 2: Build Binaries" + + for entry in "${BUILD_TARGETS[@]}"; do + IFS='|' read -r target use_cross label <<< "$entry" + + # Apply target filter + if [ -n "$TARGET_FILTER" ] && [ "$TARGET_FILTER" != "all" ]; then + if ! echo "$target" | grep -qi "$TARGET_FILTER"; then + log_skip "${label} (filtered out by --targets ${TARGET_FILTER})" + continue + fi + fi + + # Build and exit immediately on failure + if ! build_binary "$target" "$use_cross"; then + echo "Error: Build failed for ${target}. Cannot continue without binaries." >&2 + exit 1 + fi + done +} + +# Return the relative path (from PROJECT_ROOT) to the binary for a target. +binary_rel_path() { + local target="$1" + echo "target/${target}/debug/forge" +} + +# ============================================================================= +# Static analysis +# ============================================================================= + +run_static_checks() { + log_header "Phase 1: Static Analysis" + + if bash -n "${BASH_SOURCE[0]}" 2>/dev/null; then + log_pass "bash -n syntax check" + else + log_fail "bash -n syntax check" + fi + + if command -v shellcheck > /dev/null 2>&1; then + if shellcheck -x -e "$SHELLCHECK_EXCLUSIONS" "${BASH_SOURCE[0]}" 2>/dev/null; then + log_pass "shellcheck (excluding $SHELLCHECK_EXCLUSIONS)" + else + log_fail "shellcheck (excluding $SHELLCHECK_EXCLUSIONS)" + fi + else + log_skip "shellcheck (not installed)" + fi +} + +# ============================================================================= +# Docker helpers +# ============================================================================= + +# Build the install command for git (and bash where needed). +pkg_install_cmd() { + local image="$1" + local extra="$2" + + # Helper: check if extra is a special sentinel (not a real package name) + is_sentinel() { + case "$1" in + NO_GIT|FULL_PREINSTALL|BROKEN_ZSH|RERUN|PARTIAL|"") return 0 ;; + *) return 1 ;; + esac + } + + local git_cmd="" + case "$image" in + alpine*) + git_cmd="apk add --no-cache git bash curl" + if ! is_sentinel "$extra"; then git_cmd="$git_cmd $extra"; fi + ;; + fedora*|rockylinux*|almalinux*|centos*) + git_cmd="dnf install -y git" + if ! is_sentinel "$extra"; then git_cmd="$git_cmd $extra"; fi + ;; + archlinux*) + git_cmd="pacman -Sy --noconfirm git" + if ! is_sentinel "$extra"; then git_cmd="$git_cmd $extra"; fi + ;; + opensuse*|suse*) + git_cmd="zypper -n install git curl" + if ! is_sentinel "$extra"; then git_cmd="$git_cmd $extra"; fi + ;; + *void*) + git_cmd="xbps-install -Sy git bash curl" + if ! is_sentinel "$extra"; then git_cmd="$git_cmd $extra"; fi + ;; + *) + git_cmd="apt-get update -qq && apt-get install -y -qq git curl" + if ! is_sentinel "$extra"; then git_cmd="$git_cmd $extra"; fi + ;; + esac + + echo "$git_cmd" +} + +# Return Dockerfile RUN commands to create a non-root user with sudo. +user_setup_cmd() { + local image="$1" + local sudoers="echo 'testuser ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers" + local create_user="useradd -m -s /bin/bash testuser" + + case "$image" in + alpine*) + echo "apk add --no-cache sudo && adduser -D -s /bin/sh testuser && ${sudoers}" + ;; + fedora*|rockylinux*|almalinux*|centos*) + echo "dnf install -y sudo && ${create_user} && ${sudoers}" + ;; + archlinux*) + echo "pacman -Sy --noconfirm sudo && ${create_user} && ${sudoers}" + ;; + opensuse*|suse*) + echo "zypper -n install sudo && ${create_user} && ${sudoers}" + ;; + *void*) + echo "xbps-install -Sy sudo shadow && ${create_user} && ${sudoers}" + ;; + *) + echo "apt-get update -qq && apt-get install -y -qq sudo && ${create_user} && ${sudoers}" + ;; + esac +} + +# Build a Docker image for testing. +# build_docker_image [user_setup] [extra_setup] +build_docker_image() { + local tag="$1" + local image="$2" + local bin_rel="$3" + local install_cmd="$4" + local user_setup="${5:-}" + local extra_setup="${6:-}" + + local user_lines="" + if [ -n "$user_setup" ]; then + user_lines="RUN ${user_setup} +USER testuser +WORKDIR /home/testuser" + fi + + local extra_lines="" + if [ -n "$extra_setup" ]; then + extra_lines="RUN ${extra_setup}" + fi + + local build_log="$RESULTS_DIR/docker-build-${tag}.log" + if ! docker build --quiet -t "$tag" -f - "$PROJECT_ROOT" <"$build_log" 2>&1 +FROM ${image} +ENV DEBIAN_FRONTEND=noninteractive +ENV TERM=dumb +ENV NO_COLOR=1 +ENV FORGE_EDITOR=vi +RUN ${install_cmd} +COPY ${bin_rel} /usr/local/bin/forge +RUN chmod +x /usr/local/bin/forge +${extra_lines} +${user_lines} +DOCKERFILE + then + return 1 + fi + return 0 +} + +# ============================================================================= +# Verification script +# ============================================================================= + +# Output the in-container verification script. +# Uses a single-quoted heredoc so no host-side variable expansion occurs. +# Arguments: +# $1 - test type: "standard" | "no_git" | "preinstalled_zsh" | +# "preinstalled_all" | "broken_zsh" | "rerun" | "partial" +generate_verify_script() { + local test_type="${1:-standard}" + + cat <<'VERIFY_SCRIPT_HEADER' +#!/bin/bash +set -o pipefail + +VERIFY_SCRIPT_HEADER + + # Emit the test type as a variable + echo "TEST_TYPE=\"${test_type}\"" + + cat <<'VERIFY_SCRIPT_BODY' + +# Add ~/.local/bin to PATH so tools installed via GitHub releases are found +export PATH="$HOME/.local/bin:$PATH" + +# --- Run forge zsh setup and capture output --- +setup_output_raw=$(forge zsh setup --non-interactive 2>&1) +setup_exit=$? +# Strip ANSI escape codes so grep matching works reliably +setup_output=$(printf '%s' "$setup_output_raw" | sed 's/\x1b\[[0-9;]*m//g') +echo "SETUP_EXIT=${setup_exit}" + +# --- Verify zsh binary --- +if command -v zsh > /dev/null 2>&1; then + zsh_ver=$(zsh --version 2>&1 | head -1) || zsh_ver="(failed)" + if zsh -c "zmodload zsh/zle && zmodload zsh/datetime && zmodload zsh/stat" > /dev/null 2>&1; then + echo "CHECK_ZSH=PASS ${zsh_ver} (modules OK)" + else + echo "CHECK_ZSH=FAIL ${zsh_ver} (modules broken)" + fi +else + if [ "$TEST_TYPE" = "no_git" ]; then + echo "CHECK_ZSH=PASS (expected: no zsh in no-git test)" + else + echo "CHECK_ZSH=FAIL zsh not found in PATH" + fi +fi + +# --- Verify Oh My Zsh --- +if [ -d "$HOME/.oh-my-zsh" ]; then + omz_ok=true + omz_detail="dir=OK" + for subdir in custom/plugins themes lib; do + if [ ! -d "$HOME/.oh-my-zsh/$subdir" ]; then + omz_ok=false + omz_detail="${omz_detail}, ${subdir}=MISSING" + fi + done + if [ "$omz_ok" = true ]; then + echo "CHECK_OMZ_DIR=PASS ${omz_detail}" + else + echo "CHECK_OMZ_DIR=FAIL ${omz_detail}" + fi +else + if [ "$TEST_TYPE" = "no_git" ]; then + echo "CHECK_OMZ_DIR=PASS (expected: no OMZ in no-git test)" + else + echo "CHECK_OMZ_DIR=FAIL ~/.oh-my-zsh not found" + fi +fi + +# --- Verify Oh My Zsh defaults in .zshrc --- +if [ -f "$HOME/.zshrc" ]; then + omz_defaults_ok=true + omz_defaults_detail="" + if grep -q 'ZSH_THEME=' "$HOME/.zshrc" 2>/dev/null; then + omz_defaults_detail="theme=OK" + else + omz_defaults_ok=false + omz_defaults_detail="theme=MISSING" + fi + if grep -q '^plugins=' "$HOME/.zshrc" 2>/dev/null; then + omz_defaults_detail="${omz_defaults_detail}, plugins=OK" + else + omz_defaults_ok=false + omz_defaults_detail="${omz_defaults_detail}, plugins=MISSING" + fi + if [ "$omz_defaults_ok" = true ]; then + echo "CHECK_OMZ_DEFAULTS=PASS ${omz_defaults_detail}" + else + echo "CHECK_OMZ_DEFAULTS=FAIL ${omz_defaults_detail}" + fi +else + if [ "$TEST_TYPE" = "no_git" ]; then + echo "CHECK_OMZ_DEFAULTS=PASS (expected: no .zshrc in no-git test)" + else + echo "CHECK_OMZ_DEFAULTS=FAIL ~/.zshrc not found" + fi +fi + +# --- Verify plugins --- +zsh_custom="${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}" +if [ -d "$zsh_custom/plugins/zsh-autosuggestions" ]; then + # Check for .zsh files using ls (find may not be available on minimal images) + if ls "$zsh_custom/plugins/zsh-autosuggestions/"*.zsh 1>/dev/null 2>&1; then + echo "CHECK_AUTOSUGGESTIONS=PASS" + else + echo "CHECK_AUTOSUGGESTIONS=FAIL (dir exists but no .zsh files)" + fi +else + if [ "$TEST_TYPE" = "no_git" ]; then + echo "CHECK_AUTOSUGGESTIONS=PASS (expected: no plugins in no-git test)" + else + echo "CHECK_AUTOSUGGESTIONS=FAIL not installed" + fi +fi + +if [ -d "$zsh_custom/plugins/zsh-syntax-highlighting" ]; then + if ls "$zsh_custom/plugins/zsh-syntax-highlighting/"*.zsh 1>/dev/null 2>&1; then + echo "CHECK_SYNTAX_HIGHLIGHTING=PASS" + else + echo "CHECK_SYNTAX_HIGHLIGHTING=FAIL (dir exists but no .zsh files)" + fi +else + if [ "$TEST_TYPE" = "no_git" ]; then + echo "CHECK_SYNTAX_HIGHLIGHTING=PASS (expected: no plugins in no-git test)" + else + echo "CHECK_SYNTAX_HIGHLIGHTING=FAIL not installed" + fi +fi + +# --- Verify .zshrc forge markers and content --- +if [ -f "$HOME/.zshrc" ]; then + if grep -q '# >>> forge initialize >>>' "$HOME/.zshrc" && \ + grep -q '# <<< forge initialize <<<' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_MARKERS=PASS" + else + echo "CHECK_ZSHRC_MARKERS=FAIL markers not found" + fi + + if grep -q 'eval "\$(forge zsh plugin)"' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_PLUGIN=PASS" + else + echo "CHECK_ZSHRC_PLUGIN=FAIL plugin eval not found" + fi + + if grep -q 'eval "\$(forge zsh theme)"' "$HOME/.zshrc"; then + echo "CHECK_ZSHRC_THEME=PASS" + else + echo "CHECK_ZSHRC_THEME=FAIL theme eval not found" + fi + + if grep -q 'NERD_FONT=0' "$HOME/.zshrc"; then + echo "CHECK_NO_NERD_FONT_DISABLE=FAIL (NERD_FONT=0 found in non-interactive mode)" + else + echo "CHECK_NO_NERD_FONT_DISABLE=PASS" + fi + + if grep -q 'FORGE_EDITOR' "$HOME/.zshrc"; then + echo "CHECK_NO_FORGE_EDITOR=FAIL (FORGE_EDITOR found in non-interactive mode)" + else + echo "CHECK_NO_FORGE_EDITOR=PASS" + fi + + # Check marker uniqueness (idempotency) + start_count=$(grep -c '# >>> forge initialize >>>' "$HOME/.zshrc" 2>/dev/null || echo "0") + end_count=$(grep -c '# <<< forge initialize <<<' "$HOME/.zshrc" 2>/dev/null || echo "0") + if [ "$start_count" -eq 1 ] && [ "$end_count" -eq 1 ]; then + echo "CHECK_MARKER_UNIQUE=PASS" + else + echo "CHECK_MARKER_UNIQUE=FAIL (start=${start_count}, end=${end_count})" + fi +else + if [ "$TEST_TYPE" = "no_git" ]; then + echo "CHECK_ZSHRC_MARKERS=PASS (expected: no .zshrc in no-git test)" + echo "CHECK_ZSHRC_PLUGIN=PASS (expected: no .zshrc in no-git test)" + echo "CHECK_ZSHRC_THEME=PASS (expected: no .zshrc in no-git test)" + echo "CHECK_NO_NERD_FONT_DISABLE=PASS (expected: no .zshrc in no-git test)" + echo "CHECK_NO_FORGE_EDITOR=PASS (expected: no .zshrc in no-git test)" + echo "CHECK_MARKER_UNIQUE=PASS (expected: no .zshrc in no-git test)" + else + echo "CHECK_ZSHRC_MARKERS=FAIL no .zshrc" + echo "CHECK_ZSHRC_PLUGIN=FAIL no .zshrc" + echo "CHECK_ZSHRC_THEME=FAIL no .zshrc" + echo "CHECK_NO_NERD_FONT_DISABLE=FAIL no .zshrc" + echo "CHECK_NO_FORGE_EDITOR=FAIL no .zshrc" + echo "CHECK_MARKER_UNIQUE=FAIL no .zshrc" + fi +fi + +# --- Check if forge zsh setup's own doctor run failed --- +# forge zsh setup runs doctor internally. Even if our independent doctor call +# succeeds (different environment), we must detect if setup's doctor failed. +if [ "$TEST_TYPE" != "no_git" ]; then + if echo "$setup_output" | grep -qi "forge zsh doctor failed"; then + echo "CHECK_SETUP_DOCTOR=FAIL (setup reported doctor failure)" + else + echo "CHECK_SETUP_DOCTOR=PASS" + fi +fi + +# --- Run forge zsh doctor --- +doctor_exit=0 +doctor_output=$(forge zsh doctor 2>&1) || doctor_exit=$? +if [ "$TEST_TYPE" = "no_git" ]; then + # Doctor may fail or not run at all in no-git scenario + echo "CHECK_DOCTOR_EXIT=PASS (skipped for no-git test)" +else + # Doctor must exit 0 — any non-zero exit means it found problems + if [ $doctor_exit -eq 0 ]; then + echo "CHECK_DOCTOR_EXIT=PASS (exit=0)" + else + echo "CHECK_DOCTOR_EXIT=FAIL (exit=${doctor_exit})" + fi +fi + +# --- Verify output format --- +output_ok=true +output_detail="" + +# Check for environment detection output +if echo "$setup_output" | grep -qi "found\|not found\|installed\|Detecting"; then + output_detail="detect=OK" +else + output_ok=false + output_detail="detect=MISSING" +fi + +if [ "$TEST_TYPE" = "no_git" ]; then + # For no-git test, check for the error message + if echo "$setup_output" | grep -qi "git is required"; then + output_detail="${output_detail}, git_error=OK" + else + output_ok=false + output_detail="${output_detail}, git_error=MISSING" + fi + echo "CHECK_OUTPUT_FORMAT=PASS ${output_detail}" +else + # Check for setup complete message + if echo "$setup_output" | grep -qi "Setup complete\|complete"; then + output_detail="${output_detail}, complete=OK" + else + output_ok=false + output_detail="${output_detail}, complete=MISSING" + fi + + # Check for configure step + if echo "$setup_output" | grep -qi "Configuring\|configured\|forge plugins"; then + output_detail="${output_detail}, configure=OK" + else + output_ok=false + output_detail="${output_detail}, configure=MISSING" + fi + + if [ "$output_ok" = true ]; then + echo "CHECK_OUTPUT_FORMAT=PASS ${output_detail}" + else + echo "CHECK_OUTPUT_FORMAT=FAIL ${output_detail}" + fi +fi + +# --- Edge-case-specific checks --- +case "$TEST_TYPE" in + preinstalled_zsh) + if echo "$setup_output" | grep -qi "Installing zsh"; then + echo "CHECK_EDGE_SKIP_ZSH=FAIL (should not install zsh when pre-installed)" + else + echo "CHECK_EDGE_SKIP_ZSH=PASS (correctly skipped zsh install)" + fi + # Should still show the detected version + if echo "$setup_output" | grep -qi "zsh.*found"; then + echo "CHECK_EDGE_ZSH_DETECTED=PASS" + else + echo "CHECK_EDGE_ZSH_DETECTED=FAIL (should report detected zsh)" + fi + ;; + preinstalled_all) + if echo "$setup_output" | grep -qi "All dependencies already installed"; then + echo "CHECK_EDGE_ALL_PRESENT=PASS" + else + echo "CHECK_EDGE_ALL_PRESENT=FAIL (should show all deps installed)" + fi + if echo "$setup_output" | grep -qi "The following will be installed"; then + echo "CHECK_EDGE_NO_INSTALL=FAIL (should not install anything)" + else + echo "CHECK_EDGE_NO_INSTALL=PASS (correctly skipped installation)" + fi + ;; + no_git) + if echo "$setup_output" | grep -qi "git is required"; then + echo "CHECK_EDGE_NO_GIT=PASS" + else + echo "CHECK_EDGE_NO_GIT=FAIL (should show git required error)" + fi + if [ "$setup_exit" -eq 0 ]; then + echo "CHECK_EDGE_NO_GIT_EXIT=PASS (exit=0, graceful)" + else + echo "CHECK_EDGE_NO_GIT_EXIT=FAIL (exit=${setup_exit}, should be 0)" + fi + ;; + broken_zsh) + if echo "$setup_output" | grep -qi "modules are broken\|broken"; then + echo "CHECK_EDGE_BROKEN_DETECTED=PASS" + else + echo "CHECK_EDGE_BROKEN_DETECTED=FAIL (should detect broken zsh)" + fi + ;; + rerun) + # Run forge zsh setup a second time + # Update PATH to include ~/.local/bin (where GitHub-installed tools are located) + # This simulates the PATH that would be set after sourcing ~/.zshrc + export PATH="$HOME/.local/bin:/usr/local/bin:$PATH" + hash -r # Clear bash's command cache + rerun_output_raw=$(forge zsh setup --non-interactive 2>&1) + rerun_exit=$? + rerun_output=$(printf '%s' "$rerun_output_raw" | sed 's/\x1b\[[0-9;]*m//g') + if [ "$rerun_exit" -eq 0 ]; then + echo "CHECK_EDGE_RERUN_EXIT=PASS" + else + echo "CHECK_EDGE_RERUN_EXIT=FAIL (exit=${rerun_exit})" + fi + if echo "$rerun_output" | grep -qi "All dependencies already installed"; then + echo "CHECK_EDGE_RERUN_SKIP=PASS" + else + echo "CHECK_EDGE_RERUN_SKIP=FAIL (second run should skip installs)" + fi + # Check marker uniqueness after re-run + if [ -f "$HOME/.zshrc" ]; then + start_count=$(grep -c '# >>> forge initialize >>>' "$HOME/.zshrc" 2>/dev/null || echo "0") + if [ "$start_count" -eq 1 ]; then + echo "CHECK_EDGE_RERUN_MARKERS=PASS (still exactly 1 marker set)" + else + echo "CHECK_EDGE_RERUN_MARKERS=FAIL (found ${start_count} marker sets)" + fi + else + echo "CHECK_EDGE_RERUN_MARKERS=FAIL (no .zshrc after re-run)" + fi + ;; + partial) + # Should only install plugins, not zsh or OMZ + if echo "$setup_output" | grep -qi "zsh-autosuggestions\|zsh-syntax-highlighting"; then + echo "CHECK_EDGE_PARTIAL_PLUGINS=PASS (plugins in install plan)" + else + echo "CHECK_EDGE_PARTIAL_PLUGINS=FAIL (plugins not mentioned)" + fi + # The install plan should NOT mention zsh or Oh My Zsh + # Extract only the install plan block (stop at first blank line after header) + install_plan=$(echo "$setup_output" | sed -n '/The following will be installed/,/^$/p' 2>/dev/null || echo "") + if [ -n "$install_plan" ]; then + if echo "$install_plan" | grep -qi "zsh (shell)\|Oh My Zsh"; then + echo "CHECK_EDGE_PARTIAL_NO_ZSH=FAIL (should not install zsh/OMZ)" + else + echo "CHECK_EDGE_PARTIAL_NO_ZSH=PASS (correctly skips zsh/OMZ)" + fi + else + # If all deps including plugins are installed, that's also OK + echo "CHECK_EDGE_PARTIAL_NO_ZSH=PASS (no install plan = nothing to install)" + fi + ;; +esac + +# --- Emit raw output for debugging --- +echo "OUTPUT_BEGIN" +echo "$setup_output_raw" +# If this is a re-run test, also show the second run output +if [ -n "$rerun_output_raw" ]; then + echo "" + echo "===== SECOND RUN (idempotency check) =====" + echo "$rerun_output_raw" + echo "==========================================" +fi +echo "OUTPUT_END" +VERIFY_SCRIPT_BODY +} + +# ============================================================================= +# Container execution +# ============================================================================= + +# Run the verify script inside a Docker container. +# Outputs: exit_code on line 1, then combined stdout+stderr. +run_container() { + local tag="$1" + local run_shell="$2" + local test_type="$3" + local exit_code=0 + local output + output=$(docker run --rm "$tag" "$run_shell" -c "$(generate_verify_script "$test_type")" 2>&1) || exit_code=$? + echo "$exit_code" + echo "$output" +} + +# ============================================================================= +# Result evaluation +# ============================================================================= + +# Parse CHECK_* lines from container output and determine pass/fail. +parse_check_lines() { + local output="$1" + local label="$2" + local all_pass=true + local fail_details="" + + while IFS= read -r line; do + case "$line" in + CHECK_*=PASS*) + ;; + CHECK_*=FAIL*) + all_pass=false + fail_details="${fail_details} ${line}\n" + ;; + esac + done <<< "$output" + + if [ "$all_pass" = true ]; then + echo "PASS" + else + echo "FAIL" + echo -e "$fail_details" + fi +} + +# Run a single Docker test for a base image with a specific binary. +# Writes result file to $RESULTS_DIR. +run_single_test() { + local entry="$1" + local variant="$2" # "root" or "user" + local target="$3" # rust target triple + local test_type="${4:-standard}" + + IFS='|' read -r image label packages <<< "$entry" + local safe_label + safe_label=$(echo "$label" | tr '[:upper:]' '[:lower:]' | tr ' /' '_-' | tr -cd '[:alnum:]_-') + local target_short="${target##*-}" # musl or gnu + local tag="${DOCKER_TAG_PREFIX}-${safe_label}-${variant}-${target_short}" + local result_file="$RESULTS_DIR/${safe_label}-${variant}-${target_short}.result" + + local bin_rel + bin_rel=$(binary_rel_path "$target") + + # Check binary exists + if [ ! -f "$PROJECT_ROOT/$bin_rel" ]; then + cat > "$result_file" </dev/null || echo "(no log)") + fi + cat > "$result_file" <&1) || true + + # Parse exit code (first line) and output (rest) without broken pipe + local container_exit + local container_output + container_exit=$(head -1 <<< "$raw_output") + container_output=$(tail -n +2 <<< "$raw_output") + + # Parse SETUP_EXIT + local setup_exit + setup_exit=$(grep '^SETUP_EXIT=' <<< "$container_output" | head -1 | cut -d= -f2) + + # Evaluate CHECK lines + local eval_result + eval_result=$(parse_check_lines "$container_output" "$label ($variant) [$target_short]") + local status + local details + status=$(head -1 <<< "$eval_result") + details=$(tail -n +2 <<< "$eval_result") + + # Check setup exit code (should be 0) + if [ -n "$setup_exit" ] && [ "$setup_exit" != "0" ] && [ "$test_type" != "no_git" ]; then + status="FAIL" + details="${details} SETUP_EXIT=${setup_exit} (expected 0)\n" + fi + + # Write result + cat > "$result_file" < "$output_file" + + # Cleanup Docker image unless --no-cleanup + if [ "$NO_CLEANUP" = false ]; then + docker rmi -f "$tag" > /dev/null 2>&1 || true + fi +} + +# Run a single edge case test with a specific binary. +run_edge_case_test() { + local entry="$1" + local target="$2" + + IFS='|' read -r edge_type image label packages <<< "$entry" + + local safe_label + safe_label=$(echo "$label" | tr '[:upper:]' '[:lower:]' | tr ' /' '_-' | tr -cd '[:alnum:]_-') + local target_short="${target##*-}" + local tag="${DOCKER_TAG_PREFIX}-edge-${safe_label}-${target_short}" + local result_file="$RESULTS_DIR/edge-${safe_label}-${target_short}.result" + + local bin_rel + bin_rel=$(binary_rel_path "$target") + + if [ ! -f "$PROJECT_ROOT/$bin_rel" ]; then + cat > "$result_file" </dev/null || echo "(no log)") + fi + cat > "$result_file" <&1) || true + + local container_exit + local container_output + container_exit=$(head -1 <<< "$raw_output") + container_output=$(tail -n +2 <<< "$raw_output") + + local setup_exit + setup_exit=$(grep '^SETUP_EXIT=' <<< "$container_output" | head -1 | cut -d= -f2) + + local eval_result + eval_result=$(parse_check_lines "$container_output" "$label [$target_short]") + local status + local details + status=$(head -1 <<< "$eval_result") + details=$(tail -n +2 <<< "$eval_result") + + # For no_git test, exit code 0 is expected even though things "fail" + if [ "$edge_type" != "NO_GIT" ] && [ -n "$setup_exit" ] && [ "$setup_exit" != "0" ]; then + status="FAIL" + details="${details} SETUP_EXIT=${setup_exit} (expected 0)\n" + fi + + cat > "$result_file" < "$output_file" + + if [ "$NO_CLEANUP" = false ]; then + docker rmi -f "$tag" > /dev/null 2>&1 || true + fi +} + +# ============================================================================= +# Parallel execution +# ============================================================================= + +# Determine which targets are compatible with a given image. +# Returns space-separated list of compatible targets. +# +# The gnu binary (x86_64-unknown-linux-gnu) requires glibc 2.38+ and won't +# run on Alpine (musl), Debian 12 (glibc 2.36), Ubuntu 22.04 (glibc 2.35), +# or Rocky 9 (glibc 2.34). The musl binary is statically linked and runs +# everywhere. +get_compatible_targets() { + local image="$1" + local all_targets="$2" # space-separated list of available targets + + # Extract base image name (before colon) + local base_image="${image%%:*}" + + # Images that ONLY support musl (old glibc or musl-based) + case "$base_image" in + alpine) + # Alpine uses musl libc, not glibc + echo "$all_targets" | tr ' ' '\n' | grep -E 'musl$' + ;; + debian) + # Debian 12 has glibc 2.36 (too old for gnu binary built on glibc 2.43) + echo "$all_targets" | tr ' ' '\n' | grep -E 'musl$' + ;; + ubuntu) + # Check version: 22.04 has glibc 2.35 (musl only), 24.04 has glibc 2.39 (both) + local version="${image#*:}" + if [[ "$version" == "22.04" ]]; then + echo "$all_targets" | tr ' ' '\n' | grep -E 'musl$' + else + # Ubuntu 24.04+ supports both + echo "$all_targets" + fi + ;; + rockylinux) + # Rocky 9 has glibc 2.34 (too old) + echo "$all_targets" | tr ' ' '\n' | grep -E 'musl$' + ;; + *) + # All other images (Arch, Fedora, openSUSE, Void) have recent glibc and support both + echo "$all_targets" + ;; + esac +} + +launch_parallel_tests() { + local max_jobs="${MAX_JOBS:-}" + if [ -z "$max_jobs" ]; then + max_jobs=$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 4) + if [ "$max_jobs" -gt "$DEFAULT_MAX_JOBS" ]; then + max_jobs=$DEFAULT_MAX_JOBS + fi + fi + + log_info "Running with up to ${max_jobs} parallel jobs" + + # Collect active targets + local active_targets=() + for entry in "${BUILD_TARGETS[@]}"; do + IFS='|' read -r target _cross _label <<< "$entry" + if [ -n "$TARGET_FILTER" ] && [ "$TARGET_FILTER" != "all" ]; then + if ! echo "$target" | grep -qi "$TARGET_FILTER"; then + continue + fi + fi + local bin="$PROJECT_ROOT/$(binary_rel_path "$target")" + if [ -f "$bin" ]; then + active_targets+=("$target") + fi + done + + if [ ${#active_targets[@]} -eq 0 ]; then + log_fail "No built binaries found for any target" + return + fi + + log_info "Testing ${#active_targets[@]} target(s): ${active_targets[*]}" + + # FIFO-based semaphore for concurrency control + local fifo + fifo=$(mktemp -u) + mkfifo "$fifo" + exec 3<>"$fifo" + rm "$fifo" + + # Fill semaphore with tokens + for ((i = 0; i < max_jobs; i++)); do + echo >&3 + done + + # Launch base image tests for each target + for target in "${active_targets[@]}"; do + for entry in "${IMAGES[@]}"; do + IFS='|' read -r image label _packages <<< "$entry" + + # Apply filter + if [ -n "$FILTER_PATTERN" ] && ! echo "$label" | grep -qiE "$FILTER_PATTERN"; then + continue + fi + if [ -n "$EXCLUDE_PATTERN" ] && echo "$label" | grep -qiE "$EXCLUDE_PATTERN"; then + continue + fi + + # Check if this image is compatible with this target + local compatible_targets + compatible_targets=$(get_compatible_targets "$image" "${active_targets[*]}") + if ! echo "$compatible_targets" | grep -qw "$target"; then + continue + fi + + # Root variant + read -u 3 + ( + run_single_test "$entry" "root" "$target" "standard" + echo >&3 + ) & + + # User+sudo variant + read -u 3 + ( + run_single_test "$entry" "user" "$target" "standard" + echo >&3 + ) & + done + + # Launch edge case tests for each target + for entry in "${EDGE_CASES[@]}"; do + IFS='|' read -r _type image label _packages <<< "$entry" + + if [ -n "$FILTER_PATTERN" ] && ! echo "$label" | grep -qiE "$FILTER_PATTERN"; then + continue + fi + if [ -n "$EXCLUDE_PATTERN" ] && echo "$label" | grep -qiE "$EXCLUDE_PATTERN"; then + continue + fi + + # Check compatibility for edge cases too + local compatible_targets + compatible_targets=$(get_compatible_targets "$image" "${active_targets[*]}") + if ! echo "$compatible_targets" | grep -qw "$target"; then + continue + fi + + read -u 3 + ( + run_edge_case_test "$entry" "$target" + echo >&3 + ) & + done + done + + # Wait for all jobs to complete + wait + + # Close semaphore FD + exec 3>&- +} + +# ============================================================================= +# Result collection and reporting +# ============================================================================= + +collect_test_results() { + log_header "Results" + + local has_results=false + if [ -d "$RESULTS_DIR" ]; then + for f in "$RESULTS_DIR"/*.result; do + if [ -f "$f" ]; then + has_results=true + break + fi + done + fi + + if [ "$has_results" = false ]; then + log_skip "No test results found" + return + fi + + for result_file in "$RESULTS_DIR"/*.result; do + [ -f "$result_file" ] || continue + local status + status=$(grep '^STATUS:' "$result_file" | head -1 | awk '{print $2}' || echo "UNKNOWN") + local label + label=$(grep '^LABEL:' "$result_file" | head -1 | sed 's/^LABEL: //' || echo "(unknown test)") + + case "$status" in + PASS) + log_pass "$label" + ;; + FAIL) + log_fail "$label" + local details + details=$(grep '^DETAILS:' "$result_file" | head -1 | sed 's/^DETAILS: //' || true) + if [ -n "$details" ] && [ "$details" != " " ]; then + echo -e " ${DIM}${details}${NC}" + fi + # Show build log if present + local build_log_content + build_log_content=$(grep '^BUILD_LOG:' "$result_file" | head -1 | sed 's/^BUILD_LOG: //' || true) + if [ -n "$build_log_content" ] && [ "$build_log_content" != " " ]; then + echo -e " ${DIM}Build: ${build_log_content}${NC}" + fi + # Show failing CHECK lines from output file + local output_file="${result_file%.result}.output" + if [ -f "$output_file" ]; then + grep 'CHECK_.*=FAIL' "$output_file" 2>/dev/null | while read -r line; do + echo -e " ${RED}${line}${NC}" + done || true + fi + ;; + *) + log_skip "$label" + ;; + esac + done +} + +print_report() { + echo "" + echo -e "${BOLD}════════════════════════════════════════════════════════${NC}" + local total=$((PASS + FAIL + SKIP)) + if [ "$FAIL" -eq 0 ]; then + echo -e "${GREEN}${BOLD} RESULTS: ${PASS} passed, ${FAIL} failed, ${SKIP} skipped (${total} total)${NC}" + else + echo -e "${RED}${BOLD} RESULTS: ${PASS} passed, ${FAIL} failed, ${SKIP} skipped (${total} total)${NC}" + fi + echo -e "${BOLD}════════════════════════════════════════════════════════${NC}" + + if [ ${#FAILURES[@]} -gt 0 ]; then + echo "" + echo -e "${RED}${BOLD}Failed tests:${NC}" + for f in "${FAILURES[@]}"; do + echo -e " ${RED}• ${f}${NC}" + done + fi + + if [ "$NO_CLEANUP" = true ] && [ -n "$RESULTS_DIR" ] && [ -d "$RESULTS_DIR" ]; then + echo "" + echo -e " ${DIM}Results preserved: ${RESULTS_DIR}${NC}" + fi +} + +# ============================================================================= +# Docker tests orchestrator +# ============================================================================= + +run_docker_tests() { + # Check Docker is available + if ! command -v docker > /dev/null 2>&1; then + log_skip "Docker not installed" + return + fi + + if ! docker info > /dev/null 2>&1; then + log_skip "Docker daemon not running" + return + fi + + # Create results directory (needed by build phase for logs) + # Use a known path for CI artifact upload when --no-cleanup + if [ "$NO_CLEANUP" = true ]; then + RESULTS_DIR="$PROJECT_ROOT/test-results-linux" + rm -rf "$RESULTS_DIR" + mkdir -p "$RESULTS_DIR" + else + RESULTS_DIR=$(mktemp -d) + fi + + # Build binaries + build_all_targets + + log_header "Phase 3: Docker E2E Tests" + log_info "Results dir: ${RESULTS_DIR}" + + # Run tests in parallel + launch_parallel_tests + + # Collect and display results + collect_test_results +} + +# ============================================================================= +# Main +# ============================================================================= + +main() { + parse_args "$@" + + echo -e "${BOLD}${BLUE}Forge ZSH Setup — E2E Test Suite${NC}" + echo "" + + run_static_checks + + if [ "$MODE" = "quick" ]; then + echo "" + print_report + if [ "$FAIL" -gt 0 ]; then + exit 1 + fi + exit 0 + fi + + run_docker_tests + + echo "" + print_report + + # Cleanup results dir unless --no-cleanup + if [ "$NO_CLEANUP" = false ] && [ -n "$RESULTS_DIR" ] && [ -d "$RESULTS_DIR" ]; then + rm -rf "$RESULTS_DIR" + fi + + if [ "$FAIL" -gt 0 ]; then + exit 1 + fi + exit 0 +} + +main "$@" diff --git a/crates/forge_domain/Cargo.toml b/crates/forge_domain/Cargo.toml index 367ed9e0d6..6d16ef0fbc 100644 --- a/crates/forge_domain/Cargo.toml +++ b/crates/forge_domain/Cargo.toml @@ -25,6 +25,7 @@ tokio-stream.workspace = true uuid.workspace = true tracing.workspace = true url.workspace = true +tempfile.workspace = true merge.workspace = true serde_yml.workspace = true forge_template.workspace = true diff --git a/crates/forge_domain/src/background_process.rs b/crates/forge_domain/src/background_process.rs new file mode 100644 index 0000000000..9251facad4 --- /dev/null +++ b/crates/forge_domain/src/background_process.rs @@ -0,0 +1,19 @@ +use std::path::PathBuf; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +/// Metadata for a single background process spawned by the shell tool. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BackgroundProcess { + /// OS process ID. + pub pid: u32, + /// The original command string that was executed. + pub command: String, + /// Working directory where the command was spawned. + pub cwd: PathBuf, + /// Absolute path to the log file capturing stdout/stderr. + pub log_file: PathBuf, + /// When the process was spawned. + pub started_at: DateTime, +} diff --git a/crates/forge_domain/src/lib.rs b/crates/forge_domain/src/lib.rs index 70c543ed8b..78b2eff736 100644 --- a/crates/forge_domain/src/lib.rs +++ b/crates/forge_domain/src/lib.rs @@ -3,6 +3,7 @@ mod agent_definition; mod app_config; mod attachment; mod auth; +mod background_process; mod chat_request; mod chat_response; mod commit_config; @@ -61,6 +62,7 @@ mod xml; pub use agent::*; pub use agent_definition::*; pub use attachment::*; +pub use background_process::*; pub use chat_request::*; pub use chat_response::*; pub use commit_config::*; diff --git a/crates/forge_domain/src/shell.rs b/crates/forge_domain/src/shell.rs index 84df33a2cc..9657f074b4 100644 --- a/crates/forge_domain/src/shell.rs +++ b/crates/forge_domain/src/shell.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + /// Output from a command execution #[derive(Debug, Clone)] pub struct CommandOutput { @@ -12,3 +14,20 @@ impl CommandOutput { self.exit_code.is_none_or(|code| code >= 0) } } + +/// Output from a background (detached) command execution. +/// +/// Wraps a `CommandOutput` with the process ID and the `NamedTempFile` handle +/// that owns the log file on disk. Keeping the handle alive prevents the temp +/// file from being deleted. +#[derive(Debug)] +pub struct BackgroundCommandOutput { + /// The original command string that was executed. + pub command: String, + /// OS process ID of the spawned background process. + pub pid: u32, + /// Absolute path to the log file capturing stdout/stderr. + pub log_file: PathBuf, + /// The temp-file handle; dropping it deletes the log from disk. + pub log_handle: tempfile::NamedTempFile, +} diff --git a/crates/forge_domain/src/tools/catalog.rs b/crates/forge_domain/src/tools/catalog.rs index fcfa3ddcf1..d41dc6fedb 100644 --- a/crates/forge_domain/src/tools/catalog.rs +++ b/crates/forge_domain/src/tools/catalog.rs @@ -562,6 +562,15 @@ pub struct Shell { #[serde(default)] #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, + + /// If true, runs the command in the background as a detached process. + /// The command's stdout/stderr are redirected to a temporary log file. + /// The tool returns immediately with the log file path and process ID + /// instead of waiting for the command to complete. + /// Use this for long-running processes like web servers or file watchers. + #[serde(default)] + #[serde(skip_serializing_if = "is_default")] + pub background: bool, } /// Input type for the net fetch tool @@ -1682,6 +1691,7 @@ mod tests { keep_ansi: false, env: None, description: Some("Shows working tree status".to_string()), + background: false, }; let actual = serde_json::to_value(&fixture).unwrap(); @@ -1705,6 +1715,7 @@ mod tests { keep_ansi: false, env: None, description: None, + background: false, }; let actual = serde_json::to_value(&fixture).unwrap(); @@ -1727,6 +1738,7 @@ mod tests { keep_ansi: false, env: None, description: None, + background: false, }; let actual = serde_json::to_value(&fixture).unwrap(); diff --git a/crates/forge_domain/src/tools/definition/snapshots/forge_domain__tools__definition__usage__tests__tool_usage.snap b/crates/forge_domain/src/tools/definition/snapshots/forge_domain__tools__definition__usage__tests__tool_usage.snap index 4c9761e7e6..5e568f0d57 100644 --- a/crates/forge_domain/src/tools/definition/snapshots/forge_domain__tools__definition__usage__tests__tool_usage.snap +++ b/crates/forge_domain/src/tools/definition/snapshots/forge_domain__tools__definition__usage__tests__tool_usage.snap @@ -9,7 +9,7 @@ expression: prompt {"name":"remove","description":"Request to remove a file at the specified path. Use when you need to delete an existing file. The path must be absolute. This operation can be undone using the `{{tool_names.undo}}` tool.","arguments":{"path":{"description":"The path of the file to remove (absolute path required)","type":"string","is_required":true}}} {"name":"patch","description":"Performs exact string replacements in files.\nUsage:\n- You must use your `{{tool_names.read}}` tool at least once in the conversation before editing. This tool will error if you attempt an edit without reading the file. \n- When editing text from `{{tool_names.read}}` tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: 'line_number:'. Everything after that line_number: is the actual file content to match. Never include any part of the line number prefix in the old_string or new_string.\n- ALWAYS prefer editing existing files in the codebase. NEVER write new files unless explicitly required.\n- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.\n- The edit will FAIL if `old_string` is not unique in the file. Either provide a larger string with more surrounding context to make it unique or use `replace_all` to change every instance of `old_string`. \n- Use `replace_all` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.","arguments":{"file_path":{"description":"The absolute path to the file to modify","type":"string","is_required":true},"new_string":{"description":"The text to replace it with (must be different from old_string)","type":"string","is_required":true},"old_string":{"description":"The text to replace","type":"string","is_required":true},"replace_all":{"description":"Replace all occurrences of old_string (default false)","type":"boolean","is_required":false}}} {"name":"undo","description":"Reverts the most recent file operation (create/modify/delete) on a specific file. Use this tool when you need to recover from incorrect file changes or if a revert is requested by the user.","arguments":{"path":{"description":"The absolute path of the file to revert to its previous state.","type":"string","is_required":true}}} -{"name":"shell","description":"Executes shell commands. The `cwd` parameter sets the working directory for command execution. If not specified, defaults to `{{env.cwd}}`.\n\nCRITICAL: Do NOT use `cd` commands in the command string. This is FORBIDDEN. Always use the `cwd` parameter to set the working directory instead. Any use of `cd` in the command is redundant, incorrect, and violates the tool contract.\n\nIMPORTANT: This tool is for terminal operations like git, npm, docker, etc. DO NOT use it for file operations (reading, writing, editing, searching, finding files) - use the specialized tools for this instead.\n\nBefore executing the command, please follow these steps:\n\n1. Directory Verification:\n - If the command will create new directories or files, first use `shell` with `ls` to verify the parent directory exists and is the correct location\n - For example, before running \"mkdir foo/bar\", first use `ls foo` to check that \"foo\" exists and is the intended parent directory\n\n2. Command Execution:\n - Always quote file paths that contain spaces with double quotes (e.g., python \"path with spaces/script.py\")\n - Examples of proper quoting:\n - mkdir \"/Users/name/My Documents\" (correct)\n - mkdir /Users/name/My Documents (incorrect - will fail)\n - python \"/path/with spaces/script.py\" (correct)\n - python /path/with spaces/script.py (incorrect - will fail)\n - After ensuring proper quoting, execute the command.\n - Capture the output of the command.\n\nUsage notes:\n - The command argument is required.\n - It is very helpful if you write a clear, concise description of what this command does in 5-10 words.\n - If the output exceeds {{env.stdoutMaxPrefixLength}} prefix lines or {{env.stdoutMaxSuffixLength}} suffix lines, or if a line exceeds {{env.stdoutMaxLineLength}} characters, it will be truncated and the full output will be written to a temporary file. You can use read with start_line/end_line to read specific sections or fs_search to search the full content. Because of this, you do NOT need to use `head`, `tail`, or other truncation commands to limit output - just run the command directly.\n - Avoid using {{tool_names.shell}} with the `find`, `grep`, `cat`, `head`, `tail`, `sed`, `awk`, or `echo` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:\n - File search: Use `{{tool_names.fs_search}}` (NOT find or ls)\n - Content search: Use `{{tool_names.fs_search}}` with regex (NOT grep or rg)\n - Read files: Use `{{tool_names.read}}` (NOT cat/head/tail)\n - Edit files: Use `{{tool_names.patch}}`(NOT sed/awk)\n - Write files: Use `{{tool_names.write}}` (NOT echo >/cat < && `. Use the `cwd` parameter to change directories instead.\n\nGood examples:\n - With explicit cwd: cwd=\"/foo/bar\" with command: pytest tests\n\nBad example:\n cd /foo/bar && pytest tests\n\nReturns complete output including stdout, stderr, and exit code for diagnostic purposes.","arguments":{"command":{"description":"The shell command to execute.","type":"string","is_required":true},"cwd":{"description":"The working directory where the command should be executed.\nIf not specified, defaults to the current working directory from the\nenvironment.","type":"string","is_required":false},"description":{"description":"Clear, concise description of what this command does. Recommended to be\n5-10 words for simple commands. For complex commands with pipes or\nmultiple operations, provide more context. Examples: \"Lists files in\ncurrent directory\", \"Installs package dependencies\", \"Compiles Rust\nproject with release optimizations\".","type":"string","is_required":false},"env":{"description":"Environment variable names to pass to command execution (e.g., [\"PATH\",\n\"HOME\", \"USER\"]). The system automatically reads the specified\nvalues and applies them during command execution.","type":"array","is_required":false},"keep_ansi":{"description":"Whether to preserve ANSI escape codes in the output.\nIf true, ANSI escape codes will be preserved in the output.\nIf false (default), ANSI escape codes will be stripped from the output.","type":"boolean","is_required":false}}} +{"name":"shell","description":"Executes shell commands. The `cwd` parameter sets the working directory for command execution. If not specified, defaults to `{{env.cwd}}`.\n\nCRITICAL: Do NOT use `cd` commands in the command string. This is FORBIDDEN. Always use the `cwd` parameter to set the working directory instead. Any use of `cd` in the command is redundant, incorrect, and violates the tool contract.\n\nIMPORTANT: This tool is for terminal operations like git, npm, docker, etc. DO NOT use it for file operations (reading, writing, editing, searching, finding files) - use the specialized tools for this instead.\n\nBefore executing the command, please follow these steps:\n\n1. Directory Verification:\n - If the command will create new directories or files, first use `shell` with `ls` to verify the parent directory exists and is the correct location\n - For example, before running \"mkdir foo/bar\", first use `ls foo` to check that \"foo\" exists and is the intended parent directory\n\n2. Command Execution:\n - Always quote file paths that contain spaces with double quotes (e.g., python \"path with spaces/script.py\")\n - Examples of proper quoting:\n - mkdir \"/Users/name/My Documents\" (correct)\n - mkdir /Users/name/My Documents (incorrect - will fail)\n - python \"/path/with spaces/script.py\" (correct)\n - python /path/with spaces/script.py (incorrect - will fail)\n - After ensuring proper quoting, execute the command.\n - Capture the output of the command.\n\nUsage notes:\n - The command argument is required.\n - It is very helpful if you write a clear, concise description of what this command does in 5-10 words.\n - If the output exceeds {{env.stdoutMaxPrefixLength}} prefix lines or {{env.stdoutMaxSuffixLength}} suffix lines, or if a line exceeds {{env.stdoutMaxLineLength}} characters, it will be truncated and the full output will be written to a temporary file. You can use read with start_line/end_line to read specific sections or fs_search to search the full content. Because of this, you do NOT need to use `head`, `tail`, or other truncation commands to limit output - just run the command directly.\n - Avoid using {{tool_names.shell}} with the `find`, `grep`, `cat`, `head`, `tail`, `sed`, `awk`, or `echo` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:\n - File search: Use `{{tool_names.fs_search}}` (NOT find or ls)\n - Content search: Use `{{tool_names.fs_search}}` with regex (NOT grep or rg)\n - Read files: Use `{{tool_names.read}}` (NOT cat/head/tail)\n - Edit files: Use `{{tool_names.patch}}`(NOT sed/awk)\n - Write files: Use `{{tool_names.write}}` (NOT echo >/cat < && `. Use the `cwd` parameter to change directories instead.\n\nGood examples:\n - With explicit cwd: cwd=\"/foo/bar\" with command: pytest tests\n\nBad example:\n cd /foo/bar && pytest tests\n\nBackground execution:\n - Set `background: true` to run long-lived processes (web servers, file watchers, dev servers) as detached background jobs.\n - The command returns immediately with a **log file path** and **process ID (PID)** instead of waiting for completion.\n - The process continues running independently even after the session ends.\n - CRITICAL: Always remember the log file path returned by background commands. You will need it to check output, diagnose errors, or verify the process is working. After compaction the log file path will still be available in the summary.\n - Use `read` on the log file path to inspect process output at any time.\n - Examples of when to use background:\n - Starting a web server: `npm start`, `python manage.py runserver`, `cargo run --bin server`\n - Starting a file watcher: `npm run watch`, `cargo watch`\n - Starting any process that runs indefinitely and should not block your workflow\n\nReturns complete output including stdout, stderr, and exit code for diagnostic purposes.","arguments":{"background":{"description":"If true, runs the command in the background as a detached process.\nThe command's stdout/stderr are redirected to a temporary log file.\nThe tool returns immediately with the log file path and process ID\ninstead of waiting for the command to complete.\nUse this for long-running processes like web servers or file watchers.","type":"boolean","is_required":false},"command":{"description":"The shell command to execute.","type":"string","is_required":true},"cwd":{"description":"The working directory where the command should be executed.\nIf not specified, defaults to the current working directory from the\nenvironment.","type":"string","is_required":false},"description":{"description":"Clear, concise description of what this command does. Recommended to be\n5-10 words for simple commands. For complex commands with pipes or\nmultiple operations, provide more context. Examples: \"Lists files in\ncurrent directory\", \"Installs package dependencies\", \"Compiles Rust\nproject with release optimizations\".","type":"string","is_required":false},"env":{"description":"Environment variable names to pass to command execution (e.g., [\"PATH\",\n\"HOME\", \"USER\"]). The system automatically reads the specified\nvalues and applies them during command execution.","type":"array","is_required":false},"keep_ansi":{"description":"Whether to preserve ANSI escape codes in the output.\nIf true, ANSI escape codes will be preserved in the output.\nIf false (default), ANSI escape codes will be stripped from the output.","type":"boolean","is_required":false}}} {"name":"fetch","description":"Retrieves content from URLs as markdown or raw text. Enables access to current online information including websites, APIs and documentation. Use for obtaining up-to-date information beyond training data, verifying facts, or retrieving specific online content. Handles HTTP/HTTPS and converts HTML to readable markdown by default. Cannot access private/restricted resources requiring authentication. Respects robots.txt and may be blocked by anti-scraping measures. For large pages, returns the first 40,000 characters and stores the complete content in a temporary file for subsequent access.","arguments":{"raw":{"description":"Get raw content without any markdown conversion (default: false)","type":"boolean","is_required":false},"url":{"description":"URL to fetch","type":"string","is_required":true}}} {"name":"followup","description":"Use this tool when you encounter ambiguities, need clarification, or require more details to proceed effectively. Use this tool judiciously to maintain a balance between gathering necessary information and avoiding excessive back-and-forth.","arguments":{"multiple":{"description":"If true, allows selecting multiple options; if false (default), only one\noption can be selected","type":"boolean","is_required":false},"option1":{"description":"First option to choose from","type":"string","is_required":false},"option2":{"description":"Second option to choose from","type":"string","is_required":false},"option3":{"description":"Third option to choose from","type":"string","is_required":false},"option4":{"description":"Fourth option to choose from","type":"string","is_required":false},"option5":{"description":"Fifth option to choose from","type":"string","is_required":false},"question":{"description":"Question to ask the user","type":"string","is_required":true}}} {"name":"plan","description":"Creates a new plan file with the specified name, version, and content. Use this tool to create structured project plans, task breakdowns, or implementation strategies that can be tracked and referenced throughout development sessions.","arguments":{"content":{"description":"The content to write to the plan file. This should be the complete\nplan content in markdown format.","type":"string","is_required":true},"plan_name":{"description":"The name of the plan (will be used in the filename)","type":"string","is_required":true},"version":{"description":"The version of the plan (e.g., \"v1\", \"v2\", \"1.0\")","type":"string","is_required":true}}} diff --git a/crates/forge_domain/src/tools/descriptions/shell.md b/crates/forge_domain/src/tools/descriptions/shell.md index 24d62c33af..e508437cf8 100644 --- a/crates/forge_domain/src/tools/descriptions/shell.md +++ b/crates/forge_domain/src/tools/descriptions/shell.md @@ -44,4 +44,15 @@ Good examples: Bad example: cd /foo/bar && pytest tests +Background execution: + - Set `background: true` to run long-lived processes (web servers, file watchers, dev servers) as detached background jobs. + - The command returns immediately with a **log file path** and **process ID (PID)** instead of waiting for completion. + - The process continues running independently even after the session ends. + - CRITICAL: Always remember the log file path returned by background commands. You will need it to check output, diagnose errors, or verify the process is working. After compaction the log file path will still be available in the summary. + - Use `read` on the log file path to inspect process output at any time. + - Examples of when to use background: + - Starting a web server: `npm start`, `python manage.py runserver`, `cargo run --bin server` + - Starting a file watcher: `npm run watch`, `cargo watch` + - Starting any process that runs indefinitely and should not block your workflow + Returns complete output including stdout, stderr, and exit code for diagnostic purposes. \ No newline at end of file diff --git a/crates/forge_domain/src/tools/snapshots/forge_domain__tools__catalog__tests__tool_definition_json.snap b/crates/forge_domain/src/tools/snapshots/forge_domain__tools__catalog__tests__tool_definition_json.snap index eb4edfe04c..e313b4fd00 100644 --- a/crates/forge_domain/src/tools/snapshots/forge_domain__tools__catalog__tests__tool_definition_json.snap +++ b/crates/forge_domain/src/tools/snapshots/forge_domain__tools__catalog__tests__tool_definition_json.snap @@ -232,6 +232,10 @@ expression: tools "title": "Shell", "type": "object", "properties": { + "background": { + "description": "If true, runs the command in the background as a detached process.\nThe command's stdout/stderr are redirected to a temporary log file.\nThe tool returns immediately with the log file path and process ID\ninstead of waiting for the command to complete.\nUse this for long-running processes like web servers or file watchers.", + "type": "boolean" + }, "command": { "description": "The shell command to execute.", "type": "string" diff --git a/crates/forge_infra/src/executor.rs b/crates/forge_infra/src/executor.rs index 61da083493..f14a22b333 100644 --- a/crates/forge_infra/src/executor.rs +++ b/crates/forge_infra/src/executor.rs @@ -3,7 +3,9 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use forge_app::CommandInfra; -use forge_domain::{CommandOutput, ConsoleWriter as OutputPrinterTrait, Environment}; +use forge_domain::{ + BackgroundCommandOutput, CommandOutput, ConsoleWriter as OutputPrinterTrait, Environment, +}; use tokio::io::AsyncReadExt; use tokio::process::Command; use tokio::sync::Mutex; @@ -234,6 +236,114 @@ impl CommandInfra for ForgeCommandExecutorService { Ok(prepared_command.spawn()?.wait().await?) } + + async fn execute_command_background( + &self, + command: String, + working_dir: PathBuf, + env_vars: Option>, + ) -> anyhow::Result { + // Create a temp log file that will capture stdout/stderr + let log_file = tempfile::Builder::new() + .prefix("forge-bg-") + .suffix(".log") + .tempfile() + .map_err(|e| anyhow::anyhow!("Failed to create background log file: {e}"))?; + let log_path = log_file.path().to_path_buf(); + let log_path_str = log_path.display().to_string(); + + tracing::info!( + command = %command, + log_path = %log_path_str, + "Spawning background process" + ); + + // NOTE: We intentionally do NOT acquire self.ready here. + // Background spawns should not block foreground commands. + + let pid = spawn_background_process( + &command, + &working_dir, + &log_path_str, + &self.env.shell, + self.restricted, + env_vars, + ) + .await?; + + tracing::info!(pid = pid, log_path = %log_path_str, "Background process spawned"); + + Ok(BackgroundCommandOutput { command, pid, log_file: log_path, log_handle: log_file }) + } +} + +/// Spawn a background process, returning its PID. +async fn spawn_background_process( + command: &str, + working_dir: &Path, + log_path: &str, + shell: &str, + restricted: bool, + env_vars: Option>, +) -> anyhow::Result { + let is_windows = cfg!(target_os = "windows"); + + let mut cmd = if is_windows { + let bg_command = format!("{command} > \"{log_path}\" 2>&1"); + let mut cmd = Command::new("cmd"); + cmd.args(["/C", "start", "/b", "cmd", "/C", &bg_command]); + cmd + } else { + let shell_bin = if restricted { "rbash" } else { shell }; + let bg_command = format!("nohup {command} > {log_path} 2>&1 & echo $!"); + let mut cmd = Command::new(shell_bin); + cmd.arg("-c").arg(&bg_command); + cmd + }; + + cmd.current_dir(working_dir) + .stdin(std::process::Stdio::null()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .kill_on_drop(false); + + #[cfg(windows)] + { + use std::os::windows::process::CommandExt; + cmd.creation_flags(0x00000010); // DETACHED_PROCESS + } + + if let Some(vars) = env_vars { + for var in vars { + if let Ok(value) = std::env::var(&var) { + cmd.env(&var, value); + } + } + } + + if is_windows { + let child = cmd.spawn()?; + child + .id() + .ok_or_else(|| anyhow::anyhow!("Failed to get PID of background process on Windows")) + } else { + let output = cmd.output().await?; + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!("Failed to spawn background process: {stderr}"); + } + let stdout = String::from_utf8_lossy(&output.stdout); + stdout + .trim() + .lines() + .last() + .unwrap_or("") + .trim() + .parse() + .map_err(|e| { + anyhow::anyhow!("Failed to parse PID from shell output '{stdout}': {e}") + }) + } } #[cfg(test)] diff --git a/crates/forge_infra/src/forge_infra.rs b/crates/forge_infra/src/forge_infra.rs index 70c2ef62d1..60fe2beb3f 100644 --- a/crates/forge_infra/src/forge_infra.rs +++ b/crates/forge_infra/src/forge_infra.rs @@ -10,8 +10,8 @@ use forge_app::{ StrategyFactory, UserInfra, WalkerInfra, }; use forge_domain::{ - AuthMethod, CommandOutput, Environment, FileInfo as FileInfoData, McpServerConfig, ProviderId, - URLParam, + AuthMethod, BackgroundCommandOutput, CommandOutput, Environment, + FileInfo as FileInfoData, McpServerConfig, ProviderId, URLParam, }; use reqwest::header::HeaderMap; use reqwest::{Response, Url}; @@ -210,6 +210,17 @@ impl CommandInfra for ForgeInfra { .execute_command_raw(command, working_dir, env_vars) .await } + + async fn execute_command_background( + &self, + command: String, + working_dir: PathBuf, + env_vars: Option>, + ) -> anyhow::Result { + self.command_executor_service + .execute_command_background(command, working_dir, env_vars) + .await + } } #[async_trait::async_trait] diff --git a/crates/forge_main/Cargo.toml b/crates/forge_main/Cargo.toml index 4e5aa0e052..31a2d868a2 100644 --- a/crates/forge_main/Cargo.toml +++ b/crates/forge_main/Cargo.toml @@ -64,6 +64,10 @@ forge_markdown_stream.workspace = true strip-ansi-escapes.workspace = true terminal_size = "0.4" rustls.workspace = true +reqwest.workspace = true +regex.workspace = true +async-trait.workspace = true +tempfile.workspace = true [target.'cfg(not(target_os = "android"))'.dependencies] arboard = "3.4" @@ -72,7 +76,6 @@ arboard = "3.4" tokio = { workspace = true, features = ["macros", "rt", "time", "test-util"] } insta.workspace = true pretty_assertions.workspace = true -tempfile.workspace = true serial_test = "3.4" fake = { version = "4.4.0", features = ["derive"] } forge_domain = { path = "../forge_domain" } diff --git a/crates/forge_main/src/built_in_commands.json b/crates/forge_main/src/built_in_commands.json index e6cd3c3ca3..efc4cda46d 100644 --- a/crates/forge_main/src/built_in_commands.json +++ b/crates/forge_main/src/built_in_commands.json @@ -114,5 +114,9 @@ { "command": "setup", "description": "Setup zsh integration by updating .zshrc" + }, + { + "command": "processes", + "description": "List and manage background processes [alias: ps]" } ] diff --git a/crates/forge_main/src/cli.rs b/crates/forge_main/src/cli.rs index 5f095e86d3..9144aea0a5 100644 --- a/crates/forge_main/src/cli.rs +++ b/crates/forge_main/src/cli.rs @@ -175,6 +175,21 @@ pub enum TopLevelCommand { /// Run diagnostics on shell environment (alias for `zsh doctor`). Doctor, + + /// List and manage background processes spawned during shell sessions. + Processes { + /// Output in machine-readable format (tab-separated). + #[arg(long)] + porcelain: bool, + + /// Kill the process with the given PID. + #[arg(long)] + kill: Option, + + /// When used with --kill, also delete the log file. + #[arg(long)] + delete_log: bool, + }, } /// Command group for custom command management. @@ -405,7 +420,12 @@ pub enum ZshCommandGroup { Rprompt, /// Setup zsh integration by updating .zshrc with plugin and theme - Setup, + Setup { + /// Skip interactive prompts (Nerd Font check, editor selection) and use + /// defaults. Useful for scripted installations and CI. + #[arg(long, short = 'y')] + non_interactive: bool, + }, /// Show keyboard shortcuts for ZSH line editor Keyboard, @@ -1716,7 +1736,7 @@ mod tests { let fixture = Cli::parse_from(["forge", "zsh", "setup"]); let actual = match fixture.subcommands { Some(TopLevelCommand::Zsh(terminal)) => { - matches!(terminal, ZshCommandGroup::Setup) + matches!(terminal, ZshCommandGroup::Setup { .. }) } _ => false, }; diff --git a/crates/forge_main/src/model.rs b/crates/forge_main/src/model.rs index 62025782aa..2f48cac41d 100644 --- a/crates/forge_main/src/model.rs +++ b/crates/forge_main/src/model.rs @@ -282,6 +282,7 @@ impl ForgeCommandManager { Ok(SlashCommand::Commit { max_diff_size }) } "/index" => Ok(SlashCommand::Index), + "/processes" | "/ps" => Ok(SlashCommand::Processes), text => { let parts = text.split_ascii_whitespace().collect::>(); @@ -437,6 +438,10 @@ pub enum SlashCommand { /// Index the current workspace for semantic code search #[strum(props(usage = "Index the current workspace for semantic search"))] Index, + + /// List and manage background processes spawned during this session + #[strum(props(usage = "List and manage background processes [alias: ps]"))] + Processes, } impl SlashCommand { @@ -469,6 +474,7 @@ impl SlashCommand { SlashCommand::Delete => "delete", SlashCommand::AgentSwitch(agent_id) => agent_id, SlashCommand::Index => "index", + SlashCommand::Processes => "processes", } } diff --git a/crates/forge_main/src/ui.rs b/crates/forge_main/src/ui.rs index baa66826e6..49821d593b 100644 --- a/crates/forge_main/src/ui.rs +++ b/crates/forge_main/src/ui.rs @@ -45,8 +45,8 @@ use crate::title_display::TitleDisplayExt; use crate::tools_display::format_tools; use crate::update::on_update; use crate::utils::humanize_time; -use crate::zsh::ZshRPrompt; -use crate::{TRACKER, banner, tracker}; +use crate::zsh::{FzfStatus, Group, Installation, OmzStatus, Platform, ZshRPrompt, ZshStatus}; +use crate::{TRACKER, banner, tracker, zsh}; // File-specific constants const MISSING_AGENT_TITLE: &str = ""; @@ -434,8 +434,8 @@ impl A + Send + Sync> UI { } return Ok(()); } - crate::cli::ZshCommandGroup::Setup => { - self.on_zsh_setup().await?; + crate::cli::ZshCommandGroup::Setup { non_interactive } => { + self.on_zsh_setup(non_interactive).await?; } crate::cli::ZshCommandGroup::Keyboard => { self.on_zsh_keyboard().await?; @@ -666,13 +666,17 @@ impl A + Send + Sync> UI { return Ok(()); } TopLevelCommand::Setup => { - self.on_zsh_setup().await?; + self.on_zsh_setup(false).await?; return Ok(()); } TopLevelCommand::Doctor => { self.on_zsh_doctor().await?; return Ok(()); } + TopLevelCommand::Processes { porcelain, kill, delete_log } => { + self.on_processes_cli(porcelain, kill, delete_log).await?; + return Ok(()); + } } Ok(()) } @@ -1591,86 +1595,204 @@ impl A + Send + Sync> UI { } /// Setup ZSH integration by updating .zshrc - async fn on_zsh_setup(&mut self) -> anyhow::Result<()> { - // Check nerd font support - println!(); - println!( - "{} {} {}", - "󱙺".bold(), - "FORGE 33.0k".bold(), - " tonic-1.0".cyan() - ); + /// Sets up ZSH integration including dependency installation and `.zshrc` + /// configuration. + /// + /// Orchestrates the full setup flow: + /// 1. Prerequisite check (git) + /// 2. Parallel dependency detection (zsh, Oh My Zsh, plugins, fzf) + /// 3. Installation of missing dependencies (respecting dependency order) + /// 4. Windows bashrc auto-start configuration + /// 5. Nerd Font check and editor selection (interactive, skipped if + /// `non_interactive`) + /// 6. `.zshrc` configuration via `setup_zsh_integration()` + /// 7. Doctor verification and summary + /// + /// # Arguments + /// + /// * `non_interactive` - When true, skips Nerd Font and editor prompts, + /// using defaults (nerd fonts enabled, no editor override). + async fn on_zsh_setup(&mut self, non_interactive: bool) -> anyhow::Result<()> { + // Track whether setup completed without any errors + let mut setup_fully_successful = true; + + // Step A: Prerequisite check + self.spinner.start(Some("Checking prerequisites"))?; + let git_ok = crate::zsh::detect_git().await; + self.spinner.stop(None)?; - let can_see_nerd_fonts = - ForgeWidget::confirm("Can you see all the icons clearly without any overlap?") - .with_default(true) - .prompt()?; + if !git_ok { + self.writeln_title(TitleFormat::error( + "git is required but not found. Install git and re-run forge zsh setup", + ))?; + return Ok(()); + } - let disable_nerd_font = match can_see_nerd_fonts { - Some(true) => { - println!(); - false - } - Some(false) => { - println!(); - println!(" {} Nerd Fonts will be disabled", "⚠".yellow()); - println!(); - println!(" You can enable them later by:"); - println!( - " 1. Installing a Nerd Font from: {}", - "https://www.nerdfonts.com/".dimmed() - ); - println!(" 2. Configuring your terminal to use a Nerd Font"); - println!( - " 3. Removing {} from your ~/.zshrc", - "NERD_FONT=0".dimmed() - ); - println!(); - true - } - None => { - // User interrupted, default to not disabling - println!(); - false - } - }; + // Step B: Detect all dependencies in parallel + self.spinner.start(Some("Detecting environment"))?; + let platform = zsh::detect_platform(); + let deps = zsh::detect_all_dependencies().await; + let sudo = zsh::detect_sudo(platform).await; + self.spinner.stop(None)?; - // Ask about editor preference - let editor_options = vec![ - "Use system default ($EDITOR)", - "VS Code (code --wait)", - "Vim", - "Neovim (nvim)", - "Nano", - "Emacs", - "Sublime Text (subl --wait)", - "Skip - I'll configure it later", - ]; - - let selected_editor = ForgeWidget::select( - "Which editor would you like to use for editing prompts?", - editor_options, + // Display detection results + self.log_dependency_status(&deps)?; + println!(); + + // Step C–E: Install missing dependencies + Windows bash_profile + let needs_install = !deps.all_installed() || deps.needs_tools(); + if needs_install { + let missing = deps.missing_items(); + self.writeln_title(TitleFormat::info("The following will be installed:"))?; + missing.into_iter().for_each(|item| { + println!(" {} ({})", item.to_string().dimmed(), item.kind()); + }); + println!(); + } else { + self.writeln_title(TitleFormat::info("All dependencies already installed"))?; + println!(); + } + + let install_failed = Arc::new(std::sync::atomic::AtomicBool::new(false)); + let fail_flag = install_failed.clone(); + + let bp_ok_sp = self.spinner.clone(); + let bp_err_sp = self.spinner.clone(); + let bp_success = Arc::new(std::sync::atomic::AtomicBool::new(true)); + let bp_flag = bp_success.clone(); + + Group::when( + needs_install, + self.setup_install_zsh(&deps, platform, sudo) + .then(self.setup_install_omz(&deps)) + .then(self.setup_install_plugins(&deps)) + .then(self.setup_install_tools(&deps, platform, sudo)) + .notify_err(move |e| { + tracing::error!(error = ?e, "Installation failed"); + fail_flag.store(true, std::sync::atomic::Ordering::Relaxed); + Ok(()) + }), ) - .prompt()?; + .then(Group::when( + platform == Platform::Windows + && !install_failed.load(std::sync::atomic::Ordering::Relaxed), + self.setup_bash_profile() + .notify_ok(move || { + bp_ok_sp.stop(None)?; + bp_ok_sp.write_ln(format!( + " {} Configured ~/.bash_profile to auto-start zsh", + "[OK]".green() + )) + }) + .notify_err(move |e| { + let _ = bp_err_sp.stop(None); + let _ = bp_err_sp.write_ln( + TitleFormat::error(format!("Failed to configure bash_profile: {}", e)) + .display(), + ); + bp_flag.store(false, std::sync::atomic::Ordering::Relaxed); + Ok(()) + }), + )) + .install() + .await?; - let forge_editor = match selected_editor { - Some("Use system default ($EDITOR)") => None, - Some("VS Code (code --wait)") => Some("code --wait"), - Some("Vim") => Some("vim"), - Some("Neovim (nvim)") => Some("nvim"), - Some("Nano") => Some("nano"), - Some("Emacs") => Some("emacs"), - Some("Sublime Text (subl --wait)") => Some("subl --wait"), - Some("Skip - I'll configure it later") => None, - _ => None, + if install_failed.load(std::sync::atomic::Ordering::Relaxed) { + return Ok(()); + } + if !bp_success.load(std::sync::atomic::Ordering::Relaxed) { + setup_fully_successful = false; + } + if needs_install { + println!(); + } + + // Step F & G: Nerd Font check and Editor selection + let (disable_nerd_font, forge_editor) = if non_interactive { + // Non-interactive mode: use safe defaults + (false, None) + } else { + // Step F: Nerd Font check + println!(); + println!( + "{} {} {}", + "󱙺".bold(), + "FORGE 33.0k".bold(), + " tonic-1.0".cyan() + ); + + let can_see_nerd_fonts = + ForgeWidget::confirm("Can you see all the icons clearly without any overlap?") + .with_default(true) + .prompt()?; + + let disable_nerd_font = match can_see_nerd_fonts { + Some(true) => { + println!(); + false + } + Some(false) => { + println!(); + println!(" {} Nerd Fonts will be disabled", "⚠".yellow()); + println!(); + println!(" You can enable them later by:"); + println!( + " 1. Installing a Nerd Font from: {}", + "https://www.nerdfonts.com/".dimmed() + ); + println!(" 2. Configuring your terminal to use a Nerd Font"); + println!( + " 3. Removing {} from your ~/.zshrc", + "NERD_FONT=0".dimmed() + ); + println!(); + true + } + None => { + // User interrupted, default to not disabling + println!(); + false + } + }; + + // Step G: Editor selection + let editor_options = vec![ + "Use system default ($EDITOR)", + "VS Code (code --wait)", + "Vim", + "Neovim (nvim)", + "Nano", + "Emacs", + "Sublime Text (subl --wait)", + "Skip - I'll configure it later", + ]; + + let selected_editor = ForgeWidget::select( + "Which editor would you like to use for editing prompts?", + editor_options, + ) + .prompt()?; + + let forge_editor = match selected_editor { + Some("Use system default ($EDITOR)") => None, + Some("VS Code (code --wait)") => Some("code --wait"), + Some("Vim") => Some("vim"), + Some("Neovim (nvim)") => Some("nvim"), + Some("Nano") => Some("nano"), + Some("Emacs") => Some("emacs"), + Some("Sublime Text (subl --wait)") => Some("subl --wait"), + Some("Skip - I'll configure it later") => None, + _ => None, + }; + + (disable_nerd_font, forge_editor) }; - // Setup ZSH integration with nerd font and editor configuration + // Step H: Configure .zshrc via setup_zsh_integration() (always runs) self.spinner.start(Some("Configuring ZSH"))?; let result = crate::zsh::setup_zsh_integration(disable_nerd_font, forge_editor)?; self.spinner.stop(None)?; - // Log backup creation if one was made if let Some(backup_path) = result.backup_path { self.writeln_title(TitleFormat::debug(format!( "backup created at {}", @@ -1680,20 +1802,381 @@ impl A + Send + Sync> UI { self.writeln_title(TitleFormat::info(result.message))?; + // Step I: Run doctor (don't bail on failure — still show summary) self.writeln_title(TitleFormat::debug("running forge zsh doctor"))?; println!(); - let doctor_result = self.on_zsh_doctor().await; + match self.on_zsh_doctor().await { + Ok(()) => { + self.writeln_title(TitleFormat::warning( + "run `exec zsh` now (or open a new terminal window) to load the updated shell config", + ))?; + self.writeln_title(TitleFormat::warning( + "run `: Hi` after restarting your shell to confirm everything works", + ))?; + } + Err(e) => { + setup_fully_successful = false; + self.writeln_title(TitleFormat::error(format!("forge zsh doctor failed: {e}")))?; + } + } - if doctor_result.is_ok() { - self.writeln_title(TitleFormat::warning( - "run `exec zsh` now (or open a new terminal window) to load the updated shell config", + // Step J: Change default shell (if not already zsh) + if platform != Platform::Windows { + let current_shell = std::env::var("SHELL").unwrap_or_default(); + if !current_shell.contains("zsh") { + // Check if chsh is available (use POSIX command -v, not which) + let chsh_available = zsh::resolve_command_path("chsh").await.is_some(); + + if chsh_available { + let should_change_shell = if non_interactive { + // In non-interactive mode, default to yes + true + } else { + // Interactive prompt + println!(); + ForgeWidget::confirm("Would you like to make zsh your default shell?") + .with_default(true) + .prompt()? + .unwrap_or(false) + }; + + if should_change_shell { + // Find zsh path using POSIX command -v + if let Some(zsh_path) = zsh::resolve_command_path("zsh").await { + // Check if we're running as root (chsh won't need password) + let is_root = std::env::var("USER").unwrap_or_default() == "root" + || std::env::var("EUID").unwrap_or_default() == "0"; + + // Only try chsh if we're root or in an interactive terminal + // (non-root users need password which requires TTY) + let can_run_chsh = is_root || !non_interactive; + + if can_run_chsh { + // Try to run chsh + self.spinner.start(Some("Setting zsh as default shell"))?; + let chsh_result = tokio::process::Command::new("chsh") + .args(["-s", &zsh_path]) + .status() + .await; + self.spinner.stop(None)?; + + match chsh_result { + Ok(status) if status.success() => { + self.writeln_title(TitleFormat::info( + "zsh is now your default shell", + ))?; + } + Ok(_) => { + setup_fully_successful = false; + self.writeln_title(TitleFormat::warning( + "Failed to set default shell. You may need to run: chsh -s $(command -v zsh)", + ))?; + } + Err(e) => { + setup_fully_successful = false; + self.writeln_title(TitleFormat::warning(format!( + "Failed to set default shell: {}", + e + )))?; + self.writeln_title(TitleFormat::info( + "Run manually: chsh -s $(command -v zsh)", + ))?; + } + } + } else { + // Skip chsh in non-interactive mode for non-root users + self.writeln_title(TitleFormat::info( + "To make zsh your default shell, run: chsh -s $(command -v zsh)", + ))?; + } + } else { + self.writeln_title(TitleFormat::warning( + "Could not find zsh path. Run manually: chsh -s $(command -v zsh)", + ))?; + } + } + } + } + } + + // Step K: Summary + println!(); + if setup_fully_successful { + self.writeln_title(TitleFormat::info( + "Setup complete! Open a new Git Bash window to start zsh.", ))?; + } else { self.writeln_title(TitleFormat::warning( - "run `: Hi` after restarting your shell to confirm everything works", + "Setup completed with some errors. Please review the messages above.", ))?; } - doctor_result + Ok(()) + } + + /// Logs the detected status of each zsh setup dependency to the UI. + fn log_dependency_status(&mut self, deps: &zsh::DependencyStatus) -> anyhow::Result<()> { + match &deps.zsh { + ZshStatus::Functional { version, path } => { + self.writeln_title(TitleFormat::info(format!( + "zsh {} found at {}", + version, path + )))?; + } + ZshStatus::Broken { path } => { + self.writeln_title(TitleFormat::info(format!( + "zsh found at {} but modules are broken", + path + )))?; + } + ZshStatus::NotFound => { + self.writeln_title(TitleFormat::info("zsh not found"))?; + } + } + + match &deps.oh_my_zsh { + OmzStatus::Installed => { + self.writeln_title(TitleFormat::info("Oh My Zsh installed"))?; + } + OmzStatus::NotInstalled => { + self.writeln_title(TitleFormat::info("Oh My Zsh not found"))?; + } + } + + if deps.autosuggestions == crate::zsh::PluginStatus::Installed { + self.writeln_title(TitleFormat::info("zsh-autosuggestions installed"))?; + } else { + self.writeln_title(TitleFormat::info("zsh-autosuggestions not found"))?; + } + + if deps.syntax_highlighting == crate::zsh::PluginStatus::Installed { + self.writeln_title(TitleFormat::info("zsh-syntax-highlighting installed"))?; + } else { + self.writeln_title(TitleFormat::info("zsh-syntax-highlighting not found"))?; + } + + match &deps.fzf { + FzfStatus::Found { version, meets_minimum } => { + if *meets_minimum { + self.writeln_title(TitleFormat::info(format!("fzf {} found", version)))?; + } else { + self.writeln_title(TitleFormat::info(format!( + "fzf {} found (outdated, need >= 0.36.0)", + version + )))?; + } + } + FzfStatus::NotFound => { + self.writeln_title(TitleFormat::info("fzf not found"))?; + } + } + + match &deps.bat { + crate::zsh::BatStatus::Installed { version, meets_minimum } => { + let status_msg = if *meets_minimum { + format!("bat {} found", version) + } else { + format!("bat {} found (outdated, need >= 0.20.0)", version) + }; + self.writeln_title(TitleFormat::info(status_msg))?; + } + crate::zsh::BatStatus::NotFound => { + self.writeln_title(TitleFormat::info("bat not found"))?; + } + } + + match &deps.fd { + crate::zsh::FdStatus::Installed { version, meets_minimum } => { + let status_msg = if *meets_minimum { + format!("fd {} found", version) + } else { + format!("fd {} found (outdated, need >= 10.0.0)", version) + }; + self.writeln_title(TitleFormat::info(status_msg))?; + } + crate::zsh::FdStatus::NotFound => { + self.writeln_title(TitleFormat::info("fd not found"))?; + } + } + + Ok(()) + } + + /// Builds a group that installs zsh if it is missing or broken. + fn setup_install_zsh( + &self, + deps: &zsh::DependencyStatus, + platform: Platform, + sudo: zsh::SudoCapability, + ) -> Group { + if !deps.needs_zsh() { + return Group::unit(zsh::Noop); + } + let reinstall = matches!(deps.zsh, zsh::ZshStatus::Broken { .. }); + let mut install_zsh = zsh::InstallZsh::new(platform, sudo); + if reinstall { + install_zsh = install_zsh.reinstall(); + } + let sp = self.spinner.clone(); + let sp2 = self.spinner.clone(); + Group::unit(install_zsh) + .notify_ok(move || { + sp.stop(None)?; + sp.write_ln(format!(" {} zsh installed", "[OK]".green())) + }) + .notify_err(move |e| { + let _ = sp2.stop(None); + let _ = sp2.write_ln( + TitleFormat::error(format!( + "Failed to install zsh: {e}. Setup cannot continue." + )) + .display(), + ); + Err(e) + }) + } + + /// Builds a group that installs Oh My Zsh if it is missing. + fn setup_install_omz(&self, deps: &zsh::DependencyStatus) -> Group { + if !deps.needs_omz() { + return Group::unit(zsh::Noop); + } + let sp = self.spinner.clone(); + let sp2 = self.spinner.clone(); + Group::unit(zsh::InstallOhMyZsh::new()) + .notify_ok(move || sp.write_ln(format!(" {} Oh My Zsh installed", "[OK]".green()))) + .notify_err(move |e| { + let _ = sp2.write_ln( + TitleFormat::error(format!( + "Failed to install Oh My Zsh: {e}. Setup cannot continue." + )) + .display(), + ); + Err(e) + }) + } + + /// Builds a group that installs plugins (autosuggestions + + /// syntax-highlighting) in parallel. + fn setup_install_plugins(&self, deps: &zsh::DependencyStatus) -> Group { + if !deps.needs_plugins() { + return Group::unit(zsh::Noop); + } + + let mut group: Option = None; + + if deps.autosuggestions == crate::zsh::PluginStatus::NotInstalled { + let sp = self.spinner.clone(); + let task = Group::unit(zsh::InstallAutosuggestions::new()).notify_err(move |e| { + let _ = sp.write_ln( + TitleFormat::error(format!( + "Failed to install zsh-autosuggestions: {e}. Setup cannot continue." + )) + .display(), + ); + Err(e) + }); + group = Some(task); + } + + if deps.syntax_highlighting == crate::zsh::PluginStatus::NotInstalled { + let sp = self.spinner.clone(); + let task = Group::unit(zsh::InstallSyntaxHighlighting::new()).notify_err(move |e| { + let _ = sp.write_ln( + TitleFormat::error(format!( + "Failed to install zsh-syntax-highlighting: {e}. Setup cannot continue." + )) + .display(), + ); + Err(e) + }); + group = Some(match group { + Some(g) => g.alongside(task), + None => task, + }); + } + + match group { + Some(group) => { + let sp = self.spinner.clone(); + group.notify_ok(move || { + sp.write_ln(format!(" {} Plugins installed", "[OK]".green())) + }) + } + None => Group::unit(zsh::Noop), + } + } + + /// Builds a group that installs tools (fzf, bat, fd) in parallel. + fn setup_install_tools( + &self, + deps: &zsh::DependencyStatus, + platform: Platform, + sudo: zsh::SudoCapability, + ) -> Group { + if !deps.needs_tools() { + return Group::unit(zsh::Noop); + } + + let mut group: Option = None; + + if matches!(deps.fzf, FzfStatus::NotFound) { + let sp = self.spinner.clone(); + let sp2 = sp.clone(); + let task = Group::unit(zsh::InstallFzf::new(platform, sudo)) + .notify_ok(move || sp.write_ln(format!(" {} fzf installed", "[OK]".green()))) + .notify_err(move |e| { + let _ = sp2.write_ln( + TitleFormat::error(format!("Failed to install fzf: {e}")).display(), + ); + Err(e) + }); + group = Some(task); + } + + if matches!(deps.bat, crate::zsh::BatStatus::NotFound) { + let sp = self.spinner.clone(); + let sp2 = sp.clone(); + let task = Group::unit(zsh::InstallBat::new(platform, sudo)) + .notify_ok(move || sp.write_ln(format!(" {} bat installed", "[OK]".green()))) + .notify_err(move |e| { + let _ = sp2.write_ln( + TitleFormat::error(format!("Failed to install bat: {e}")).display(), + ); + Err(e) + }); + group = Some(match group { + Some(g) => g.alongside(task), + None => task, + }); + } + + if matches!(deps.fd, crate::zsh::FdStatus::NotFound) { + let sp = self.spinner.clone(); + let sp2 = sp.clone(); + let task = Group::unit(zsh::InstallFd::new(platform, sudo)) + .notify_ok(move || sp.write_ln(format!(" {} fd installed", "[OK]".green()))) + .notify_err(move |e| { + let _ = sp2.write_ln( + TitleFormat::error(format!("Failed to install fd: {e}")).display(), + ); + Err(e) + }); + group = Some(match group { + Some(g) => g.alongside(task), + None => task, + }); + } + + match group { + Some(group) => group, + None => Group::unit(zsh::Noop), + } + } + + /// Builds a group that configures `~/.bash_profile` for zsh auto-start. + fn setup_bash_profile(&self) -> Group { + Group::unit(zsh::ConfigureBashProfile::new()) } /// Handle the cmd command - generates shell command from natural language @@ -1986,6 +2469,9 @@ impl A + Send + Sync> UI { )); } } + SlashCommand::Processes => { + self.on_processes().await?; + } } Ok(false) @@ -2008,6 +2494,121 @@ impl A + Send + Sync> UI { Ok(()) } + /// Shows all tracked background processes and lets the user select one to + /// kill. After killing, asks whether to also delete the log file. + async fn on_processes(&mut self) -> anyhow::Result<()> { + let processes = self.api.list_background_processes()?; + if processes.is_empty() { + self.writeln_title(TitleFormat::debug("No background processes running"))?; + return Ok(()); + } + + // Build display strings for the picker. + // Format: "command | dir | elapsed | status" + let display_items: Vec = processes + .iter() + .map(|(p, alive)| { + let status = if *alive { "running" } else { "stopped" }; + let elapsed = humanize_time(p.started_at); + let dir = p.cwd.file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| p.cwd.display().to_string()); + format!( + "{} | {} | {} | {}", + p.command, + dir, + elapsed, + status, + ) + }) + .collect(); + + let selected = + ForgeWidget::select("Select a background process to kill", display_items.clone()) + .prompt()?; + + let Some(selected_str) = selected else { + return Ok(()); + }; + + // Find the PID by matching the selected display string back to the + // processes list (same order as display_items). + let idx = display_items + .iter() + .position(|s| s == &selected_str) + .ok_or_else(|| anyhow::anyhow!("Failed to match selection"))?; + let pid = processes[idx].0.pid; + let log_file = processes[idx].0.log_file.clone(); + + // Kill the process and remove from manager, keeping log file for now + self.api.kill_background_process(pid, false)?; + self.writeln_title(TitleFormat::action(format!("Killed process {pid}")))?; + + // Ask about log file deletion + let delete_log = ForgeWidget::confirm("Delete the log file?") + .with_default(false) + .prompt()?; + + if delete_log == Some(true) { + let _ = std::fs::remove_file(&log_file); + self.writeln_title(TitleFormat::debug(format!( + "Deleted log file: {}", + log_file.display() + )))?; + } + + Ok(()) + } + + /// CLI handler for `forge processes`. Supports porcelain output and + /// --kill/--delete-log flags. + async fn on_processes_cli( + &mut self, + porcelain: bool, + kill_pid: Option, + delete_log: bool, + ) -> anyhow::Result<()> { + if let Some(pid) = kill_pid { + self.api.kill_background_process(pid, delete_log)?; + if !porcelain { + self.writeln_title(TitleFormat::action(format!("Killed process {pid}")))?; + } + return Ok(()); + } + + let processes = self.api.list_background_processes()?; + if porcelain { + // Tab-separated output for machine consumption + for (p, alive) in &processes { + let status = if *alive { "running" } else { "stopped" }; + println!( + "{}\t{}\t{}\t{}\t{}", + p.pid, + status, + p.command, + p.started_at.to_rfc3339(), + p.log_file.display() + ); + } + } else if processes.is_empty() { + self.writeln_title(TitleFormat::debug("No background processes running"))?; + } else { + for (p, alive) in &processes { + let status = if *alive { "running" } else { "stopped" }; + let elapsed = humanize_time(p.started_at); + self.writeln_title(TitleFormat::debug(format!( + "PID {} | {} | {} | {} | log: {}", + p.pid, + status, + p.command, + elapsed, + p.log_file.display() + )))?; + } + } + Ok(()) + } + /// Select a model from all configured providers using porcelain-style /// tabular display matching the shell plugin's `:model` UI. /// diff --git a/crates/forge_main/src/zsh/fixtures/bashrc_clean.sh b/crates/forge_main/src/zsh/fixtures/bashrc_clean.sh new file mode 100644 index 0000000000..5d081c14c5 --- /dev/null +++ b/crates/forge_main/src/zsh/fixtures/bashrc_clean.sh @@ -0,0 +1,2 @@ +# My bashrc +export PATH=$PATH:/usr/local/bin diff --git a/crates/forge_main/src/zsh/fixtures/bashrc_incomplete_block_no_fi.sh b/crates/forge_main/src/zsh/fixtures/bashrc_incomplete_block_no_fi.sh new file mode 100644 index 0000000000..1969cf5499 --- /dev/null +++ b/crates/forge_main/src/zsh/fixtures/bashrc_incomplete_block_no_fi.sh @@ -0,0 +1,7 @@ +# My bashrc +export PATH=$PATH:/usr/local/bin + +# >>> forge initialize >>> +if [ -t 0 ] && [ -x "/usr/bin/zsh" ]; then + export SHELL="/usr/bin/zsh" + exec "/usr/bin/zsh" diff --git a/crates/forge_main/src/zsh/fixtures/bashrc_malformed_block_missing_fi.sh b/crates/forge_main/src/zsh/fixtures/bashrc_malformed_block_missing_fi.sh new file mode 100644 index 0000000000..9be98760b9 --- /dev/null +++ b/crates/forge_main/src/zsh/fixtures/bashrc_malformed_block_missing_fi.sh @@ -0,0 +1,9 @@ +# My bashrc +export PATH=$PATH:/usr/local/bin + +# Added by zsh installer +if [ -t 0 ] && [ -x "/usr/bin/zsh" ]; then + export SHELL="/usr/bin/zsh" + +# Content after incomplete block (will be lost) +alias ll='ls -la' diff --git a/crates/forge_main/src/zsh/fixtures/bashrc_multiple_incomplete_blocks.sh b/crates/forge_main/src/zsh/fixtures/bashrc_multiple_incomplete_blocks.sh new file mode 100644 index 0000000000..ec7407c361 --- /dev/null +++ b/crates/forge_main/src/zsh/fixtures/bashrc_multiple_incomplete_blocks.sh @@ -0,0 +1,11 @@ +# My bashrc +export PATH=$PATH:/usr/local/bin + +# Added by zsh installer +if [ -t 0 ] && [ -x "/usr/bin/zsh" ]; then + export SHELL="/usr/bin/zsh" + +# >>> forge initialize >>> +if [ -t 0 ] && [ -x "/usr/bin/zsh" ]; then + export SHELL="/usr/bin/zsh" + exec "/usr/bin/zsh" diff --git a/crates/forge_main/src/zsh/fixtures/bashrc_with_forge_block.sh b/crates/forge_main/src/zsh/fixtures/bashrc_with_forge_block.sh new file mode 100644 index 0000000000..8f1f3426c5 --- /dev/null +++ b/crates/forge_main/src/zsh/fixtures/bashrc_with_forge_block.sh @@ -0,0 +1,12 @@ +# My bashrc +export PATH=$PATH:/usr/local/bin + +# >>> forge initialize >>> +if [ -t 0 ] && [ -x "/usr/bin/zsh" ]; then + export SHELL="/usr/bin/zsh" + exec "/usr/bin/zsh" +fi +# <<< forge initialize <<< + +# More config +alias ll='ls -la' diff --git a/crates/forge_main/src/zsh/fixtures/bashrc_with_old_installer_block.sh b/crates/forge_main/src/zsh/fixtures/bashrc_with_old_installer_block.sh new file mode 100644 index 0000000000..4d4dca68e1 --- /dev/null +++ b/crates/forge_main/src/zsh/fixtures/bashrc_with_old_installer_block.sh @@ -0,0 +1,11 @@ +# My bashrc +export PATH=$PATH:/usr/local/bin + +# Added by zsh installer +if [ -t 0 ] && [ -x "/usr/bin/zsh" ]; then + export SHELL="/usr/bin/zsh" + exec "/usr/bin/zsh" +fi + +# More config +alias ll='ls -la' diff --git a/crates/forge_main/src/zsh/mod.rs b/crates/forge_main/src/zsh/mod.rs index 5bf5325f67..7f47236839 100644 --- a/crates/forge_main/src/zsh/mod.rs +++ b/crates/forge_main/src/zsh/mod.rs @@ -6,13 +6,25 @@ //! - Shell diagnostics //! - Right prompt (rprompt) display //! - Prompt styling utilities +//! - Full setup orchestration (zsh, Oh My Zsh, plugins) mod plugin; mod rprompt; +mod setup; mod style; +/// Normalizes shell script content for cross-platform compatibility. +/// +/// Strips carriage returns (`\r`) that appear when `include_str!` or +/// `include_dir!` embed files on Windows (where `git core.autocrlf=true` +/// converts LF to CRLF on checkout). Zsh cannot parse `\r` in scripts. +pub(crate) fn normalize_script(content: &str) -> String { + content.replace("\r\n", "\n").replace('\r', "\n") +} + pub use plugin::{ generate_zsh_plugin, generate_zsh_theme, run_zsh_doctor, run_zsh_keyboard, setup_zsh_integration, }; pub use rprompt::ZshRPrompt; +pub use setup::*; diff --git a/crates/forge_main/src/zsh/plugin.rs b/crates/forge_main/src/zsh/plugin.rs index 363f2afeaf..a3d2cf23b6 100644 --- a/crates/forge_main/src/zsh/plugin.rs +++ b/crates/forge_main/src/zsh/plugin.rs @@ -22,7 +22,8 @@ pub fn generate_zsh_plugin() -> Result { // Iterate through all embedded files in shell-plugin/lib, stripping comments // and empty lines. All files in this directory are .zsh files. for file in forge_embed::files(&ZSH_PLUGIN_LIB) { - let content = std::str::from_utf8(file.contents())?; + let raw = std::str::from_utf8(file.contents())?; + let content = super::normalize_script(raw); for line in content.lines() { let trimmed = line.trim(); // Skip empty lines and comment lines @@ -51,7 +52,8 @@ pub fn generate_zsh_plugin() -> Result { /// Generates the ZSH theme for Forge pub fn generate_zsh_theme() -> Result { - let mut content = include_str!("../../../../shell-plugin/forge.theme.zsh").to_string(); + let mut content = + super::normalize_script(include_str!("../../../../shell-plugin/forge.theme.zsh")); // Set environment variable to indicate theme is loaded (with timestamp) content.push_str("\n_FORGE_THEME_LOADED=$(date +%s)\n"); @@ -59,6 +61,19 @@ pub fn generate_zsh_theme() -> Result { Ok(content) } +/// Creates a temporary zsh script file for Windows execution +fn create_temp_zsh_script(script_content: &str) -> Result<(tempfile::TempDir, PathBuf)> { + use std::io::Write; + + let temp_dir = tempfile::tempdir().context("Failed to create temp directory")?; + let script_path = temp_dir.path().join("forge_script.zsh"); + let mut file = fs::File::create(&script_path).context("Failed to create temp script file")?; + file.write_all(script_content.as_bytes()) + .context("Failed to write temp script")?; + + Ok((temp_dir, script_path)) +} + /// Executes a ZSH script with streaming output /// /// # Arguments @@ -71,14 +86,35 @@ pub fn generate_zsh_theme() -> Result { /// Returns error if the script cannot be executed, if output streaming fails, /// or if the script exits with a non-zero status code fn execute_zsh_script_with_streaming(script_content: &str, script_name: &str) -> Result<()> { - // Execute the script in a zsh subprocess with piped output - let mut child = std::process::Command::new("zsh") - .arg("-c") - .arg(script_content) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn() - .context(format!("Failed to execute zsh {} script", script_name))?; + let script_content = super::normalize_script(script_content); + + // On Unix, pass script via `zsh -c` -- Command::arg() uses execve which + // passes arguments directly without shell interpretation, so embedded + // quotes are safe. + // On Windows, write script to temp file and execute it with -f (no rc files) + // This avoids CreateProcess quote mangling AND prevents ~/.zshrc loading + let (_temp_dir, mut child) = if cfg!(windows) { + let (temp_dir, script_path) = create_temp_zsh_script(&script_content)?; + let child = std::process::Command::new("zsh") + // -f: don't load ~/.zshrc (prevents theme loading during doctor) + .arg("-f") + .arg(script_path.to_string_lossy().as_ref()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context(format!("Failed to execute zsh {} script", script_name))?; + // Keep temp_dir alive by boxing it in the tuple + (Some(temp_dir), child) + } else { + let child = std::process::Command::new("zsh") + .arg("-c") + .arg(&script_content) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context(format!("Failed to execute zsh {} script", script_name))?; + (None, child) + }; // Get stdout and stderr handles let stdout = child.stdout.take().context("Failed to capture stdout")?; @@ -209,7 +245,8 @@ pub fn setup_zsh_integration( ) -> Result { const START_MARKER: &str = "# >>> forge initialize >>>"; const END_MARKER: &str = "# <<< forge initialize <<<"; - const FORGE_INIT_CONFIG: &str = include_str!("../../../../shell-plugin/forge.setup.zsh"); + const FORGE_INIT_CONFIG_RAW: &str = include_str!("../../../../shell-plugin/forge.setup.zsh"); + let forge_init_config = super::normalize_script(FORGE_INIT_CONFIG_RAW); let home = std::env::var("HOME").context("HOME environment variable not set")?; let zdotdir = std::env::var("ZDOTDIR").unwrap_or_else(|_| home.clone()); @@ -230,7 +267,7 @@ pub fn setup_zsh_integration( // Build the forge config block with markers let mut forge_config: Vec = vec![START_MARKER.to_string()]; - forge_config.extend(FORGE_INIT_CONFIG.lines().map(String::from)); + forge_config.extend(forge_init_config.lines().map(String::from)); // Add nerd font configuration if requested if disable_nerd_font { @@ -369,6 +406,7 @@ mod tests { } #[test] + #[serial_test::serial] fn test_setup_zsh_integration_without_nerd_font_config() { use tempfile::TempDir; @@ -426,6 +464,7 @@ mod tests { } #[test] + #[serial_test::serial] fn test_setup_zsh_integration_with_nerd_font_disabled() { use tempfile::TempDir; @@ -485,6 +524,7 @@ mod tests { } #[test] + #[serial_test::serial] fn test_setup_zsh_integration_with_editor() { use tempfile::TempDir; @@ -550,6 +590,7 @@ mod tests { } #[test] + #[serial_test::serial] fn test_setup_zsh_integration_with_both_configs() { use tempfile::TempDir; @@ -607,6 +648,7 @@ mod tests { } #[test] + #[serial_test::serial] fn test_setup_zsh_integration_updates_existing_markers() { use tempfile::TempDir; diff --git a/crates/forge_main/src/zsh/scripts/bash_profile_autostart_block.sh b/crates/forge_main/src/zsh/scripts/bash_profile_autostart_block.sh new file mode 100644 index 0000000000..88845c7cb2 --- /dev/null +++ b/crates/forge_main/src/zsh/scripts/bash_profile_autostart_block.sh @@ -0,0 +1,12 @@ + +# >>> forge initialize >>> +# Source ~/.bashrc for user customizations (aliases, functions, etc.) +if [ -f "$HOME/.bashrc" ]; then + source "$HOME/.bashrc" +fi +# Auto-start zsh for interactive sessions +if [ -t 0 ] && [ -x "{{zsh}}" ]; then + export SHELL="{{zsh}}" + exec "{{zsh}}" +fi +# <<< forge initialize <<< diff --git a/crates/forge_main/src/zsh/scripts/zshenv_fpath_block.sh b/crates/forge_main/src/zsh/scripts/zshenv_fpath_block.sh new file mode 100644 index 0000000000..e02fb8879f --- /dev/null +++ b/crates/forge_main/src/zsh/scripts/zshenv_fpath_block.sh @@ -0,0 +1,11 @@ + +# --- zsh installer fpath (added by forge zsh setup) --- +_zsh_fn_base="/usr/share/zsh/functions" +if [ -d "$_zsh_fn_base" ]; then + fpath=("$_zsh_fn_base" $fpath) + for _zsh_fn_sub in "$_zsh_fn_base"/*/; do + [ -d "$_zsh_fn_sub" ] && fpath=("${_zsh_fn_sub%/}" $fpath) + done +fi +unset _zsh_fn_base _zsh_fn_sub +# --- end zsh installer fpath --- diff --git a/crates/forge_main/src/zsh/setup/detect.rs b/crates/forge_main/src/zsh/setup/detect.rs new file mode 100644 index 0000000000..dd94f869de --- /dev/null +++ b/crates/forge_main/src/zsh/setup/detect.rs @@ -0,0 +1,387 @@ +//! Dependency detection functions for the ZSH setup orchestrator. +//! +//! Detects the installation status of all dependencies: zsh, Oh My Zsh, +//! plugins, fzf, bat, fd, git, and sudo capability. + +use std::path::PathBuf; + +use tokio::process::Command; + +use super::platform::Platform; +use super::types::*; +use super::util::{command_exists, version_gte}; +use super::{BAT_MIN_VERSION, FD_MIN_VERSION, FZF_MIN_VERSION}; + +/// Detects whether git is available on the system. +/// +/// # Returns +/// +/// `true` if `git --version` succeeds, `false` otherwise. +pub async fn detect_git() -> bool { + Command::new("git") + .arg("--version") + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .map(|s| s.success()) + .unwrap_or(false) +} + +/// Detects the current zsh installation status. +/// +/// Checks for zsh binary presence, then verifies that critical modules +/// (zle, datetime, stat) load correctly. +pub async fn detect_zsh() -> ZshStatus { + // Find zsh binary + let which_cmd = if cfg!(target_os = "windows") { + "where" + } else { + "which" + }; + + let output = match Command::new(which_cmd) + .arg("zsh") + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + { + Ok(o) if o.status.success() => o, + _ => return ZshStatus::NotFound, + }; + + let path = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if path.is_empty() { + return ZshStatus::NotFound; + } + + // Smoke test critical modules + let modules_ok = Command::new("zsh") + .args([ + "-c", + "zmodload zsh/zle && zmodload zsh/datetime && zmodload zsh/stat", + ]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .map(|s| s.success()) + .unwrap_or(false); + + if !modules_ok { + return ZshStatus::Broken { path: path.lines().next().unwrap_or(&path).to_string() }; + } + + // Get version + let version = match Command::new("zsh") + .arg("--version") + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + { + Ok(o) if o.status.success() => { + let out = String::from_utf8_lossy(&o.stdout); + // "zsh 5.9 (x86_64-pc-linux-gnu)" -> "5.9" + out.split_whitespace() + .nth(1) + .unwrap_or("unknown") + .to_string() + } + _ => "unknown".to_string(), + }; + + ZshStatus::Functional { + version, + path: path.lines().next().unwrap_or(&path).to_string(), + } +} + +/// Detects whether Oh My Zsh is installed. +pub async fn detect_oh_my_zsh() -> OmzStatus { + let home = match std::env::var("HOME") { + Ok(h) => h, + Err(_) => return OmzStatus::NotInstalled, + }; + let omz_path = PathBuf::from(&home).join(".oh-my-zsh"); + if omz_path.is_dir() { + OmzStatus::Installed + } else { + OmzStatus::NotInstalled + } +} + +/// Returns the `$ZSH_CUSTOM` plugins directory path. +/// +/// Falls back to `$HOME/.oh-my-zsh/custom` if the environment variable is not +/// set. +pub(super) fn zsh_custom_dir() -> Option { + if let Ok(custom) = std::env::var("ZSH_CUSTOM") { + return Some(PathBuf::from(custom)); + } + std::env::var("HOME") + .ok() + .map(|h| PathBuf::from(h).join(".oh-my-zsh").join("custom")) +} + +/// Detects whether the zsh-autosuggestions plugin is installed. +pub async fn detect_autosuggestions() -> PluginStatus { + match zsh_custom_dir() { + Some(dir) if dir.join("plugins").join("zsh-autosuggestions").is_dir() => { + PluginStatus::Installed + } + _ => PluginStatus::NotInstalled, + } +} + +/// Detects whether the zsh-syntax-highlighting plugin is installed. +pub async fn detect_syntax_highlighting() -> PluginStatus { + match zsh_custom_dir() { + Some(dir) if dir.join("plugins").join("zsh-syntax-highlighting").is_dir() => { + PluginStatus::Installed + } + _ => PluginStatus::NotInstalled, + } +} + +/// Detects fzf installation and checks version against minimum requirement. +pub async fn detect_fzf() -> FzfStatus { + // Check if fzf exists + if !command_exists("fzf").await { + return FzfStatus::NotFound; + } + + let output = match Command::new("fzf") + .arg("--version") + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + { + Ok(o) if o.status.success() => o, + _ => return FzfStatus::NotFound, + }; + + let out = String::from_utf8_lossy(&output.stdout); + // fzf --version outputs something like "0.54.0 (d4e6f0c)" or just "0.54.0" + let version = out + .split_whitespace() + .next() + .unwrap_or("unknown") + .to_string(); + + let meets_minimum = version_gte(&version, FZF_MIN_VERSION); + + FzfStatus::Found { version, meets_minimum } +} + +/// Detects bat installation (checks both "bat" and "batcat" on Debian/Ubuntu). +pub async fn detect_bat() -> BatStatus { + match detect_tool_with_aliases(&["bat", "batcat"], 1, BAT_MIN_VERSION).await { + Some((version, meets_minimum)) => BatStatus::Installed { version, meets_minimum }, + None => BatStatus::NotFound, + } +} + +/// Detects fd installation (checks both "fd" and "fdfind" on Debian/Ubuntu). +pub async fn detect_fd() -> FdStatus { + match detect_tool_with_aliases(&["fd", "fdfind"], 1, FD_MIN_VERSION).await { + Some((version, meets_minimum)) => FdStatus::Installed { version, meets_minimum }, + None => FdStatus::NotFound, + } +} + +/// Detects a tool by trying multiple command aliases, parsing the version +/// from `--version` output, and checking against a minimum version. +/// +/// # Arguments +/// * `aliases` - Command names to try (e.g., `["bat", "batcat"]`) +/// * `version_word_index` - Which whitespace-delimited word in the output +/// contains the version (e.g., `"bat 0.24.0"` -> index 1) +/// * `min_version` - Minimum acceptable version string +/// +/// Returns `Some((version, meets_minimum))` if any alias is found. +async fn detect_tool_with_aliases( + aliases: &[&str], + version_word_index: usize, + min_version: &str, +) -> Option<(String, bool)> { + for cmd in aliases { + if command_exists(cmd).await + && let Ok(output) = Command::new(cmd) + .arg("--version") + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + && output.status.success() + { + let out = String::from_utf8_lossy(&output.stdout); + let version = out + .split_whitespace() + .nth(version_word_index) + .unwrap_or("unknown") + .to_string(); + let meets_minimum = version_gte(&version, min_version); + return Some((version, meets_minimum)); + } + } + None +} + +/// Runs all dependency detection functions in parallel and returns aggregated +/// results. +/// +/// # Returns +/// +/// A `DependencyStatus` containing the status of all dependencies. +pub async fn detect_all_dependencies() -> DependencyStatus { + let (git, zsh, oh_my_zsh, autosuggestions, syntax_highlighting, fzf, bat, fd) = tokio::join!( + detect_git(), + detect_zsh(), + detect_oh_my_zsh(), + detect_autosuggestions(), + detect_syntax_highlighting(), + detect_fzf(), + detect_bat(), + detect_fd(), + ); + + DependencyStatus { + zsh, + oh_my_zsh, + autosuggestions, + syntax_highlighting, + fzf, + bat, + fd, + git, + } +} + +/// Detects sudo capability for the current platform. +pub async fn detect_sudo(platform: Platform) -> SudoCapability { + match platform { + Platform::Windows | Platform::Android => SudoCapability::NoneNeeded, + Platform::MacOS | Platform::Linux => { + // Check if already root via `id -u` + let is_root = Command::new("id") + .arg("-u") + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .map(|o| String::from_utf8_lossy(&o.stdout).trim() == "0") + .unwrap_or(false); + + if is_root { + return SudoCapability::Root; + } + + // Check if sudo is available + let has_sudo = command_exists("sudo").await; + + if has_sudo { + SudoCapability::SudoAvailable + } else { + SudoCapability::NoneAvailable + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_detect_oh_my_zsh_installed() { + let temp = tempfile::TempDir::new().unwrap(); + let omz_dir = temp.path().join(".oh-my-zsh"); + std::fs::create_dir(&omz_dir).unwrap(); + + // Temporarily set HOME + let original_home = std::env::var("HOME").ok(); + unsafe { + std::env::set_var("HOME", temp.path()); + } + + let actual = detect_oh_my_zsh().await; + + // Restore + unsafe { + if let Some(h) = original_home { + std::env::set_var("HOME", h); + } + } + + assert!(matches!(actual, OmzStatus::Installed)); + } + + #[tokio::test] + async fn test_detect_oh_my_zsh_not_installed() { + let temp = tempfile::TempDir::new().unwrap(); + + let original_home = std::env::var("HOME").ok(); + unsafe { + std::env::set_var("HOME", temp.path()); + } + + let actual = detect_oh_my_zsh().await; + + unsafe { + if let Some(h) = original_home { + std::env::set_var("HOME", h); + } + } + + assert!(matches!(actual, OmzStatus::NotInstalled)); + } + + #[tokio::test] + async fn test_detect_autosuggestions_installed() { + let temp = tempfile::TempDir::new().unwrap(); + let plugin_dir = temp.path().join("plugins").join("zsh-autosuggestions"); + std::fs::create_dir_all(&plugin_dir).unwrap(); + + let original_custom = std::env::var("ZSH_CUSTOM").ok(); + unsafe { + std::env::set_var("ZSH_CUSTOM", temp.path()); + } + + let actual = detect_autosuggestions().await; + + unsafe { + if let Some(c) = original_custom { + std::env::set_var("ZSH_CUSTOM", c); + } else { + std::env::remove_var("ZSH_CUSTOM"); + } + } + + assert_eq!(actual, PluginStatus::Installed); + } + + #[tokio::test] + async fn test_detect_autosuggestions_not_installed() { + let temp = tempfile::TempDir::new().unwrap(); + + let original_custom = std::env::var("ZSH_CUSTOM").ok(); + unsafe { + std::env::set_var("ZSH_CUSTOM", temp.path()); + } + + let actual = detect_autosuggestions().await; + + unsafe { + if let Some(c) = original_custom { + std::env::set_var("ZSH_CUSTOM", c); + } else { + std::env::remove_var("ZSH_CUSTOM"); + } + } + + assert_eq!(actual, PluginStatus::NotInstalled); + } +} diff --git a/crates/forge_main/src/zsh/setup/install_plugins.rs b/crates/forge_main/src/zsh/setup/install_plugins.rs new file mode 100644 index 0000000000..5d9fc9ef27 --- /dev/null +++ b/crates/forge_main/src/zsh/setup/install_plugins.rs @@ -0,0 +1,562 @@ +//! Plugin and Oh My Zsh installation functions. +//! +//! Handles installation of Oh My Zsh, zsh-autosuggestions, +//! zsh-syntax-highlighting, and bashrc auto-start configuration. + +use std::path::PathBuf; + +use anyhow::{Context, Result, bail}; +use tokio::process::Command; + +use super::OMZ_INSTALL_URL; +use super::detect::zsh_custom_dir; +use super::util::{path_str, resolve_zsh_path}; + +/// Installs Oh My Zsh by downloading and running the official install script. +pub struct InstallOhMyZsh; + +impl InstallOhMyZsh { + /// Creates a new `InstallOhMyZsh`. + pub fn new() -> Self { + Self + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for InstallOhMyZsh { + async fn install(self) -> anyhow::Result<()> { + install_oh_my_zsh().await + } +} + +/// Installs the zsh-autosuggestions plugin via git clone. +pub struct InstallAutosuggestions; + +impl InstallAutosuggestions { + /// Creates a new `InstallAutosuggestions`. + pub fn new() -> Self { + Self + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for InstallAutosuggestions { + async fn install(self) -> anyhow::Result<()> { + install_autosuggestions().await + } +} + +/// Installs the zsh-syntax-highlighting plugin via git clone. +pub struct InstallSyntaxHighlighting; + +impl InstallSyntaxHighlighting { + /// Creates a new `InstallSyntaxHighlighting`. + pub fn new() -> Self { + Self + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for InstallSyntaxHighlighting { + async fn install(self) -> anyhow::Result<()> { + install_syntax_highlighting().await + } +} + +/// Configures `~/.bash_profile` to auto-start zsh on Windows (Git Bash). +pub struct ConfigureBashProfile; + +impl ConfigureBashProfile { + /// Creates a new `ConfigureBashProfile`. + pub fn new() -> Self { + Self + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for ConfigureBashProfile { + async fn install(self) -> anyhow::Result<()> { + configure_bash_profile_autostart().await + } +} + +/// Installs Oh My Zsh by downloading and executing the official install script. +/// +/// Sets `RUNZSH=no` and `CHSH=no` to prevent the script from switching shells +/// or starting zsh automatically (we handle that ourselves). +/// +/// # Errors +/// +/// Returns error if the download fails or the install script exits with +/// non-zero. +pub(super) async fn install_oh_my_zsh() -> Result<()> { + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(60)) + .build() + .context("Failed to create HTTP client")?; + + let script = client + .get(OMZ_INSTALL_URL) + .send() + .await + .context("Failed to download Oh My Zsh install script")? + .bytes() + .await + .context("Failed to read Oh My Zsh install script")?; + + // Pipe the script directly to `sh -s` (like curl | sh) instead of writing + // a temp file. The `-s` flag tells sh to read commands from stdin. + let mut child = Command::new("sh") + .arg("-s") + .env("RUNZSH", "no") + .env("CHSH", "no") + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .spawn() + .context("Failed to spawn sh for Oh My Zsh install")?; + + // Write the script to the child's stdin, then drop to close the pipe + if let Some(mut stdin) = child.stdin.take() { + tokio::io::AsyncWriteExt::write_all(&mut stdin, &script) + .await + .context("Failed to pipe Oh My Zsh install script to sh")?; + } + + let status = child + .wait() + .await + .context("Failed to wait for Oh My Zsh install script")?; + + if !status.success() { + bail!("Oh My Zsh installation failed. Install manually: https://ohmyz.sh/#install"); + } + + // Configure Oh My Zsh defaults in .zshrc + configure_omz_defaults().await?; + + Ok(()) +} + +/// Configures Oh My Zsh defaults in `.zshrc` (theme and plugins). +async fn configure_omz_defaults() -> Result<()> { + let home = std::env::var("HOME").context("HOME not set")?; + let zshrc_path = PathBuf::from(&home).join(".zshrc"); + + if !zshrc_path.exists() { + return Ok(()); + } + + let content = tokio::fs::read_to_string(&zshrc_path) + .await + .context("Failed to read .zshrc")?; + + // Create backup before modifying + let timestamp = chrono::Local::now().format("%Y-%m-%d_%H-%M-%S"); + let backup_path = zshrc_path.with_file_name(format!(".zshrc.bak.{}", timestamp)); + tokio::fs::copy(&zshrc_path, &backup_path) + .await + .context("Failed to create .zshrc backup")?; + + let mut new_content = content.clone(); + + // Set theme to robbyrussell + let theme_re = regex::Regex::new(r#"(?m)^ZSH_THEME=.*$"#).unwrap(); + new_content = theme_re + .replace(&new_content, r#"ZSH_THEME="robbyrussell""#) + .to_string(); + + // Set plugins + let plugins_re = regex::Regex::new(r#"(?m)^plugins=\(.*\)$"#).unwrap(); + new_content = plugins_re + .replace( + &new_content, + "plugins=(git command-not-found colored-man-pages extract z)", + ) + .to_string(); + + tokio::fs::write(&zshrc_path, &new_content) + .await + .context("Failed to write .zshrc")?; + + Ok(()) +} + +/// Installs the zsh-autosuggestions plugin via git clone into the Oh My Zsh +/// custom plugins directory. +/// +/// # Errors +/// +/// Returns error if git clone fails. +pub(super) async fn install_autosuggestions() -> Result<()> { + let dest = zsh_custom_dir() + .context("Could not determine ZSH_CUSTOM directory")? + .join("plugins") + .join("zsh-autosuggestions"); + + if dest.exists() { + return Ok(()); + } + + let status = Command::new("git") + .args([ + "clone", + "https://github.com/zsh-users/zsh-autosuggestions.git", + &path_str(&dest), + ]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context("Failed to clone zsh-autosuggestions")?; + + if !status.success() { + bail!("Failed to install zsh-autosuggestions"); + } + + Ok(()) +} + +/// Installs the zsh-syntax-highlighting plugin via git clone into the Oh My Zsh +/// custom plugins directory. +/// +/// # Errors +/// +/// Returns error if git clone fails. +pub(super) async fn install_syntax_highlighting() -> Result<()> { + let dest = zsh_custom_dir() + .context("Could not determine ZSH_CUSTOM directory")? + .join("plugins") + .join("zsh-syntax-highlighting"); + + if dest.exists() { + return Ok(()); + } + + let status = Command::new("git") + .args([ + "clone", + "https://github.com/zsh-users/zsh-syntax-highlighting.git", + &path_str(&dest), + ]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context("Failed to clone zsh-syntax-highlighting")?; + + if !status.success() { + bail!("Failed to install zsh-syntax-highlighting"); + } + + Ok(()) +} + +/// Configures `~/.bashrc` to auto-start zsh on Windows (Git Bash). +pub(super) async fn configure_bash_profile_autostart() -> Result<()> { + let home = std::env::var("HOME").context("HOME not set")?; + let home_path = PathBuf::from(&home); + + // Create empty sentinel files to suppress Git Bash "no such file" warnings. + // We skip .bash_profile since we're about to write real content to it. + for file in &[".bash_login", ".profile"] { + let path = home_path.join(file); + if !path.exists() { + let _ = tokio::fs::write(&path, "").await; + } + } + + // --- Clean legacy auto-start blocks from ~/.bashrc --- + let bashrc_path = home_path.join(".bashrc"); + if bashrc_path.exists() + && let Ok(mut bashrc) = tokio::fs::read_to_string(&bashrc_path).await + { + let original = bashrc.clone(); + remove_autostart_blocks(&mut bashrc); + if bashrc != original { + let _ = tokio::fs::write(&bashrc_path, &bashrc).await; + } + } + + // --- Write auto-start block to ~/.bash_profile --- + let bash_profile_path = home_path.join(".bash_profile"); + + let mut content = if bash_profile_path.exists() { + tokio::fs::read_to_string(&bash_profile_path) + .await + .unwrap_or_default() + } else { + String::new() + }; + + // Remove any previous auto-start blocks + remove_autostart_blocks(&mut content); + + // Resolve zsh path + let zsh_path = resolve_zsh_path().await; + + let autostart_block = + crate::zsh::normalize_script(include_str!("../scripts/bash_profile_autostart_block.sh")) + .replace("{{zsh}}", &zsh_path); + + content.push_str(&autostart_block); + + tokio::fs::write(&bash_profile_path, &content) + .await + .context("Failed to write ~/.bash_profile")?; + + Ok(()) +} + +/// End-of-block sentinel used by the new multi-line block format. +const END_MARKER: &str = "# <<< forge initialize <<<"; + +/// Removes all auto-start blocks (old and new markers) from the given content. +fn remove_autostart_blocks(content: &mut String) { + loop { + let mut found = false; + for marker in &[ + "# >>> forge initialize >>>", + "# Added by zsh installer", + "# Added by forge zsh setup", + ] { + if let Some(start) = content.find(marker) { + found = true; + // Check if there's a newline before the marker (added by our block format) + // If so, include it in the removal to prevent accumulating blank lines + let actual_start = if start > 0 && content.as_bytes()[start - 1] == b'\n' { + start - 1 + } else { + start + }; + + // Prefer the explicit end sentinel (new format with two if/fi blocks) + if let Some(end_offset) = content[start..].find(END_MARKER) { + let end = start + end_offset + END_MARKER.len(); + // Consume trailing newline if present + let end = if end < content.len() && content.as_bytes()[end] == b'\n' { + end + 1 + } else { + end + }; + content.replace_range(actual_start..end, ""); + } + // Fall back to legacy single-fi format + else if let Some(fi_offset) = content[start..].find("\nfi\n") { + let end = start + fi_offset + 4; // +4 for "\nfi\n" + content.replace_range(actual_start..end, ""); + } else if let Some(fi_offset) = content[start..].find("\nfi") { + let end = start + fi_offset + 3; + content.replace_range(actual_start..end, ""); + } else { + content.truncate(actual_start); + } + break; // Process one marker at a time, then restart search + } + } + if !found { + break; + } + } +} +#[cfg(test)] +mod tests { + use super::*; + + /// Runs `configure_bash_profile_autostart()` with HOME set to the given + /// temp directory, then restores the original HOME. + async fn run_with_home(temp: &tempfile::TempDir) -> Result<()> { + let original_home = std::env::var("HOME").ok(); + unsafe { std::env::set_var("HOME", temp.path()) }; + let result = configure_bash_profile_autostart().await; + unsafe { + match original_home { + Some(home) => std::env::set_var("HOME", home), + None => std::env::remove_var("HOME"), + } + } + result + } + + #[tokio::test] + #[serial_test::serial] + async fn test_writes_to_bash_profile_not_bashrc() { + let temp = tempfile::TempDir::new().unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Should succeed: {:?}", actual); + + let bash_profile = temp.path().join(".bash_profile"); + let content = tokio::fs::read_to_string(&bash_profile).await.unwrap(); + + // Should contain the auto-start block in .bash_profile + assert!(content.contains("# >>> forge initialize >>>")); + assert!(content.contains("# <<< forge initialize <<<")); + assert!(content.contains("source \"$HOME/.bashrc\"")); + assert!(content.contains("if [ -t 0 ] && [ -x")); + assert!(content.contains("export SHELL=")); + assert!(content.contains("exec")); + } + + #[tokio::test] + #[serial_test::serial] + async fn test_replaces_existing_block_in_bash_profile() { + let temp = tempfile::TempDir::new().unwrap(); + let bash_profile_path = temp.path().join(".bash_profile"); + + // Seed .bash_profile with an existing forge block + let initial = include_str!("../fixtures/bashrc_with_forge_block.sh"); + tokio::fs::write(&bash_profile_path, initial).await.unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Should succeed: {:?}", actual); + + let content = tokio::fs::read_to_string(&bash_profile_path).await.unwrap(); + + // Original non-block content preserved + assert!(content.contains("# My bashrc")); + assert!(content.contains("export PATH=$PATH:/usr/local/bin")); + assert!(content.contains("# More config")); + assert!(content.contains("alias ll='ls -la'")); + + // Exactly one auto-start block + assert_eq!(content.matches("# >>> forge initialize >>>").count(), 1); + assert_eq!(content.matches("# <<< forge initialize <<<").count(), 1); + } + + #[tokio::test] + #[serial_test::serial] + async fn test_removes_old_installer_block_from_bash_profile() { + let temp = tempfile::TempDir::new().unwrap(); + let bash_profile_path = temp.path().join(".bash_profile"); + + let initial = include_str!("../fixtures/bashrc_with_old_installer_block.sh"); + tokio::fs::write(&bash_profile_path, initial).await.unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Should succeed: {:?}", actual); + + let content = tokio::fs::read_to_string(&bash_profile_path).await.unwrap(); + + assert!(!content.contains("# Added by zsh installer")); + assert!(content.contains("# >>> forge initialize >>>")); + assert_eq!(content.matches("# >>> forge initialize >>>").count(), 1); + } + + #[tokio::test] + #[serial_test::serial] + async fn test_cleans_legacy_block_from_bashrc() { + let temp = tempfile::TempDir::new().unwrap(); + let bashrc_path = temp.path().join(".bashrc"); + + // Seed .bashrc with a legacy forge block (from previous installer version) + let initial = include_str!("../fixtures/bashrc_with_forge_block.sh"); + tokio::fs::write(&bashrc_path, initial).await.unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Should succeed: {:?}", actual); + + // .bashrc should have the forge block removed + let bashrc = tokio::fs::read_to_string(&bashrc_path).await.unwrap(); + assert!(!bashrc.contains("# >>> forge initialize >>>")); + assert!(bashrc.contains("# My bashrc")); + assert!(bashrc.contains("alias ll='ls -la'")); + + // .bash_profile should have the new block + let bash_profile = tokio::fs::read_to_string(temp.path().join(".bash_profile")) + .await + .unwrap(); + assert!(bash_profile.contains("# >>> forge initialize >>>")); + } + + #[tokio::test] + #[serial_test::serial] + async fn test_handles_incomplete_block_no_fi() { + let temp = tempfile::TempDir::new().unwrap(); + let bash_profile_path = temp.path().join(".bash_profile"); + + let initial = include_str!("../fixtures/bashrc_incomplete_block_no_fi.sh"); + tokio::fs::write(&bash_profile_path, initial).await.unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Should succeed: {:?}", actual); + + let content = tokio::fs::read_to_string(&bash_profile_path).await.unwrap(); + + // Original content before the incomplete block preserved + assert!(content.contains("# My bashrc")); + assert!(content.contains("export PATH=$PATH:/usr/local/bin")); + + // Exactly one complete block + assert_eq!(content.matches("# >>> forge initialize >>>").count(), 1); + assert_eq!(content.matches("# <<< forge initialize <<<").count(), 1); + assert!(content.contains("if [ -t 0 ] && [ -x")); + assert!(content.contains("export SHELL=")); + assert!(content.contains("exec")); + } + + #[tokio::test] + #[serial_test::serial] + async fn test_handles_malformed_block_missing_closing_fi() { + let temp = tempfile::TempDir::new().unwrap(); + let bash_profile_path = temp.path().join(".bash_profile"); + + // Content after the incomplete block will be lost + let initial = include_str!("../fixtures/bashrc_malformed_block_missing_fi.sh"); + tokio::fs::write(&bash_profile_path, initial).await.unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Should succeed: {:?}", actual); + + let content = tokio::fs::read_to_string(&bash_profile_path).await.unwrap(); + + assert!(content.contains("# My bashrc")); + assert!(content.contains("export PATH=$PATH:/usr/local/bin")); + assert!(!content.contains("alias ll='ls -la'")); // lost after truncation + + assert!(content.contains("# >>> forge initialize >>>")); + assert_eq!(content.matches("# >>> forge initialize >>>").count(), 1); + assert_eq!(content.matches("# <<< forge initialize <<<").count(), 1); + } + + #[tokio::test] + #[serial_test::serial] + async fn test_idempotent() { + let temp = tempfile::TempDir::new().unwrap(); + let bash_profile_path = temp.path().join(".bash_profile"); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "First run failed: {:?}", actual); + let content_first = tokio::fs::read_to_string(&bash_profile_path).await.unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Second run failed: {:?}", actual); + let content_second = tokio::fs::read_to_string(&bash_profile_path).await.unwrap(); + + assert_eq!(content_first, content_second); + assert_eq!( + content_second.matches("# >>> forge initialize >>>").count(), + 1 + ); + } + + #[tokio::test] + #[serial_test::serial] + async fn test_handles_multiple_incomplete_blocks() { + let temp = tempfile::TempDir::new().unwrap(); + let bash_profile_path = temp.path().join(".bash_profile"); + + let initial = include_str!("../fixtures/bashrc_multiple_incomplete_blocks.sh"); + tokio::fs::write(&bash_profile_path, initial).await.unwrap(); + + let actual = run_with_home(&temp).await; + assert!(actual.is_ok(), "Should succeed: {:?}", actual); + + let content = tokio::fs::read_to_string(&bash_profile_path).await.unwrap(); + + assert!(content.contains("# My bashrc")); + assert!(content.contains("export PATH=$PATH:/usr/local/bin")); + assert_eq!(content.matches("# >>> forge initialize >>>").count(), 1); + assert_eq!(content.matches("# <<< forge initialize <<<").count(), 1); + } +} diff --git a/crates/forge_main/src/zsh/setup/install_tools.rs b/crates/forge_main/src/zsh/setup/install_tools.rs new file mode 100644 index 0000000000..e02b2c5eb7 --- /dev/null +++ b/crates/forge_main/src/zsh/setup/install_tools.rs @@ -0,0 +1,574 @@ +//! Tool installation functions (fzf, bat, fd). +//! +//! Handles installation of CLI tools via package managers or GitHub releases, +//! including version checking, archive extraction, and binary deployment. + +use std::path::{Path, PathBuf}; + +use anyhow::{Context, Result, bail}; +use tokio::process::Command; + +use super::detect::{detect_bat, detect_fd, detect_fzf}; +use super::install_zsh::LinuxPackageManager; +use super::libc::{LibcType, detect_libc_type}; +use super::platform::{Arch, Platform}; +use super::types::*; +use super::util::*; +use super::{BAT_MIN_VERSION, FD_MIN_VERSION, FZF_MIN_VERSION}; + +/// Installs fzf using the platform's package manager or GitHub releases. +pub struct InstallFzf { + /// Target platform. + pub platform: Platform, + /// Available privilege level. + pub sudo: SudoCapability, +} + +impl InstallFzf { + /// Creates a new `InstallFzf`. + pub fn new(platform: Platform, sudo: SudoCapability) -> Self { + Self { platform, sudo } + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for InstallFzf { + async fn install(self) -> anyhow::Result<()> { + install_fzf(self.platform, &self.sudo).await + } +} + +/// Installs bat using the platform's package manager or GitHub releases. +pub struct InstallBat { + /// Target platform. + pub platform: Platform, + /// Available privilege level. + pub sudo: SudoCapability, +} + +impl InstallBat { + /// Creates a new `InstallBat`. + pub fn new(platform: Platform, sudo: SudoCapability) -> Self { + Self { platform, sudo } + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for InstallBat { + async fn install(self) -> anyhow::Result<()> { + install_bat(self.platform, &self.sudo).await + } +} + +/// Installs fd using the platform's package manager or GitHub releases. +pub struct InstallFd { + /// Target platform. + pub platform: Platform, + /// Available privilege level. + pub sudo: SudoCapability, +} + +impl InstallFd { + /// Creates a new `InstallFd`. + pub fn new(platform: Platform, sudo: SudoCapability) -> Self { + Self { platform, sudo } + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for InstallFd { + async fn install(self) -> anyhow::Result<()> { + install_fd(self.platform, &self.sudo).await + } +} + +/// Installs fzf (fuzzy finder) using package manager or GitHub releases. +/// +/// Tries package manager first (which checks version requirements before +/// installing). Falls back to GitHub releases if package manager unavailable or +/// version too old. +pub(super) async fn install_fzf(platform: Platform, sudo: &SudoCapability) -> Result<()> { + // Try package manager first (version is checked before installing) + // NOTE: Use Err() not bail!() — bail! returns from the function immediately, + // preventing the GitHub release fallback below from running. + let pkg_mgr_result = try_install_via_package_manager("fzf", platform, sudo).await; + + // If package manager succeeded, verify installation and version + if pkg_mgr_result.is_ok() { + let status = detect_fzf().await; + if matches!(status, FzfStatus::Found { meets_minimum: true, .. }) { + return Ok(()); + } + } + + // Fall back to GitHub releases (pkg mgr unavailable or version too old) + install_fzf_from_github(platform).await +} + +/// Installs bat (file viewer) using package manager or GitHub releases. +/// +/// Tries package manager first (which checks version requirements before +/// installing). Falls back to GitHub releases if package manager unavailable or +/// version too old. +pub(super) async fn install_bat(platform: Platform, sudo: &SudoCapability) -> Result<()> { + // Try package manager first (version is checked before installing) + // NOTE: Use Err() not bail!() — bail! returns from the function immediately, + // preventing the GitHub release fallback below from running. + let pkg_mgr_result = try_install_via_package_manager("bat", platform, sudo).await; + + // If package manager succeeded, verify installation and version + if pkg_mgr_result.is_ok() { + let status = detect_bat().await; + if matches!(status, BatStatus::Installed { meets_minimum: true, .. }) { + return Ok(()); + } + } + + // Fall back to GitHub releases (pkg mgr unavailable or version too old) + install_sharkdp_tool_from_github("bat", "sharkdp/bat", "0.25.0", platform).await +} + +/// Installs fd (file finder) using package manager or GitHub releases. +/// +/// Tries package manager first (which checks version requirements before +/// installing). Falls back to GitHub releases if package manager unavailable or +/// version too old. +pub(super) async fn install_fd(platform: Platform, sudo: &SudoCapability) -> Result<()> { + // Try package manager first (version is checked before installing) + // NOTE: Use Err() not bail!() — bail! returns from the function immediately, + // preventing the GitHub release fallback below from running. + let pkg_mgr_result = try_install_via_package_manager("fd", platform, sudo).await; + + // If package manager succeeded, verify installation and version + if pkg_mgr_result.is_ok() { + let status = detect_fd().await; + if matches!(status, FdStatus::Installed { meets_minimum: true, .. }) { + return Ok(()); + } + } + + // Fall back to GitHub releases (pkg mgr unavailable or version too old) + install_sharkdp_tool_from_github("fd", "sharkdp/fd", "10.1.0", platform).await +} + +/// Tries to install a tool using the platform's native package manager. +/// +/// Returns `Ok(())` if the package manager ran successfully (the caller should +/// still verify the installed version). Returns `Err` if no package manager is +/// available or the install command failed -- the caller should fall back to +/// GitHub releases. +async fn try_install_via_package_manager( + tool: &str, + platform: Platform, + sudo: &SudoCapability, +) -> Result<()> { + match platform { + Platform::Linux => install_via_package_manager_linux(tool, sudo).await, + Platform::MacOS => install_via_brew(tool).await, + Platform::Android => install_via_pkg(tool).await, + Platform::Windows => Err(anyhow::anyhow!("No package manager on Windows")), + } +} + +/// Installs a tool via Homebrew on macOS. +async fn install_via_brew(tool: &str) -> Result<()> { + if !command_exists("brew").await { + bail!("brew not found"); + } + let status = Command::new("brew") + .args(["install", tool]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await?; + if status.success() { + Ok(()) + } else { + bail!("brew install {} failed", tool) + } +} + +/// Installs a tool via pkg on Android (Termux). +async fn install_via_pkg(tool: &str) -> Result<()> { + if !command_exists("pkg").await { + bail!("pkg not found"); + } + let status = Command::new("pkg") + .args(["install", "-y", tool]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await?; + if status.success() { + Ok(()) + } else { + bail!("pkg install {} failed", tool) + } +} + +/// Installs a tool via Linux package manager. +/// +/// Detects available package manager, checks if available version meets minimum +/// requirements, and only installs if version is sufficient. Returns error if +/// package manager version is too old (caller should fall back to GitHub). +async fn install_via_package_manager_linux(tool: &str, sudo: &SudoCapability) -> Result<()> { + for mgr in LinuxPackageManager::all() { + let binary = mgr.to_string(); + if command_exists(&binary).await { + // apt-get requires index refresh + if *mgr == LinuxPackageManager::AptGet { + let _ = run_maybe_sudo(&binary, &["update", "-qq"], sudo).await; + } + + let package_name = match tool { + "fzf" => mgr.fzf_package_name(), + "bat" => mgr.bat_package_name(), + "fd" => mgr.fd_package_name(), + _ => bail!("Unknown tool: {}", tool), + }; + + // Check available version before installing + let min_version = match tool { + "fzf" => FZF_MIN_VERSION, + "bat" => BAT_MIN_VERSION, + "fd" => FD_MIN_VERSION, + _ => bail!("Unknown tool: {}", tool), + }; + + if let Some(available_version) = mgr.query_available_version(package_name).await + && !version_gte(&available_version, min_version) + { + bail!( + "Package manager has {} {} but {} or higher required", + tool, + available_version, + min_version + ); + } + // Version is good, proceed with installation + + let args = mgr.install_args(&[package_name]); + return run_maybe_sudo( + &binary, + &args.iter().map(String::as_str).collect::>(), + sudo, + ) + .await; + } + } + bail!("No supported package manager found") +} + +/// Installs fzf from GitHub releases. +async fn install_fzf_from_github(platform: Platform) -> Result<()> { + let asset_pattern = platform.fzf_asset_pattern(); + + let version = get_latest_release_with_binary("junegunn/fzf", asset_pattern, "0.56.3").await; + + let url = construct_fzf_url(&version, platform)?; + let archive_type = match platform.archive_ext() { + "zip" => ArchiveType::Zip, + _ => ArchiveType::TarGz, + }; + + download_extract_and_install(&url, "fzf", archive_type, false).await +} + +/// Installs a sharkdp tool (bat, fd) from GitHub releases. +/// +/// Both bat and fd follow the same naming convention: +/// `{tool}-v{version}-{target}.{ext}` with nested archive layout. +/// +/// # Arguments +/// * `tool` - Tool name (e.g., "bat", "fd") +/// * `repo` - GitHub repository (e.g., "sharkdp/bat") +/// * `fallback_version` - Version to use if GitHub API is unavailable +/// * `platform` - Target platform +async fn install_sharkdp_tool_from_github( + tool: &str, + repo: &str, + fallback_version: &str, + platform: Platform, +) -> Result<()> { + let target = construct_rust_target(platform).await?; + + let version = get_latest_release_with_binary(repo, &target, fallback_version).await; + let ext = platform.archive_ext(); + let archive_type = match ext { + "zip" => ArchiveType::Zip, + _ => ArchiveType::TarGz, + }; + let url = format!( + "https://github.com/{}/releases/download/v{}/{}-v{}-{}.{}", + repo, version, tool, version, target, ext + ); + + download_extract_and_install(&url, tool, archive_type, true).await +} + +/// Minimal struct for parsing GitHub release API response. +#[derive(serde::Deserialize)] +struct GitHubRelease { + tag_name: String, + assets: Vec, +} + +/// Minimal struct for parsing GitHub asset info. +#[derive(serde::Deserialize)] +struct GitHubAsset { + name: String, +} + +/// Finds the latest GitHub release that has the required binary asset. +/// +/// Checks recent releases (up to 10) and returns the first one that has +/// a binary matching the pattern. This handles cases where the latest release +/// exists but binaries haven't been built yet (CI delays). +/// +/// # Arguments +/// * `repo` - Repository in format "owner/name" +/// * `asset_pattern` - Pattern to match in asset names (e.g., +/// "x86_64-unknown-linux-musl") +/// +/// Returns the version string (without 'v' prefix) or fallback if all fail. +async fn get_latest_release_with_binary(repo: &str, asset_pattern: &str, fallback: &str) -> String { + // Try to get list of recent releases + let releases_url = format!("https://api.github.com/repos/{}/releases?per_page=10", repo); + let response = match reqwest::Client::new() + .get(&releases_url) + .header("User-Agent", "forge-cli") + .send() + .await + { + Ok(resp) if resp.status().is_success() => resp, + _ => return fallback.to_string(), + }; + + // Parse releases + let releases: Vec = match response.json().await { + Ok(r) => r, + Err(_) => return fallback.to_string(), + }; + + // Find the first release that has the required binary + for release in releases { + // Check if this release has a binary matching our pattern + let has_binary = release + .assets + .iter() + .any(|asset| asset.name.contains(asset_pattern)); + + if has_binary { + // Strip 'v' prefix if present + let version = release + .tag_name + .strip_prefix('v') + .unwrap_or(&release.tag_name) + .to_string(); + return version; + } + } + + // No release with binaries found, use fallback + fallback.to_string() +} + +/// Archive type for tool downloads. +#[derive(Debug, Clone, Copy)] +enum ArchiveType { + TarGz, + Zip, +} + +/// Downloads, extracts, and installs a tool binary to `~/.local/bin`. +/// +/// Creates a temporary directory for the download, extracts the archive, +/// copies the binary to `~/.local/bin`, and cleans up the temp directory. +/// +/// # Arguments +/// * `url` - Download URL for the archive +/// * `tool_name` - Name of the binary to find in the archive +/// * `archive_type` - Whether the archive is tar.gz or zip +/// * `nested` - If true, searches subdirectories for the binary (e.g., bat/fd +/// archives) +async fn download_extract_and_install( + url: &str, + tool_name: &str, + archive_type: ArchiveType, + nested: bool, +) -> Result<()> { + let temp_dir = std::env::temp_dir().join(format!("forge-{}-download", tool_name)); + tokio::fs::create_dir_all(&temp_dir).await?; + let _cleanup = TempDirCleanup(temp_dir.clone()); + + // Download archive + let response = reqwest::get(url).await.context("Failed to download tool")?; + if !response.status().is_success() { + bail!( + "Failed to download {}: HTTP {} - {}", + tool_name, + response.status(), + response.text().await.unwrap_or_default() + ); + } + let bytes = response.bytes().await?; + + let archive_ext = match archive_type { + ArchiveType::TarGz => "tar.gz", + ArchiveType::Zip => "zip", + }; + let archive_path = temp_dir.join(format!("{}.{}", tool_name, archive_ext)); + tokio::fs::write(&archive_path, &bytes).await?; + + // Extract archive + extract_archive(&archive_path, &temp_dir, archive_type).await?; + + // Find binary in extracted files + let binary_path = find_binary_in_dir(&temp_dir, tool_name, nested).await?; + + // Install to ~/.local/bin + install_binary_to_local_bin(&binary_path, tool_name).await?; + + Ok(()) +} + +/// Extracts an archive to the given destination directory. +async fn extract_archive( + archive_path: &Path, + dest_dir: &Path, + archive_type: ArchiveType, +) -> Result<()> { + match archive_type { + ArchiveType::TarGz => { + let status = Command::new("tar") + .args(["-xzf", &path_str(archive_path), "-C", &path_str(dest_dir)]) + .status() + .await?; + if !status.success() { + bail!("Failed to extract tar.gz archive"); + } + } + ArchiveType::Zip => { + #[cfg(target_os = "windows")] + { + let status = Command::new("powershell") + .args([ + "-Command", + &format!( + "Expand-Archive -Path '{}' -DestinationPath '{}'", + archive_path.display(), + dest_dir.display() + ), + ]) + .status() + .await?; + if !status.success() { + bail!("Failed to extract zip archive"); + } + } + #[cfg(not(target_os = "windows"))] + { + let status = Command::new("unzip") + .args(["-q", &path_str(archive_path), "-d", &path_str(dest_dir)]) + .status() + .await?; + if !status.success() { + bail!("Failed to extract zip archive"); + } + } + } + } + Ok(()) +} + +/// Locates the tool binary inside an extracted archive directory. +/// +/// If `nested` is true, searches one level of subdirectories (for archives +/// like bat/fd that wrap contents in a folder). Otherwise looks at the top +/// level. +async fn find_binary_in_dir(dir: &Path, tool_name: &str, nested: bool) -> Result { + let binary_name = if cfg!(target_os = "windows") { + format!("{}.exe", tool_name) + } else { + tool_name.to_string() + }; + + if nested { + let mut entries = tokio::fs::read_dir(dir).await?; + while let Some(entry) = entries.next_entry().await? { + if entry.file_type().await?.is_dir() { + let candidate = entry.path().join(&binary_name); + if candidate.exists() { + return Ok(candidate); + } + } + } + bail!( + "Binary '{}' not found in nested archive structure", + tool_name + ); + } else { + let candidate = dir.join(&binary_name); + if candidate.exists() { + Ok(candidate) + } else { + bail!("Binary '{}' not found in flat archive structure", tool_name); + } + } +} + +/// Installs a binary to `~/.local/bin` with executable permissions. +async fn install_binary_to_local_bin(binary_path: &Path, name: &str) -> Result<()> { + let home = std::env::var("HOME").context("HOME not set")?; + let local_bin = PathBuf::from(home).join(".local").join("bin"); + tokio::fs::create_dir_all(&local_bin).await?; + + let dest_name = if cfg!(target_os = "windows") { + format!("{}.exe", name) + } else { + name.to_string() + }; + let dest = local_bin.join(dest_name); + tokio::fs::copy(binary_path, &dest).await?; + + #[cfg(not(target_os = "windows"))] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = tokio::fs::metadata(&dest).await?.permissions(); + perms.set_mode(0o755); + tokio::fs::set_permissions(&dest, perms).await?; + } + + Ok(()) +} + +/// Constructs the download URL for fzf based on platform and architecture. +fn construct_fzf_url(version: &str, platform: Platform) -> Result { + let arch = Arch::detect()?; + Ok(format!( + "https://github.com/junegunn/fzf/releases/download/v{}/fzf-{}-{}_{}.{}", + version, + version, + platform.fzf_os(), + arch.as_go(), + platform.archive_ext() + )) +} + +/// Constructs a Rust target triple for bat/fd downloads. +async fn construct_rust_target(platform: Platform) -> Result { + let arch = Arch::detect()?; + match platform { + Platform::Linux => { + let libc = detect_libc_type().await.unwrap_or(LibcType::Musl); + let libc_suffix = match libc { + LibcType::Musl => "musl", + LibcType::Gnu => "gnu", + }; + Ok(format!("{}-unknown-linux-{}", arch.as_rust(), libc_suffix)) + } + Platform::MacOS => Ok(format!("{}-apple-darwin", arch.as_rust())), + Platform::Windows => Ok(format!("{}-pc-windows-msvc", arch.as_rust())), + Platform::Android => Ok("aarch64-unknown-linux-musl".to_string()), + } +} diff --git a/crates/forge_main/src/zsh/setup/install_zsh.rs b/crates/forge_main/src/zsh/setup/install_zsh.rs new file mode 100644 index 0000000000..6c839d3720 --- /dev/null +++ b/crates/forge_main/src/zsh/setup/install_zsh.rs @@ -0,0 +1,858 @@ +//! ZSH installation functions. +//! +//! Handles platform-specific zsh installation (Linux, macOS, Android, +//! Windows/Git Bash) including MSYS2 package management, extraction methods, +//! and shell configuration. + +use std::path::{Path, PathBuf}; + +use anyhow::{Context, Result, bail}; +use tokio::process::Command; + +use super::platform::Platform; +use super::types::SudoCapability; +use super::util::*; +use super::{MSYS2_BASE, MSYS2_PKGS}; + +/// Installs zsh using the platform-appropriate method. +/// +/// Set `reinstall` to `true` to force re-extraction of package files when zsh +/// is present but its modules are broken. +pub struct InstallZsh { + /// Target platform. + pub platform: Platform, + /// Available privilege level. + pub sudo: SudoCapability, + /// When `true`, forces a full reinstall (e.g., to repair broken modules). + pub reinstall: bool, +} + +impl InstallZsh { + /// Creates a new `InstallZsh` for a fresh installation (not a reinstall). + pub fn new(platform: Platform, sudo: SudoCapability) -> Self { + Self { platform, sudo, reinstall: false } + } + + /// Marks this as a reinstall, forcing re-extraction of package files. + pub fn reinstall(mut self) -> Self { + self.reinstall = true; + self + } +} + +#[async_trait::async_trait] +impl super::installer::Installation for InstallZsh { + async fn install(self) -> anyhow::Result<()> { + match self.platform { + Platform::MacOS => install_zsh_macos(&self.sudo).await, + Platform::Linux => install_zsh_linux(&self.sudo, self.reinstall).await, + Platform::Android => install_zsh_android().await, + Platform::Windows => install_zsh_windows().await, + } + } +} + +/// Installs zsh on macOS via Homebrew. +async fn install_zsh_macos(sudo: &SudoCapability) -> Result<()> { + if !command_exists("brew").await { + bail!("Homebrew not found. Install from https://brew.sh then re-run forge zsh setup"); + } + + // Homebrew refuses to run as root + if *sudo == SudoCapability::Root { + if let Ok(brew_user) = std::env::var("SUDO_USER") { + let status = Command::new("sudo") + .args(["-u", &brew_user, "brew", "install", "zsh"]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context("Failed to run brew as non-root user")?; + + if !status.success() { + bail!("brew install zsh failed"); + } + return Ok(()); + } + bail!( + "Homebrew cannot run as root. Please run without sudo, or install zsh manually: brew install zsh" + ); + } + + let status = Command::new("brew") + .args(["install", "zsh"]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context("Failed to run brew install zsh")?; + + if !status.success() { + bail!("brew install zsh failed"); + } + + Ok(()) +} + +/// A Linux package manager with knowledge of how to install and reinstall +/// packages. +#[derive(Debug, Clone, Copy, PartialEq, Eq, strum_macros::Display)] +#[strum(serialize_all = "kebab-case")] +pub(super) enum LinuxPackageManager { + /// Debian / Ubuntu family. + AptGet, + /// Fedora / RHEL 8+ family. + Dnf, + /// RHEL 7 / CentOS 7 family (legacy). + Yum, + /// Arch Linux family. + Pacman, + /// Alpine Linux. + Apk, + /// openSUSE family. + Zypper, + /// Void Linux. + #[strum(serialize = "xbps-install")] + XbpsInstall, +} + +impl LinuxPackageManager { + /// Returns the argument list for a standard package installation. + pub(super) fn install_args>(&self, packages: &[S]) -> Vec { + let mut args = match self { + Self::AptGet => vec!["install".to_string(), "-y".to_string()], + Self::Dnf | Self::Yum => vec!["install".to_string(), "-y".to_string()], + Self::Pacman => vec!["-S".to_string(), "--noconfirm".to_string()], + Self::Apk => vec!["add".to_string(), "--no-cache".to_string()], + Self::Zypper => vec!["install".to_string(), "-y".to_string()], + Self::XbpsInstall => vec!["-Sy".to_string()], + }; + args.extend(packages.iter().map(|p| p.as_ref().to_string())); + args + } + + /// Returns the argument list that forces a full reinstall, restoring any + /// deleted files (e.g., broken zsh module `.so` files). + fn reinstall_args>(&self, packages: &[S]) -> Vec { + let mut args = match self { + Self::AptGet => vec![ + "install".to_string(), + "-y".to_string(), + "--reinstall".to_string(), + ], + Self::Dnf | Self::Yum => vec!["reinstall".to_string(), "-y".to_string()], + Self::Pacman => vec![ + "-S".to_string(), + "--noconfirm".to_string(), + "--overwrite".to_string(), + "*".to_string(), + ], + Self::Apk => vec![ + "add".to_string(), + "--no-cache".to_string(), + "--force-overwrite".to_string(), + ], + Self::Zypper => vec![ + "install".to_string(), + "-y".to_string(), + "--force".to_string(), + ], + Self::XbpsInstall => vec!["-Sfy".to_string()], + }; + args.extend(packages.iter().map(|p| p.as_ref().to_string())); + args + } + + /// Returns all supported package managers in detection-priority order. + pub(super) fn all() -> &'static [Self] { + &[ + Self::AptGet, + Self::Dnf, + Self::Yum, + Self::Pacman, + Self::Apk, + Self::Zypper, + Self::XbpsInstall, + ] + } + + /// Returns the package name for fzf. + pub(super) fn fzf_package_name(&self) -> &'static str { + "fzf" + } + + /// Returns the package name for bat. + /// + /// On Debian/Ubuntu, the package is named "bat" (not "batcat"). + /// The binary is installed as "batcat" to avoid conflicts. + pub(super) fn bat_package_name(&self) -> &'static str { + "bat" + } + + /// Returns the package name for fd. + /// + /// On Debian/Ubuntu, the package is named "fd-find" due to naming + /// conflicts. + pub(super) fn fd_package_name(&self) -> &'static str { + match self { + Self::AptGet => "fd-find", + _ => "fd", + } + } + + /// Queries the available version of a package from the package manager. + /// + /// Returns None if the package is not available or version cannot be + /// determined. + pub(super) async fn query_available_version(&self, package: &str) -> Option { + let binary = self.to_string(); + + let output = match self { + Self::AptGet => { + // apt-cache policy shows available versions + Command::new("apt-cache") + .args(["policy", package]) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + } + Self::Dnf | Self::Yum => { + // dnf/yum info shows available version + Command::new(&binary) + .args(["info", package]) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + } + Self::Pacman => { + // pacman -Si shows sync db info + Command::new(&binary) + .args(["-Si", package]) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + } + Self::Apk => { + // apk info shows version + Command::new(&binary) + .args(["info", package]) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + } + Self::Zypper => { + // zypper info shows available version + Command::new(&binary) + .args(["info", package]) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + } + Self::XbpsInstall => { + // xbps-query -R shows remote package info + Command::new("xbps-query") + .args(["-R", package]) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + } + }; + + if !output.status.success() { + return None; + } + + let out = String::from_utf8_lossy(&output.stdout); + + // Parse version from output based on package manager + match self { + Self::AptGet => { + // apt-cache policy output: " Candidate: 0.24.0-1" + for line in out.lines() { + if line.trim().starts_with("Candidate:") { + let version = line.split(':').nth(1)?.trim(); + if version != "(none)" { + // Extract version number (strip debian revision) + let version = version.split('-').next()?.to_string(); + return Some(version); + } + } + } + } + Self::Dnf | Self::Yum => { + // dnf info output: "Version : 0.24.0" + for line in out.lines() { + if line.starts_with("Version") { + let version = line.split(':').nth(1)?.trim().to_string(); + return Some(version); + } + } + } + Self::Pacman => { + // pacman -Si output: "Version : 0.24.0-1" + for line in out.lines() { + if line.starts_with("Version") { + let version = line.split(':').nth(1)?.trim(); + // Strip package revision + let version = version.split('-').next()?.to_string(); + return Some(version); + } + } + } + Self::Apk => { + // apk info output: "bat-0.24.0-r0 description:" + let first_line = out.lines().next()?; + if first_line.contains(package) { + // Extract version between package name and description + let parts: Vec<&str> = first_line.split('-').collect(); + if parts.len() >= 2 { + // Get version (skip package name, take version parts before -r0) + let version_parts: Vec<&str> = parts[1..] + .iter() + .take_while(|p| !p.starts_with('r')) + .copied() + .collect(); + if !version_parts.is_empty() { + return Some(version_parts.join("-")); + } + } + } + } + Self::Zypper => { + // zypper info output: "Version: 0.24.0-1.1" + for line in out.lines() { + if line.starts_with("Version") { + let version = line.split(':').nth(1)?.trim(); + // Strip package revision + let version = version.split('-').next()?.to_string(); + return Some(version); + } + } + } + Self::XbpsInstall => { + // xbps-query output: "pkgver: bat-0.24.0_1" + for line in out.lines() { + if line.starts_with("pkgver:") { + let pkgver = line.split(':').nth(1)?.trim(); + // Extract version (format: package-version_revision) + let version = pkgver.split('-').nth(1)?; + let version = version.split('_').next()?.to_string(); + return Some(version); + } + } + } + } + + None + } +} + +/// Installs zsh on Linux using the first available package manager. +/// +/// When `reinstall` is true, uses reinstall flags to force re-extraction +/// of package files (e.g., when modules are broken but the package is +/// "already the newest version"). +async fn install_zsh_linux(sudo: &SudoCapability, reinstall: bool) -> Result<()> { + for mgr in LinuxPackageManager::all() { + let binary = mgr.to_string(); + if command_exists(&binary).await { + // apt-get requires a prior index refresh to avoid stale metadata + if *mgr == LinuxPackageManager::AptGet { + let _ = run_maybe_sudo(&binary, &["update", "-qq"], sudo).await; + } + let args = if reinstall { + mgr.reinstall_args(&["zsh"]) + } else { + mgr.install_args(&["zsh"]) + }; + return run_maybe_sudo( + &binary, + &args.iter().map(String::as_str).collect::>(), + sudo, + ) + .await; + } + } + + bail!( + "No supported package manager found. Install zsh manually using your system's package manager." + ); +} + +/// Installs zsh on Android via pkg. +async fn install_zsh_android() -> Result<()> { + if !command_exists("pkg").await { + bail!("pkg not found on Android. Install Termux's package manager first."); + } + + let status = Command::new("pkg") + .args(["install", "-y", "zsh"]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context("Failed to run pkg install zsh")?; + + if !status.success() { + bail!("pkg install zsh failed"); + } + + Ok(()) +} + +/// Installs zsh on Windows by downloading MSYS2 packages into Git Bash's /usr +/// tree. +/// +/// Downloads zsh and its runtime dependencies (ncurses, libpcre2_8, libiconv, +/// libgdbm, gcc-libs) from the MSYS2 repository, extracts them, and copies +/// the files into the Git Bash `/usr` directory. +async fn install_zsh_windows() -> Result<()> { + let home = std::env::var("HOME").context("HOME environment variable not set")?; + let temp_dir = PathBuf::from(&home).join(".forge-zsh-install-temp"); + + // Clean up any previous temp directory + if temp_dir.exists() { + let _ = tokio::fs::remove_dir_all(&temp_dir).await; + } + tokio::fs::create_dir_all(&temp_dir) + .await + .context("Failed to create temp directory")?; + + // Ensure cleanup on exit + let _cleanup = TempDirCleanup(temp_dir.clone()); + + // Step 1: Resolve and download all packages in parallel + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(120)) + .build() + .context("Failed to create HTTP client")?; + + let repo_index = client + .get(format!("{}/", MSYS2_BASE)) + .send() + .await + .context("Failed to fetch MSYS2 repo index")? + .text() + .await + .context("Failed to read MSYS2 repo index")?; + + // Download all packages in parallel + let download_futures: Vec<_> = MSYS2_PKGS + .iter() + .map(|pkg| { + let client = client.clone(); + let temp_dir = temp_dir.clone(); + let repo_index = repo_index.clone(); + async move { + let pkg_file = resolve_msys2_package(pkg, &repo_index); + let url = format!("{}/{}", MSYS2_BASE, pkg_file); + let dest = temp_dir.join(format!("{}.pkg.tar.zst", pkg)); + + let response = client + .get(&url) + .send() + .await + .context(format!("Failed to download {}", pkg))?; + + if !response.status().is_success() { + bail!("Failed to download {}: HTTP {}", pkg, response.status()); + } + + let bytes = response + .bytes() + .await + .context(format!("Failed to read {} response", pkg))?; + + tokio::fs::write(&dest, &bytes) + .await + .context(format!("Failed to write {}", pkg))?; + + Ok::<_, anyhow::Error>(()) + } + }) + .collect(); + + let results = futures::future::join_all(download_futures).await; + for result in results { + result?; + } + + // Step 2: Detect extraction method and extract + let extract_method = detect_extract_method(&temp_dir).await?; + extract_all_packages(&temp_dir, &extract_method).await?; + + // Step 3: Verify zsh.exe was extracted + if !temp_dir.join("usr").join("bin").join("zsh.exe").exists() { + bail!("zsh.exe not found after extraction. The package may be corrupt."); + } + + // Step 4: Copy into Git Bash /usr tree + install_to_git_bash(&temp_dir).await?; + + // Step 5: Configure ~/.zshenv with fpath entries + configure_zshenv().await?; + + Ok(()) +} + +/// Resolves the latest MSYS2 package filename for a given package name by +/// parsing the repository index HTML. +/// +/// Falls back to hardcoded package names if parsing fails. +fn resolve_msys2_package(pkg_name: &str, repo_index: &str) -> String { + // Try to find the latest package in the repo index + let pattern = format!( + r#"{}-[0-9][^\s"]*x86_64\.pkg\.tar\.zst"#, + regex::escape(pkg_name) + ); + if let Ok(re) = regex::Regex::new(&pattern) { + let mut matches: Vec<&str> = re + .find_iter(repo_index) + .map(|m| m.as_str()) + // Exclude development packages + .filter(|s| !s.contains("-devel-")) + .collect(); + + matches.sort(); + + if let Some(latest) = matches.last() { + return (*latest).to_string(); + } + } + + // Fallback to hardcoded names + match pkg_name { + "zsh" => "zsh-5.9-5-x86_64.pkg.tar.zst", + "ncurses" => "ncurses-6.6-1-x86_64.pkg.tar.zst", + "libpcre2_8" => "libpcre2_8-10.47-1-x86_64.pkg.tar.zst", + "libiconv" => "libiconv-1.18-2-x86_64.pkg.tar.zst", + "libgdbm" => "libgdbm-1.26-1-x86_64.pkg.tar.zst", + "gcc-libs" => "gcc-libs-15.2.0-1-x86_64.pkg.tar.zst", + _ => "unknown", + } + .to_string() +} + +/// Extraction methods available on Windows. +#[derive(Debug)] +enum ExtractMethod { + /// zstd + tar are both available natively + ZstdTar, + /// 7-Zip (7z command) + SevenZip, + /// 7-Zip standalone (7za command) + SevenZipA, + /// PowerShell with a downloaded zstd.exe + PowerShell { + /// Path to the downloaded zstd.exe + zstd_exe: PathBuf, + }, +} + +/// Detects the best available extraction method on the system. +async fn detect_extract_method(temp_dir: &Path) -> Result { + // Check zstd + tar + let has_zstd = command_exists("zstd").await; + let has_tar = command_exists("tar").await; + if has_zstd && has_tar { + return Ok(ExtractMethod::ZstdTar); + } + + // Check 7z + if command_exists("7z").await { + return Ok(ExtractMethod::SevenZip); + } + + // Check 7za + if command_exists("7za").await { + return Ok(ExtractMethod::SevenZipA); + } + + // Fall back to PowerShell + downloaded zstd.exe + if command_exists("powershell.exe").await { + let zstd_dir = temp_dir.join("zstd-tool"); + tokio::fs::create_dir_all(&zstd_dir) + .await + .context("Failed to create zstd tool directory")?; + + let zstd_zip_url = + "https://github.com/facebook/zstd/releases/download/v1.5.5/zstd-v1.5.5-win64.zip"; + + let client = reqwest::Client::new(); + let bytes = client + .get(zstd_zip_url) + .send() + .await + .context("Failed to download zstd")? + .bytes() + .await + .context("Failed to read zstd download")?; + + let zip_path = zstd_dir.join("zstd.zip"); + tokio::fs::write(&zip_path, &bytes) + .await + .context("Failed to write zstd.zip")?; + + // Extract using PowerShell + let zip_win = to_win_path(&zip_path); + let dir_win = to_win_path(&zstd_dir); + let ps_cmd = format!( + "Expand-Archive -Path '{}' -DestinationPath '{}' -Force", + zip_win, dir_win + ); + + let status = Command::new("powershell.exe") + .args(["-NoProfile", "-Command", &ps_cmd]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context("Failed to extract zstd.zip")?; + + if !status.success() { + bail!("Failed to extract zstd.zip via PowerShell"); + } + + // Find zstd.exe recursively + let zstd_exe = find_file_recursive(&zstd_dir, "zstd.exe").await; + match zstd_exe { + Some(path) => return Ok(ExtractMethod::PowerShell { zstd_exe: path }), + None => bail!("Could not find zstd.exe after extraction"), + } + } + + bail!( + "No extraction tool found (need zstd+tar, 7-Zip, or PowerShell). Install 7-Zip from https://www.7-zip.org/ and re-run." + ) +} + +/// Extracts all downloaded MSYS2 packages in the temp directory. +async fn extract_all_packages(temp_dir: &Path, method: &ExtractMethod) -> Result<()> { + for pkg in MSYS2_PKGS { + let zst_file = temp_dir.join(format!("{}.pkg.tar.zst", pkg)); + let tar_file = temp_dir.join(format!("{}.pkg.tar", pkg)); + + match method { + ExtractMethod::ZstdTar => { + run_cmd( + "zstd", + &[ + "-d", + &path_str(&zst_file), + "-o", + &path_str(&tar_file), + "--quiet", + ], + temp_dir, + ) + .await?; + run_cmd("tar", &["-xf", &path_str(&tar_file)], temp_dir).await?; + let _ = tokio::fs::remove_file(&tar_file).await; + } + ExtractMethod::SevenZip => { + run_cmd("7z", &["x", "-y", &path_str(&zst_file)], temp_dir).await?; + run_cmd("7z", &["x", "-y", &path_str(&tar_file)], temp_dir).await?; + let _ = tokio::fs::remove_file(&tar_file).await; + } + ExtractMethod::SevenZipA => { + run_cmd("7za", &["x", "-y", &path_str(&zst_file)], temp_dir).await?; + run_cmd("7za", &["x", "-y", &path_str(&tar_file)], temp_dir).await?; + let _ = tokio::fs::remove_file(&tar_file).await; + } + ExtractMethod::PowerShell { zstd_exe } => { + let zst_win = to_win_path(&zst_file); + let tar_win = to_win_path(&tar_file); + let zstd_win = to_win_path(zstd_exe); + let ps_cmd = format!("& '{}' -d '{}' -o '{}' --quiet", zstd_win, zst_win, tar_win); + let status = Command::new("powershell.exe") + .args(["-NoProfile", "-Command", &ps_cmd]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context(format!("Failed to decompress {}", pkg))?; + + if !status.success() { + bail!("Failed to decompress {}", pkg); + } + + run_cmd("tar", &["-xf", &path_str(&tar_file)], temp_dir).await?; + let _ = tokio::fs::remove_file(&tar_file).await; + } + } + } + + Ok(()) +} + +/// Copies extracted zsh files into Git Bash's /usr tree. +/// +/// Attempts UAC elevation via PowerShell if needed. +async fn install_to_git_bash(temp_dir: &Path) -> Result<()> { + let git_usr = if command_exists("cygpath").await { + let output = Command::new("cygpath") + .args(["-w", "/usr"]) + .stdout(std::process::Stdio::piped()) + .output() + .await?; + String::from_utf8_lossy(&output.stdout).trim().to_string() + } else { + r"C:\Program Files\Git\usr".to_string() + }; + + let temp_win = to_win_path(temp_dir); + + // Generate PowerShell install script + let ps_script = format!( + r#"$src = '{}' +$usr = '{}' +Get-ChildItem -Path "$src\usr\bin" -Filter "*.exe" | ForEach-Object {{ + Copy-Item -Force $_.FullName "$usr\bin\" +}} +Get-ChildItem -Path "$src\usr\bin" -Filter "*.dll" | ForEach-Object {{ + Copy-Item -Force $_.FullName "$usr\bin\" +}} +if (Test-Path "$src\usr\lib\zsh") {{ + Copy-Item -Recurse -Force "$src\usr\lib\zsh" "$usr\lib\" +}} +if (Test-Path "$src\usr\share\zsh") {{ + Copy-Item -Recurse -Force "$src\usr\share\zsh" "$usr\share\" +}} +Write-Host "ZSH_INSTALL_OK""#, + temp_win, git_usr + ); + + let ps_file = temp_dir.join("install.ps1"); + tokio::fs::write(&ps_file, &ps_script) + .await + .context("Failed to write install script")?; + + let ps_file_win = to_win_path(&ps_file); + + let zsh_exe = PathBuf::from(&git_usr).join("bin").join("zsh.exe"); + + // Try elevated install via UAC + let uac_cmd = format!( + "Start-Process powershell -Verb RunAs -Wait -ArgumentList \"-NoProfile -ExecutionPolicy Bypass -File `\"{}`\"\"", + ps_file_win + ); + + let _ = Command::new("powershell.exe") + .args(["-NoProfile", "-Command", &uac_cmd]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await; + + // Fallback: direct execution if already admin + if !zsh_exe.exists() { + let _ = Command::new("powershell.exe") + .args([ + "-NoProfile", + "-ExecutionPolicy", + "Bypass", + "-File", + &ps_file_win, + ]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await; + } + + if !zsh_exe.exists() && !command_exists("zsh").await { + bail!( + "zsh.exe not found at {} after installation. Try re-running from an Administrator Git Bash.", + zsh_exe.display() + ); + } + + Ok(()) +} + +/// Configures `~/.zshenv` with fpath entries for MSYS2 zsh function +/// subdirectories. +async fn configure_zshenv() -> Result<()> { + let home = std::env::var("HOME").context("HOME not set")?; + let zshenv_path = PathBuf::from(&home).join(".zshenv"); + + let mut content = if zshenv_path.exists() { + tokio::fs::read_to_string(&zshenv_path) + .await + .unwrap_or_default() + } else { + String::new() + }; + + // Remove any previous installer block + if let (Some(start), Some(end)) = ( + content.find("# --- zsh installer fpath"), + content.find("# --- end zsh installer fpath ---"), + ) && start < end + { + let end_of_line = content[end..] + .find('\n') + .map(|i| end + i + 1) + .unwrap_or(content.len()); + content.replace_range(start..end_of_line, ""); + } + + let fpath_block = include_str!("../scripts/zshenv_fpath_block.sh"); + + content.push_str(fpath_block); + tokio::fs::write(&zshenv_path, &content) + .await + .context("Failed to write ~/.zshenv")?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_resolve_msys2_package_fallback() { + // Empty repo index should fall back to hardcoded names + let actual = resolve_msys2_package("zsh", ""); + let expected = "zsh-5.9-5-x86_64.pkg.tar.zst"; + assert_eq!(actual, expected); + } + + #[test] + fn test_resolve_msys2_package_from_index() { + let fake_index = r#" + zsh-5.9-3-x86_64.pkg.tar.zst + zsh-5.9-5-x86_64.pkg.tar.zst + zsh-5.8-1-x86_64.pkg.tar.zst + "#; + let actual = resolve_msys2_package("zsh", fake_index); + let expected = "zsh-5.9-5-x86_64.pkg.tar.zst"; + assert_eq!(actual, expected); + } + + #[test] + fn test_resolve_msys2_package_excludes_devel() { + let fake_index = r#" + ncurses-devel-6.6-1-x86_64.pkg.tar.zst + ncurses-6.6-1-x86_64.pkg.tar.zst + "#; + let actual = resolve_msys2_package("ncurses", fake_index); + let expected = "ncurses-6.6-1-x86_64.pkg.tar.zst"; + assert_eq!(actual, expected); + } +} diff --git a/crates/forge_main/src/zsh/setup/installer.rs b/crates/forge_main/src/zsh/setup/installer.rs new file mode 100644 index 0000000000..d1ff96ab15 --- /dev/null +++ b/crates/forge_main/src/zsh/setup/installer.rs @@ -0,0 +1,145 @@ +use std::future::Future; +use std::pin::Pin; + +/// A unit of installation work. +#[async_trait::async_trait] +pub trait Installation: Send { + async fn install(self) -> anyhow::Result<()>; +} + +/// A no-op installation that always succeeds. +/// +/// Useful as a placeholder when you need a `Group` that does nothing +/// (e.g., as the seed for a builder chain). +pub struct Noop; + +#[async_trait::async_trait] +impl Installation for Noop { + async fn install(self) -> anyhow::Result<()> { + Ok(()) + } +} + +/// Type alias for a type-erased, boxed installation closure. +type BoxedInstall = + Box Pin> + Send>> + Send>; + +/// Type alias for the success callback. +type OnOk = Box anyhow::Result<()> + Send>; + +/// Type alias for the failure callback. +type OnErr = Box anyhow::Result<()> + Send>; + +/// A composable group of installation tasks that can be executed +/// sequentially, in parallel, conditionally, or with result callbacks. +/// +/// ```ignore +/// Group::unit(install_a) +/// .notify_ok(|| println!("a done")) +/// .notify_err(|e| { eprintln!("a failed: {e}"); Err(e) }) +/// .then(Group::unit(install_b)) +/// .alongside(Group::unit(install_c)) +/// ``` +pub enum Group { + /// A single type-erased installation. + Unit(BoxedInstall), + /// Run the left group first, then run the right group. + Sequential(Box, Box), + /// Run both groups concurrently. + Parallel(Box, Box), + /// Run the inner group only if the condition is true; otherwise no-op. + When(bool, Box), + /// Run the inner group, then dispatch to callbacks based on the result. + Notify { + inner: Box, + on_ok: Option, + on_err: Option, + }, +} + +impl Group { + /// Creates a `Group::Unit` from any `Installation` implementor. + pub fn unit(installation: impl Installation + 'static) -> Self { + Group::Unit(Box::new(|| Box::pin(installation.install()))) + } + + /// Creates a conditional group that only runs if `condition` is true. + pub fn when(condition: bool, group: Group) -> Self { + Group::When(condition, Box::new(group)) + } + + /// Appends another group to run after this one completes. + pub fn then(self, next: Group) -> Self { + Group::Sequential(Box::new(self), Box::new(next)) + } + + /// Appends another group to run concurrently with this one. + pub fn alongside(self, other: Group) -> Self { + Group::Parallel(Box::new(self), Box::new(other)) + } + + /// Attaches a success callback to this group. + pub fn notify_ok(self, on_ok: impl FnOnce() -> anyhow::Result<()> + Send + 'static) -> Self { + match self { + Group::Notify { inner, on_ok: _, on_err } => { + Group::Notify { inner, on_ok: Some(Box::new(on_ok)), on_err } + } + other => Group::Notify { + inner: Box::new(other), + on_ok: Some(Box::new(on_ok)), + on_err: None, + }, + } + } + + /// Attaches a failure callback to this group. + pub fn notify_err( + self, + on_err: impl FnOnce(anyhow::Error) -> anyhow::Result<()> + Send + 'static, + ) -> Self { + match self { + Group::Notify { inner, on_ok, on_err: _ } => { + Group::Notify { inner, on_ok, on_err: Some(Box::new(on_err)) } + } + other => Group::Notify { + inner: Box::new(other), + on_ok: None, + on_err: Some(Box::new(on_err)), + }, + } + } +} + +#[async_trait::async_trait] +impl Installation for Group { + async fn install(self) -> anyhow::Result<()> { + match self { + Group::Unit(f) => f().await, + Group::Sequential(left, right) => { + left.install().await?; + right.install().await + } + Group::Parallel(left, right) => { + let (l, r) = tokio::join!(left.install(), right.install()); + l.and(r) + } + Group::When(condition, inner) => { + if condition { + inner.install().await + } else { + Ok(()) + } + } + Group::Notify { inner, on_ok, on_err } => match inner.install().await { + Ok(()) => match on_ok { + Some(f) => f(), + None => Ok(()), + }, + Err(e) => match on_err { + Some(f) => f(e), + None => Err(e), + }, + }, + } + } +} diff --git a/crates/forge_main/src/zsh/setup/libc.rs b/crates/forge_main/src/zsh/setup/libc.rs new file mode 100644 index 0000000000..3022795371 --- /dev/null +++ b/crates/forge_main/src/zsh/setup/libc.rs @@ -0,0 +1,188 @@ +//! Libc detection for Linux systems. +//! +//! Determines whether the system uses musl or GNU libc, which affects +//! which binary variants to download for CLI tools (fzf, bat, fd). + +use std::path::Path; + +use anyhow::{Result, bail}; +use tokio::process::Command; + +use super::platform::{Platform, detect_platform}; + +/// Type of C standard library (libc) on Linux systems. +/// +/// Used to determine which binary variant to download for CLI tools +/// (fzf, bat, fd) that provide both musl and GNU builds. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum LibcType { + /// musl libc (statically linked, works everywhere) + Musl, + /// GNU libc / glibc (dynamically linked, requires compatible version) + Gnu, +} + +impl std::fmt::Display for LibcType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LibcType::Musl => write!(f, "musl"), + LibcType::Gnu => write!(f, "GNU"), + } + } +} + +/// Detects the libc type on Linux systems. +/// +/// Uses multiple detection methods in order: +/// 1. Check for musl library files in `/lib/libc.musl-{arch}.so.1` +/// 2. Run `ldd /bin/ls` and check for "musl" in output +/// 3. Extract glibc version from `ldd --version` and verify >= 2.39 +/// 4. Verify all required shared libraries exist +/// +/// Returns `LibcType::Musl` as safe fallback if detection fails or +/// if glibc version is too old. +/// +/// # Errors +/// +/// Returns error only if running on non-Linux platform (should not be called). +pub async fn detect_libc_type() -> Result { + let platform = detect_platform(); + if platform != Platform::Linux { + bail!( + "detect_libc_type() called on non-Linux platform: {}", + platform + ); + } + + // Method 1: Check for musl library files + let arch = std::env::consts::ARCH; + let musl_paths = [ + format!("/lib/libc.musl-{}.so.1", arch), + format!("/usr/lib/libc.musl-{}.so.1", arch), + ]; + for path in &musl_paths { + if Path::new(path).exists() { + return Ok(LibcType::Musl); + } + } + + // Method 2: Check ldd output for "musl" + if let Ok(output) = Command::new("ldd").arg("/bin/ls").output().await + && output.status.success() + { + let stdout = String::from_utf8_lossy(&output.stdout); + if stdout.to_lowercase().contains("musl") { + return Ok(LibcType::Musl); + } + } + + // Method 3: Check glibc version + let glibc_version = extract_glibc_version().await; + if let Some(version) = glibc_version { + // Require glibc >= 2.39 for GNU binaries + if version >= (2, 39) { + // Method 4: Verify all required shared libraries exist + if check_gnu_runtime_deps() { + return Ok(LibcType::Gnu); + } + } + } + + // Safe fallback: use musl (works everywhere) + Ok(LibcType::Musl) +} + +/// Extracts glibc version from `ldd --version` or `getconf GNU_LIBC_VERSION`. +/// +/// Returns `Some((major, minor))` if version found, `None` otherwise. +async fn extract_glibc_version() -> Option<(u32, u32)> { + // Try ldd --version first + if let Ok(output) = Command::new("ldd").arg("--version").output().await + && output.status.success() + { + let stdout = String::from_utf8_lossy(&output.stdout); + if let Some(version) = parse_version_from_text(&stdout) { + return Some(version); + } + } + + // Fall back to getconf + if let Ok(output) = Command::new("getconf") + .arg("GNU_LIBC_VERSION") + .output() + .await + && output.status.success() + { + let stdout = String::from_utf8_lossy(&output.stdout); + if let Some(version) = parse_version_from_text(&stdout) { + return Some(version); + } + } + + None +} + +/// Parses version string like "2.39" or "glibc 2.39" from text. +/// +/// Returns `Some((major, minor))` if found, `None` otherwise. +fn parse_version_from_text(text: &str) -> Option<(u32, u32)> { + use regex::Regex; + let re = Regex::new(r"(\d+)\.(\d+)").ok()?; + let caps = re.captures(text)?; + let major = caps.get(1)?.as_str().parse().ok()?; + let minor = caps.get(2)?.as_str().parse().ok()?; + Some((major, minor)) +} + +/// Checks if all required GNU runtime dependencies are available. +/// +/// Verifies existence of: +/// - `libgcc_s.so.1` (GCC runtime) +/// - `libm.so.6` (math library) +/// - `libc.so.6` (C standard library) +/// +/// Returns `true` only if ALL libraries found. +fn check_gnu_runtime_deps() -> bool { + let required_libs = ["libgcc_s.so.1", "libm.so.6", "libc.so.6"]; + let arch = std::env::consts::ARCH; + let search_paths = [ + "/lib", + "/lib64", + "/usr/lib", + "/usr/lib64", + &format!("/lib/{}-linux-gnu", arch), + &format!("/usr/lib/{}-linux-gnu", arch), + ]; + + for lib in &required_libs { + let mut found = false; + for path in &search_paths { + let lib_path = Path::new(path).join(lib); + if lib_path.exists() { + found = true; + break; + } + } + if !found { + // Fall back to ldconfig -p + if !check_lib_with_ldconfig(lib) { + return false; + } + } + } + + true +} + +/// Checks if a library exists using `ldconfig -p`. +/// +/// Returns `true` if library found, `false` otherwise. +fn check_lib_with_ldconfig(lib_name: &str) -> bool { + if let Ok(output) = std::process::Command::new("ldconfig").arg("-p").output() + && output.status.success() + { + let stdout = String::from_utf8_lossy(&output.stdout); + return stdout.contains(lib_name); + } + false +} diff --git a/crates/forge_main/src/zsh/setup/mod.rs b/crates/forge_main/src/zsh/setup/mod.rs new file mode 100644 index 0000000000..c15a5c402a --- /dev/null +++ b/crates/forge_main/src/zsh/setup/mod.rs @@ -0,0 +1,75 @@ +//! ZSH setup orchestrator for `forge zsh setup`. +//! +//! Detects and installs all dependencies required for forge's shell +//! integration: zsh, Oh My Zsh, zsh-autosuggestions, zsh-syntax-highlighting. +//! Handles platform-specific installation (Linux, macOS, Android, Windows/Git +//! Bash) with parallel dependency detection and installation where possible. +//! +//! # Module layout +//! +//! | Module | Responsibility | +//! |--------------------|----------------| +//! | `platform` | OS detection (`Platform`, `detect_platform`) | +//! | `libc` | C-library detection (`LibcType`, `detect_libc_type`) | +//! | `types` | Status enums (`ZshStatus`, `FzfStatus`, …, `DependencyStatus`) | +//! | `util` | Path / command helpers, `version_gte`, sudo runner | +//! | `detect` | Dependency detection (`detect_all_dependencies`, per-tool) | +//! | `install_zsh` | ZSH + zshenv installation (per platform) | +//! | `install_plugins` | Oh My Zsh, zsh-autosuggestions, zsh-syntax-highlighting, bash_profile | +//! | `install_tools` | fzf / bat / fd (package manager + GitHub fallback) | + +mod detect; +mod install_plugins; +mod install_tools; +mod install_zsh; +mod installer; +mod libc; +mod platform; +mod types; +mod util; +// ── Constants (shared across submodules) ───────────────────────────────────── + +/// Base URL for MSYS2 package repository. +pub(super) const MSYS2_BASE: &str = "https://repo.msys2.org/msys/x86_64"; + +/// Package names required for ZSH on MSYS2/Windows. +pub(super) const MSYS2_PKGS: &[&str] = &[ + "zsh", + "ncurses", + "libpcre2_8", + "libiconv", + "libgdbm", + "gcc-libs", +]; + +/// URL for the Oh My Zsh install script. +pub(super) const OMZ_INSTALL_URL: &str = + "https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh"; + +/// Minimum acceptable fzf version. +pub(super) const FZF_MIN_VERSION: &str = "0.36.0"; + +/// Minimum acceptable bat version. +pub(super) const BAT_MIN_VERSION: &str = "0.20.0"; + +/// Minimum acceptable fd version. +pub(super) const FD_MIN_VERSION: &str = "10.0.0"; + +// ── Public re-exports ──────────────────────────────────────────────────────── +// +// These items are the **only** public surface of the `setup` module and must +// match exactly what `zsh/mod.rs` imports via `pub use setup::{…}`. + +pub use detect::{detect_all_dependencies, detect_git, detect_sudo}; +pub use install_plugins::{ + ConfigureBashProfile, InstallAutosuggestions, InstallOhMyZsh, InstallSyntaxHighlighting, +}; +pub use install_tools::{InstallBat, InstallFd, InstallFzf}; +pub use install_zsh::InstallZsh; +pub use installer::{Group, Installation, Noop}; +pub use platform::{Platform, detect_platform}; +pub use types::{ + BatStatus, DependencyStatus, FdStatus, FzfStatus, OmzStatus, PluginStatus, SudoCapability, + ZshStatus, +}; +pub use util::resolve_command_path; diff --git a/crates/forge_main/src/zsh/setup/platform.rs b/crates/forge_main/src/zsh/setup/platform.rs new file mode 100644 index 0000000000..b3e5e3089a --- /dev/null +++ b/crates/forge_main/src/zsh/setup/platform.rs @@ -0,0 +1,169 @@ +//! Platform and architecture detection for the ZSH setup orchestrator. +//! +//! Detects the current operating system platform at runtime, distinguishing +//! between Linux, macOS, Windows (Git Bash/MSYS2/Cygwin), and Android (Termux). +//! Also detects the CPU architecture for download URL construction. + +use std::path::Path; + +use anyhow::{Result, bail}; + +/// Represents the detected operating system platform. +#[derive(Debug, Clone, Copy, PartialEq, Eq, strum_macros::Display)] +pub enum Platform { + /// Linux (excluding Android) + Linux, + /// macOS / Darwin + #[strum(to_string = "macOS")] + MacOS, + /// Windows (Git Bash, MSYS2, Cygwin) + Windows, + /// Android (Termux or similar) + Android, +} + +impl Platform { + /// Returns the OS identifier used in fzf release asset names. + pub fn fzf_os(&self) -> &'static str { + match self { + Platform::Linux => "linux", + Platform::MacOS => "darwin", + Platform::Windows => "windows", + Platform::Android => "android", + } + } + + /// Returns the OS pattern used to search for matching fzf release assets. + /// + /// Android falls back to `"linux"` because fzf does not ship + /// android-specific binaries. + pub fn fzf_asset_pattern(&self) -> &'static str { + match self { + Platform::Android => "linux", + other => other.fzf_os(), + } + } + + /// Returns the default archive extension for tool downloads on this + /// platform. + pub fn archive_ext(&self) -> &'static str { + match self { + Platform::Windows => "zip", + _ => "tar.gz", + } + } +} + +/// Detected CPU architecture. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Arch { + /// 64-bit x86 (Intel / AMD) + X86_64, + /// 64-bit ARM (Apple Silicon, Graviton, etc.) + Aarch64, +} + +impl Arch { + /// Detects the architecture from `std::env::consts::ARCH`. + pub fn detect() -> Result { + match std::env::consts::ARCH { + "x86_64" => Ok(Arch::X86_64), + "aarch64" => Ok(Arch::Aarch64), + other => bail!("Unsupported architecture: {}", other), + } + } + + /// Returns the Go-style architecture name used in fzf release URLs. + pub fn as_go(&self) -> &'static str { + match self { + Arch::X86_64 => "amd64", + Arch::Aarch64 => "arm64", + } + } + + /// Returns the Rust target-triple architecture prefix used in bat/fd + /// release URLs. + pub fn as_rust(&self) -> &'static str { + match self { + Arch::X86_64 => "x86_64", + Arch::Aarch64 => "aarch64", + } + } +} + +/// Detects the current operating system platform at runtime. +/// +/// On Linux, further distinguishes Android from regular Linux by checking +/// for Termux environment variables and system files. +pub fn detect_platform() -> Platform { + if cfg!(target_os = "windows") { + return Platform::Windows; + } + if cfg!(target_os = "macos") { + return Platform::MacOS; + } + if cfg!(target_os = "android") { + return Platform::Android; + } + + // On Linux, check for Android environment + if cfg!(target_os = "linux") && is_android() { + return Platform::Android; + } + + // Also check the OS string at runtime for MSYS2/Cygwin environments + let os = std::env::consts::OS; + if os.starts_with("windows") || os.starts_with("msys") || os.starts_with("cygwin") { + return Platform::Windows; + } + + Platform::Linux +} + +/// Checks if running on Android (Termux or similar). +fn is_android() -> bool { + // Check Termux PREFIX + if let Ok(prefix) = std::env::var("PREFIX") + && prefix.contains("com.termux") + { + return true; + } + // Check Android-specific env vars + if std::env::var("ANDROID_ROOT").is_ok() || std::env::var("ANDROID_DATA").is_ok() { + return true; + } + // Check for Android build.prop + Path::new("/system/build.prop").exists() +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_detect_platform_returns_valid() { + let actual = detect_platform(); + // On the test runner OS, we should get a valid platform + let is_valid = matches!( + actual, + Platform::Linux | Platform::MacOS | Platform::Windows | Platform::Android + ); + assert!(is_valid, "Expected valid platform, got {:?}", actual); + } + + #[test] + fn test_platform_display() { + assert_eq!(format!("{}", Platform::Linux), "Linux"); + assert_eq!(format!("{}", Platform::MacOS), "macOS"); + assert_eq!(format!("{}", Platform::Windows), "Windows"); + assert_eq!(format!("{}", Platform::Android), "Android"); + } + + #[test] + fn test_arch_detect() { + let actual = Arch::detect(); + assert!(actual.is_ok(), "Arch::detect() should succeed on CI"); + } +} diff --git a/crates/forge_main/src/zsh/setup/types.rs b/crates/forge_main/src/zsh/setup/types.rs new file mode 100644 index 0000000000..a52e87f804 --- /dev/null +++ b/crates/forge_main/src/zsh/setup/types.rs @@ -0,0 +1,395 @@ +//! Dependency status types for the ZSH setup orchestrator. +//! +//! Pure data types representing the installation status of each dependency +//! (zsh, Oh My Zsh, plugins, fzf, bat, fd) and related capability enums. + +/// Status of the zsh shell installation. +#[derive(Debug, Clone)] +pub enum ZshStatus { + /// zsh was not found on the system. + NotFound, + /// zsh was found but modules are broken (needs reinstall). + Broken { + /// Path to the zsh binary + path: String, + }, + /// zsh is installed and fully functional. + Functional { + /// Detected version string (e.g., "5.9") + version: String, + /// Path to the zsh binary + path: String, + }, +} + +/// Status of Oh My Zsh installation. +#[derive(Debug, Clone)] +pub enum OmzStatus { + /// Oh My Zsh is not installed. + NotInstalled, + /// Oh My Zsh is installed at the given path. + Installed, +} + +/// Status of a zsh plugin (autosuggestions or syntax-highlighting). +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum PluginStatus { + /// Plugin is not installed. + NotInstalled, + /// Plugin is installed. + Installed, +} + +/// Status of fzf installation. +#[derive(Debug, Clone)] +pub enum FzfStatus { + /// fzf was not found. + NotFound, + /// fzf was found with the given version. `meets_minimum` indicates whether + /// it meets the minimum required version. + Found { + /// Detected version string + version: String, + /// Whether the version meets the minimum requirement + meets_minimum: bool, + }, +} + +/// Status of bat installation. +#[derive(Debug, Clone)] +pub enum BatStatus { + /// bat was not found. + NotFound, + /// bat is installed. + Installed { + /// Detected version string + version: String, + /// Whether the version meets the minimum requirement (0.20.0+) + meets_minimum: bool, + }, +} + +/// Status of fd installation. +#[derive(Debug, Clone)] +pub enum FdStatus { + /// fd was not found. + NotFound, + /// fd is installed. + Installed { + /// Detected version string + version: String, + /// Whether the version meets the minimum requirement (10.0.0+) + meets_minimum: bool, + }, +} + +/// Reason a dependency appears in the missing list. +#[derive(Debug, Clone, Copy, PartialEq, Eq, strum_macros::Display)] +pub enum ItemReason { + /// The tool is not installed at all. + #[strum(to_string = "missing")] + Missing, + /// The tool is installed but below the minimum required version. + #[strum(to_string = "outdated")] + Outdated, +} + +/// Identifies a dependency managed by the ZSH setup orchestrator. +#[derive(Debug, Clone, Copy, PartialEq, Eq, strum_macros::Display)] +pub enum Dependency { + /// zsh shell + #[strum(to_string = "zsh")] + Zsh, + /// Oh My Zsh plugin framework + #[strum(to_string = "Oh My Zsh")] + OhMyZsh, + /// zsh-autosuggestions plugin + #[strum(to_string = "zsh-autosuggestions")] + Autosuggestions, + /// zsh-syntax-highlighting plugin + #[strum(to_string = "zsh-syntax-highlighting")] + SyntaxHighlighting, + /// fzf fuzzy finder + #[strum(to_string = "fzf")] + Fzf, + /// bat file viewer + #[strum(to_string = "bat")] + Bat, + /// fd file finder + #[strum(to_string = "fd")] + Fd, +} + +impl Dependency { + /// Returns the human-readable category/kind of this dependency. + pub fn kind(&self) -> &'static str { + match self { + Dependency::Zsh => "shell", + Dependency::OhMyZsh => "plugin framework", + Dependency::Autosuggestions | Dependency::SyntaxHighlighting => "plugin", + Dependency::Fzf => "fuzzy finder", + Dependency::Bat => "file viewer", + Dependency::Fd => "file finder", + } + } +} + +/// A dependency that needs to be installed or upgraded. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct MissingItem { + /// Which dependency is missing or outdated. + pub dep: Dependency, + /// Why it appears in the missing list. + pub reason: ItemReason, +} + +impl MissingItem { + /// Creates a new missing item. + pub fn new(dep: Dependency, reason: ItemReason) -> Self { + Self { dep, reason } + } + + /// Returns the human-readable category/kind of this dependency. + pub fn kind(&self) -> &'static str { + self.dep.kind() + } +} + +impl std::fmt::Display for MissingItem { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.reason { + ItemReason::Missing => write!(f, "{}", self.dep), + ItemReason::Outdated => write!(f, "{} ({})", self.dep, self.reason), + } + } +} + +/// Aggregated dependency detection results. +#[derive(Debug, Clone)] +pub struct DependencyStatus { + /// Status of zsh installation + pub zsh: ZshStatus, + /// Status of Oh My Zsh installation + pub oh_my_zsh: OmzStatus, + /// Status of zsh-autosuggestions plugin + pub autosuggestions: PluginStatus, + /// Status of zsh-syntax-highlighting plugin + pub syntax_highlighting: PluginStatus, + /// Status of fzf installation + pub fzf: FzfStatus, + /// Status of bat installation + pub bat: BatStatus, + /// Status of fd installation + pub fd: FdStatus, + /// Whether git is available (hard prerequisite) + #[allow(dead_code)] + pub git: bool, +} + +impl DependencyStatus { + /// Returns true if all required dependencies are installed and functional. + pub fn all_installed(&self) -> bool { + matches!(self.zsh, ZshStatus::Functional { .. }) + && matches!(self.oh_my_zsh, OmzStatus::Installed) + && self.autosuggestions == PluginStatus::Installed + && self.syntax_highlighting == PluginStatus::Installed + } + + /// Returns a list of dependencies that need to be installed or upgraded. + pub fn missing_items(&self) -> Vec { + let mut items = Vec::new(); + if !matches!(self.zsh, ZshStatus::Functional { .. }) { + items.push(MissingItem::new(Dependency::Zsh, ItemReason::Missing)); + } + if !matches!(self.oh_my_zsh, OmzStatus::Installed) { + items.push(MissingItem::new(Dependency::OhMyZsh, ItemReason::Missing)); + } + if self.autosuggestions == PluginStatus::NotInstalled { + items.push(MissingItem::new( + Dependency::Autosuggestions, + ItemReason::Missing, + )); + } + if self.syntax_highlighting == PluginStatus::NotInstalled { + items.push(MissingItem::new( + Dependency::SyntaxHighlighting, + ItemReason::Missing, + )); + } + match &self.fzf { + FzfStatus::NotFound => { + items.push(MissingItem::new(Dependency::Fzf, ItemReason::Missing)) + } + FzfStatus::Found { meets_minimum: false, .. } => { + items.push(MissingItem::new(Dependency::Fzf, ItemReason::Outdated)) + } + _ => {} + } + match &self.bat { + BatStatus::NotFound => { + items.push(MissingItem::new(Dependency::Bat, ItemReason::Missing)) + } + BatStatus::Installed { meets_minimum: false, .. } => { + items.push(MissingItem::new(Dependency::Bat, ItemReason::Outdated)) + } + _ => {} + } + match &self.fd { + FdStatus::NotFound => items.push(MissingItem::new(Dependency::Fd, ItemReason::Missing)), + FdStatus::Installed { meets_minimum: false, .. } => { + items.push(MissingItem::new(Dependency::Fd, ItemReason::Outdated)) + } + _ => {} + } + items + } + + /// Returns true if zsh needs to be installed. + pub fn needs_zsh(&self) -> bool { + !matches!(self.zsh, ZshStatus::Functional { .. }) + } + + /// Returns true if Oh My Zsh needs to be installed. + pub fn needs_omz(&self) -> bool { + !matches!(self.oh_my_zsh, OmzStatus::Installed) + } + + /// Returns true if any plugins need to be installed. + pub fn needs_plugins(&self) -> bool { + self.autosuggestions == PluginStatus::NotInstalled + || self.syntax_highlighting == PluginStatus::NotInstalled + } + + /// Returns true if any tools (fzf, bat, fd) need to be installed. + pub fn needs_tools(&self) -> bool { + matches!(self.fzf, FzfStatus::NotFound) + || matches!(self.bat, BatStatus::NotFound) + || matches!(self.fd, FdStatus::NotFound) + } +} + +/// Represents the privilege level available for package installation. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SudoCapability { + /// Already running as root (no sudo needed). + Root, + /// Not root but sudo is available. + SudoAvailable, + /// No elevated privileges needed (macOS brew, Android pkg, Windows). + NoneNeeded, + /// Elevated privileges are needed but not available. + NoneAvailable, +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_all_installed_when_everything_present() { + let fixture = DependencyStatus { + zsh: ZshStatus::Functional { version: "5.9".into(), path: "/usr/bin/zsh".into() }, + oh_my_zsh: OmzStatus::Installed, + autosuggestions: PluginStatus::Installed, + syntax_highlighting: PluginStatus::Installed, + fzf: FzfStatus::Found { version: "0.54.0".into(), meets_minimum: true }, + bat: BatStatus::Installed { version: "0.24.0".into(), meets_minimum: true }, + fd: FdStatus::Installed { version: "10.2.0".into(), meets_minimum: true }, + git: true, + }; + + assert!(fixture.all_installed()); + assert!(fixture.missing_items().is_empty()); + } + + #[test] + fn test_all_installed_false_when_zsh_missing() { + let fixture = DependencyStatus { + zsh: ZshStatus::NotFound, + oh_my_zsh: OmzStatus::Installed, + autosuggestions: PluginStatus::Installed, + syntax_highlighting: PluginStatus::Installed, + fzf: FzfStatus::NotFound, + bat: BatStatus::NotFound, + fd: FdStatus::NotFound, + git: true, + }; + + assert!(!fixture.all_installed()); + + let actual = fixture.missing_items(); + let expected = vec![ + MissingItem::new(Dependency::Zsh, ItemReason::Missing), + MissingItem::new(Dependency::Fzf, ItemReason::Missing), + MissingItem::new(Dependency::Bat, ItemReason::Missing), + MissingItem::new(Dependency::Fd, ItemReason::Missing), + ]; + assert_eq!(actual, expected); + } + + #[test] + fn test_missing_items_all_missing() { + let fixture = DependencyStatus { + zsh: ZshStatus::NotFound, + oh_my_zsh: OmzStatus::NotInstalled, + autosuggestions: PluginStatus::NotInstalled, + syntax_highlighting: PluginStatus::NotInstalled, + fzf: FzfStatus::NotFound, + bat: BatStatus::NotFound, + fd: FdStatus::NotFound, + git: true, + }; + + let actual = fixture.missing_items(); + let expected = vec![ + MissingItem::new(Dependency::Zsh, ItemReason::Missing), + MissingItem::new(Dependency::OhMyZsh, ItemReason::Missing), + MissingItem::new(Dependency::Autosuggestions, ItemReason::Missing), + MissingItem::new(Dependency::SyntaxHighlighting, ItemReason::Missing), + MissingItem::new(Dependency::Fzf, ItemReason::Missing), + MissingItem::new(Dependency::Bat, ItemReason::Missing), + MissingItem::new(Dependency::Fd, ItemReason::Missing), + ]; + assert_eq!(actual, expected); + } + + #[test] + fn test_missing_items_partial() { + let fixture = DependencyStatus { + zsh: ZshStatus::Functional { version: "5.9".into(), path: "/usr/bin/zsh".into() }, + oh_my_zsh: OmzStatus::Installed, + autosuggestions: PluginStatus::NotInstalled, + syntax_highlighting: PluginStatus::Installed, + fzf: FzfStatus::NotFound, + bat: BatStatus::Installed { version: "0.24.0".into(), meets_minimum: true }, + fd: FdStatus::NotFound, + git: true, + }; + + let actual = fixture.missing_items(); + let expected = vec![ + MissingItem::new(Dependency::Autosuggestions, ItemReason::Missing), + MissingItem::new(Dependency::Fzf, ItemReason::Missing), + MissingItem::new(Dependency::Fd, ItemReason::Missing), + ]; + assert_eq!(actual, expected); + } + + #[test] + fn test_needs_zsh_when_broken() { + let fixture = DependencyStatus { + zsh: ZshStatus::Broken { path: "/usr/bin/zsh".into() }, + oh_my_zsh: OmzStatus::NotInstalled, + autosuggestions: PluginStatus::NotInstalled, + syntax_highlighting: PluginStatus::NotInstalled, + fzf: FzfStatus::NotFound, + bat: BatStatus::NotFound, + fd: FdStatus::NotFound, + git: true, + }; + + assert!(fixture.needs_zsh()); + } +} diff --git a/crates/forge_main/src/zsh/setup/util.rs b/crates/forge_main/src/zsh/setup/util.rs new file mode 100644 index 0000000000..67049b5b85 --- /dev/null +++ b/crates/forge_main/src/zsh/setup/util.rs @@ -0,0 +1,260 @@ +//! Utility functions for the ZSH setup orchestrator. +//! +//! Provides command execution helpers, path conversion utilities, +//! version comparison, and other shared infrastructure used across +//! the setup submodules. + +use std::path::{Path, PathBuf}; + +use anyhow::{Context, Result, bail}; +use tokio::process::Command; + +use super::types::SudoCapability; + +/// Checks if a command exists on the system using POSIX-compliant +/// `command -v` (available on all Unix shells) or `where` on Windows. +/// +/// Returns the resolved path if the command is found, `None` otherwise. +pub async fn resolve_command_path(cmd: &str) -> Option { + let output = if cfg!(target_os = "windows") { + Command::new("where") + .arg(cmd) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + } else { + Command::new("sh") + .args(["-c", &format!("command -v {cmd}")]) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::null()) + .output() + .await + .ok()? + }; + + if output.status.success() { + let path = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if path.is_empty() { None } else { Some(path) } + } else { + None + } +} + +/// Returns `true` if the given command is available on the system. +pub(super) async fn command_exists(cmd: &str) -> bool { + resolve_command_path(cmd).await.is_some() +} + +/// Runs a command, optionally prepending `sudo`, and returns the result. +/// +/// # Arguments +/// +/// * `program` - The program to run +/// * `args` - Arguments to pass +/// * `sudo` - The sudo capability level +/// +/// # Errors +/// +/// Returns error if: +/// - Sudo is needed but not available +/// - The command fails to spawn or exits with non-zero status +pub(super) async fn run_maybe_sudo( + program: &str, + args: &[&str], + sudo: &SudoCapability, +) -> Result<()> { + let mut cmd = match sudo { + SudoCapability::Root | SudoCapability::NoneNeeded => { + let mut c = Command::new(program); + c.args(args); + c + } + SudoCapability::SudoAvailable => { + let mut c = Command::new("sudo"); + c.arg(program); + c.args(args); + c + } + SudoCapability::NoneAvailable => { + bail!("Root privileges required to install zsh. Either run as root or install sudo."); + } + }; + + cmd.stdout(std::process::Stdio::inherit()) + .stderr(std::process::Stdio::inherit()) + .stdin(std::process::Stdio::inherit()); + + let status = cmd + .status() + .await + .context(format!("Failed to execute {}", program))?; + + if !status.success() { + bail!("{} exited with code {:?}", program, status.code()); + } + + Ok(()) +} + +/// Runs a command in a given working directory, suppressing stdout/stderr. +pub(super) async fn run_cmd(program: &str, args: &[&str], cwd: &Path) -> Result<()> { + let status = Command::new(program) + .args(args) + .current_dir(cwd) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .await + .context(format!("Failed to run {}", program))?; + + if !status.success() { + bail!("{} failed with exit code {:?}", program, status.code()); + } + Ok(()) +} + +/// Converts a path to a string, using lossy conversion. +pub(super) fn path_str(p: &Path) -> String { + p.to_string_lossy().to_string() +} + +/// Converts a Unix-style path to a Windows path. +/// +/// Performs manual `/c/...` -> `C:\...` conversion for Git Bash environments. +pub(super) fn to_win_path(p: &Path) -> String { + let s = p.to_string_lossy().to_string(); + // Simple conversion: /c/Users/... -> C:\Users\... + if s.len() >= 3 && s.starts_with('/') && s.chars().nth(2) == Some('/') { + let drive = s.chars().nth(1).unwrap().to_uppercase().to_string(); + let rest = &s[2..]; + format!("{}:{}", drive, rest.replace('/', "\\")) + } else { + s.replace('/', "\\") + } +} + +/// Recursively searches for a file by name in a directory. +pub(super) async fn find_file_recursive(dir: &Path, name: &str) -> Option { + let mut entries = match tokio::fs::read_dir(dir).await { + Ok(e) => e, + Err(_) => return None, + }; + + while let Ok(Some(entry)) = entries.next_entry().await { + let path = entry.path(); + if path.is_file() && path.file_name().map(|n| n == name).unwrap_or(false) { + return Some(path); + } + if path.is_dir() + && let Some(found) = Box::pin(find_file_recursive(&path, name)).await + { + return Some(found); + } + } + + None +} + +/// Resolves the path to the zsh binary. +/// +/// Delegates to [`resolve_command_path`] and falls back to `"zsh"` if +/// the binary cannot be located. +pub(super) async fn resolve_zsh_path() -> String { + resolve_command_path("zsh") + .await + .unwrap_or_else(|| "zsh".to_string()) +} + +/// Compares two version strings (dotted numeric). +/// +/// Returns `true` if `version >= minimum`. +pub(super) fn version_gte(version: &str, minimum: &str) -> bool { + let parse = |v: &str| -> Vec { + v.trim_start_matches('v') + .split('.') + .map(|p| { + // Remove non-numeric suffixes like "0-rc1" + let numeric: String = p.chars().take_while(|c| c.is_ascii_digit()).collect(); + numeric.parse().unwrap_or(0) + }) + .collect() + }; + + let ver = parse(version); + let min = parse(minimum); + + for i in 0..std::cmp::max(ver.len(), min.len()) { + let v = ver.get(i).copied().unwrap_or(0); + let m = min.get(i).copied().unwrap_or(0); + if v > m { + return true; + } + if v < m { + return false; + } + } + true // versions are equal +} + +/// RAII guard that cleans up a temporary directory on drop. +pub(super) struct TempDirCleanup(pub PathBuf); + +impl Drop for TempDirCleanup { + fn drop(&mut self) { + // Best effort cleanup — don't block on async in drop + let _ = std::fs::remove_dir_all(&self.0); + } +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_version_gte_equal() { + assert!(version_gte("0.36.0", "0.36.0")); + } + + #[test] + fn test_version_gte_greater_major() { + assert!(version_gte("1.0.0", "0.36.0")); + } + + #[test] + fn test_version_gte_greater_minor() { + assert!(version_gte("0.54.0", "0.36.0")); + } + + #[test] + fn test_version_gte_less() { + assert!(!version_gte("0.35.0", "0.36.0")); + } + + #[test] + fn test_version_gte_with_v_prefix() { + assert!(version_gte("v0.54.0", "0.36.0")); + } + + #[test] + fn test_version_gte_with_rc_suffix() { + assert!(version_gte("0.54.0-rc1", "0.36.0")); + } + + #[test] + fn test_to_win_path_drive() { + let actual = to_win_path(Path::new("/c/Users/test")); + let expected = r"C:\Users\test"; + assert_eq!(actual, expected); + } + + #[test] + fn test_to_win_path_no_drive() { + let actual = to_win_path(Path::new("/usr/bin/zsh")); + let expected = r"\usr\bin\zsh"; + assert_eq!(actual, expected); + } +} diff --git a/crates/forge_repo/src/forge_repo.rs b/crates/forge_repo/src/forge_repo.rs index dde20149e0..1d1c3c300a 100644 --- a/crates/forge_repo/src/forge_repo.rs +++ b/crates/forge_repo/src/forge_repo.rs @@ -475,6 +475,17 @@ where .execute_command_raw(command, working_dir, env_vars) .await } + + async fn execute_command_background( + &self, + command: String, + working_dir: PathBuf, + env_vars: Option>, + ) -> anyhow::Result { + self.infra + .execute_command_background(command, working_dir, env_vars) + .await + } } #[async_trait::async_trait] diff --git a/crates/forge_repo/src/provider/openai_responses/snapshots/forge_repo__provider__openai_responses__request__tests__openai_responses_all_catalog_tools.snap b/crates/forge_repo/src/provider/openai_responses/snapshots/forge_repo__provider__openai_responses__request__tests__openai_responses_all_catalog_tools.snap index 750b03e650..575994f601 100644 --- a/crates/forge_repo/src/provider/openai_responses/snapshots/forge_repo__provider__openai_responses__request__tests__openai_responses_all_catalog_tools.snap +++ b/crates/forge_repo/src/provider/openai_responses/snapshots/forge_repo__provider__openai_responses__request__tests__openai_responses_all_catalog_tools.snap @@ -385,6 +385,10 @@ expression: actual.tools "parameters": { "additionalProperties": false, "properties": { + "background": { + "description": "If true, runs the command in the background as a detached process.\nThe command's stdout/stderr are redirected to a temporary log file.\nThe tool returns immediately with the log file path and process ID\ninstead of waiting for the command to complete.\nUse this for long-running processes like web servers or file watchers.", + "type": "boolean" + }, "command": { "description": "The shell command to execute.", "type": "string" @@ -431,6 +435,7 @@ expression: actual.tools } }, "required": [ + "background", "command", "cwd", "description", @@ -441,7 +446,7 @@ expression: actual.tools "type": "object" }, "strict": true, - "description": "Executes shell commands. The `cwd` parameter sets the working directory for command execution. If not specified, defaults to `{{env.cwd}}`.\n\nCRITICAL: Do NOT use `cd` commands in the command string. This is FORBIDDEN. Always use the `cwd` parameter to set the working directory instead. Any use of `cd` in the command is redundant, incorrect, and violates the tool contract.\n\nIMPORTANT: This tool is for terminal operations like git, npm, docker, etc. DO NOT use it for file operations (reading, writing, editing, searching, finding files) - use the specialized tools for this instead.\n\nBefore executing the command, please follow these steps:\n\n1. Directory Verification:\n - If the command will create new directories or files, first use `shell` with `ls` to verify the parent directory exists and is the correct location\n - For example, before running \"mkdir foo/bar\", first use `ls foo` to check that \"foo\" exists and is the intended parent directory\n\n2. Command Execution:\n - Always quote file paths that contain spaces with double quotes (e.g., python \"path with spaces/script.py\")\n - Examples of proper quoting:\n - mkdir \"/Users/name/My Documents\" (correct)\n - mkdir /Users/name/My Documents (incorrect - will fail)\n - python \"/path/with spaces/script.py\" (correct)\n - python /path/with spaces/script.py (incorrect - will fail)\n - After ensuring proper quoting, execute the command.\n - Capture the output of the command.\n\nUsage notes:\n - The command argument is required.\n - It is very helpful if you write a clear, concise description of what this command does in 5-10 words.\n - If the output exceeds {{env.stdoutMaxPrefixLength}} prefix lines or {{env.stdoutMaxSuffixLength}} suffix lines, or if a line exceeds {{env.stdoutMaxLineLength}} characters, it will be truncated and the full output will be written to a temporary file. You can use read with start_line/end_line to read specific sections or fs_search to search the full content. Because of this, you do NOT need to use `head`, `tail`, or other truncation commands to limit output - just run the command directly.\n - Avoid using {{tool_names.shell}} with the `find`, `grep`, `cat`, `head`, `tail`, `sed`, `awk`, or `echo` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:\n - File search: Use `{{tool_names.fs_search}}` (NOT find or ls)\n - Content search: Use `{{tool_names.fs_search}}` with regex (NOT grep or rg)\n - Read files: Use `{{tool_names.read}}` (NOT cat/head/tail)\n - Edit files: Use `{{tool_names.patch}}`(NOT sed/awk)\n - Write files: Use `{{tool_names.write}}` (NOT echo >/cat < && `. Use the `cwd` parameter to change directories instead.\n\nGood examples:\n - With explicit cwd: cwd=\"/foo/bar\" with command: pytest tests\n\nBad example:\n cd /foo/bar && pytest tests\n\nReturns complete output including stdout, stderr, and exit code for diagnostic purposes." + "description": "Executes shell commands. The `cwd` parameter sets the working directory for command execution. If not specified, defaults to `{{env.cwd}}`.\n\nCRITICAL: Do NOT use `cd` commands in the command string. This is FORBIDDEN. Always use the `cwd` parameter to set the working directory instead. Any use of `cd` in the command is redundant, incorrect, and violates the tool contract.\n\nIMPORTANT: This tool is for terminal operations like git, npm, docker, etc. DO NOT use it for file operations (reading, writing, editing, searching, finding files) - use the specialized tools for this instead.\n\nBefore executing the command, please follow these steps:\n\n1. Directory Verification:\n - If the command will create new directories or files, first use `shell` with `ls` to verify the parent directory exists and is the correct location\n - For example, before running \"mkdir foo/bar\", first use `ls foo` to check that \"foo\" exists and is the intended parent directory\n\n2. Command Execution:\n - Always quote file paths that contain spaces with double quotes (e.g., python \"path with spaces/script.py\")\n - Examples of proper quoting:\n - mkdir \"/Users/name/My Documents\" (correct)\n - mkdir /Users/name/My Documents (incorrect - will fail)\n - python \"/path/with spaces/script.py\" (correct)\n - python /path/with spaces/script.py (incorrect - will fail)\n - After ensuring proper quoting, execute the command.\n - Capture the output of the command.\n\nUsage notes:\n - The command argument is required.\n - It is very helpful if you write a clear, concise description of what this command does in 5-10 words.\n - If the output exceeds {{env.stdoutMaxPrefixLength}} prefix lines or {{env.stdoutMaxSuffixLength}} suffix lines, or if a line exceeds {{env.stdoutMaxLineLength}} characters, it will be truncated and the full output will be written to a temporary file. You can use read with start_line/end_line to read specific sections or fs_search to search the full content. Because of this, you do NOT need to use `head`, `tail`, or other truncation commands to limit output - just run the command directly.\n - Avoid using {{tool_names.shell}} with the `find`, `grep`, `cat`, `head`, `tail`, `sed`, `awk`, or `echo` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:\n - File search: Use `{{tool_names.fs_search}}` (NOT find or ls)\n - Content search: Use `{{tool_names.fs_search}}` with regex (NOT grep or rg)\n - Read files: Use `{{tool_names.read}}` (NOT cat/head/tail)\n - Edit files: Use `{{tool_names.patch}}`(NOT sed/awk)\n - Write files: Use `{{tool_names.write}}` (NOT echo >/cat < && `. Use the `cwd` parameter to change directories instead.\n\nGood examples:\n - With explicit cwd: cwd=\"/foo/bar\" with command: pytest tests\n\nBad example:\n cd /foo/bar && pytest tests\n\nBackground execution:\n - Set `background: true` to run long-lived processes (web servers, file watchers, dev servers) as detached background jobs.\n - The command returns immediately with a **log file path** and **process ID (PID)** instead of waiting for completion.\n - The process continues running independently even after the session ends.\n - CRITICAL: Always remember the log file path returned by background commands. You will need it to check output, diagnose errors, or verify the process is working. After compaction the log file path will still be available in the summary.\n - Use `read` on the log file path to inspect process output at any time.\n - Examples of when to use background:\n - Starting a web server: `npm start`, `python manage.py runserver`, `cargo run --bin server`\n - Starting a file watcher: `npm run watch`, `cargo watch`\n - Starting any process that runs indefinitely and should not block your workflow\n\nReturns complete output including stdout, stderr, and exit code for diagnostic purposes." }, { "type": "function", diff --git a/crates/forge_services/Cargo.toml b/crates/forge_services/Cargo.toml index ccee3506bd..ce3a8f416f 100644 --- a/crates/forge_services/Cargo.toml +++ b/crates/forge_services/Cargo.toml @@ -52,6 +52,8 @@ http.workspace = true infer.workspace = true uuid.workspace = true tonic.workspace = true +tempfile.workspace = true +sysinfo.workspace = true [dev-dependencies] tokio = { workspace = true, features = ["macros", "rt", "time", "test-util"] } diff --git a/crates/forge_services/src/tool_services/background_process.rs b/crates/forge_services/src/tool_services/background_process.rs new file mode 100644 index 0000000000..d63465f865 --- /dev/null +++ b/crates/forge_services/src/tool_services/background_process.rs @@ -0,0 +1,285 @@ +use std::path::PathBuf; +use std::sync::Mutex; + +use anyhow::{Context, Result}; +use chrono::Utc; +use forge_domain::BackgroundProcess; + +/// Owns the temp-file handles for background process log files so that they +/// are automatically cleaned up when the manager is dropped. +struct OwnedLogFile { + /// Keeping the `NamedTempFile` alive prevents cleanup; when dropped the + /// file is deleted. + _handle: tempfile::NamedTempFile, + /// Associated PID so we can remove the handle when the process is killed. + pid: u32, +} + +impl std::fmt::Debug for OwnedLogFile { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("OwnedLogFile") + .field("pid", &self.pid) + .finish() + } +} + +/// Thread-safe registry of background processes spawned during the current +/// session. +/// +/// When the manager is dropped all owned temp-file handles are released, +/// causing the underlying log files to be deleted automatically. +#[derive(Default, Debug)] +pub struct BackgroundProcessManager { + processes: Mutex>, + log_handles: Mutex>, +} + +impl BackgroundProcessManager { + /// Creates a new, empty manager. + pub fn new() -> Self { + Self::default() + } + + /// Acquires the processes lock, returning an error if poisoned. + fn lock_processes( + &self, + ) -> Result>> { + self.processes + .lock() + .map_err(|e| anyhow::anyhow!("processes lock poisoned: {e}")) + } + + /// Acquires the log handles lock, returning an error if poisoned. + fn lock_log_handles( + &self, + ) -> Result>> { + self.log_handles + .lock() + .map_err(|e| anyhow::anyhow!("log handles lock poisoned: {e}")) + } + + /// Register a newly spawned background process. + /// + /// # Arguments + /// + /// * `pid` - OS process id of the spawned process. + /// * `command` - The command string that was executed. + /// * `cwd` - Working directory where the command was spawned. + /// * `log_file` - Absolute path to the log file. + /// * `log_handle` - The `NamedTempFile` handle that owns the log file on + /// disk. Kept alive until the process is removed or the manager is + /// dropped. + /// + /// # Errors + /// + /// Returns an error if the internal lock is poisoned. + pub fn register( + &self, + pid: u32, + command: String, + cwd: PathBuf, + log_file: PathBuf, + log_handle: tempfile::NamedTempFile, + ) -> Result { + let process = BackgroundProcess { pid, command, cwd, log_file, started_at: Utc::now() }; + self.lock_processes()?.push(process.clone()); + self.lock_log_handles()? + .push(OwnedLogFile { _handle: log_handle, pid }); + Ok(process) + } + + /// Remove a background process by PID. + /// + /// This also drops the associated log-file handle. If `delete_log` is + /// `false` the handle is persisted (leaked) so the file survives on disk. + /// + /// # Errors + /// + /// Returns an error if the internal lock is poisoned. + fn remove(&self, pid: u32, delete_log: bool) -> Result<()> { + self.lock_processes()?.retain(|p| p.pid != pid); + + if delete_log { + self.lock_log_handles()?.retain(|h| h.pid != pid); + } else { + let mut handles = self.lock_log_handles()?; + if let Some(pos) = handles.iter().position(|h| h.pid == pid) { + let owned = handles.remove(pos); + let _ = owned._handle.keep(); + } + } + Ok(()) + } + + /// Kills a background process by PID and removes it from tracking. + /// + /// Returns `Ok(())` if the process was killed or was already dead. + /// The `delete_log` flag controls whether the log file is deleted. + /// + /// # Errors + /// + /// Returns an error if the process could not be killed or the lock is + /// poisoned. + pub fn kill(&self, pid: u32, delete_log: bool) -> Result<()> { + kill_process(pid).context("failed to kill background process")?; + self.remove(pid, delete_log)?; + Ok(()) + } + + /// Returns a snapshot of all tracked processes with their alive status. + /// + /// # Errors + /// + /// Returns an error if the internal lock is poisoned. + pub fn list_with_status(&self) -> Result> { + Ok(self + .lock_processes()? + .iter() + .map(|p| { + let alive = is_process_alive(p.pid); + (p.clone(), alive) + }) + .collect()) + } +} + +/// Cross-platform check whether a process is still running. +fn is_process_alive(pid: u32) -> bool { + let s = sysinfo::System::new_with_specifics( + sysinfo::RefreshKind::nothing() + .with_processes(sysinfo::ProcessRefreshKind::nothing()), + ); + s.process(sysinfo::Pid::from_u32(pid)).is_some() +} + +/// Cross-platform process termination. +fn kill_process(pid: u32) -> anyhow::Result<()> { + let s = sysinfo::System::new_with_specifics( + sysinfo::RefreshKind::nothing() + .with_processes(sysinfo::ProcessRefreshKind::nothing()), + ); + match s.process(sysinfo::Pid::from_u32(pid)) { + Some(process) => { + process.kill(); + Ok(()) + } + // Process already gone -- nothing to kill. + None => Ok(()), + } +} + +#[cfg(test)] +mod tests { + use std::io::Write; + + use pretty_assertions::assert_eq; + + use super::*; + + fn create_temp_log() -> tempfile::NamedTempFile { + let mut f = tempfile::Builder::new() + .prefix("forge-bg-test-") + .suffix(".log") + .tempfile() + .unwrap(); + writeln!(f, "test log content").unwrap(); + f + } + + #[test] + fn test_register_and_list_with_status() { + let fixture = BackgroundProcessManager::new(); + let log = create_temp_log(); + let log_path = log.path().to_path_buf(); + + fixture.register(1234, "npm start".to_string(), PathBuf::from("/test"), log_path.clone(), log).unwrap(); + + let actual = fixture.list_with_status().unwrap(); + + assert_eq!(actual.len(), 1); + assert_eq!(actual[0].0.pid, 1234); + assert_eq!(actual[0].0.command, "npm start"); + assert_eq!(actual[0].0.log_file, log_path); + } + + #[test] + fn test_remove_with_log_deletion() { + let fixture = BackgroundProcessManager::new(); + let log = create_temp_log(); + let log_path = log.path().to_path_buf(); + + fixture.register(100, "node app.js".to_string(), PathBuf::from("/test"), log_path.clone(), log).unwrap(); + assert_eq!(fixture.list_with_status().unwrap().len(), 1); + + fixture.remove(100, true).unwrap(); + + assert_eq!(fixture.list_with_status().unwrap().len(), 0); + assert!(!log_path.exists()); + } + + #[test] + fn test_remove_without_log_deletion() { + let fixture = BackgroundProcessManager::new(); + let log = create_temp_log(); + let log_path = log.path().to_path_buf(); + + fixture.register(200, "cargo watch".to_string(), PathBuf::from("/test"), log_path.clone(), log).unwrap(); + + fixture.remove(200, false).unwrap(); + + assert_eq!(fixture.list_with_status().unwrap().len(), 0); + assert!(log_path.exists()); + + let _ = std::fs::remove_file(&log_path); + } + + #[test] + fn test_multiple_processes() { + let fixture = BackgroundProcessManager::new(); + + let log1 = create_temp_log(); + let path1 = log1.path().to_path_buf(); + let log2 = create_temp_log(); + let path2 = log2.path().to_path_buf(); + + fixture.register(10, "server1".to_string(), PathBuf::from("/proj1"), path1, log1).unwrap(); + fixture.register(20, "server2".to_string(), PathBuf::from("/proj2"), path2, log2).unwrap(); + + assert_eq!(fixture.list_with_status().unwrap().len(), 2); + + fixture.remove(10, true).unwrap(); + + let actual = fixture.list_with_status().unwrap(); + assert_eq!(actual.len(), 1); + assert_eq!(actual[0].0.pid, 20); + } + + #[test] + fn test_drop_cleans_up_temp_files() { + let log = create_temp_log(); + let log_path = log.path().to_path_buf(); + + { + let manager = BackgroundProcessManager::new(); + manager.register(300, "temp cmd".to_string(), PathBuf::from("/test"), log_path.clone(), log).unwrap(); + assert!(log_path.exists()); + } + + assert!(!log_path.exists()); + } + + #[test] + fn test_list_with_status_shows_dead_process() { + let fixture = BackgroundProcessManager::new(); + let log = create_temp_log(); + let path = log.path().to_path_buf(); + + fixture.register(99999, "ghost".to_string(), PathBuf::from("/test"), path, log).unwrap(); + + let actual = fixture.list_with_status().unwrap(); + + assert_eq!(actual.len(), 1); + assert_eq!(actual[0].0.pid, 99999); + assert!(!actual[0].1); + } +} diff --git a/crates/forge_services/src/tool_services/mod.rs b/crates/forge_services/src/tool_services/mod.rs index 64a5c6f3c0..3f67bf25fe 100644 --- a/crates/forge_services/src/tool_services/mod.rs +++ b/crates/forge_services/src/tool_services/mod.rs @@ -1,3 +1,4 @@ +mod background_process; mod fetch; mod followup; mod fs_patch; @@ -11,6 +12,7 @@ mod plan_create; mod shell; mod skill; +pub use background_process::*; pub use fetch::*; pub use followup::*; pub use fs_patch::*; diff --git a/crates/forge_services/src/tool_services/shell.rs b/crates/forge_services/src/tool_services/shell.rs index bb2cc07328..abd63b2b10 100644 --- a/crates/forge_services/src/tool_services/shell.rs +++ b/crates/forge_services/src/tool_services/shell.rs @@ -3,7 +3,9 @@ use std::sync::Arc; use anyhow::bail; use forge_app::domain::Environment; -use forge_app::{CommandInfra, EnvironmentInfra, ShellOutput, ShellService}; +use forge_app::{CommandInfra, EnvironmentInfra, ShellOutput, ShellOutputKind, ShellService}; + +use super::BackgroundProcessManager; use strip_ansi_escapes::strip; // Strips out the ansi codes from content. @@ -21,13 +23,16 @@ fn strip_ansi(content: String) -> String { pub struct ForgeShell { env: Environment, infra: Arc, + bg_manager: Arc, } impl ForgeShell { - /// Create a new Shell with environment configuration + /// Create a new Shell with environment configuration and a background + /// process manager for tracking long-running detached processes. pub fn new(infra: Arc) -> Self { let env = infra.get_environment(); - Self { env, infra } + let bg_manager = Arc::new(BackgroundProcessManager::new()); + Self { env, infra, bg_manager } } fn validate_command(command: &str) -> anyhow::Result<()> { @@ -46,11 +51,39 @@ impl ShellService for ForgeShell { cwd: PathBuf, keep_ansi: bool, silent: bool, + background: bool, env_vars: Option>, description: Option, ) -> anyhow::Result { Self::validate_command(&command)?; + if background { + let bg_output = self + .infra + .execute_command_background(command, cwd.clone(), env_vars) + .await?; + + // Register with the background process manager which takes + // ownership of the temp-file handle (keeps the log file alive). + self.bg_manager.register( + bg_output.pid, + bg_output.command.clone(), + cwd, + bg_output.log_file.clone(), + bg_output.log_handle, + )?; + + return Ok(ShellOutput { + kind: ShellOutputKind::Background { + command: bg_output.command, + pid: bg_output.pid, + log_file: bg_output.log_file, + }, + shell: self.env.shell.clone(), + description, + }); + } + let mut output = self .infra .execute_command(command, cwd, silent, env_vars) @@ -61,7 +94,21 @@ impl ShellService for ForgeShell { output.stderr = strip_ansi(output.stderr); } - Ok(ShellOutput { output, shell: self.env.shell.clone(), description }) + Ok(ShellOutput { + kind: ShellOutputKind::Foreground(output), + shell: self.env.shell.clone(), + description, + }) + } + + fn list_background_processes( + &self, + ) -> anyhow::Result> { + self.bg_manager.list_with_status() + } + + fn kill_background_process(&self, pid: u32, delete_log: bool) -> anyhow::Result<()> { + self.bg_manager.kill(pid, delete_log) } } #[cfg(test)] @@ -109,6 +156,26 @@ mod tests { ) -> anyhow::Result { unimplemented!() } + + async fn execute_command_background( + &self, + command: String, + _working_dir: PathBuf, + _env_vars: Option>, + ) -> anyhow::Result { + let log_file = tempfile::Builder::new() + .prefix("forge-bg-test-") + .suffix(".log") + .tempfile() + .unwrap(); + let log_path = log_file.path().to_path_buf(); + Ok(forge_domain::BackgroundCommandOutput { + command, + pid: 9999, + log_file: log_path, + log_handle: log_file, + }) + } } impl EnvironmentInfra for MockCommandInfra { @@ -130,11 +197,19 @@ mod tests { } } + fn make_shell(expected_env_vars: Option>) -> ForgeShell { + ForgeShell::new(Arc::new(MockCommandInfra { expected_env_vars })) + } + + /// Extracts the foreground CommandOutput from a ShellOutput, panicking if + /// the variant is Background. + fn unwrap_foreground(output: &ShellOutput) -> &forge_domain::CommandOutput { + output.foreground().expect("Expected Foreground variant") + } + #[tokio::test] async fn test_shell_service_forwards_env_vars() { - let fixture = ForgeShell::new(Arc::new(MockCommandInfra { - expected_env_vars: Some(vec!["PATH".to_string(), "HOME".to_string()]), - })); + let fixture = make_shell(Some(vec!["PATH".to_string(), "HOME".to_string()])); let actual = fixture .execute( @@ -142,19 +217,21 @@ mod tests { PathBuf::from("."), false, false, + false, Some(vec!["PATH".to_string(), "HOME".to_string()]), None, ) .await .unwrap(); - assert_eq!(actual.output.stdout, "Mock output"); - assert_eq!(actual.output.exit_code, Some(0)); + let fg = unwrap_foreground(&actual); + assert_eq!(fg.stdout, "Mock output"); + assert_eq!(fg.exit_code, Some(0)); } #[tokio::test] async fn test_shell_service_forwards_no_env_vars() { - let fixture = ForgeShell::new(Arc::new(MockCommandInfra { expected_env_vars: None })); + let fixture = make_shell(None); let actual = fixture .execute( @@ -162,21 +239,21 @@ mod tests { PathBuf::from("."), false, false, + false, None, None, ) .await .unwrap(); - assert_eq!(actual.output.stdout, "Mock output"); - assert_eq!(actual.output.exit_code, Some(0)); + let fg = unwrap_foreground(&actual); + assert_eq!(fg.stdout, "Mock output"); + assert_eq!(fg.exit_code, Some(0)); } #[tokio::test] async fn test_shell_service_forwards_empty_env_vars() { - let fixture = ForgeShell::new(Arc::new(MockCommandInfra { - expected_env_vars: Some(vec![]), - })); + let fixture = make_shell(Some(vec![])); let actual = fixture .execute( @@ -184,19 +261,21 @@ mod tests { PathBuf::from("."), false, false, + false, Some(vec![]), None, ) .await .unwrap(); - assert_eq!(actual.output.stdout, "Mock output"); - assert_eq!(actual.output.exit_code, Some(0)); + let fg = unwrap_foreground(&actual); + assert_eq!(fg.stdout, "Mock output"); + assert_eq!(fg.exit_code, Some(0)); } #[tokio::test] async fn test_shell_service_with_description() { - let fixture = ForgeShell::new(Arc::new(MockCommandInfra { expected_env_vars: None })); + let fixture = make_shell(None); let actual = fixture .execute( @@ -204,14 +283,20 @@ mod tests { PathBuf::from("."), false, false, + false, None, Some("Prints hello to stdout".to_string()), ) .await .unwrap(); - assert_eq!(actual.output.stdout, "Mock output"); - assert_eq!(actual.output.exit_code, Some(0)); + match &actual.kind { + ShellOutputKind::Foreground(output) => { + assert_eq!(output.stdout, "Mock output"); + assert_eq!(output.exit_code, Some(0)); + } + _ => panic!("Expected Foreground"), + } assert_eq!( actual.description, Some("Prints hello to stdout".to_string()) @@ -220,7 +305,7 @@ mod tests { #[tokio::test] async fn test_shell_service_without_description() { - let fixture = ForgeShell::new(Arc::new(MockCommandInfra { expected_env_vars: None })); + let fixture = make_shell(None); let actual = fixture .execute( @@ -228,14 +313,49 @@ mod tests { PathBuf::from("."), false, false, + false, None, None, ) .await .unwrap(); - assert_eq!(actual.output.stdout, "Mock output"); - assert_eq!(actual.output.exit_code, Some(0)); + match &actual.kind { + ShellOutputKind::Foreground(output) => { + assert_eq!(output.stdout, "Mock output"); + assert_eq!(output.exit_code, Some(0)); + } + _ => panic!("Expected Foreground"), + } assert_eq!(actual.description, None); } + + #[tokio::test] + async fn test_shell_service_background_execution() { + let fixture = make_shell(None); + + let actual = fixture + .execute( + "npm start".to_string(), + PathBuf::from("."), + false, + false, + true, + None, + Some("Start dev server".to_string()), + ) + .await + .unwrap(); + + match &actual.kind { + ShellOutputKind::Background { pid, .. } => { + assert_eq!(*pid, 9999); + } + _ => panic!("Expected Background"), + } + + let tracked = fixture.list_background_processes().unwrap(); + assert_eq!(tracked.len(), 1); + assert_eq!(tracked[0].0.pid, 9999); + } } diff --git a/shell-plugin/doctor.zsh b/shell-plugin/doctor.zsh index 54de54c63d..52b6ca1550 100755 --- a/shell-plugin/doctor.zsh +++ b/shell-plugin/doctor.zsh @@ -295,6 +295,17 @@ if command -v bat &> /dev/null; then else print_result pass "bat: installed" fi +elif command -v batcat &> /dev/null; then + local bat_version=$(batcat --version 2>&1 | awk '{print $2}') + if [[ -n "$bat_version" ]]; then + if version_gte "$bat_version" "0.20.0"; then + print_result pass "batcat: ${bat_version}" + else + print_result fail "batcat: ${bat_version}" "Version 0.20.0 or higher required. Update: https://github.com/sharkdp/bat#installation" + fi + else + print_result pass "batcat: installed" + fi else print_result warn "bat not found" "Enhanced preview. See installation: https://github.com/sharkdp/bat#installation" fi diff --git a/shell-plugin/forge.setup.zsh b/shell-plugin/forge.setup.zsh index 76e2039905..53840110a0 100644 --- a/shell-plugin/forge.setup.zsh +++ b/shell-plugin/forge.setup.zsh @@ -1,6 +1,11 @@ # !! Contents within this block are managed by 'forge zsh setup' !! # !! Do not edit manually - changes will be overwritten !! +# Add ~/.local/bin to PATH if it exists and isn't already in PATH +if [[ -d "$HOME/.local/bin" ]] && [[ ":$PATH:" != *":$HOME/.local/bin:"* ]]; then + export PATH="$HOME/.local/bin:$PATH" +fi + # Add required zsh plugins if not already present if [[ ! " ${plugins[@]} " =~ " zsh-autosuggestions " ]]; then plugins+=(zsh-autosuggestions) diff --git a/shell-plugin/lib/dispatcher.zsh b/shell-plugin/lib/dispatcher.zsh index 1f0d2d9b7d..658ffbc0d0 100644 --- a/shell-plugin/lib/dispatcher.zsh +++ b/shell-plugin/lib/dispatcher.zsh @@ -226,6 +226,9 @@ function forge-accept-line() { keyboard-shortcuts|kb) _forge_action_keyboard ;; + processes|ps) + _forge_action_processes + ;; *) _forge_action_default "$user_action" "$input_text" ;;