diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md index 1131589..d467fb0 100644 --- a/.claude/CLAUDE.md +++ b/.claude/CLAUDE.md @@ -33,13 +33,13 @@ Git-Same is a Rust CLI + TUI tool that discovers GitHub org/repo structures and ### Core modules - **`auth/`** — GitHub CLI (`gh`) authentication only (`gh auth token`), with SSH clone support -- **`config/`** — TOML config parser. Default location: `~/.config/git-same/config.toml`. Sections: `[clone]`, `[filters]`, `[[providers]]` +- **`config/`** — TOML config parser. Default: `~/.config/git-same/config.toml`. Top-level keys: `workspaces`, `default_workspace`, plus `[clone]` and `[filters]` sections - **`discovery/`** — `DiscoveryOrchestrator` coordinates repo discovery via providers, applies filters, builds `ActionPlan` (what to clone vs sync) - **`operations/clone/`** — `CloneManager` handles concurrent cloning (configurable 1–32, default 4) - **`operations/sync/`** — `SyncManager` handles fetch/pull with concurrency. Detects repos with uncommitted changes and optionally skips them - **`provider/`** — Trait-based provider abstraction (`Provider` trait in `traits.rs`). GitHub implementation in `github/client.rs` with pagination. Mock provider in `mock.rs` for testing - **`git/`** — `GitOperations` trait (`traits.rs`) with `ShellGit` implementation (`shell.rs`) that shells out to `git` commands -- **`cache/`** — `DiscoveryCache` with TTL-based validity at `~/.cache/git-same/` +- **`cache/`** — `DiscoveryCache` with TTL-based validity, persisted per workspace at `/.git-same/cache.json` - **`errors/`** — Custom error hierarchy: `AppError`, `GitError`, `ProviderError` with `suggested_action()` methods - **`output/`** — Verbosity levels and `indicatif` progress bars (`CloneProgressBar`, `SyncProgressBar`, `DiscoveryProgressBar`) - **`types/repo.rs`** — Core data types: `Repo`, `Org`, `ActionPlan`, `OpResult`, `OpSummary` diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..5ace460 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/S1-Test-CI.yml b/.github/workflows/S1-Test-CI.yml index 3f489a9..d71c490 100644 --- a/.github/workflows/S1-Test-CI.yml +++ b/.github/workflows/S1-Test-CI.yml @@ -7,6 +7,9 @@ env: CARGO_TERM_COLOR: always RUST_BACKTRACE: 1 +permissions: + contents: read + jobs: test: name: Test (${{ matrix.os }}) @@ -22,6 +25,8 @@ jobs: steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Install Rust uses: dtolnay/rust-toolchain@stable @@ -32,13 +37,13 @@ jobs: - uses: Swatinem/rust-cache@v2 - name: Check formatting - run: cargo fmt --all -- --check + run: cargo +${{ matrix.rust }} fmt --all -- --check - name: Clippy - run: cargo clippy --all-targets --all-features -- -D warnings + run: cargo +${{ matrix.rust }} clippy --all-targets --all-features -- -D warnings - name: Run tests - run: cargo test --all-features + run: cargo +${{ matrix.rust }} test --all-features build: name: Build (${{ matrix.target }}) @@ -63,6 +68,8 @@ jobs: steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Install Rust uses: dtolnay/rust-toolchain@stable @@ -76,15 +83,20 @@ jobs: - uses: Swatinem/rust-cache@v2 - name: Build release - run: cargo build --release --target ${{ matrix.target }} + run: cargo +stable build --release --target ${{ matrix.target }} env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: ${{ matrix.target == 'aarch64-unknown-linux-gnu' && 'aarch64-linux-gnu-gcc' || '' }} coverage: name: Code Coverage runs-on: ubuntu-latest + permissions: + contents: read + id-token: write steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Install Rust uses: dtolnay/rust-toolchain@stable @@ -97,18 +109,111 @@ jobs: tool: cargo-tarpaulin - name: Generate coverage - run: cargo tarpaulin --all-features --workspace --timeout 120 --out xml + run: cargo +stable tarpaulin --all-features --workspace --timeout 120 --out xml - name: Upload coverage to Codecov uses: codecov/codecov-action@v5 with: + use_oidc: true fail_ci_if_error: false + alias-drift-check: + name: Alias Drift Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Verify alias manifest and consumers + run: | + set -e + + # Manifest exists + if [ ! -f toolkit/packaging/binary-aliases.txt ]; then + echo "ERROR: toolkit/packaging/binary-aliases.txt not found"; exit 1 + fi + + # Only 1 [[bin]] in Cargo.toml + BIN_COUNT=$(grep -c '^[[:space:]]*\[\[bin\]\]' Cargo.toml || true) + if [ "$BIN_COUNT" -ne 1 ]; then + echo "ERROR: Cargo.toml has $BIN_COUNT [[bin]] entries, expected 1"; exit 1 + fi + + # Primary matches Cargo.toml bin name + MANIFEST_PRIMARY=$(head -n1 toolkit/packaging/binary-aliases.txt) + CARGO_BIN=$(awk ' + /^\[\[bin\]\]/ { in_bin=1; next } + in_bin && /^\[/ { exit } + in_bin && $1 == "name" { + line = $0 + sub(/^[^"]*"/, "", line) + sub(/".*$/, "", line) + if (line != "") { print line; exit } + } + ' Cargo.toml) + if [ -z "$CARGO_BIN" ]; then + echo "ERROR: Could not resolve bin name from Cargo.toml"; exit 1 + fi + if [ "$MANIFEST_PRIMARY" != "$CARGO_BIN" ]; then + echo "ERROR: Primary mismatch: manifest=$MANIFEST_PRIMARY cargo=$CARGO_BIN"; exit 1 + fi + + # Homebrew workflow references all aliases + while IFS= read -r alias; do + [ -z "$alias" ] && continue + if ! grep -Fq -- "$alias" .github/workflows/S3-Publish-Homebrew.yml; then + echo "ERROR: '$alias' missing from S3-Publish-Homebrew.yml"; exit 1 + fi + done < toolkit/packaging/binary-aliases.txt + + # Conductor script paths exist with exact casing (important on Linux) + [ -f toolkit/conductor/run.sh ] || { echo "ERROR: toolkit/conductor/run.sh not found"; exit 1; } + [ -f toolkit/conductor/archive.sh ] || { echo "ERROR: toolkit/conductor/archive.sh not found"; exit 1; } + + # Scripts reference the manifest + grep -q 'binary-aliases.txt' toolkit/conductor/run.sh || { echo "ERROR: run.sh missing manifest ref"; exit 1; } + grep -q 'binary-aliases.txt' toolkit/conductor/archive.sh || { echo "ERROR: archive.sh missing manifest ref"; exit 1; } + + echo "All alias drift checks passed." + + workflow-secret-safety: + name: Workflow Secret Safety + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Block known secret-leak patterns in workflows + run: | + set -euo pipefail + + # Prevent credentials embedded in URLs (high leak risk in logs/config). + if grep -RInE 'https://[^/@[:space:]]+@github[.]com' .github/workflows; then + echo "ERROR: Credential-in-URL pattern found in workflow files." + exit 1 + fi + + # Prevent direct printing of expressions that evaluate from secrets. + if grep -RInE 'echo[[:space:]].*\$\{\{[[:space:]]*secrets\.' .github/workflows; then + echo "ERROR: Direct echo of secrets expression found in workflow files." + exit 1 + fi + + # Prevent shell xtrace in workflow scripts. + if grep -RInE '(^|[[:space:];])set[[:space:]]+-[a-wyzA-WYZ]*x|bash[[:space:]]+-[a-wyzA-WYZ]*x' .github/workflows; then + echo "ERROR: Shell xtrace detected in workflow files." + exit 1 + fi + audit: name: Security Audit runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - uses: rustsec/audit-check@v2 with: token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/S2-Release-GitHub.yml b/.github/workflows/S2-Release-GitHub.yml index bb5930b..80d5aff 100644 --- a/.github/workflows/S2-Release-GitHub.yml +++ b/.github/workflows/S2-Release-GitHub.yml @@ -25,6 +25,8 @@ jobs: steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Install Rust uses: dtolnay/rust-toolchain@stable @@ -35,19 +37,24 @@ jobs: - uses: Swatinem/rust-cache@v2 - name: Check formatting - run: cargo fmt --all -- --check + run: cargo +${{ matrix.rust }} fmt --all -- --check - name: Clippy - run: cargo clippy --all-targets --all-features -- -D warnings + run: cargo +${{ matrix.rust }} clippy --all-targets --all-features -- -D warnings - name: Run tests - run: cargo test --all-features + run: cargo +${{ matrix.rust }} test --all-features coverage: name: Code Coverage runs-on: ubuntu-latest + permissions: + contents: read + id-token: write steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Install Rust uses: dtolnay/rust-toolchain@stable @@ -60,25 +67,118 @@ jobs: tool: cargo-tarpaulin - name: Generate coverage - run: cargo tarpaulin --all-features --workspace --timeout 120 --out xml + run: cargo +stable tarpaulin --all-features --workspace --timeout 120 --out xml - name: Upload coverage to Codecov uses: codecov/codecov-action@v5 with: + use_oidc: true fail_ci_if_error: false + alias-drift-check: + name: Alias Drift Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Verify alias manifest and consumers + run: | + set -e + + # Manifest exists + if [ ! -f toolkit/packaging/binary-aliases.txt ]; then + echo "ERROR: toolkit/packaging/binary-aliases.txt not found"; exit 1 + fi + + # Only 1 [[bin]] in Cargo.toml + BIN_COUNT=$(grep -c '^[[:space:]]*\[\[bin\]\]' Cargo.toml || true) + if [ "$BIN_COUNT" -ne 1 ]; then + echo "ERROR: Cargo.toml has $BIN_COUNT [[bin]] entries, expected 1"; exit 1 + fi + + # Primary matches Cargo.toml bin name + MANIFEST_PRIMARY=$(head -n1 toolkit/packaging/binary-aliases.txt) + CARGO_BIN=$(awk ' + /^\[\[bin\]\]/ { in_bin=1; next } + in_bin && /^\[/ { exit } + in_bin && $1 == "name" { + line = $0 + sub(/^[^"]*"/, "", line) + sub(/".*$/, "", line) + if (line != "") { print line; exit } + } + ' Cargo.toml) + if [ -z "$CARGO_BIN" ]; then + echo "ERROR: Could not resolve bin name from Cargo.toml"; exit 1 + fi + if [ "$MANIFEST_PRIMARY" != "$CARGO_BIN" ]; then + echo "ERROR: Primary mismatch: manifest=$MANIFEST_PRIMARY cargo=$CARGO_BIN"; exit 1 + fi + + # Homebrew workflow references all aliases + while IFS= read -r alias; do + [ -z "$alias" ] && continue + if ! grep -Fq -- "$alias" .github/workflows/S3-Publish-Homebrew.yml; then + echo "ERROR: '$alias' missing from S3-Publish-Homebrew.yml"; exit 1 + fi + done < toolkit/packaging/binary-aliases.txt + + # Conductor script paths exist with exact casing (important on Linux) + [ -f toolkit/conductor/run.sh ] || { echo "ERROR: toolkit/conductor/run.sh not found"; exit 1; } + [ -f toolkit/conductor/archive.sh ] || { echo "ERROR: toolkit/conductor/archive.sh not found"; exit 1; } + + # Scripts reference the manifest + grep -q 'binary-aliases.txt' toolkit/conductor/run.sh || { echo "ERROR: run.sh missing manifest ref"; exit 1; } + grep -q 'binary-aliases.txt' toolkit/conductor/archive.sh || { echo "ERROR: archive.sh missing manifest ref"; exit 1; } + + echo "All alias drift checks passed." + audit: name: Security Audit runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - uses: rustsec/audit-check@v2 with: token: ${{ secrets.GITHUB_TOKEN }} + workflow-secret-safety: + name: Workflow Secret Safety + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Block known secret-leak patterns in workflows + run: | + set -euo pipefail + + # Prevent credentials embedded in URLs (high leak risk in logs/config). + if grep -RInE 'https://[^/@[:space:]]+@github[.]com' .github/workflows; then + echo "ERROR: Credential-in-URL pattern found in workflow files." + exit 1 + fi + + # Prevent direct printing of expressions that evaluate from secrets. + if grep -RInE 'echo[[:space:]].*\$\{\{[[:space:]]*secrets\.' .github/workflows; then + echo "ERROR: Direct echo of secrets expression found in workflow files." + exit 1 + fi + + # Prevent shell xtrace in workflow scripts. + if grep -RInE '(^|[[:space:];])set[[:space:]]+-[a-wyzA-WYZ]*x|bash[[:space:]]+-[a-wyzA-WYZ]*x' .github/workflows; then + echo "ERROR: Shell xtrace detected in workflow files." + exit 1 + fi + build-release-assets: name: Build Release Asset (${{ matrix.target }}) - needs: [test, coverage, audit] + needs: [test, coverage, alias-drift-check, audit, workflow-secret-safety] runs-on: ${{ matrix.os }} strategy: matrix: @@ -110,6 +210,8 @@ jobs: steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Install Rust uses: dtolnay/rust-toolchain@stable @@ -123,7 +225,7 @@ jobs: - uses: Swatinem/rust-cache@v2 - name: Build - run: cargo build --release --target ${{ matrix.target }} + run: cargo +stable build --release --target ${{ matrix.target }} env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: ${{ matrix.target == 'aarch64-unknown-linux-gnu' && 'aarch64-linux-gnu-gcc' || '' }} diff --git a/.github/workflows/S3-Publish-Homebrew.yml b/.github/workflows/S3-Publish-Homebrew.yml index ecad50e..6f53f29 100644 --- a/.github/workflows/S3-Publish-Homebrew.yml +++ b/.github/workflows/S3-Publish-Homebrew.yml @@ -12,12 +12,17 @@ env: TAP_REPO: zaai-com/homebrew-tap FORMULA_NAME: git-same +permissions: + contents: read + jobs: homebrew: name: Update Homebrew Formula runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Get version from tag id: version @@ -94,10 +99,16 @@ jobs: elsif OS.linux? bin.install "git-same-linux-#{Hardware::CPU.arm? ? "aarch64" : "x86_64"}" => "git-same" end + bin.install_symlink "git-same" => "gitsame" + bin.install_symlink "git-same" => "gitsa" + bin.install_symlink "git-same" => "gisa" end test do assert_match version.to_s, shell_output("#{bin}/git-same --version") + assert_match version.to_s, shell_output("#{bin}/gitsame --version") + assert_match version.to_s, shell_output("#{bin}/gitsa --version") + assert_match version.to_s, shell_output("#{bin}/gisa --version") end end FORMULA @@ -109,11 +120,16 @@ jobs: sed -i "s|SHA_LINUX_AARCH64_PLACEHOLDER|${{ steps.sha.outputs.linux_aarch64 }}|g" formula.rb sed -i "s|SHA_MACOS_AARCH64_PLACEHOLDER|${{ steps.sha.outputs.macos_aarch64 }}|g" formula.rb + - name: Checkout tap repository + uses: actions/checkout@v6 + with: + repository: ${{ env.TAP_REPO }} + token: ${{ secrets.HOMEBREW_TAP_REPO_COMMIT_TOKEN }} + path: tap + fetch-depth: 0 + - name: Push formula to tap - env: - TAP_TOKEN: ${{ secrets.HOMEBREW_TAP_TOKEN }} run: | - git clone "https://x-access-token:${TAP_TOKEN}@github.com/${TAP_REPO}.git" tap mkdir -p tap/Formula cp formula.rb "tap/Formula/${FORMULA_NAME}.rb" cd tap @@ -124,5 +140,5 @@ jobs: echo "Formula unchanged, skipping commit" else git commit -m "Update ${FORMULA_NAME} to ${{ steps.version.outputs.version }}" - git push + git push origin HEAD fi diff --git a/.github/workflows/S4-Publish-Crates.yml b/.github/workflows/S4-Publish-Crates.yml index 6120a0a..ee76e36 100644 --- a/.github/workflows/S4-Publish-Crates.yml +++ b/.github/workflows/S4-Publish-Crates.yml @@ -6,12 +6,17 @@ on: env: CARGO_TERM_COLOR: always +permissions: + contents: read + jobs: publish: name: Publish to crates.io runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Install Rust uses: dtolnay/rust-toolchain@stable @@ -19,6 +24,6 @@ jobs: - uses: Swatinem/rust-cache@v2 - name: Publish - run: cargo publish + run: cargo +stable publish env: CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/AGENTS.md b/AGENTS.md index 2995166..fbe0dbb 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -33,13 +33,13 @@ Git-Same is a Rust CLI + TUI tool that discovers GitHub org/repo structures and ### Core modules - **`auth/`** — GitHub CLI (`gh`) authentication only (`gh auth token`), with SSH clone support -- **`config/`** — TOML config parser. Default location: `~/.config/git-same/config.toml`. Sections: `[clone]`, `[filters]`, `[[providers]]` +- **`config/`** — TOML config parser. Global config location: `~/.config/git-same/config.toml` with `[clone]`, `[filters]`, and workspace registry fields (`workspaces`, `default_workspace`). Per-workspace config lives at `/.git-same/config.toml` and contains provider/auth-related workspace settings. - **`discovery/`** — `DiscoveryOrchestrator` coordinates repo discovery via providers, applies filters, builds `ActionPlan` (what to clone vs sync) - **`operations/clone/`** — `CloneManager` handles concurrent cloning (configurable 1–32, default 4) - **`operations/sync/`** — `SyncManager` handles fetch/pull with concurrency. Detects repos with uncommitted changes and optionally skips them - **`provider/`** — Trait-based provider abstraction (`Provider` trait in `traits.rs`). GitHub implementation in `github/client.rs` with pagination. Mock provider in `mock.rs` for testing - **`git/`** — `GitOperations` trait (`traits.rs`) with `ShellGit` implementation (`shell.rs`) that shells out to `git` commands -- **`cache/`** — `DiscoveryCache` with TTL-based validity at `~/.cache/git-same/` +- **`cache/`** — `DiscoveryCache` with TTL-based validity, persisted per workspace at `/.git-same/cache.json` - **`errors/`** — Custom error hierarchy: `AppError`, `GitError`, `ProviderError` with `suggested_action()` methods - **`output/`** — Verbosity levels and `indicatif` progress bars (`CloneProgressBar`, `SyncProgressBar`, `DiscoveryProgressBar`) - **`types/repo.rs`** — Core data types: `Repo`, `Org`, `ActionPlan`, `OpResult`, `OpSummary` diff --git a/Cargo.lock b/Cargo.lock index 37b441e..3f5dfea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -852,7 +852,7 @@ dependencies = [ [[package]] name = "git-same" -version = "1.0.0" +version = "1.1.0" dependencies = [ "anyhow", "async-trait", @@ -1294,9 +1294,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.90" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" dependencies = [ "once_cell", "wasm-bindgen", @@ -1719,9 +1719,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] name = "pin-utils" @@ -3077,9 +3077,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.113" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60722a937f594b7fde9adb894d7c092fc1bb6612897c46368d18e7a20208eff2" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" dependencies = [ "cfg-if", "once_cell", @@ -3090,9 +3090,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.63" +version = "0.4.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a89f4650b770e4521aa6573724e2aed4704372151bd0de9d16a3bbabb87441a" +checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8" dependencies = [ "cfg-if", "futures-util", @@ -3104,9 +3104,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.113" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac8c6395094b6b91c4af293f4c79371c163f9a6f56184d2c9a85f5a95f3950" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3114,9 +3114,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.113" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab3fabce6159dc20728033842636887e4877688ae94382766e00b180abac9d60" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" dependencies = [ "bumpalo", "proc-macro2", @@ -3127,9 +3127,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.113" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de0e091bdb824da87dc01d967388880d017a0a9bc4f3bdc0d86ee9f9336e3bb5" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" dependencies = [ "unicode-ident", ] @@ -3170,9 +3170,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.90" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "705eceb4ce901230f8625bd1d665128056ccbe4b7408faa625eec1ba80f59a97" +checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" dependencies = [ "js-sys", "wasm-bindgen", @@ -3717,18 +3717,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.39" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a" +checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.39" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517" +checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index c36cdb3..1810f9a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "git-same" -version = "1.0.0" +version = "1.1.0" edition = "2021" authors = ["Git-Same Contributors"] description = "Mirror GitHub structure /orgs/repos/ to local file system." @@ -9,23 +9,12 @@ repository = "https://github.com/zaai-com/git-same" keywords = ["git", "github", "cli", "clone", "sync"] categories = ["command-line-utilities", "development-tools"] -# All binaries are installed by default +# Aliases (gitsame, gitsa, gisa) are created as symlinks by installers. +# See toolkit/packaging/binary-aliases.txt for the full list. [[bin]] name = "git-same" path = "src/main.rs" -[[bin]] -name = "gitsame" -path = "src/main.rs" - -[[bin]] -name = "gitsa" -path = "src/main.rs" - -[[bin]] -name = "gisa" -path = "src/main.rs" - [features] default = ["tui"] tui = ["dep:ratatui", "dep:crossterm"] diff --git a/conductor.json b/conductor.json index 98c0a1f..6fd997f 100644 --- a/conductor.json +++ b/conductor.json @@ -2,9 +2,9 @@ "name": "Git-Same", "description": "Mirror GitHub org/repo structure locally - supports multiple providers", "scripts": { - "setup": "./toolkit/Conductor/setup.sh", - "run": "./toolkit/Conductor/run.sh", - "archive": "./toolkit/Conductor/archive.sh" + "setup": "./toolkit/conductor/setup.sh", + "run": "./toolkit/conductor/run.sh", + "archive": "./toolkit/conductor/archive.sh" }, "stack": { "language": "Rust", diff --git a/docs/README.md b/docs/README.md index 3611171..c86a762 100644 --- a/docs/README.md +++ b/docs/README.md @@ -44,13 +44,17 @@ brew install zaai-com/tap/git-same ## Available Commands -The tool can be invoked using any of these names (all installed by default): +The tool can be invoked using any of these names: -- `git-same` - Main command -- `gitsame` - No hyphen variant -- `gitsa` - Short form -- `gisa` - Shortest variant -- `git same` - Git subcommand (requires git-same in PATH) +| Command | Description | +|------------|--------------------------------| +| `git-same` | Primary binary | +| `gitsame` | No-hyphen alias (symlink) | +| `gitsa` | Short alias (symlink) | +| `gisa` | Shortest alias (symlink) | +| `git same` | Git subcommand (requires git-same in PATH) | + +> **Install method differences:** Homebrew (`brew install zaai-com/tap/git-same`) installs all aliases automatically. `cargo install git-same` installs only the primary binary — run `toolkit/conductor/run.sh` to create alias symlinks. The canonical alias list lives in `toolkit/packaging/binary-aliases.txt`. ## Quick Start @@ -100,7 +104,7 @@ gisa sync ## Configuration -Edit `~/.config/git-same/config.toml` to customize behavior: +Global behavior is configured in `~/.config/git-same/config.toml`: ```toml # Directory structure: {org}/{repo} or {provider}/{org}/{repo} @@ -112,6 +116,12 @@ concurrency = 4 # Default sync mode: fetch or pull sync_mode = "fetch" +# Optional default workspace root path +# default_workspace = "~/Git-Same/GitHub" + +# Registered workspace root paths +# workspaces = ["~/Git-Same/GitHub"] + [clone] # Clone depth (0 = full history) depth = 0 @@ -131,37 +141,27 @@ include_forks = false # Filter by organizations (empty = all) orgs = [] - -# Default provider (GitHub.com) -[[providers]] -kind = "github" -auth = "gh-cli" -prefer_ssh = true -enabled = true ``` -`base_path` is workspace-specific (`WorkspaceConfig.base_path`) and is set during -`gisa setup` (or via workspace config files), not in the global `Config`. - -### Multi-Provider Setup +Provider and workspace-specific settings are stored inside each workspace at +`/.git-same/config.toml`: ```toml -# GitHub.com -[[providers]] +username = "my-user" +orgs = ["my-org"] + +[provider] kind = "github" -auth = "gh-cli" prefer_ssh = true -enabled = true +``` -# GitHub Enterprise -[[providers]] +For GitHub Enterprise, configure the workspace provider: + +```toml +[provider] kind = "github-enterprise" -name = "Work GitHub" api_url = "https://github.company.com/api/v3" -auth = "gh-cli" prefer_ssh = true -enabled = true -base_path = "~/work/code" ``` Authenticate GitHub Enterprise once with: @@ -200,7 +200,7 @@ Sync repositories — discover, clone new, fetch/pull existing: gisa sync [OPTIONS] Options: - -w, --workspace Workspace to sync + -w, --workspace Workspace to sync (path or unique folder name) --pull Use pull instead of fetch for existing repos -n, --dry-run Show what would be done -c, --concurrency Number of parallel operations (1-32) @@ -216,7 +216,7 @@ Show status of local repositories: gisa status [OPTIONS] Options: - -w, --workspace Workspace to check + -w, --workspace Workspace to check (path or unique folder name) -o, --org ... Filter by organization (repeatable) -d, --uncommitted Show only repositories with uncommitted changes -b, --behind Show only repositories behind upstream @@ -229,7 +229,7 @@ Manage workspaces: ```bash gisa workspace list # List configured workspaces -gisa workspace default [NAME] # Set default workspace +gisa workspace default [WORKSPACE] # Set default workspace (path or unique folder name) gisa workspace default --clear # Clear default workspace ``` @@ -300,7 +300,7 @@ cargo build cargo build --release ``` -Binaries are output to `target/release/` (or `target/debug/`): `git-same`, `gitsame`, `gitsa`, `gisa`. +The binary is output to `target/release/git-same` (or `target/debug/git-same`). Alias symlinks are created by the install scripts, not by Cargo. ### Running tests @@ -339,7 +339,7 @@ cargo fmt --all -- --check cargo install --path . ``` -This installs all 4 binary aliases (`git-same`, `gitsame`, `gitsa`, `gisa`). Make sure `~/.cargo/bin` is in your `$PATH`. +This installs the `git-same` binary. To also create the alias symlinks (`gitsame`, `gitsa`, `gisa`), run `toolkit/conductor/run.sh` or install via Homebrew. Make sure `~/.cargo/bin` is in your `$PATH`. ### Rebuilding @@ -359,7 +359,10 @@ cargo uninstall git-same # Remove config and cache rm -rf ~/.config/git-same/ -rm -rf ~/.cache/git-same/ + +# Workspace-local cache/history live under each workspace: +# /.git-same/cache.json +# /.git-same/sync-history.json ``` ## License diff --git a/docs/plans/apple-code-signing.md b/docs/plans/apple-code-signing.md new file mode 100644 index 0000000..ce57e4b --- /dev/null +++ b/docs/plans/apple-code-signing.md @@ -0,0 +1,101 @@ +# Apple Code Signing & Notarization for macOS Releases + +## Context + +The macOS binaries (`git-same-macos-x86_64`, `git-same-macos-aarch64`) built by the S2 release workflow are currently unsigned. Since we have an Apple Developer account, we can sign and notarize the binaries for users who download them directly. + +### Who is affected by unsigned binaries? + +| Distribution channel | Gatekeeper warning? | Signing needed? | +|---|---|---| +| `brew install zaai-com/tap/git-same` | **No** — Homebrew strips quarantine | No | +| Direct download from GitHub Releases (browser) | **Yes** — browser sets quarantine xattr | Yes | +| `curl` / `wget` from GitHub Releases | **No** — no quarantine xattr set | No | + +Our S3 pipeline publishes a Formula (bare CLI binary + SHA256), not a Cask (.app). Homebrew's `curl`-based download does not set `com.apple.quarantine`, so **Homebrew users will not see Gatekeeper warnings regardless of signing**. Signing benefits users who download binaries directly from the GitHub Releases page via a browser. + +## Workflow Changes + +**Single file modified:** `.github/workflows/S2-Release-GitHub.yml` + +Add 4 new steps to the `build-release-assets` job, conditional on `runner.os == 'macOS'`, inserted between the existing "Rename binary" and "Upload artifact" steps: + +### Step 1: Import signing certificate +- Decode the base64 `.p12` certificate from secrets +- Create a temporary keychain (`build.keychain`), import the cert +- Use `security set-key-partition-list` to allow non-interactive `codesign` access + +### Step 2: Sign the binary +```bash +codesign \ + --sign "Developer ID Application: $APPLE_SIGNING_IDENTITY" \ + --options runtime \ + --timestamp \ + --force \ + ${{ matrix.asset_name }} +codesign --verify --verbose=4 ${{ matrix.asset_name }} +``` +- `--options runtime` enables Hardened Runtime (required for notarization) +- `--timestamp` embeds a secure timestamp (required for notarization) +- No entitlements file needed — a plain CLI tool has no special capability requirements + +### Step 3: Notarize the binary +```bash +zip ${{ matrix.asset_name }}.zip ${{ matrix.asset_name }} +xcrun notarytool submit ${{ matrix.asset_name }}.zip \ + --apple-id "$APPLE_ID" \ + --team-id "$APPLE_TEAM_ID" \ + --password "$APPLE_APP_SPECIFIC_PASSWORD" \ + --wait --timeout 300 +rm ${{ matrix.asset_name }}.zip +``` +- Stapling is not possible for bare binaries (only `.app`/`.pkg`/`.dmg`), but macOS does an online Gatekeeper check on first run which resolves the notarization ticket automatically + +### Step 4: Cleanup keychain +- Delete the temporary keychain and restore defaults +- Runs even if previous steps fail (uses `if: always()`) + +## Pre-requisite: Create the Developer ID Application Certificate + +Before configuring the workflow, you need a "Developer ID Application" certificate (this is the type used for signing software distributed outside the App Store). + +1. **Open Xcode** → Settings → Accounts → select your Apple ID → Manage Certificates +2. Click **"+"** → select **"Developer ID Application"** → Create +3. Alternatively via [developer.apple.com/account/resources/certificates](https://developer.apple.com/account/resources/certificates): create a new certificate of type "Developer ID Application", upload a CSR generated via Keychain Access +4. **Export the `.p12`:** Open Keychain Access → find "Developer ID Application: Your Name" → right-click → Export → save as `.p12` with a strong password +5. **Base64-encode it:** + ```bash + base64 -i DeveloperIDApplication.p12 | pbcopy + ``` +6. **Create an app-specific password:** Go to [appleid.apple.com](https://appleid.apple.com) → Sign-In and Security → App-Specific Passwords → Generate one labeled "git-same notarization" +7. **Find your Team ID:** Go to [developer.apple.com/account](https://developer.apple.com/account) → Membership Details → Team ID (10-character alphanumeric) + +## Required GitHub Secrets + +Configure these in your repo settings (Settings → Secrets and variables → Actions): + +| Secret | Description | +|--------|-------------| +| `APPLE_DEVELOPER_CERTIFICATE_P12` | Base64-encoded `.p12` from step 5 above | +| `APPLE_DEVELOPER_CERTIFICATE_PASSWORD` | Password used when exporting the `.p12` in step 4 | +| `APPLE_SIGNING_IDENTITY` | Name + Team ID only (e.g. `Your Name (TEAMID)`), **without** the `Developer ID Application:` prefix because the workflow command adds that prefix | +| `APPLE_ID` | Your Apple ID email | +| `APPLE_TEAM_ID` | 10-character Team ID from step 7 | +| `APPLE_APP_SPECIFIC_PASSWORD` | App-specific password from step 6 | + +## What Does NOT Change + +- Linux and Windows build matrix entries are unaffected (steps are gated on `runner.os == 'macOS'`) +- No new files created (no entitlements plist needed) +- S3 Homebrew workflow unchanged — it already downloads the release binaries, which will now be signed +- `Cargo.toml` release profile (`strip = true`, `lto = true`) is fully compatible with signing + +## Verification + +1. After implementation, trigger S2 on a tag push +2. Download the macOS artifacts and verify locally: + ```bash + codesign --verify --verbose=4 git-same-macos-aarch64 + spctl --assess --type execute --verbose git-same-macos-aarch64 + ``` +3. Install via Homebrew tap and confirm no Gatekeeper warnings appear diff --git a/docs/plans/github-settings-links.md b/docs/plans/github-settings-links.md new file mode 100644 index 0000000..ea480ef --- /dev/null +++ b/docs/plans/github-settings-links.md @@ -0,0 +1,237 @@ +# GitHub Security Hardening Runbook (ZAAI-com) + +This runbook gives direct links plus step-by-step actions and why each action matters for `ZAAI-com/git-same`. + +Note: GitHub can return `404` on settings URLs when not logged in with sufficient permissions. + +## Goal + +1. Prevent secrets from being exposed in logs, artifacts, or workflow configuration. +2. Enforce least privilege for workflow tokens and repository permissions. +3. Add guardrails so insecure workflow changes are blocked automatically. + +## Phase 1: Organization Baseline (ZAAI-com) + +### 1) Organization Actions policy +Link: https://github.com/orgs/ZAAI-com/settings/actions + +Why: Central policy prevents unsafe workflow behavior across all repos. + +How: +1. Open the link as org owner/security admin. +2. Restrict allowed actions to GitHub-owned and verified creators, or an allowlist. +3. If available in your plan, enable requiring full-length commit SHA pinning for actions. +4. Set default workflow permissions to read-only where possible. +5. Save settings. + +Verify: +1. In another repo test workflow, try adding an unapproved action tag and confirm it is blocked. + +### 2) Organization Actions secrets +Link: https://github.com/orgs/ZAAI-com/settings/secrets/actions + +Why: Centralized secret policy reduces sprawl and accidental over-sharing. + +How: +1. Review existing org secrets and repository access scopes. +2. Remove broad secrets that are not reused. +3. Keep only shared low-risk secrets at org level. +4. For publish tokens, prefer repo environment secrets instead of org-wide scope. + +Verify: +1. Confirm only intended repositories have access to each org secret. + +### 3) Organization Actions variables +Link: https://github.com/orgs/ZAAI-com/settings/variables/actions + +Why: Non-secret constants belong in variables, reducing secret misuse. + +How: +1. Move non-sensitive values from secrets to variables. +2. Use clear names such as `HOMEBREW_TAP_REPO` or `RELEASE_REPO`. + +Verify: +1. Workflows still resolve variables and no secret is used where a variable is enough. + +### 4) Organization rulesets +Link: https://github.com/orgs/ZAAI-com/settings/rules + +Why: Rulesets standardize protections so repos cannot drift to weaker settings. + +How: +1. Create or update a ruleset for production repositories. +2. Require pull requests for default branch changes. +3. Require status checks before merge. +4. Require code-owner reviews for `.github/workflows/**` changes. +5. Apply ruleset to `ZAAI-com/git-same`. + +Verify: +1. Open a test PR touching `.github/workflows/` and confirm approvals/checks are required. + +### 5) Organization security analysis +Link: https://github.com/orgs/ZAAI-com/settings/security_analysis + +Why: Secret scanning and dependency security catch leaks and known vulnerabilities early. + +How: +1. Enable secret scanning for supported repositories. +2. Enable push protection for supported repositories. +3. Enable Dependabot alerts and security updates. + +Verify: +1. Security features show enabled for `git-same` in repo security settings. + +### 6) Organization audit log +Link: https://github.com/orgs/ZAAI-com/settings/audit-log + +Why: Audit trails support incident response and change accountability. + +How: +1. Filter by `action:org.update_actions_secret` and repo name `git-same`. +2. Review recent changes to secrets, rules, and actions policy. +3. Export events if you need compliance records. + +Verify: +1. Confirm all recent critical setting changes are attributable to expected admins. + +## Phase 2: Repository Hardening (ZAAI-com/git-same) + +### 1) Repository Actions settings +Link: https://github.com/ZAAI-com/git-same/settings/actions + +Why: Repo-level workflow controls reduce blast radius from `GITHUB_TOKEN`. + +How: +1. Open the link as repo admin. +2. Set `Workflow permissions` to read repository contents by default. +3. Disable broad write permissions unless a workflow explicitly requires it. +4. Keep approval requirements for external contributors enabled if available. + +Verify: +1. Workflows still run. +2. Only publish jobs have explicit write permissions in YAML. + +### 2) Repository Actions secrets +Link: https://github.com/ZAAI-com/git-same/settings/secrets/actions + +Why: Secret scope should match job scope. + +How: +1. Review existing repo secrets. +2. Keep only secrets that cannot be moved to environment scope. +3. Remove stale tokens and rotate long-lived tokens. + +Verify: +1. No unused secret names remain. +2. Secret update timestamps are current after rotation. + +### 3) Repository Actions variables +Link: https://github.com/ZAAI-com/git-same/settings/variables/actions + +Why: Avoid storing non-sensitive constants as secrets. + +How: +1. Add repository variables for non-sensitive values used by workflows. +2. Update workflows to reference variables where appropriate. + +Verify: +1. Workflows pass with variables. +2. Secret count is reduced. + +### 4) Environments (`release`, `homebrew`, `crates`) +Link: https://github.com/ZAAI-com/git-same/settings/environments + +Why: Environment secrets and reviewer gates protect high-risk publish operations. + +How: +1. Create environment `release`. +2. Add required reviewers for `release`. +3. Create environment `homebrew`. +4. Add `HOMEBREW_TAP_REPO_COMMIT_TOKEN` to `homebrew`. +5. Add required reviewers for `homebrew`. +6. Create environment `crates`. +7. Add `CARGO_REGISTRY_TOKEN` to `crates`. +8. Add required reviewers for `crates`. +9. Update workflows so publish jobs declare `environment: homebrew` or `environment: crates`. + +Verify: +1. Trigger S3 and S4 with `workflow_dispatch`. +2. Confirm each run pauses for required reviewer approval before secret access. + +### 5) Repository rulesets +Link: https://github.com/ZAAI-com/git-same/settings/rules + +Why: Prevent bypass of CI checks and workflow guardrails. + +How: +1. Create a `main` branch ruleset. +2. Require pull requests and at least one reviewer. +3. Require status checks from CI workflows before merge. +4. Include checks that enforce workflow safety. +5. Restrict force-push and branch deletion. + +Verify: +1. PR to `main` cannot merge without required checks and review. + +### 6) Branch protection (legacy, if needed) +Link: https://github.com/ZAAI-com/git-same/settings/branches + +Why: Some teams still use branch protection rules instead of rulesets. + +How: +1. If rulesets are active, keep this page minimal to avoid conflicting policy. +2. If not using rulesets, configure equivalent protections on `main`. + +Verify: +1. Only one protection mechanism is authoritative to avoid confusion. + +### 7) Repository security analysis +Link: https://github.com/ZAAI-com/git-same/settings/security_analysis + +Why: Repo-level toggle confirms scanning is active where supported. + +How: +1. Enable secret scanning and push protection if available. +2. Enable Dependabot alerts and updates. +3. Enable dependency graph if disabled. + +Verify: +1. Security tab reports all intended features as enabled. + +### 8) Repository access +Link: https://github.com/ZAAI-com/git-same/settings/access + +Why: Least-privilege human access is as important as token security. + +How: +1. Remove direct admin access where unnecessary. +2. Grant access via teams with defined roles. +3. Limit write/admin to maintainers responsible for releases. + +Verify: +1. Access list matches expected team ownership model. + +## Phase 3: Workflow Validation + +### 1) Run required workflows +Why: Validates that security controls are enforced without breaking delivery. + +How: +1. Trigger S1 manually. +2. Trigger S2 via `workflow_dispatch` and confirm all S1 gate jobs pass before artifact build starts. +3. Trigger S3 via `workflow_dispatch` and verify environment approval for Homebrew publish. +4. Trigger S4 via `workflow_dispatch` and verify environment approval for crates publish. + +Verify: +1. No secret value appears in logs. +2. No publish job runs without reviewer gate. +3. Required checks block merges when failing. + +## Quick Acceptance Checklist + +1. Org Actions policy restricted and saved. +2. Org security analysis enabled where supported. +3. Repo environments `release`, `homebrew`, `crates` configured with reviewers. +4. Publish secrets moved to environment scope. +5. Repo ruleset for `main` requires reviews and checks. +6. Workflow runs verified with no secret leakage. diff --git a/docs/plans/move-config-to-sync-folder.md b/docs/plans/move-config-to-sync-folder.md deleted file mode 100644 index 39d7966..0000000 --- a/docs/plans/move-config-to-sync-folder.md +++ /dev/null @@ -1,98 +0,0 @@ -# Move Workspace Config to Sync Folder - -## Status: Proposal (not yet implemented) - -## Problem - -Workspace configs live in `~/.config/git-same//` — a location disconnected from the repos they manage. This creates several friction points: - -- **Requires auto-generated names** — The `` directory (e.g., `github-repos`) is an artifact of this storage model. Users never chose it and gain nothing from it. -- **Not portable** — Moving `~/repos` to another machine loses the config. You'd need to re-run setup. -- **Two locations to back up** — Config in `~/.config`, data in `~/repos`. -- **Not self-describing** — No way to tell a folder is a gisa sync target by looking at it. - -## Proposed Design - -Move workspace config into the sync folder itself: - -```text -~/repos/ ~/.config/git-same/ -├── .git-same/ └── config.toml (global only) -│ ├── config.toml ├── structure = "{org}/{repo}" -│ └── cache.json ├── concurrency = 8 -├── org1/repo1/.git/ └── sync_mode = "fetch" -└── org2/repo3/.git/ -``` - -### Key changes - -1. **Workspace config moves to `{base_path}/.git-same/config.toml`** -2. **Cache moves to `{base_path}/.git-same/cache.json`** -3. **Global config stays at `~/.config/git-same/config.toml`** — holds defaults + a registry of known workspace paths -4. **`default_workspace` becomes a path** — e.g., `default_workspace = "~/repos"` instead of `default_workspace = "github-repos"` -5. **Workspace discovery** — scan registered paths in global config, verify `.git-same/config.toml` exists - -### Global config changes - -```toml -# ~/.config/git-same/config.toml -structure = "{org}/{repo}" -concurrency = 8 -sync_mode = "fetch" - -# Default workspace (by path) -default_workspace = "~/repos" - -# Known workspace paths (for discovery) -workspaces = [ - "~/repos", - "~/work/code", -] -``` - -### Migration strategy - -1. On first run after update, detect old-format configs in `~/.config/git-same//` -2. Move each workspace config into its `base_path/.git-same/` -3. Update global config with `workspaces = [...]` array -4. Remove old workspace directories from `~/.config/git-same/` -5. Print a migration summary - -### What this eliminates - -- `WorkspaceConfig.name` field (no longer needed — path IS the identity) -- `WorkspaceManager::name_from_path()` / `unique_name()` -- The entire `~/.config/git-same//` directory structure -- `SetupState.workspace_name` / `name_editing` - -### What this enables - -- `gisa setup` in any directory drops config right there -- Moving a sync folder to another machine preserves the config -- `base_path` becomes the sole workspace identifier everywhere - -## Edge Cases to Handle - -| Case | Resolution | -|------|-----------| -| Sync folder on read-only mount | Fall back to `~/.config` location, warn | -| Sync folder is itself a git repo | Add `.git-same/` to `.gitignore` automatically | -| Org or repo named `.git-same` | Use a more unique name like `.gisa/` | -| User deletes `.git-same/` | Workspace disappears from registry; `gisa setup` re-creates | -| Two workspaces with same base_path | Not possible — path is unique identity | - -## Files to Modify - -- `src/config/workspace_manager.rs` — Complete rewrite of discovery/save/load -- `src/config/workspace.rs` — Remove `name` field, update constructors -- `src/config/parser.rs` — Add `workspaces` array, change `default_workspace` to path -- `src/setup/handler.rs` — Save to `base_path/.git-same/` instead of `~/.config` -- `src/setup/state.rs` — Remove `workspace_name` / `name_editing` -- `src/cache.rs` — Update cache path resolution -- `src/commands/workspace.rs` — Rewrite for path-based operations -- `src/tui/handler.rs` — Update workspace matching -- `src/tui/app.rs` — Update workspace loading - -## Estimated Scope - -Medium-large refactor. Migration logic is the riskiest part — must handle partial migrations, permission errors, and rollback. Consider feature-flagging the new storage format during development. diff --git a/docs/plans/optimize-binary-aliases.md b/docs/plans/optimize-binary-aliases.md deleted file mode 100644 index d01b160..0000000 --- a/docs/plans/optimize-binary-aliases.md +++ /dev/null @@ -1,91 +0,0 @@ -# Optimize Binary Aliases - -**Status:** Proposed -**Impact:** ~4x faster release link stage - -## Problem - -The release build produces 4 identical binaries (`git-same`, `gitsame`, `gitsa`, `gisa`), all compiled from `src/main.rs` with no behavioral differences. Combined with the release profile (`lto = true`, `codegen-units = 1`), each binary triggers a full LTO link pass — the most expensive build step. This roughly quadruples link time. - -## Current State - -- `Cargo.toml` defines 4 `[[bin]]` entries all pointing to `src/main.rs` -- `src/main.rs` does not inspect `argv[0]` — all binaries behave identically -- Integration tests only reference `git-same` -- Homebrew formula already installs only `git-same` -- GitHub Release artifacts are single binaries per platform - -## Proposed Solution - -Replace the 4 `[[bin]]` entries with a single `git-same` binary and create aliases via symlinks or documentation depending on the install method. - -### Cargo.toml - -Remove 3 duplicate `[[bin]]` sections, keeping only: - -```toml -[[bin]] -name = "git-same" -path = "src/main.rs" -``` - -### Homebrew (S3-Publish-Homebrew.yml) - -Add symlinks in the formula's `install` method: - -```ruby -bin.install_symlink "git-same" => "gitsame" -bin.install_symlink "git-same" => "gitsa" -bin.install_symlink "git-same" => "gisa" -``` - -### cargo install / GitHub Releases - -Document that users can create shell aliases: - -```bash -alias gitsame="git-same" -alias gitsa="git-same" -alias gisa="git-same" -``` - -Or symlinks: - -```bash -for alias in gitsame gitsa gisa; do - ln -sf "$(which git-same)" "$(dirname $(which git-same))/$alias" -done -``` - -### toolkit/Conductor/run.sh - -Add symlink creation after `cargo install --path .`: - -```bash -for alias in gitsame gitsa gisa; do - ln -sf "$HOME/.cargo/bin/git-same" "$HOME/.cargo/bin/$alias" -done -``` - -## Files to Modify - -| File | Change | -|------|--------| -| `Cargo.toml` | Remove 3 duplicate `[[bin]]` entries | -| `toolkit/Conductor/run.sh` | Add symlink creation after install | -| `.github/workflows/S3-Publish-Homebrew.yml` | Add `bin.install_symlink` lines | -| `docs/README.md` | Document alias setup for manual installs | - -## No Changes Needed - -- `src/main.rs` — no binary-name awareness -- `src/cli.rs` — display name hardcoded to `git-same`, completions generate as `gisa` (works via symlink) -- `tests/integration_test.rs` — already only references `git-same` -- `.github/workflows/S2-Release-GitHub.yml` — already builds single artifact per platform - -## Trade-offs - -- **Pro:** ~4x faster link stage in release builds -- **Pro:** Smaller build output (1 binary instead of 4) -- **Con:** `cargo install git-same` no longer auto-installs all 4 aliases -- **Con:** Users need to manually set up aliases or symlinks (unless using Homebrew) diff --git a/docs/plans/path-selector-ux.md b/docs/plans/path-selector-ux.md deleted file mode 100644 index 907ec06..0000000 --- a/docs/plans/path-selector-ux.md +++ /dev/null @@ -1,109 +0,0 @@ -# Setup Path Selector UX Ideas - -**Status:** Proposed -**Scope:** Setup wizard (`SelectPath` screen) - -## Goal - -Reduce friction when choosing a base path during setup, especially for users who do not remember exact directory names. - -## Current Friction - -- Path entry depends on free typing + tab completion. -- Suggestions are helpful, but users cannot visually browse real folders. -- New users can get stuck on path syntax (`~`, trailing `/`, nested folders). - -## Option A (Recommended): Inline Folder Navigator Mode - -Add a toggleable browse mode inside the existing `SelectPath` screen. - -### Interaction - -- `b` opens navigator mode -- `Up`/`Down` selects folder -- `Right` enters selected folder -- `Left` goes to parent folder -- `Enter` selects current folder as base path -- `Esc` exits navigator mode back to typed path mode - -### Mockup - -```text - Where should repositories be cloned? - Repos will be organized as: // - - Base Path: ~/Developer - - Browse Folders (Navigator) - Current: ~/Developer - - > projects/ - clients/ - playground/ - archives/ - .. (parent) - - [Enter] Use Folder [Left/Right] Open/Back [Esc] Close -``` - -### Why this fits now - -- Reuses current key model (arrow navigation already standard). -- Keeps existing typed mode and tab completion for power users. -- Minimal architecture impact: can live inside `setup/screens/path.rs` + `setup/handler.rs`. - -## Option B: Two-Pane Explorer - -Split path screen into left tree (folders) + right preview/details. - -### Mockup - -```text - Base Path Picker - - ~/Developer Preview - > projects/ Final path: - clients/ ~/Developer/projects - playground/ Clone layout: - archives/ ~/Developer// -``` - -### Trade-off - -Clearer context, but more rendering complexity and harder to support narrow terminals. - -## Option C: Guided Presets + "Browse from here" - -Keep suggestions first, but add one action: "Browse from selected suggestion". - -### Mockup - -```text - Suggestions: - > ~/Git-Same/GitHub (current directory) - ~/Developer - ~/Projects - ~ - - [Enter] Use Suggestion [b] Browse From Suggestion [Tab] Edit -``` - -### Trade-off - -Very small change, but less flexible than full navigator mode. - -## Recommended Rollout - -1. Ship Option C first (fast, low risk). -2. Add Option A navigator in next iteration. -3. Keep typed + completion mode permanently for advanced users. - -## Implementation Notes - -- New `PathInputMode` enum (e.g., `Suggestions | Typing | Browsing`). -- Navigator state fields: - - `browse_current_dir: String` - - `browse_entries: Vec` - - `browse_index: usize` -- Hide dot-folders by default; allow toggle later. -- Always show resulting normalized path in a preview line. diff --git a/docs/plans/remove-global-providers-config-opus.md b/docs/plans/remove-global-providers-config-opus.md deleted file mode 100644 index f052add..0000000 --- a/docs/plans/remove-global-providers-config-opus.md +++ /dev/null @@ -1,64 +0,0 @@ -# Plan: Remove [[providers]] from Global Config - -## Context - -Previous work simplified auth to gh-cli only and restructured providers (Steps 1–5, all done). The remaining task: remove `[[providers]]` from the global user config entirely. Confirmed that `config.providers: Vec` is never used at runtime — all sync/clone operations use `WorkspaceConfig.provider` (workspace-level). The global config should contain only: `concurrency`, `sync_mode`, `structure`, `default_workspace`, `[clone]`, `[filters]`. `ProviderEntry` / `AuthMethod` remain as internal types used by workspace config and the provider factory. - ---- - -## Step 1: `src/config/parser.rs` - -- Remove `providers: Vec` field and `#[serde(default = "default_providers")]` annotation from `Config` -- Remove `default_providers()` function -- Remove `use super::provider_config::ProviderEntry;` import (no longer needed here) -- Remove provider validation block from `Config::validate()` (the `for (i, provider)` loop and the empty-providers check) -- Remove `enabled_providers()` method -- Remove the `[[providers]]` section from `Config::default_toml()` (lines ~260–265) -- Remove `ProviderEntry` from `Config::default()` (it's in the providers field) - ---- - -## Step 2: `src/config/mod.rs` - -- Update the doc comment example to remove `[[providers]]` -- Keep `AuthMethod` and `ProviderEntry` in `pub use provider_config::{...}` — required because `WorkspaceProvider.auth: AuthMethod` is a `pub` field and `to_provider_entry()` returns `ProviderEntry`. Removing them from `mod.rs` while `provider_config` is a private module would cause a `E0446` compile error (public field/method using a type that is unreachable outside the module). - ---- - -## Step 3: `src/lib.rs` prelude - -- Remove `AuthMethod` and `ProviderEntry` from the prelude re-exports in `src/lib.rs:73` — they remain accessible as `crate::config::AuthMethod` / `crate::config::ProviderEntry` but are no longer advertised at the top-level API surface - ---- - -## Step 4: `src/config/parser_tests.rs` - -- `test_default_config` (line 12): remove `assert_eq!(config.providers.len(), 1)` -- `test_load_full_config` (lines 41–43): remove `[[providers]]` section from the test TOML string (TOML parses fine without it) -- Remove `test_load_multi_provider_config` entirely (lines 58–75) -- Remove `test_validation_rejects_empty_providers` entirely (lines 104–113) -- Remove `test_enabled_providers_filter` entirely (lines 131–152) -- `test_parse_config_with_default_workspace` (lines 165–167): remove `[[providers]]` from content -- `test_parse_config_without_default_workspace` (lines 175–177): remove `[[providers]]` from content -- `test_save_default_workspace_to_replace_without_sync_mode` (lines 241–244): remove `[[providers]]` from content - -> Note: serde ignores unknown TOML keys by default, so existing user config files with `[[providers]]` will continue to load without error — the section is silently ignored. - ---- - -## Files Summary - -| File | Change | -|------|--------| -| `src/config/parser.rs` | Remove `providers` field, `default_providers()`, empty-providers validation, `enabled_providers()`, `[[providers]]` from default TOML | -| `src/config/mod.rs` | Keep `AuthMethod`/`ProviderEntry` in public exports (required for public API); update doc example | -| `src/lib.rs` | Remove `AuthMethod`/`ProviderEntry` from prelude | -| `src/config/parser_tests.rs` | Remove provider-related assertions and tests | - ---- - -## Verification - -1. `cargo fmt -- --check` -2. `cargo clippy -- -D warnings` -3. `cargo test` diff --git a/docs/plans/remove-global-providers-config.md b/docs/plans/remove-global-providers-config.md deleted file mode 100644 index a72f4d3..0000000 --- a/docs/plans/remove-global-providers-config.md +++ /dev/null @@ -1,114 +0,0 @@ -# Plan: Remove `[[providers]]` From Global Config - -## Goal -Remove provider definitions from the global config file (`~/.config/git-same/config.toml`) and keep provider configuration workspace-scoped. -After this change, global config should only contain: -- `concurrency` -- `sync_mode` -- `structure` -- `default_workspace` -- `[clone]` -- `[filters]` - -## Scope Decision -- Treat this as a **breaking library API change** (CLI behavior remains aligned with current workspace-based flow). -- Runtime already uses workspace provider config for setup/sync operations. - -## Implementation Steps - -### 1. Confirm release/API scope -- Mark this work as breaking for crate consumers because `AuthMethod`/`ProviderEntry` are currently part of public interfaces. -- Audit affected surfaces: - - `src/lib.rs` - - `src/config/mod.rs` - - `src/config/workspace.rs` - - `src/auth/mod.rs` - - `src/provider/mod.rs` - -### 2. Remove global `[[providers]]` schema from parser -Update `src/config/parser.rs`: -- Remove `providers: Vec` from `Config`. -- Remove `default_providers()` helper. -- Remove provider-specific validation (empty-check and per-provider loop). -- Remove `enabled_providers()` method. -- Remove `[[providers]]` block from `Config::default_toml()`. -- Remove unused import of `ProviderEntry`. - -### 3. Redesign workspace/provider bridge API -Update `src/config/workspace.rs`: -- Remove `to_provider_entry()` adapter from `WorkspaceProvider`. -- Add direct helpers required by auth/provider code paths (for example, API URL/name helpers), so runtime no longer depends on `ProviderEntry`. - -### 4. Remove `AuthMethod` from public workspace model -- Since auth is gh-cli-only, remove `auth` from `WorkspaceProvider` and related serialization/tests. -- Keep workspace provider fields that are still user-specific (`kind`, `api_url`, `prefer_ssh`). -- Update `src/config/provider_config.rs` as needed so legacy type usage is minimized or internalized. - -### 5. Update auth/provider entrypoints to use workspace provider type -Update: -- `src/auth/mod.rs` -- `src/provider/mod.rs` - -Actions: -- Replace function signatures that currently accept `ProviderEntry`. -- Preserve existing behavior for host extraction and enterprise URL handling. - -### 6. Migrate all runtime call sites -Update: -- `src/workflows/sync_workspace.rs` -- `src/setup/handler.rs` - -Actions: -- Pass `WorkspaceProvider` directly to auth/provider layers. -- Remove intermediate conversion calls. - -### 7. Remove public re-exports for legacy provider config types -Update: -- `src/config/mod.rs` -- `src/lib.rs` -- `src/lib_tests.rs` - -Actions: -- Remove prelude/config re-exports of `AuthMethod` and `ProviderEntry`. -- Adjust prelude tests to validate remaining public API. - -### 8. Update parser tests for new global schema -Update `src/config/parser_tests.rs`: -- Remove provider-related assertions/tests. -- Remove `[[providers]]` snippets where no longer necessary. -- Add backward-compat test: config containing legacy `[[providers]]` still parses and is ignored. - -### 9. Update workspace/auth/provider tests -Update relevant tests to new interfaces and structs: -- `src/config/workspace_tests.rs` -- `src/provider/mod_tests.rs` -- `src/auth/mod_tests.rs` -- `src/workflows/sync_workspace_tests.rs` - -### 10. Update docs and examples -Update: -- `docs/README.md` -- `.context/GIT-SAME-DOCUMENTATION.md` (if maintained in parallel) - -Actions: -- Remove global `[[providers]]` examples. -- Document provider configuration as workspace-scoped. - -### 11. Validation -Run: -- `cargo fmt -- --check` -- `cargo clippy -- -D warnings` -- `cargo test` - -Manual smoke checks: -- `gisa init` -- `gisa setup` -- `gisa sync --dry-run` -- Verify legacy config with `[[providers]]` still loads without failure. - -### 12. Delivery strategy (recommended) -Split into 3 commits: -1. Parser/schema cleanup + parser tests -2. API redesign + runtime call-site migration -3. Docs + remaining test updates - diff --git a/src/auth/mod.rs b/src/auth/mod.rs index 86c5f71..4b01265 100644 --- a/src/auth/mod.rs +++ b/src/auth/mod.rs @@ -2,22 +2,11 @@ //! //! This module handles authentication with Git hosting providers //! using the GitHub CLI (`gh auth token`). -//! -//! # Example -//! -//! ```no_run -//! use git_same::auth::{get_auth_for_provider, AuthResult}; -//! use git_same::config::ProviderEntry; -//! -//! let provider = ProviderEntry::github(); -//! let auth = get_auth_for_provider(&provider).expect("Failed to authenticate"); -//! println!("Authenticated as {:?} via {}", auth.username, auth.method); -//! ``` pub mod gh_cli; pub mod ssh; -use crate::config::ProviderEntry; +use crate::config::WorkspaceProvider; use crate::errors::AppError; use tracing::{debug, warn}; @@ -53,7 +42,6 @@ impl std::fmt::Display for ResolvedAuthMethod { pub fn get_auth() -> Result { debug!("Resolving authentication via gh CLI"); - // Try gh CLI let gh_installed = gh_cli::is_installed(); let gh_authenticated = gh_installed && gh_cli::is_authenticated(); debug!(gh_installed, gh_authenticated, "Checking GitHub CLI status"); @@ -78,7 +66,6 @@ pub fn get_auth() -> Result { } } - // No authentication found - provide helpful error message let ssh_note = if ssh::has_ssh_keys() { "\n\nNote: SSH keys detected. While SSH keys work for git clone/push,\n\ you still need a provider API token for repository discovery.\n\ @@ -98,8 +85,8 @@ pub fn get_auth() -> Result { ))) } -/// Get authentication for a specific provider configuration. -pub fn get_auth_for_provider(provider: &ProviderEntry) -> Result { +/// Get authentication for a specific workspace provider configuration. +pub fn get_auth_for_provider(provider: &WorkspaceProvider) -> Result { debug!( api_url = provider.api_url.as_deref().unwrap_or("default"), "Resolving authentication for provider" @@ -122,7 +109,6 @@ pub fn get_auth_for_provider(provider: &ProviderEntry) -> Result &'static str { /// Prints the gisa ASCII art banner to stdout (CLI mode). pub fn print_banner() { - // Build full art from shared constants let version = env!("CARGO_PKG_VERSION"); let version_display = format!("{:^6}", version); - let line5 = format!("{LINE5_PREFIX}{version_display}{LINE5_SUFFIX}"); - let art = format!( - "\n{}\n{}\n{}\n{}\n{}\n{}", - LINES[0], LINES[1], LINES[2], LINES[3], line5, LAST_LINE - ); - println!("{}", style(art).cyan().bold()); + println!(); + for text in &LINES { + println!("{}", cli_gradient_line(text, &GRADIENT_STOPS)); + } + println!("{}", cli_line5(&version_display, &GRADIENT_STOPS)); + println!("{}", cli_gradient_line(LAST_LINE, &GRADIENT_STOPS)); + let subtitle = subheadline(); let visible_len = subtitle.chars().count(); let pad = if visible_len < ART_WIDTH { @@ -62,7 +62,93 @@ pub fn print_banner() { } else { 0 }; - println!("{}{}\n", " ".repeat(pad + 1), style(subtitle).dim()); + println!( + "{}{}\n", + " ".repeat(pad.saturating_sub(1)), + style(subtitle).dim() + ); +} + +fn styled_gradient_chunk(text: &str, r: u8, g: u8, b: u8, force_styling: bool) -> String { + let styled = style(text).true_color(r, g, b).bold(); + if force_styling { + format!("{}", styled.force_styling(true)) + } else { + format!("{}", styled) + } +} + +fn styled_version_badge(text: &str, r: u8, g: u8, b: u8, force_styling: bool) -> String { + let styled = style(text).black().on_true_color(r, g, b).bold(); + if force_styling { + format!("{}", styled.force_styling(true)) + } else { + format!("{}", styled) + } +} + +fn cli_gradient_line(text: &str, stops: &[(u8, u8, u8)]) -> String { + cli_gradient_line_with_force(text, stops, false) +} + +fn cli_gradient_line_with_force(text: &str, stops: &[(u8, u8, u8)], force_styling: bool) -> String { + let chars: Vec = text.chars().collect(); + let len = chars.len().max(1); + + chars + .into_iter() + .enumerate() + .map(|(i, ch)| { + let t = i as f64 / (len - 1).max(1) as f64; + let (r, g, b) = interpolate_stops(stops, t); + styled_gradient_chunk(&ch.to_string(), r, g, b, force_styling) + }) + .collect() +} + +fn cli_line5(version_display: &str, stops: &[(u8, u8, u8)]) -> String { + cli_line5_with_force(version_display, stops, false) +} + +fn cli_line5_with_force( + version_display: &str, + stops: &[(u8, u8, u8)], + force_styling: bool, +) -> String { + let prefix_len = LINE5_PREFIX.chars().count(); + let version_len = version_display.chars().count(); + let full_len = prefix_len + version_len + LINE5_SUFFIX.chars().count(); + let denom = (full_len - 1).max(1) as f64; + + let mut out = String::new(); + for (i, ch) in LINE5_PREFIX.chars().enumerate() { + let t = i as f64 / denom; + let (r, g, b) = interpolate_stops(stops, t); + out.push_str(&styled_gradient_chunk( + &ch.to_string(), + r, + g, + b, + force_styling, + )); + } + + let ver_t = prefix_len as f64 / denom; + let (vr, vg, vb) = interpolate_stops(stops, ver_t); + out.push_str(&styled_version_badge( + version_display, + vr, + vg, + vb, + force_styling, + )); + + let suffix_pos = prefix_len + version_len; + let suffix_t = suffix_pos as f64 / denom; + let (r, g, b) = interpolate_stops(stops, suffix_t); + out.push_str(&styled_gradient_chunk(LINE5_SUFFIX, r, g, b, force_styling)); + + out } // --------------------------------------------------------------------------- @@ -79,7 +165,6 @@ use ratatui::{ }; /// Linearly interpolate between RGB color stops. -#[cfg(feature = "tui")] pub(crate) fn interpolate_stops(stops: &[(u8, u8, u8)], t: f64) -> (u8, u8, u8) { let t = t.clamp(0.0, 1.0); let segments = stops.len() - 1; diff --git a/src/banner_tests.rs b/src/banner_tests.rs index 05ac418..f675150 100644 --- a/src/banner_tests.rs +++ b/src/banner_tests.rs @@ -10,7 +10,6 @@ fn print_banner_executes_without_panicking() { print_banner(); } -#[cfg(feature = "tui")] #[test] fn interpolate_stops_clamps_to_bounds() { let start = interpolate_stops(&[(0, 0, 0), (255, 255, 255)], -1.0); @@ -20,6 +19,20 @@ fn interpolate_stops_clamps_to_bounds() { assert_eq!(end, (255, 255, 255)); } +#[test] +fn cli_gradient_line_uses_truecolor_sequences() { + let rendered = cli_gradient_line_with_force("AB", &GRADIENT_STOPS, true); + assert!(rendered.contains("\u{1b}[38;2;59;130;246")); + assert!(rendered.contains("\u{1b}[38;2;34;197;94")); +} + +#[test] +fn cli_line5_styles_version_as_badge() { + let version_display = format!("{:^6}", "1.1.0"); + let rendered = cli_line5_with_force(&version_display, &GRADIENT_STOPS, true); + assert!(rendered.contains("\u{1b}[48;2;")); +} + #[cfg(feature = "tui")] #[test] fn render_banner_handles_multiple_widths() { diff --git a/src/cache/discovery.rs b/src/cache/discovery.rs index 588e30b..f60ea5d 100644 --- a/src/cache/discovery.rs +++ b/src/cache/discovery.rs @@ -110,10 +110,11 @@ pub struct CacheManager { } impl CacheManager { - /// Create a cache manager for a specific workspace. - pub fn for_workspace(workspace_name: &str) -> Result { - let cache_path = crate::config::WorkspaceManager::cache_path(workspace_name) - .map_err(|e| anyhow::anyhow!("{}", e))?; + /// Create a cache manager for a specific workspace root path. + /// + /// Cache is persisted at `/.git-same/cache.json`. + pub fn for_workspace(root: &Path) -> Result { + let cache_path = crate::config::WorkspaceStore::cache_path(root); Ok(Self { cache_path, ttl: DEFAULT_CACHE_TTL, diff --git a/src/cache/sync_history.rs b/src/cache/sync_history.rs index f565263..6f2140c 100644 --- a/src/cache/sync_history.rs +++ b/src/cache/sync_history.rs @@ -1,7 +1,7 @@ use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use std::fs; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use tracing::debug; use crate::tui::app::SyncHistoryEntry; @@ -17,19 +17,16 @@ struct SyncHistoryFile { /// Manages per-workspace sync history persistence. /// -/// History is stored at `~/.config/git-same//sync-history.json`. +/// History is stored at `/.git-same/sync-history.json`. pub struct SyncHistoryManager { path: PathBuf, } impl SyncHistoryManager { - /// Create a history manager for a specific workspace. - pub fn for_workspace(workspace_name: &str) -> Result { - let dir = crate::config::WorkspaceManager::workspace_dir(workspace_name) - .map_err(|e| anyhow::anyhow!("{}", e))?; - Ok(Self { - path: dir.join("sync-history.json"), - }) + /// Create a history manager for a specific workspace root path. + pub fn for_workspace(root: &Path) -> Result { + let path = crate::config::WorkspaceStore::sync_history_path(root); + Ok(Self { path }) } /// Load sync history from disk. Returns empty vec if file doesn't exist. diff --git a/src/cli.rs b/src/cli.rs index 7d4be29..69824dc 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -8,7 +8,8 @@ use std::path::PathBuf; /// Git-Same - Mirror GitHub structure /orgs/repos/ to local file system /// -/// Available as: git-same, gitsame, gitsa, gisa +/// Available as: git-same (primary), gitsame, gitsa, gisa (symlink aliases) +/// Alias list: see toolkit/packaging/binary-aliases.txt /// Also works as: git same (git subcommand) #[derive(Parser, Debug)] #[command(name = "git-same")] @@ -55,6 +56,9 @@ pub enum Command { /// Reset gisa — remove all config, workspaces, and cache Reset(ResetArgs), + + /// Scan a directory tree for unregistered workspaces (.git-same/ folders) + Scan(ScanArgs), } /// Arguments for the init command @@ -80,7 +84,7 @@ pub struct SetupArgs { /// Arguments for the sync command #[derive(Args, Debug)] pub struct SyncCmdArgs { - /// Workspace path or name to sync + /// Workspace path or folder name to sync #[arg(short, long)] pub workspace: Option, @@ -108,7 +112,7 @@ pub struct SyncCmdArgs { /// Arguments for the status command #[derive(Args, Debug)] pub struct StatusArgs { - /// Workspace path or name + /// Workspace path or folder name #[arg(short, long)] pub workspace: Option, @@ -148,7 +152,8 @@ pub enum WorkspaceCommand { /// Arguments for the workspace default subcommand #[derive(Args, Debug)] pub struct WorkspaceDefaultArgs { - /// Workspace path or name to set as default (omit to show current) + /// Workspace path or folder name to set as default (omit to show current) + #[arg(value_name = "WORKSPACE")] pub name: Option, /// Clear the default workspace @@ -164,6 +169,21 @@ pub struct ResetArgs { pub force: bool, } +/// Arguments for the scan command +#[derive(Args, Debug)] +pub struct ScanArgs { + /// Root directory to scan (default: current directory) + pub path: Option, + + /// Maximum directory depth to search (default: 5) + #[arg(short, long, default_value = "5")] + pub depth: usize, + + /// Register discovered workspaces automatically + #[arg(long)] + pub register: bool, +} + impl Cli { /// Parse command line arguments. pub fn parse_args() -> Self { diff --git a/src/commands/mod.rs b/src/commands/mod.rs index b4a1ea4..1ae8e43 100644 --- a/src/commands/mod.rs +++ b/src/commands/mod.rs @@ -5,6 +5,7 @@ pub mod init; pub mod reset; +pub mod scan; #[cfg(feature = "tui")] pub mod setup; pub mod status; @@ -29,7 +30,13 @@ pub async fn run_command( command: &Command, config_path: Option<&Path>, output: &Output, + quiet: bool, ) -> Result<()> { + // Print the canonical CLI banner for all subcommands. + if !output.is_json() && !quiet { + crate::banner::print_banner(); + } + // Commands that don't need config if let Command::Init(args) = command { return run_init(args, output).await; @@ -37,6 +44,9 @@ pub async fn run_command( if let Command::Reset(args) = command { return reset::run(args, output).await; } + if let Command::Scan(args) = command { + return scan::run(args, config_path, output); + } #[cfg(feature = "tui")] if let Command::Setup(args) = command { return setup::run(args, output).await; @@ -46,7 +56,7 @@ pub async fn run_command( let config = load_config(config_path)?; match command { - Command::Init(_) | Command::Reset(_) => unreachable!(), + Command::Init(_) | Command::Reset(_) | Command::Scan(_) => unreachable!(), #[cfg(feature = "tui")] Command::Setup(_) => unreachable!(), Command::Sync(args) => run_sync_cmd(args, &config, output).await, diff --git a/src/commands/reset.rs b/src/commands/reset.rs index deb0d3e..b478d9b 100644 --- a/src/commands/reset.rs +++ b/src/commands/reset.rs @@ -16,16 +16,15 @@ enum ResetScope { Everything, ConfigOnly, AllWorkspaces, - Workspace(String), + Workspace(PathBuf), } /// Rich detail about a single workspace for display. struct WorkspaceDetail { - name: String, - base_path: String, + root_path: PathBuf, orgs: Vec, last_synced: Option, - dir: PathBuf, + dot_dir: PathBuf, cache_size: Option, } @@ -103,8 +102,8 @@ fn discover_targets() -> Result { /// Build rich detail for a workspace. fn build_workspace_detail(ws: &WorkspaceConfig) -> Result { - let dir = WorkspaceManager::workspace_dir(&ws.name)?; - let cache_file = WorkspaceManager::cache_path(&ws.name)?; + let dot_dir = WorkspaceManager::dot_dir(&ws.root_path); + let cache_file = WorkspaceManager::cache_path(&ws.root_path); let cache_size = if cache_file.exists() { std::fs::metadata(&cache_file).map(|m| m.len()).ok() @@ -113,11 +112,10 @@ fn build_workspace_detail(ws: &WorkspaceConfig) -> Result { }; Ok(WorkspaceDetail { - name: ws.name.clone(), - base_path: ws.base_path.clone(), + root_path: ws.root_path.clone(), orgs: ws.orgs.clone(), last_synced: ws.last_synced.clone(), - dir, + dot_dir, cache_size, }) } @@ -145,8 +143,8 @@ fn display_detailed_targets(scope: &ResetScope, target: &ResetTarget, output: &O display_workspace_detail(ws, output); } } - ResetScope::Workspace(name) => { - if let Some(ws) = target.workspaces.iter().find(|w| w.name == *name) { + ResetScope::Workspace(path) => { + if let Some(ws) = target.workspaces.iter().find(|w| w.root_path == *path) { display_workspace_detail(ws, output); } } @@ -155,7 +153,8 @@ fn display_detailed_targets(scope: &ResetScope, target: &ResetTarget, output: &O /// Display detail for a single workspace. fn display_workspace_detail(ws: &WorkspaceDetail, output: &Output) { - output.info(&format!(" Workspace at {}:", ws.base_path)); + let path_display = crate::config::workspace::tilde_collapse_path(&ws.root_path); + output.info(&format!(" Workspace at {}:", path_display)); if ws.orgs.is_empty() { output.info(" Orgs: (all)"); @@ -178,7 +177,7 @@ fn display_workspace_detail(ws: &WorkspaceDetail, output: &Output) { output.info(&format!(" Cache: {}", format_bytes(size))); } - output.info(&format!(" Directory: {}", ws.dir.display())); + output.info(&format!(" Config dir: {}", ws.dot_dir.display())); } /// Execute the reset based on scope. @@ -205,11 +204,11 @@ fn execute_reset(scope: &ResetScope, target: &ResetTarget, output: &Output) -> R had_errors |= !remove_workspace_dir(ws, output); } } - ResetScope::Workspace(name) => { - if let Some(ws) = target.workspaces.iter().find(|w| w.name == *name) { + ResetScope::Workspace(path) => { + if let Some(ws) = target.workspaces.iter().find(|w| w.root_path == *path) { had_errors |= !remove_workspace_dir(ws, output); } else { - output.warn(&format!("Workspace '{}' not found.", name)); + output.warn(&format!("Workspace '{}' not found.", path.display())); had_errors = true; } } @@ -230,8 +229,8 @@ fn execute_reset(scope: &ResetScope, target: &ResetTarget, output: &Output) -> R ResetScope::AllWorkspaces => { output.success("All workspaces removed."); } - ResetScope::Workspace(name) => { - output.success(&format!("Workspace '{}' removed.", name)); + ResetScope::Workspace(path) => { + output.success(&format!("Workspace '{}' removed.", path.display())); } } Ok(()) @@ -239,15 +238,18 @@ fn execute_reset(scope: &ResetScope, target: &ResetTarget, output: &Output) -> R } fn remove_workspace_dir(ws: &WorkspaceDetail, output: &Output) -> bool { - match std::fs::remove_dir_all(&ws.dir) { + let path_display = crate::config::workspace::tilde_collapse_path(&ws.root_path); + match std::fs::remove_dir_all(&ws.dot_dir) { Ok(()) => { - output.success(&format!("Removed workspace at {}", ws.base_path)); + // Also unregister from global config + let _ = Config::remove_from_registry(&path_display); + output.success(&format!("Removed workspace config at {}", path_display)); true } Err(e) => { output.warn(&format!( - "Failed to remove workspace at {}: {}", - ws.base_path, e + "Failed to remove workspace config at {}: {}", + path_display, e )); false } @@ -303,7 +305,10 @@ fn prompt_scope(target: &ResetTarget) -> Result { } if target.has_workspaces() { - options.push(("A specific workspace", ResetScope::Workspace(String::new()))); + options.push(( + "A specific workspace", + ResetScope::Workspace(PathBuf::new()), + )); } // If only one option, skip the menu @@ -332,6 +337,7 @@ fn prompt_scope(target: &ResetTarget) -> Result { fn prompt_workspace(workspaces: &[WorkspaceDetail]) -> Result { eprintln!("\nSelect a workspace to delete:"); for (i, ws) in workspaces.iter().enumerate() { + let path_display = crate::config::workspace::tilde_collapse_path(&ws.root_path); let orgs = if ws.orgs.is_empty() { "all orgs".to_string() } else { @@ -342,11 +348,13 @@ fn prompt_workspace(workspaces: &[WorkspaceDetail]) -> Result { .as_deref() .map(humanize_timestamp) .unwrap_or_else(|| "never synced".to_string()); - eprintln!(" {}. {} ({}, {})", i + 1, ws.base_path, orgs, synced); + eprintln!(" {}. {} ({}, {})", i + 1, path_display, orgs, synced); } let choice = prompt_number("> ", workspaces.len())?; - Ok(ResetScope::Workspace(workspaces[choice - 1].name.clone())) + Ok(ResetScope::Workspace( + workspaces[choice - 1].root_path.clone(), + )) } /// Read a number from stdin (1-based, within max). diff --git a/src/commands/reset_tests.rs b/src/commands/reset_tests.rs index 0934651..a789aeb 100644 --- a/src/commands/reset_tests.rs +++ b/src/commands/reset_tests.rs @@ -26,11 +26,10 @@ fn test_reset_target_not_empty_with_workspaces() { config_dir: PathBuf::from("/some/dir"), config_file: None, workspaces: vec![WorkspaceDetail { - name: "ws1".to_string(), - base_path: "~/github".to_string(), + root_path: PathBuf::from("/tmp/ws1"), orgs: vec!["org1".to_string()], last_synced: None, - dir: PathBuf::from("/some/dir/ws1"), + dot_dir: PathBuf::from("/tmp/ws1/.git-same"), cache_size: None, }], }; @@ -71,11 +70,10 @@ fn test_format_bytes() { #[test] fn test_display_workspace_detail_no_panic() { let ws = WorkspaceDetail { - name: "test".to_string(), - base_path: "~/github".to_string(), + root_path: PathBuf::from("/tmp/test"), orgs: vec!["org1".to_string(), "org2".to_string()], last_synced: Some("2026-02-24T10:00:00Z".to_string()), - dir: PathBuf::from("/tmp/test"), + dot_dir: PathBuf::from("/tmp/test/.git-same"), cache_size: Some(12345), }; let output = Output::new(crate::output::Verbosity::Quiet, false); @@ -88,11 +86,10 @@ fn test_display_detailed_targets_everything() { config_dir: PathBuf::from("/tmp/test"), config_file: Some(PathBuf::from("/tmp/test/config.toml")), workspaces: vec![WorkspaceDetail { - name: "ws1".to_string(), - base_path: "~/github".to_string(), + root_path: PathBuf::from("/tmp/ws1"), orgs: Vec::new(), last_synced: None, - dir: PathBuf::from("/tmp/test/ws1"), + dot_dir: PathBuf::from("/tmp/ws1/.git-same"), cache_size: None, }], }; diff --git a/src/commands/scan.rs b/src/commands/scan.rs new file mode 100644 index 0000000..62738ba --- /dev/null +++ b/src/commands/scan.rs @@ -0,0 +1,178 @@ +//! Scan command — find unregistered .git-same/ workspace folders. + +use crate::cli::ScanArgs; +use crate::config::{Config, WorkspaceStore}; +use crate::errors::{AppError, Result}; +use crate::output::Output; +use std::collections::HashSet; +use std::path::{Path, PathBuf}; + +/// Run the scan command. +pub fn run(args: &ScanArgs, config_path: Option<&Path>, output: &Output) -> Result<()> { + let root = match &args.path { + Some(p) => p.clone(), + None => std::env::current_dir() + .map_err(|e| AppError::config(format!("Failed to resolve current directory: {}", e)))?, + }; + + let root = std::fs::canonicalize(&root).map_err(|e| { + AppError::config(format!( + "Failed to access scan root {}: {}", + root.display(), + e + )) + })?; + output.info(&format!( + "Scanning {} (depth {})", + root.display(), + args.depth + )); + + let found = scan_for_workspaces(&root, args.depth); + + if found.is_empty() { + output.info("No .git-same/ workspaces found."); + return Ok(()); + } + + // Load existing registry to flag already-registered workspaces + let global = match config_path { + Some(path) => Config::load_from(path), + None => Config::load(), + }?; + let registered: std::collections::HashSet = global + .workspaces + .iter() + .map(|p| { + let expanded = shellexpand::tilde(p); + std::fs::canonicalize(expanded.as_ref()) + .unwrap_or_else(|_| PathBuf::from(expanded.as_ref())) + }) + .collect(); + + let mut unregistered_count = 0usize; + let mut register_failures = Vec::new(); + for ws_root in &found { + let is_registered = registered.contains(ws_root); + let tilde = crate::config::workspace::tilde_collapse_path(ws_root); + if is_registered { + output.plain(&format!(" [registered] {}", tilde)); + } else { + output.plain(&format!(" [unregistered] {}", tilde)); + unregistered_count += 1; + + if args.register { + match WorkspaceStore::load(ws_root) { + Ok(ws) => { + let save_result = match config_path { + Some(path) => WorkspaceStore::save_with_registry_config_path(&ws, path), + None => WorkspaceStore::save(&ws), + }; + match save_result { + Ok(()) => { + output.success(&format!(" Registered: {}", tilde)); + unregistered_count = unregistered_count.saturating_sub(1); + } + Err(e) => { + output.warn(&format!(" Failed to register {}: {}", tilde, e)); + register_failures.push(format!("{}: {}", tilde, e)); + } + } + } + Err(e) => { + output.warn(&format!(" Skipping {}: {}", tilde, e)); + register_failures.push(format!("{}: {}", tilde, e)); + } + } + } + } + } + + output.plain(""); + output.info(&format!( + "Found {} workspace(s): {} registered, {} unregistered{}", + found.len(), + found.len() - unregistered_count, + unregistered_count, + if unregistered_count > 0 && !args.register { + " (use --register to add them)" + } else { + "" + } + )); + + if !register_failures.is_empty() { + let first = register_failures + .first() + .map(String::as_str) + .unwrap_or("unknown error"); + return Err(AppError::config(format!( + "Failed to register {} workspace(s). First error: {}", + register_failures.len(), + first + ))); + } + + Ok(()) +} + +/// Recursively scan for directories containing `.git-same/config.toml`. +fn scan_for_workspaces(root: &Path, max_depth: usize) -> Vec { + let mut results = Vec::new(); + let mut visited = HashSet::new(); + scan_recursive(root, 0, max_depth, &mut results, &mut visited); + results.sort(); + results.dedup(); + results +} + +fn scan_recursive( + dir: &Path, + depth: usize, + max_depth: usize, + results: &mut Vec, + visited: &mut HashSet, +) { + if depth > max_depth { + return; + } + + let canonical_dir = std::fs::canonicalize(dir).unwrap_or_else(|_| dir.to_path_buf()); + if !visited.insert(canonical_dir.clone()) { + return; + } + + // Check if this directory is a workspace root + let config_path = WorkspaceStore::config_path(&canonical_dir); + if config_path.exists() { + results.push(canonical_dir); + // Don't recurse into workspace directories + return; + } + + let Ok(entries) = std::fs::read_dir(&canonical_dir) else { + return; + }; + + for entry in entries.flatten() { + // Avoid traversing symlinks to directories. + let Ok(file_type) = entry.file_type() else { + continue; + }; + if !file_type.is_dir() { + continue; + } + + let path = entry.path(); + let name = entry.file_name().to_string_lossy().to_string(); + // Skip hidden dirs (except .git-same itself is already handled above) + if name.starts_with('.') { + continue; + } + scan_recursive(&path, depth + 1, max_depth, results, visited); + } +} + +#[cfg(test)] +#[path = "scan_tests.rs"] +mod tests; diff --git a/src/commands/scan_tests.rs b/src/commands/scan_tests.rs new file mode 100644 index 0000000..e7eab71 --- /dev/null +++ b/src/commands/scan_tests.rs @@ -0,0 +1,136 @@ +use super::*; + +#[test] +fn scan_empty_directory_finds_nothing() { + let temp = tempfile::tempdir().unwrap(); + let found = scan_for_workspaces(temp.path(), 3); + assert!(found.is_empty()); +} + +#[test] +fn scan_finds_git_same_workspace() { + let temp = tempfile::tempdir().unwrap(); + let ws_root = temp.path().join("my-workspace"); + let dot_dir = ws_root.join(".git-same"); + std::fs::create_dir_all(&dot_dir).unwrap(); + std::fs::write( + dot_dir.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); + + let found = scan_for_workspaces(temp.path(), 3); + assert_eq!(found.len(), 1); + assert_eq!(found[0], std::fs::canonicalize(&ws_root).unwrap()); +} + +#[test] +fn scan_does_not_recurse_into_workspace() { + let temp = tempfile::tempdir().unwrap(); + let outer = temp.path().join("outer"); + let outer_dot = outer.join(".git-same"); + std::fs::create_dir_all(&outer_dot).unwrap(); + std::fs::write( + outer_dot.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); + + // Inner workspace — should NOT appear because we stop recursing at outer + let inner = outer.join("inner"); + let inner_dot = inner.join(".git-same"); + std::fs::create_dir_all(&inner_dot).unwrap(); + std::fs::write( + inner_dot.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); + + let found = scan_for_workspaces(temp.path(), 5); + assert_eq!(found.len(), 1); + assert_eq!(found[0], std::fs::canonicalize(&outer).unwrap()); +} + +#[cfg(unix)] +#[test] +fn scan_ignores_symlinked_directories() { + use std::os::unix::fs::symlink; + + let temp = tempfile::tempdir().unwrap(); + let ws_root = temp.path().join("my-workspace"); + let dot_dir = ws_root.join(".git-same"); + std::fs::create_dir_all(&dot_dir).unwrap(); + std::fs::write( + dot_dir.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); + + symlink(&ws_root, temp.path().join("workspace-link")).unwrap(); + + let found = scan_for_workspaces(temp.path(), 3); + assert_eq!(found.len(), 1); + assert_eq!(found[0], std::fs::canonicalize(&ws_root).unwrap()); +} + +#[test] +fn run_register_with_custom_config_path_updates_registry() { + let temp = tempfile::tempdir().unwrap(); + let scan_root = temp.path().join("scan-root"); + let ws_root = scan_root.join("team").join("project"); + let dot_dir = ws_root.join(".git-same"); + std::fs::create_dir_all(&dot_dir).unwrap(); + std::fs::write( + dot_dir.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); + + let custom_config_path = temp.path().join("custom-config.toml"); + std::fs::write(&custom_config_path, crate::config::Config::default_toml()).unwrap(); + + let args = crate::cli::ScanArgs { + path: Some(scan_root), + depth: 5, + register: true, + }; + let output = crate::output::Output::quiet(); + run(&args, Some(&custom_config_path), &output).unwrap(); + + let cfg = crate::config::Config::load_from(&custom_config_path).unwrap(); + assert_eq!(cfg.workspaces.len(), 1); + let expected_suffix = std::path::Path::new("scan-root") + .join("team") + .join("project"); + assert!( + std::path::Path::new(&cfg.workspaces[0]).ends_with(&expected_suffix), + "Unexpected registered workspace path: {}", + cfg.workspaces[0] + ); +} + +#[test] +fn run_returns_error_when_custom_config_is_invalid() { + let temp = tempfile::tempdir().unwrap(); + let scan_root = temp.path().join("scan-root"); + let ws_root = scan_root.join("team").join("project"); + let dot_dir = ws_root.join(".git-same"); + std::fs::create_dir_all(&dot_dir).unwrap(); + std::fs::write( + dot_dir.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); + + let invalid_config_path = temp.path().join("invalid-config.toml"); + std::fs::write(&invalid_config_path, "invalid = [").unwrap(); + + let args = crate::cli::ScanArgs { + path: Some(scan_root), + depth: 5, + register: false, + }; + let output = crate::output::Output::quiet(); + let err = run(&args, Some(&invalid_config_path), &output).unwrap_err(); + assert!(err.to_string().contains("Failed to parse config")); +} diff --git a/src/commands/status.rs b/src/commands/status.rs index e1d4b90..2c19026 100644 --- a/src/commands/status.rs +++ b/src/commands/status.rs @@ -9,10 +9,10 @@ use crate::output::{format_count, Output}; /// Show status of repositories. pub async fn run(args: &StatusArgs, config: &Config, output: &Output) -> Result<()> { - let mut workspace = WorkspaceManager::resolve(args.workspace.as_deref(), config)?; + let workspace = WorkspaceManager::resolve(args.workspace.as_deref(), config)?; // Ensure base path exists (offer to fix if user moved it) - super::ensure_base_path(&mut workspace, output)?; + super::ensure_base_path(&workspace, output)?; let base_path = workspace.expanded_base_path(); let structure = workspace.structure.as_deref().unwrap_or(&config.structure); diff --git a/src/commands/status_tests.rs b/src/commands/status_tests.rs index 4500d0d..41a1b3f 100644 --- a/src/commands/status_tests.rs +++ b/src/commands/status_tests.rs @@ -21,7 +21,8 @@ async fn test_status_no_workspaces() { let err = result.expect_err("nonexistent workspace should return an error"); assert!( err.to_string().contains("not found") - || err.to_string().contains("No workspace configured for path"), + || err.to_string().contains("No workspace config found") + || err.to_string().contains("Configuration error"), "unexpected error: {}", err ); diff --git a/src/commands/support/workspace.rs b/src/commands/support/workspace.rs index 2c5b881..e0b34d4 100644 --- a/src/commands/support/workspace.rs +++ b/src/commands/support/workspace.rs @@ -1,14 +1,12 @@ -use crate::config::{WorkspaceConfig, WorkspaceManager}; +use crate::config::WorkspaceConfig; use crate::errors::{AppError, Result}; use crate::output::Output; -use std::io::{self, BufRead, Write}; -/// Ensure the workspace base_path exists. +/// Ensure the workspace root path exists. /// -/// If the configured path is missing, checks whether the current directory -/// could be the new location and offers to update the workspace config. -/// Returns an error if the path cannot be resolved. -pub(crate) fn ensure_base_path(workspace: &mut WorkspaceConfig, output: &Output) -> Result<()> { +/// If the configured path is missing, returns an error advising the user +/// to re-run `gisa setup`. +pub(crate) fn ensure_base_path(workspace: &WorkspaceConfig, output: &Output) -> Result<()> { let base_path = workspace.expanded_base_path(); if base_path.is_dir() { return Ok(()); @@ -20,47 +18,14 @@ pub(crate) fn ensure_base_path(workspace: &mut WorkspaceConfig, output: &Output) ))); } - let cwd = std::env::current_dir() - .map_err(|e| AppError::path(format!("Cannot determine current directory: {}", e)))?; - output.warn(&format!( - "Base path '{}' does not exist.", - workspace.base_path + "Base path '{}' does not exist. Run 'gisa setup' to reconfigure.", + base_path.display() )); - output.info(&format!("Current directory: {}", cwd.display())); - - let prompt = format!( - "Update workspace at '{}' to use '{}'? [y/N] ", - workspace.base_path, - cwd.display() - ); - - if confirm_stderr(&prompt)? { - workspace.base_path = cwd.to_string_lossy().to_string(); - WorkspaceManager::save(workspace)?; - output.success(&format!("Updated base path to '{}'", workspace.base_path)); - Ok(()) - } else { - Err(AppError::config(format!( - "Base path '{}' does not exist. \ - Move to the correct directory and retry, \ - or update manually with 'gisa setup'.", - base_path.display() - ))) - } -} - -/// Prompt on stderr and return true if the user answers y/yes. -fn confirm_stderr(prompt: &str) -> Result { - eprint!("{}", prompt); - io::stderr().flush()?; - - let stdin = io::stdin(); - let mut line = String::new(); - stdin.lock().read_line(&mut line)?; - - let answer = line.trim().to_lowercase(); - Ok(answer == "y" || answer == "yes") + Err(AppError::config(format!( + "Base path '{}' does not exist.", + base_path.display() + ))) } #[cfg(test)] diff --git a/src/commands/support/workspace_tests.rs b/src/commands/support/workspace_tests.rs index 5190f31..19ab951 100644 --- a/src/commands/support/workspace_tests.rs +++ b/src/commands/support/workspace_tests.rs @@ -4,14 +4,10 @@ use crate::output::{Output, Verbosity}; #[test] fn ensure_base_path_is_noop_when_path_exists() { let temp = tempfile::tempdir().unwrap(); - let mut workspace = WorkspaceConfig::new("ws", temp.path().to_string_lossy().to_string()); + let workspace = WorkspaceConfig::new_from_root(temp.path()); let output = Output::new(Verbosity::Quiet, false); - ensure_base_path(&mut workspace, &output).unwrap(); - assert_eq!( - workspace.base_path, - temp.path().to_string_lossy().to_string() - ); + ensure_base_path(&workspace, &output).unwrap(); } #[test] @@ -20,14 +16,18 @@ fn ensure_base_path_rejects_existing_file_path() { let file_path = temp.path().join("not-a-directory"); std::fs::write(&file_path, "x").unwrap(); - let mut workspace = WorkspaceConfig::new("ws", file_path.to_string_lossy().to_string()); + let workspace = WorkspaceConfig::new_from_root(&file_path); let output = Output::new(Verbosity::Quiet, false); - let err = ensure_base_path(&mut workspace, &output).unwrap_err(); + let err = ensure_base_path(&workspace, &output).unwrap_err(); assert!(err.to_string().contains("not a directory")); } #[test] -fn confirm_stderr_function_signature_is_stable() { - let _fn_ptr: fn(&str) -> Result = confirm_stderr; +fn ensure_base_path_errors_on_missing_path() { + let workspace = WorkspaceConfig::new_from_root(std::path::Path::new("/nonexistent/path/xyz")); + let output = Output::new(Verbosity::Quiet, false); + + let err = ensure_base_path(&workspace, &output).unwrap_err(); + assert!(err.to_string().contains("does not exist")); } diff --git a/src/commands/sync_cmd.rs b/src/commands/sync_cmd.rs index 0a806ed..1abc60e 100644 --- a/src/commands/sync_cmd.rs +++ b/src/commands/sync_cmd.rs @@ -26,7 +26,7 @@ pub async fn run(args: &SyncCmdArgs, config: &Config, output: &Output) -> Result // Resolve workspace and ensure base path exists (offer to fix if user moved it) let mut workspace = WorkspaceManager::resolve(args.workspace.as_deref(), config)?; - super::ensure_base_path(&mut workspace, output)?; + super::ensure_base_path(&workspace, output)?; output.info("Discovering repositories..."); let discovery_progress = DiscoveryProgressBar::new(verbosity); diff --git a/src/commands/sync_cmd_tests.rs b/src/commands/sync_cmd_tests.rs index 0af5801..447285c 100644 --- a/src/commands/sync_cmd_tests.rs +++ b/src/commands/sync_cmd_tests.rs @@ -60,7 +60,13 @@ async fn run_returns_error_for_unknown_workspace_name() { } let err = result.unwrap_err(); - assert!(err.to_string().contains("No workspace configured")); + assert!( + err.to_string().contains("No workspace configured") + || err.to_string().contains("No workspace config found") + || err.to_string().contains("Configuration error"), + "unexpected error: {}", + err + ); } #[test] diff --git a/src/commands/workspace.rs b/src/commands/workspace.rs index b7e5e36..c9a3cc9 100644 --- a/src/commands/workspace.rs +++ b/src/commands/workspace.rs @@ -12,8 +12,8 @@ pub fn run(args: &WorkspaceArgs, config: &Config, output: &Output) -> Result<()> WorkspaceCommand::Default(default_args) => { if default_args.clear { clear_default(output) - } else if let Some(ref name) = default_args.name { - set_default(name, output) + } else if let Some(ref selector) = default_args.name { + set_default(selector, config, output) } else { show_default(config, output) } @@ -29,10 +29,11 @@ fn list(config: &Config, output: &Output) -> Result<()> { return Ok(()); } - let default_name = config.default_workspace.as_deref().unwrap_or(""); + let default_path = config.default_workspace.as_deref().unwrap_or(""); for ws in &workspaces { - let marker = if ws.name == default_name { "*" } else { " " }; + let ws_path = crate::config::workspace::tilde_collapse_path(&ws.root_path); + let marker = if ws_path == default_path { "*" } else { " " }; let last_synced = ws.last_synced.as_deref().unwrap_or("never"); let org_info = if ws.orgs.is_empty() { "all orgs".to_string() @@ -43,12 +44,14 @@ fn list(config: &Config, output: &Output) -> Result<()> { output.plain(&format!( " {} {} ({}, {}, last synced: {})", - marker, ws.base_path, provider_label, org_info, last_synced + marker, ws_path, provider_label, org_info, last_synced )); } - if !default_name.is_empty() { - if let Ok(default_ws) = WorkspaceManager::load(default_name) { + if !default_path.is_empty() { + let expanded = shellexpand::tilde(default_path); + let root = std::path::Path::new(expanded.as_ref()); + if let Ok(default_ws) = WorkspaceManager::load(root) { output.plain(""); output.info(&format!("Default: {}", default_ws.display_label())); } @@ -59,26 +62,25 @@ fn list(config: &Config, output: &Output) -> Result<()> { fn show_default(config: &Config, output: &Output) -> Result<()> { match &config.default_workspace { - Some(name) => { - if let Ok(ws) = WorkspaceManager::load(name) { + Some(path_str) => { + let expanded = shellexpand::tilde(path_str); + let root = std::path::Path::new(expanded.as_ref()); + if let Ok(ws) = WorkspaceManager::load(root) { output.info(&format!("Default workspace: {}", ws.display_label())); } else { - output.info(&format!("Default workspace: {} (not found)", name)); + output.info(&format!("Default workspace: {} (not found)", path_str)); } } - None => output.info("No default workspace set. Use 'gisa workspace default '."), + None => output.info("No default workspace set. Use 'gisa workspace default '."), } Ok(()) } -fn set_default(name_or_path: &str, output: &Output) -> Result<()> { - // Try name first (backward compat), then path - let ws = match WorkspaceManager::load(name_or_path) { - Ok(ws) => ws, - Err(_) => WorkspaceManager::load_by_path(name_or_path)?, - }; +fn set_default(selector: &str, config: &Config, output: &Output) -> Result<()> { + let ws = WorkspaceManager::resolve(Some(selector), config)?; - Config::save_default_workspace(Some(&ws.name))?; + let tilde_path = crate::config::workspace::tilde_collapse_path(&ws.root_path); + Config::save_default_workspace(Some(&tilde_path))?; output.success(&format!( "Default workspace set to '{}'", ws.display_label() diff --git a/src/config/mod.rs b/src/config/mod.rs index f7c8f4f..f5128c7 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -6,12 +6,8 @@ //! # Example Configuration //! //! ```toml -//! base_path = "~/github" +//! structure = "{org}/{repo}" //! concurrency = 4 -//! -//! [[providers]] -//! kind = "github" -//! auth = "gh-cli" //! ``` mod parser; @@ -22,7 +18,6 @@ pub mod workspace_policy; pub mod workspace_store; pub use parser::{Config, ConfigCloneOptions, FilterOptions, SyncMode}; -pub use provider_config::{AuthMethod, ProviderEntry}; pub use workspace::{WorkspaceConfig, WorkspaceProvider}; pub use workspace_manager::WorkspaceManager; pub use workspace_policy::WorkspacePolicy; diff --git a/src/config/parser.rs b/src/config/parser.rs index a088592..723d902 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -2,7 +2,6 @@ //! //! Handles loading and parsing of config.toml files. -use super::provider_config::ProviderEntry; use crate::errors::AppError; use crate::operations::clone::{DEFAULT_CONCURRENCY, MAX_CONCURRENCY}; use serde::{Deserialize, Serialize}; @@ -86,7 +85,7 @@ pub struct Config { #[serde(default)] pub sync_mode: SyncMode, - /// Default workspace name (used when --workspace is not specified and multiple exist) + /// Default workspace path (used when --workspace is not specified and multiple exist) #[serde(default)] pub default_workspace: Option, @@ -103,9 +102,9 @@ pub struct Config { #[serde(default)] pub filters: FilterOptions, - /// Provider configurations - #[serde(default = "default_providers")] - pub providers: Vec, + /// Registry of known workspace root paths (tilde-collapsed). + #[serde(default)] + pub workspaces: Vec, } fn default_structure() -> String { @@ -120,10 +119,6 @@ fn default_refresh_interval() -> u64 { 30 } -fn default_providers() -> Vec { - vec![ProviderEntry::github()] -} - impl Default for Config { fn default() -> Self { Self { @@ -134,7 +129,7 @@ impl Default for Config { refresh_interval: default_refresh_interval(), clone: ConfigCloneOptions::default(), filters: FilterOptions::default(), - providers: default_providers(), + workspaces: Vec::new(), } } } @@ -204,17 +199,6 @@ impl Config { )); } - // Validate providers - if self.providers.is_empty() { - return Err(AppError::config("At least one provider must be configured")); - } - - for (i, provider) in self.providers.iter().enumerate() { - provider - .validate() - .map_err(|e| AppError::config(format!("Provider {} error: {}", i + 1, e)))?; - } - Ok(()) } @@ -256,13 +240,6 @@ include_forks = false # Exclude specific repos # exclude_repos = ["org/repo-to-skip"] - -# Provider configuration (default: GitHub.com with gh CLI auth) -[[providers]] -kind = "github" -auth = "gh-cli" -prefer_ssh = true -# base_path = "~/github" "# } @@ -349,9 +326,82 @@ prefer_ssh = true Ok(()) } - /// Returns enabled providers only. - pub fn enabled_providers(&self) -> impl Iterator { - self.providers.iter().filter(|p| p.enabled) + /// Add a workspace path to the global registry. + pub fn add_to_registry(path: &str) -> Result<(), AppError> { + Self::add_to_registry_at(&Self::default_path()?, path) + } + + /// Add a workspace path to the registry in a specific config file. + pub fn add_to_registry_at(config_path: &Path, path: &str) -> Result<(), AppError> { + if !config_path.exists() { + return Err(AppError::config( + "Config file not found. Run 'gisa init' first.", + )); + } + Self::modify_registry_at(config_path, Some(path), None) + } + + /// Remove a workspace path from the global registry. + pub fn remove_from_registry(path: &str) -> Result<(), AppError> { + Self::remove_from_registry_at(&Self::default_path()?, path) + } + + /// Remove a workspace path from the registry in a specific config file. + pub fn remove_from_registry_at(config_path: &Path, path: &str) -> Result<(), AppError> { + if !config_path.exists() { + return Ok(()); + } + Self::modify_registry_at(config_path, None, Some(path)) + } + + /// Add or remove a path from the workspaces registry in the config file. + fn modify_registry_at( + config_path: &Path, + add: Option<&str>, + remove: Option<&str>, + ) -> Result<(), AppError> { + let content = std::fs::read_to_string(config_path) + .map_err(|e| AppError::config(format!("Failed to read config: {}", e)))?; + + let mut doc: toml::Value = toml::from_str(&content) + .map_err(|e| AppError::config(format!("Failed to parse config: {}", e)))?; + + let table = doc + .as_table_mut() + .ok_or_else(|| AppError::config("Invalid config: expected root table"))?; + + if let Some(existing) = table.get("workspaces") { + if !existing.is_array() { + return Err(AppError::config( + "Invalid config: 'workspaces' must be an array", + )); + } + } + + let workspaces = table + .entry("workspaces") + .or_insert_with(|| toml::Value::Array(Vec::new())); + let arr = workspaces + .as_array_mut() + .ok_or_else(|| AppError::config("Invalid config: 'workspaces' must be an array"))?; + + if let Some(path_to_add) = add { + let val = toml::Value::String(path_to_add.to_string()); + if !arr.contains(&val) { + arr.push(val); + } + } + if let Some(path_to_remove) = remove { + arr.retain(|v| v.as_str().map(|s| s != path_to_remove).unwrap_or(true)); + } + + let new_content = toml::to_string_pretty(&doc) + .map_err(|e| AppError::config(format!("Failed to serialize config: {}", e)))?; + + std::fs::write(config_path, new_content) + .map_err(|e| AppError::config(format!("Failed to write config: {}", e)))?; + + Ok(()) } } diff --git a/src/config/parser_tests.rs b/src/config/parser_tests.rs index 99a622f..1b82064 100644 --- a/src/config/parser_tests.rs +++ b/src/config/parser_tests.rs @@ -9,7 +9,6 @@ fn test_default_config() { assert_eq!(config.sync_mode, SyncMode::Fetch); assert!(!config.filters.include_archived); assert!(!config.filters.include_forks); - assert_eq!(config.providers.len(), 1); } #[test] @@ -37,10 +36,6 @@ include_archived = true include_forks = true orgs = ["my-org"] exclude_repos = ["my-org/skip-this"] - -[[providers]] -kind = "github" -auth = "gh-cli" "#; let config = Config::parse(content).unwrap(); @@ -55,25 +50,6 @@ auth = "gh-cli" assert_eq!(config.filters.exclude_repos, vec!["my-org/skip-this"]); } -#[test] -fn test_load_multi_provider_config() { - let content = r#" -[[providers]] -kind = "github" -auth = "gh-cli" - -[[providers]] -kind = "github" -name = "Work" -auth = "gh-cli" -"#; - - let config = Config::parse(content).unwrap(); - assert_eq!(config.providers.len(), 2); - assert_eq!(config.providers[0].kind, crate::types::ProviderKind::GitHub); - assert_eq!(config.providers[1].name, Some("Work".to_string())); -} - #[test] fn test_missing_file_returns_defaults() { let config = Config::load_from(Path::new("/nonexistent/config.toml")).unwrap(); @@ -101,17 +77,6 @@ fn test_validation_rejects_high_concurrency() { assert!(result.is_err()); } -#[test] -fn test_validation_rejects_empty_providers() { - let config = Config { - providers: vec![], - ..Config::default() - }; - let result = config.validate(); - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("provider")); -} - #[test] fn test_sync_mode_from_str() { assert_eq!("fetch".parse::().unwrap(), SyncMode::Fetch); @@ -127,30 +92,6 @@ fn test_default_toml_is_valid() { assert!(result.is_ok(), "Default TOML should be valid: {:?}", result); } -#[test] -fn test_enabled_providers_filter() { - let config = Config { - providers: vec![ - ProviderEntry { - enabled: true, - ..ProviderEntry::github() - }, - ProviderEntry { - enabled: false, - ..ProviderEntry::github() - }, - ProviderEntry { - enabled: true, - ..ProviderEntry::github() - }, - ], - ..Config::default() - }; - - let enabled: Vec<_> = config.enabled_providers().collect(); - assert_eq!(enabled.len(), 2); -} - #[test] fn test_default_config_has_no_default_workspace() { let config = Config::default(); @@ -160,22 +101,16 @@ fn test_default_config_has_no_default_workspace() { #[test] fn test_parse_config_with_default_workspace() { let content = r#" -default_workspace = "my-ws" - -[[providers]] -kind = "github" -auth = "gh-cli" +default_workspace = "~/repos" "#; let config = Config::parse(content).unwrap(); - assert_eq!(config.default_workspace, Some("my-ws".to_string())); + assert_eq!(config.default_workspace, Some("~/repos".to_string())); } #[test] fn test_parse_config_without_default_workspace() { let content = r#" -[[providers]] -kind = "github" -auth = "gh-cli" +concurrency = 4 "#; let config = Config::parse(content).unwrap(); assert!(config.default_workspace.is_none()); @@ -239,10 +174,6 @@ fn test_save_default_workspace_to_replace_without_sync_mode() { structure = "{org}/{repo}" concurrency = 8 default_workspace = "ws-old" - -[[providers]] -kind = "github" -auth = "gh-cli" "#; std::fs::write(&path, content).unwrap(); @@ -261,3 +192,46 @@ fn test_save_default_workspace_to_nonexistent_file() { Config::save_default_workspace_to(Path::new("/nonexistent/config.toml"), Some("ws")); assert!(result.is_err()); } + +#[test] +fn test_add_to_registry_at_returns_error_when_config_missing() { + let temp = tempfile::TempDir::new().unwrap(); + let path = temp.path().join("missing-config.toml"); + + let err = Config::add_to_registry_at(&path, "~/repos").unwrap_err(); + assert!(err.to_string().contains("Run 'gisa init' first")); +} + +#[test] +fn test_add_to_registry_at_adds_path_without_duplicates() { + let temp = tempfile::TempDir::new().unwrap(); + let path = temp.path().join("config.toml"); + std::fs::write(&path, Config::default_toml()).unwrap(); + + Config::add_to_registry_at(&path, "~/repos").unwrap(); + Config::add_to_registry_at(&path, "~/repos").unwrap(); + + let config = Config::load_from(&path).unwrap(); + assert_eq!(config.workspaces, vec!["~/repos".to_string()]); +} + +#[test] +fn test_add_to_registry_at_errors_when_workspaces_is_not_array() { + let temp = tempfile::TempDir::new().unwrap(); + let path = temp.path().join("config.toml"); + std::fs::write( + &path, + r#" +concurrency = 4 +workspaces = "invalid" +"#, + ) + .unwrap(); + + let err = Config::add_to_registry_at(&path, "~/repos").unwrap_err(); + assert!(err.to_string().contains("workspaces")); + + // Ensure malformed field was not silently rewritten. + let content = std::fs::read_to_string(&path).unwrap(); + assert!(content.contains(r#"workspaces = "invalid""#)); +} diff --git a/src/config/provider_config.rs b/src/config/provider_config.rs index eb84896..a485941 100644 --- a/src/config/provider_config.rs +++ b/src/config/provider_config.rs @@ -1,101 +1,12 @@ //! Provider-specific configuration. //! -//! Defines how individual Git hosting providers are configured, -//! including authentication and API endpoints. +//! This module is kept minimal — provider configuration is now handled +//! directly by `WorkspaceProvider` in the workspace config. The `AuthMethod` +//! enum has been removed since gh-cli is the only supported auth method and +//! is hardcoded in the auth module. -use crate::types::ProviderKind; -use serde::{Deserialize, Serialize}; - -/// How to authenticate with a provider. -/// -/// Currently only GitHub CLI is supported. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] -#[serde(rename_all = "kebab-case")] -pub enum AuthMethod { - /// Use GitHub CLI (`gh auth token`) - #[default] - GhCli, -} - -/// Configuration for a single Git hosting provider. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ProviderEntry { - /// The type of provider (github, gitlab, etc.) - #[serde(default)] - pub kind: ProviderKind, - - /// Display name for this provider instance - #[serde(default)] - pub name: Option, - - /// API base URL (required for GitHub Enterprise, optional for others) - #[serde(default)] - pub api_url: Option, - - /// How to authenticate - #[serde(default)] - pub auth: AuthMethod, - - /// Whether to prefer SSH for cloning (default: true) - #[serde(default = "default_true")] - pub prefer_ssh: bool, - - /// Base directory override for this provider's repos - #[serde(default)] - pub base_path: Option, - - /// Whether this provider is enabled - #[serde(default = "default_true")] - pub enabled: bool, -} - -fn default_true() -> bool { - true -} - -impl Default for ProviderEntry { - fn default() -> Self { - Self { - kind: ProviderKind::GitHub, - name: None, - api_url: None, - auth: AuthMethod::GhCli, - prefer_ssh: true, - base_path: None, - enabled: true, - } - } -} - -impl ProviderEntry { - /// Creates a default GitHub.com provider entry. - pub fn github() -> Self { - Self { - kind: ProviderKind::GitHub, - name: Some("GitHub".to_string()), - ..Default::default() - } - } - - /// Returns the effective API URL for this provider. - pub fn effective_api_url(&self) -> String { - self.api_url - .clone() - .unwrap_or_else(|| self.kind.default_api_url().to_string()) - } - - /// Returns the display name for this provider. - pub fn display_name(&self) -> String { - self.name - .clone() - .unwrap_or_else(|| self.kind.display_name().to_string()) - } - - /// Validates the provider configuration. - pub fn validate(&self) -> Result<(), String> { - Ok(()) - } -} +// This module is intentionally kept as a placeholder. All provider +// configuration is now in workspace.rs (WorkspaceProvider). #[cfg(test)] #[path = "provider_config_tests.rs"] diff --git a/src/config/provider_config_tests.rs b/src/config/provider_config_tests.rs index 7e93ba8..278138a 100644 --- a/src/config/provider_config_tests.rs +++ b/src/config/provider_config_tests.rs @@ -1,63 +1,54 @@ -use super::*; +// Provider configuration is now handled by WorkspaceProvider in workspace.rs. +// These tests verify the WorkspaceProvider API used throughout the codebase. -#[test] -fn test_default_provider_entry() { - let entry = ProviderEntry::default(); - assert_eq!(entry.kind, ProviderKind::GitHub); - assert_eq!(entry.auth, AuthMethod::GhCli); - assert!(entry.prefer_ssh); - assert!(entry.enabled); -} +use crate::config::WorkspaceProvider; +use crate::types::ProviderKind; #[test] -fn test_github_factory() { - let entry = ProviderEntry::github(); - assert_eq!(entry.kind, ProviderKind::GitHub); - assert_eq!(entry.display_name(), "GitHub"); +fn test_default_workspace_provider() { + let provider = WorkspaceProvider::default(); + assert_eq!(provider.kind, ProviderKind::GitHub); + assert!(provider.prefer_ssh); + assert!(provider.api_url.is_none()); } #[test] -fn test_effective_api_url_with_override() { - let mut entry = ProviderEntry::github(); - entry.api_url = Some("https://custom-api.example.com".to_string()); - assert_eq!(entry.effective_api_url(), "https://custom-api.example.com"); +fn test_workspace_provider_effective_api_url_default() { + let provider = WorkspaceProvider::default(); + assert_eq!(provider.effective_api_url(), "https://api.github.com"); } #[test] -fn test_effective_api_url_default() { - let entry = ProviderEntry::github(); - assert_eq!(entry.effective_api_url(), "https://api.github.com"); +fn test_workspace_provider_effective_api_url_override() { + let provider = WorkspaceProvider { + kind: ProviderKind::GitHub, + api_url: Some("https://github.example.com/api/v3".to_string()), + prefer_ssh: true, + }; + assert_eq!( + provider.effective_api_url(), + "https://github.example.com/api/v3" + ); } #[test] -fn test_validate_valid_config() { - let entry = ProviderEntry::github(); - assert!(entry.validate().is_ok()); +fn test_workspace_provider_display_name() { + let provider = WorkspaceProvider::default(); + assert_eq!(provider.display_name(), "GitHub"); } #[test] -fn test_serde_roundtrip() { - let entry = ProviderEntry { +fn test_workspace_provider_serde_roundtrip() { + let provider = WorkspaceProvider { kind: ProviderKind::GitHub, - name: Some("My GitHub".to_string()), - auth: AuthMethod::GhCli, + api_url: None, prefer_ssh: false, - ..Default::default() }; - let toml = toml::to_string(&entry).unwrap(); - let parsed: ProviderEntry = toml::from_str(&toml).unwrap(); + let toml = toml::to_string(&provider).unwrap(); + let parsed: WorkspaceProvider = toml::from_str(&toml).unwrap(); - assert_eq!(parsed.kind, entry.kind); - assert_eq!(parsed.name, entry.name); - assert_eq!(parsed.auth, entry.auth); - assert_eq!(parsed.prefer_ssh, entry.prefer_ssh); -} - -#[test] -fn test_auth_method_serde() { - assert_eq!( - serde_json::to_string(&AuthMethod::GhCli).unwrap(), - "\"gh-cli\"" - ); + assert_eq!(parsed.kind, provider.kind); + assert_eq!(parsed.api_url, provider.api_url); + assert_eq!(parsed.prefer_ssh, provider.prefer_ssh); } diff --git a/src/config/workspace.rs b/src/config/workspace.rs index 79a8eb7..323dd7e 100644 --- a/src/config/workspace.rs +++ b/src/config/workspace.rs @@ -1,13 +1,14 @@ //! Workspace configuration. //! //! Each workspace represents a sync target folder with its own provider, -//! selected organizations, and repository filters. Each workspace is a -//! subdirectory of `~/.config/git-same//` containing `workspace-config.toml`. +//! selected organizations, and repository filters. Workspace config lives +//! inside the sync folder itself at `/.git-same/config.toml`, making +//! workspaces portable and self-describing. -use super::provider_config::AuthMethod; use super::{ConfigCloneOptions, FilterOptions, SyncMode}; use crate::types::ProviderKind; use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; /// Provider configuration scoped to a single workspace. #[derive(Debug, Clone, Serialize, Deserialize)] @@ -16,10 +17,6 @@ pub struct WorkspaceProvider { #[serde(default)] pub kind: ProviderKind, - /// How to authenticate - #[serde(default)] - pub auth: AuthMethod, - /// API base URL (required for GitHub Enterprise) #[serde(default, skip_serializing_if = "Option::is_none")] pub api_url: Option, @@ -37,7 +34,6 @@ impl Default for WorkspaceProvider { fn default() -> Self { Self { kind: ProviderKind::GitHub, - auth: AuthMethod::GhCli, api_url: None, prefer_ssh: true, } @@ -45,31 +41,30 @@ impl Default for WorkspaceProvider { } impl WorkspaceProvider { - /// Convert to a `ProviderEntry` for use with existing provider/auth infrastructure. - pub fn to_provider_entry(&self) -> super::ProviderEntry { - super::ProviderEntry { - kind: self.kind, - name: Some(self.kind.display_name().to_string()), - api_url: self.api_url.clone(), - auth: self.auth.clone(), - prefer_ssh: self.prefer_ssh, - base_path: None, - enabled: true, - } + /// Returns the effective API URL for this provider. + pub fn effective_api_url(&self) -> String { + self.api_url + .clone() + .unwrap_or_else(|| self.kind.default_api_url().to_string()) + } + + /// Returns the display name for this provider. + pub fn display_name(&self) -> &str { + self.kind.display_name() } } /// Configuration for a single workspace (sync target folder). +/// +/// Stored at `/.git-same/config.toml`. The `root_path` field is not +/// serialized — it is populated at load time from the `.git-same/` parent. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WorkspaceConfig { - /// Workspace name, derived from the config folder name at load time. + /// Absolute path to the workspace root (parent of `.git-same/`). /// - /// Not stored in `workspace-config.toml` — the folder name is the source of truth. - #[serde(skip_serializing, default)] - pub name: String, - - /// Absolute path to the folder where repos are cloned. - pub base_path: String, + /// Not stored in config.toml — derived from the file's location at load time. + #[serde(skip)] + pub root_path: PathBuf, /// Provider configuration for this workspace. pub provider: WorkspaceProvider, @@ -121,11 +116,10 @@ pub struct WorkspaceConfig { } impl WorkspaceConfig { - /// Create a new workspace config with minimal required fields. - pub fn new(name: impl Into, base_path: impl Into) -> Self { + /// Create a new workspace config for the given root directory. + pub fn new_from_root(root: &Path) -> Self { Self { - name: name.into(), - base_path: base_path.into(), + root_path: root.to_path_buf(), provider: WorkspaceProvider::default(), username: String::new(), orgs: Vec::new(), @@ -141,18 +135,15 @@ impl WorkspaceConfig { } } - /// Expand ~ in base_path to the actual home directory. - pub fn expanded_base_path(&self) -> std::path::PathBuf { - let expanded = shellexpand::tilde(&self.base_path); - std::path::PathBuf::from(expanded.as_ref()) + /// Returns the workspace root path (equivalent of old `expanded_base_path()`). + pub fn expanded_base_path(&self) -> PathBuf { + self.root_path.clone() } /// Returns a user-friendly label: `"~/repos (GitHub)"`. - /// - /// This is the primary user-facing workspace identity. The internal `name` - /// field is a filesystem key and should never be shown to users. pub fn display_label(&self) -> String { - format!("{} ({})", self.base_path, self.provider.kind.display_name()) + let path_str = tilde_collapse_path(&self.root_path); + format!("{} ({})", path_str, self.provider.kind.display_name()) } /// Returns a short display summary for selectors. @@ -181,6 +172,25 @@ impl WorkspaceConfig { } } +/// Collapse the home directory prefix to `~` for display. +pub fn tilde_collapse_path(path: &Path) -> String { + let home = std::env::var("HOME") + .or_else(|_| std::env::var("USERPROFILE")) + .ok(); + + if let Some(home) = home { + let home_path = Path::new(&home); + if let Ok(suffix) = path.strip_prefix(home_path) { + if suffix.as_os_str().is_empty() { + return "~".to_string(); + } + return format!("~{}{}", std::path::MAIN_SEPARATOR, suffix.to_string_lossy()); + } + } + + path.to_string_lossy().to_string() +} + #[cfg(test)] #[path = "workspace_tests.rs"] mod tests; diff --git a/src/config/workspace_manager.rs b/src/config/workspace_manager.rs index 4d392d9..c47eabd 100644 --- a/src/config/workspace_manager.rs +++ b/src/config/workspace_manager.rs @@ -1,31 +1,24 @@ //! Workspace manager facade. //! -//! This compatibility layer keeps the existing `WorkspaceManager` API stable -//! while delegating storage and policy responsibilities to dedicated modules. +//! Keeps a stable API while delegating to `WorkspaceStore` and `WorkspacePolicy`. use super::workspace::WorkspaceConfig; use super::{workspace_policy::WorkspacePolicy, workspace_store::WorkspaceStore}; use crate::errors::AppError; -use crate::types::ProviderKind; use std::path::{Path, PathBuf}; /// Compatibility facade for workspace operations. pub struct WorkspaceManager; impl WorkspaceManager { - /// Returns the config directory: `~/.config/git-same/`. - pub fn config_dir() -> Result { - WorkspaceStore::config_dir() - } - /// List all workspace configs. pub fn list() -> Result, AppError> { WorkspaceStore::list() } - /// Load a specific workspace by name. - pub fn load(name: &str) -> Result { - WorkspaceStore::load(name) + /// Load a specific workspace by root path. + pub fn load(root: &Path) -> Result { + WorkspaceStore::load(root) } /// Save a workspace config (create or update). @@ -33,29 +26,29 @@ impl WorkspaceManager { WorkspaceStore::save(workspace) } - /// Delete a workspace by name. - pub fn delete(name: &str) -> Result<(), AppError> { - WorkspaceStore::delete(name) + /// Delete a workspace by root path. + pub fn delete(root: &Path) -> Result<(), AppError> { + WorkspaceStore::delete(root) } - /// Find a workspace whose base_path matches the given directory. - pub fn find_by_path(path: &Path) -> Result, AppError> { - WorkspaceStore::find_by_path(path) + /// Returns the `.git-same/` directory for a workspace root. + pub fn dot_dir(root: &Path) -> PathBuf { + WorkspaceStore::dot_dir(root) } - /// Load a workspace by its base_path string. - pub fn load_by_path(path_str: &str) -> Result { - WorkspaceStore::load_by_path(path_str) + /// Returns the cache file path for a workspace root. + pub fn cache_path(root: &Path) -> PathBuf { + WorkspaceStore::cache_path(root) } - /// Derive a workspace name from a base path and provider. - pub fn name_from_path(path: &Path, provider: ProviderKind) -> String { - WorkspacePolicy::name_from_path(path, provider) + /// Returns the sync history file path for a workspace root. + pub fn sync_history_path(root: &Path) -> PathBuf { + WorkspaceStore::sync_history_path(root) } - /// Return a unique workspace name, appending `-2`, `-3`, etc. on collision. - pub fn unique_name(base: &str) -> Result { - WorkspacePolicy::unique_name(base) + /// Walk up from `start` to find the nearest `.git-same/config.toml`. + pub fn detect_from_cwd(start: &Path) -> Option { + WorkspacePolicy::detect_from_cwd(start) } /// Resolve which workspace to use. @@ -72,16 +65,6 @@ impl WorkspaceManager { ) -> Result { WorkspacePolicy::resolve_from_list(workspaces) } - - /// Returns the directory path for a workspace: `~/.config/git-same//`. - pub fn workspace_dir(name: &str) -> Result { - WorkspaceStore::workspace_dir(name) - } - - /// Returns the cache file path for a workspace. - pub fn cache_path(name: &str) -> Result { - WorkspaceStore::cache_path(name) - } } #[cfg(test)] diff --git a/src/config/workspace_manager_tests.rs b/src/config/workspace_manager_tests.rs index 6481b59..1275c72 100644 --- a/src/config/workspace_manager_tests.rs +++ b/src/config/workspace_manager_tests.rs @@ -1,8 +1,31 @@ use super::*; #[test] -fn test_name_from_path_simple() { - let name = - WorkspaceManager::name_from_path(Path::new("/home/user/github"), ProviderKind::GitHub); - assert_eq!(name, "github-github"); +fn dot_dir_is_derived_from_workspace_root() { + let root = Path::new("/tmp/my-workspace"); + let dot_dir = WorkspaceManager::dot_dir(root); + assert_eq!( + dot_dir, + std::path::PathBuf::from("/tmp/my-workspace/.git-same") + ); +} + +#[test] +fn cache_path_is_inside_dot_dir() { + let root = Path::new("/tmp/my-workspace"); + let cache = WorkspaceManager::cache_path(root); + assert_eq!( + cache, + std::path::PathBuf::from("/tmp/my-workspace/.git-same/cache.json") + ); +} + +#[test] +fn sync_history_path_is_inside_dot_dir() { + let root = Path::new("/tmp/my-workspace"); + let hist = WorkspaceManager::sync_history_path(root); + assert_eq!( + hist, + std::path::PathBuf::from("/tmp/my-workspace/.git-same/sync-history.json") + ); } diff --git a/src/config/workspace_policy.rs b/src/config/workspace_policy.rs index 44d4cf9..fdb8ccd 100644 --- a/src/config/workspace_policy.rs +++ b/src/config/workspace_policy.rs @@ -1,78 +1,86 @@ -//! Workspace resolution and naming rules (policy concern only). +//! Workspace resolution rules (policy concern only). use super::parser::Config; +use super::workspace::tilde_collapse_path; use super::workspace::WorkspaceConfig; use super::workspace_store::WorkspaceStore; use crate::errors::AppError; -use crate::types::ProviderKind; use std::path::Path; /// Workspace policy helpers. pub struct WorkspacePolicy; impl WorkspacePolicy { - /// Derive a workspace name from a base path and provider. - pub fn name_from_path(path: &Path, provider: ProviderKind) -> String { - let lossy = path.to_string_lossy(); - let expanded = shellexpand::tilde(&lossy); - let path = Path::new(expanded.as_ref()); - - let last_component = path - .components() - .filter_map(|c| { - if let std::path::Component::Normal(s) = c { - s.to_str() - } else { - None - } - }) - .next_back() - .unwrap_or("workspace"); - - let prefix = match provider { - ProviderKind::GitHub => "github", - ProviderKind::GitHubEnterprise => "ghe", - ProviderKind::GitLab => "gitlab", - ProviderKind::GitLabSelfManaged => "glsm", - ProviderKind::Codeberg => "codeberg", - ProviderKind::Bitbucket => "bitbucket", - }; - format!("{}-{}", prefix, last_component) - .to_lowercase() - .replace([' ', '_'], "-") - } - - /// Return a unique workspace name, appending `-2`, `-3`, etc. on collision. - pub fn unique_name(base: &str) -> Result { - let dir = WorkspaceStore::workspace_dir(base)?; - if !dir.exists() { - return Ok(base.to_string()); - } - - for suffix in 2..=100 { - let candidate = format!("{}-{}", base, suffix); - let candidate_dir = WorkspaceStore::workspace_dir(&candidate)?; - if !candidate_dir.exists() { - return Ok(candidate); + /// Walk up from `start` to find the nearest `.git-same/config.toml`. + /// + /// Returns the workspace root (parent of `.git-same/`) if found. + pub fn detect_from_cwd(start: &Path) -> Option { + let mut current = start.to_path_buf(); + loop { + let config = WorkspaceStore::config_path(¤t); + if config.exists() { + return Some(current); + } + if !current.pop() { + break; } } - - Err(AppError::config(format!( - "Could not find a unique workspace name based on '{}'", - base - ))) + None } /// Resolve which workspace to use. + /// + /// Priority: + /// 1. Explicit `--workspace ` argument + /// 2. CWD auto-detection (walk up looking for `.git-same/`) + /// 3. Global `default_workspace` path + /// 4. Single-workspace auto-select + /// 5. Error pub fn resolve(name: Option<&str>, config: &Config) -> Result { + // 1. Explicit selector (path or unique folder name) if let Some(value) = name { - return WorkspaceStore::load(value).or_else(|_| WorkspaceStore::load_by_path(value)); + let expanded = shellexpand::tilde(value); + let root = Path::new(expanded.as_ref()); + match WorkspaceStore::load(root) { + Ok(ws) => return Ok(ws), + Err(path_err) => { + let workspaces = WorkspaceStore::list()?; + match Self::resolve_selector_from_list(value, workspaces) { + Ok(ws) => return Ok(ws), + Err(selector_err) => { + let is_ambiguous = selector_err.to_string().contains("ambiguous"); + if is_ambiguous { + return Err(selector_err); + } + if Self::looks_like_path(value) { + return Err(path_err); + } + return Err(AppError::config(format!( + "No workspace matched selector '{}'. Use 'gisa workspace list' and \ + pass a workspace folder name or path.", + value + ))); + } + } + } + } } - if let Some(ref default) = config.default_workspace { - return WorkspaceStore::load(default); + // 2. CWD auto-detection + if let Ok(cwd) = std::env::current_dir() { + if let Some(root) = Self::detect_from_cwd(&cwd) { + return WorkspaceStore::load(&root); + } + } + + // 3. Global default_workspace + if let Some(ref default_path) = config.default_workspace { + let expanded = shellexpand::tilde(default_path); + let root = Path::new(expanded.as_ref()); + return WorkspaceStore::load(root); } + // 4. Single-workspace auto-select (or error) let workspaces = WorkspaceStore::list()?; Self::resolve_from_list(workspaces) } @@ -92,13 +100,55 @@ impl WorkspacePolicy { _ => { let labels: Vec = workspaces.iter().map(|w| w.display_label()).collect(); Err(AppError::config(format!( - "Multiple workspaces configured. Use --workspace to select one, \ - or set a default with 'gisa workspace default ': {}", + "Multiple workspaces configured. Use --workspace to select one, \ + or set a default with 'gisa workspace default ': {}", labels.join(", ") ))) } } } + + fn resolve_selector_from_list( + selector: &str, + workspaces: Vec, + ) -> Result { + let matches: Vec = workspaces + .into_iter() + .filter(|ws| { + let folder_name_matches = ws + .root_path + .file_name() + .and_then(|name| name.to_str()) + .map(|name| name == selector) + .unwrap_or(false); + let path_matches = ws.root_path.to_string_lossy() == selector; + let tilde_path_matches = tilde_collapse_path(&ws.root_path) == selector; + folder_name_matches || path_matches || tilde_path_matches + }) + .collect(); + + match matches.len() { + 1 => Ok(matches + .into_iter() + .next() + .expect("single selector match exists")), + 0 => Err(AppError::config("No workspace matched selector")), + _ => { + let labels: Vec = matches.iter().map(|ws| ws.display_label()).collect(); + Err(AppError::config(format!( + "Workspace selector '{}' is ambiguous. Use an explicit path instead: {}", + selector, + labels.join(", ") + ))) + } + } + } + + fn looks_like_path(value: &str) -> bool { + value.contains(std::path::MAIN_SEPARATOR) + || value.starts_with('.') + || value.starts_with('~') + } } #[cfg(test)] diff --git a/src/config/workspace_policy_tests.rs b/src/config/workspace_policy_tests.rs index efb38b4..0a3b568 100644 --- a/src/config/workspace_policy_tests.rs +++ b/src/config/workspace_policy_tests.rs @@ -1,20 +1,5 @@ use super::*; -#[test] -fn name_from_path_uses_provider_prefix_and_normalizes() { - let name = WorkspacePolicy::name_from_path( - std::path::Path::new("~/Developer/My_Project"), - ProviderKind::GitHubEnterprise, - ); - assert_eq!(name, "ghe-my-project"); - - let github = WorkspacePolicy::name_from_path( - std::path::Path::new("~/repos/Personal"), - ProviderKind::GitHub, - ); - assert_eq!(github, "github-personal"); -} - #[test] fn resolve_from_list_errors_when_no_workspaces() { let err = WorkspacePolicy::resolve_from_list(Vec::new()).unwrap_err(); @@ -23,18 +8,66 @@ fn resolve_from_list_errors_when_no_workspaces() { #[test] fn resolve_from_list_returns_single_workspace() { - let ws = WorkspaceConfig::new("solo", "/tmp/solo"); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/solo")); let resolved = WorkspacePolicy::resolve_from_list(vec![ws.clone()]).unwrap(); - assert_eq!(resolved.name, "solo"); - assert_eq!(resolved.base_path, "/tmp/solo"); + assert_eq!(resolved.root_path, std::path::PathBuf::from("/tmp/solo")); } #[test] fn resolve_from_list_errors_when_multiple_workspaces() { - let ws1 = WorkspaceConfig::new("a", "/tmp/a"); - let ws2 = WorkspaceConfig::new("b", "/tmp/b"); + let ws1 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/a")); + let ws2 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/b")); let err = WorkspacePolicy::resolve_from_list(vec![ws1, ws2]).unwrap_err(); assert!(err.to_string().contains("Multiple workspaces configured")); assert!(err.to_string().contains("--workspace")); } + +#[test] +fn resolve_selector_from_list_matches_unique_folder_name() { + let ws1 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/alpha")); + let ws2 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/bravo")); + + let resolved = WorkspacePolicy::resolve_selector_from_list("bravo", vec![ws1, ws2]).unwrap(); + assert_eq!(resolved.root_path, std::path::PathBuf::from("/tmp/bravo")); +} + +#[test] +fn resolve_selector_from_list_errors_when_folder_name_is_ambiguous() { + let ws1 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/team-a/work")); + let ws2 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/team-b/work")); + + let err = WorkspacePolicy::resolve_selector_from_list("work", vec![ws1, ws2]).unwrap_err(); + assert!(err.to_string().contains("ambiguous")); + assert!(err.to_string().contains("explicit path")); +} + +#[test] +fn looks_like_path_identifies_path_like_selectors() { + assert!(WorkspacePolicy::looks_like_path("~/repos")); + assert!(WorkspacePolicy::looks_like_path("./repos")); + assert!(WorkspacePolicy::looks_like_path("/tmp/repos")); + assert!(!WorkspacePolicy::looks_like_path("work")); +} + +#[test] +fn detect_from_cwd_returns_none_for_plain_tmp_dir() { + let temp = tempfile::tempdir().unwrap(); + // No .git-same directory present, so detection should return None + let result = WorkspacePolicy::detect_from_cwd(temp.path()); + assert!(result.is_none()); +} + +#[test] +fn detect_from_cwd_finds_workspace_root() { + let temp = tempfile::tempdir().unwrap(); + let dot_dir = temp.path().join(".git-same"); + let config_path = dot_dir.join("config.toml"); + std::fs::create_dir_all(&dot_dir).unwrap(); + // Write a minimal workspace config + let ws = WorkspaceConfig::new_from_root(temp.path()); + std::fs::write(&config_path, ws.to_toml().unwrap()).unwrap(); + + let found = WorkspacePolicy::detect_from_cwd(temp.path()); + assert!(found.is_some()); +} diff --git a/src/config/workspace_store.rs b/src/config/workspace_store.rs index c9abcbc..0b96ec0 100644 --- a/src/config/workspace_store.rs +++ b/src/config/workspace_store.rs @@ -1,219 +1,214 @@ -//! Workspace persistence (storage concern only). +//! Workspace persistence — stores workspace config inside the sync folder. +//! +//! Each workspace has a `.git-same/` directory inside its root that contains: +//! - `config.toml` — workspace configuration +//! - `cache.json` — discovery cache +//! - `sync-history.json` — sync history -use super::workspace::WorkspaceConfig; +use super::parser::Config; +use super::workspace::{tilde_collapse_path, WorkspaceConfig}; use crate::errors::AppError; -use std::path::{Component, Path, PathBuf}; +use std::path::{Path, PathBuf}; + +/// Name of the hidden workspace metadata directory. +pub const DOT_DIR: &str = ".git-same"; +/// Config file name inside the `.git-same/` directory. +pub const CONFIG_FILE: &str = "config.toml"; +/// Cache file name inside the `.git-same/` directory. +pub const CACHE_FILE: &str = "cache.json"; +/// Sync history file name inside the `.git-same/` directory. +pub const SYNC_HISTORY_FILE: &str = "sync-history.json"; /// Filesystem-backed workspace store. pub struct WorkspaceStore; impl WorkspaceStore { - /// Returns the config directory: `~/.config/git-same/`. - pub fn config_dir() -> Result { - let config_path = crate::config::Config::default_path()?; - config_path - .parent() - .map(|p| p.to_path_buf()) - .ok_or_else(|| AppError::config("Cannot determine config directory")) + /// Returns the `.git-same/` directory for a workspace root. + pub fn dot_dir(root: &Path) -> PathBuf { + root.join(DOT_DIR) } - /// List all workspace configs. - pub fn list() -> Result, AppError> { - let dir = Self::config_dir()?; - if !dir.exists() { - return Ok(Vec::new()); - } + /// Returns the config file path for a workspace root. + pub fn config_path(root: &Path) -> PathBuf { + Self::dot_dir(root).join(CONFIG_FILE) + } - let mut workspaces = Vec::new(); - let entries = std::fs::read_dir(&dir) - .map_err(|e| AppError::config(format!("Failed to read config directory: {}", e)))?; - - for entry in entries { - let entry = entry - .map_err(|e| AppError::config(format!("Failed to read directory entry: {}", e)))?; - let path = entry.path(); - if path.is_dir() { - let config_file = path.join("workspace-config.toml"); - if config_file.exists() { - match Self::load_from_path(&config_file) { - Ok(ws) => workspaces.push(ws), - Err(e) => { - tracing::warn!( - path = %config_file.display(), - error = %e, - "Skipping invalid workspace config" - ); - } - } - } - } - } + /// Returns the cache file path for a workspace root. + pub fn cache_path(root: &Path) -> PathBuf { + Self::dot_dir(root).join(CACHE_FILE) + } - workspaces.sort_by(|a, b| a.name.cmp(&b.name)); - Ok(workspaces) + /// Returns the sync history file path for a workspace root. + pub fn sync_history_path(root: &Path) -> PathBuf { + Self::dot_dir(root).join(SYNC_HISTORY_FILE) } - /// Load a specific workspace by name. - pub fn load(name: &str) -> Result { - let path = Self::config_path(name)?; - if !path.exists() { + /// Load a workspace config from the given root directory. + /// + /// Reads `/.git-same/config.toml` and sets `root_path` from the directory. + pub fn load(root: &Path) -> Result { + let expanded = expand_path(root); + let config_path = Self::config_path(&expanded); + if !config_path.exists() { return Err(AppError::config(format!( - "Workspace '{}' not found at {}", - name, - path.display() + "No workspace config found at '{}'", + config_path.display() ))); } - Self::load_from_path(&path) + Self::load_from_path(&config_path) } - /// Save a workspace config (create or update). + /// Save a workspace config to `/.git-same/config.toml`. + /// + /// Creates the `.git-same/` directory if necessary and registers the workspace + /// in the global config registry. pub fn save(workspace: &WorkspaceConfig) -> Result<(), AppError> { - let path = Self::config_path(&workspace.name)?; - if let Some(parent) = path.parent() { - std::fs::create_dir_all(parent).map_err(|e| { - AppError::config(format!("Failed to create workspace directory: {}", e)) - })?; + let global_config_path = Config::default_path()?; + Self::save_with_registry_config_path(workspace, &global_config_path) + } + + /// Save a workspace config and register it in a specific global config file. + pub fn save_with_registry_config_path( + workspace: &WorkspaceConfig, + global_config_path: &Path, + ) -> Result<(), AppError> { + // Preflight: avoid partial workspace writes when global config is missing. + if !global_config_path.exists() { + return Err(AppError::config( + "Config file not found. Run 'gisa init' first.", + )); } - let content = workspace.to_toml()?; - std::fs::write(&path, content).map_err(|e| { + + let dot_dir = Self::dot_dir(&workspace.root_path); + let dot_dir_existed = dot_dir.exists(); + std::fs::create_dir_all(&dot_dir).map_err(|e| { AppError::config(format!( - "Failed to write workspace config at {}: {}", - path.display(), + "Failed to create workspace directory '{}': {}", + dot_dir.display(), e )) })?; - Ok(()) - } - /// Delete a workspace by name (removes the entire workspace directory). - pub fn delete(name: &str) -> Result<(), AppError> { - let dir = Self::workspace_dir(name)?; - if !dir.exists() { - return Err(AppError::config(format!("Workspace '{}' not found", name))); - } - std::fs::remove_dir_all(&dir).map_err(|e| { - AppError::config(format!("Failed to delete workspace '{}': {}", name, e)) - })?; - Ok(()) - } + let config_path = dot_dir.join(CONFIG_FILE); + let previous_config_content = if config_path.exists() { + Some(std::fs::read_to_string(&config_path).map_err(|e| { + AppError::config(format!( + "Failed to read existing workspace config at '{}': {}", + config_path.display(), + e + )) + })?) + } else { + None + }; - /// Find a workspace whose base_path matches the given directory. - pub fn find_by_path(path: &Path) -> Result, AppError> { - let workspaces = Self::list()?; - let canonical = std::fs::canonicalize(path).unwrap_or_else(|_| path.to_path_buf()); + let content = workspace.to_toml()?; + std::fs::write(&config_path, content).map_err(|e| { + AppError::config(format!( + "Failed to write workspace config at '{}': {}", + config_path.display(), + e + )) + })?; - for ws in workspaces { - let ws_path = ws.expanded_base_path(); - let ws_canonical = std::fs::canonicalize(&ws_path).unwrap_or_else(|_| ws_path.clone()); - if ws_canonical == canonical { - return Ok(Some(ws)); - } + // Register in global config + let tilde_path = tilde_collapse_path(&workspace.root_path); + if let Err(err) = Config::add_to_registry_at(global_config_path, &tilde_path) { + rollback_workspace_write( + &config_path, + previous_config_content.as_deref(), + &dot_dir, + !dot_dir_existed, + ); + return Err(err); } - Ok(None) + + Ok(()) } - /// Load a workspace by its base_path string. - pub fn load_by_path(path_str: &str) -> Result { - let workspaces = Self::list()?; + /// List all registered workspace configs. + /// + /// Reads the global `workspaces` registry and loads each entry. + /// Stale entries (where the config file no longer exists) are silently skipped. + pub fn list() -> Result, AppError> { + let global = Config::load()?; + let mut workspaces = Vec::new(); - // Exact string match on base_path - for ws in &workspaces { - if ws.base_path == path_str { - return Ok(ws.clone()); + for path_str in &global.workspaces { + let expanded = shellexpand::tilde(path_str); + let root = Path::new(expanded.as_ref()); + let config_path = Self::config_path(root); + if !config_path.exists() { + tracing::debug!( + path = %path_str, + "Skipping stale workspace registry entry" + ); + continue; } - } - - // Canonical path comparison - let expanded = shellexpand::tilde(path_str); - let target = Path::new(expanded.as_ref()); - let target_canonical = - std::fs::canonicalize(target).unwrap_or_else(|_| target.to_path_buf()); - - for ws in workspaces { - let ws_expanded = ws.expanded_base_path(); - let ws_canonical = std::fs::canonicalize(&ws_expanded).unwrap_or(ws_expanded); - if ws_canonical == target_canonical { - return Ok(ws); + match Self::load_from_path(&config_path) { + Ok(ws) => workspaces.push(ws), + Err(e) => { + tracing::warn!( + path = %config_path.display(), + error = %e, + "Skipping invalid workspace config" + ); + } } } - Err(AppError::config(format!( - "No workspace configured for path '{}'", - path_str - ))) - } - - /// Returns the directory path for a workspace: `~/.config/git-same//`. - pub fn workspace_dir(name: &str) -> Result { - Self::validate_workspace_name(name)?; - Ok(Self::config_dir()?.join(name)) - } - - /// Returns the cache file path for a workspace: `~/.config/git-same//workspace-cache.json`. - pub fn cache_path(name: &str) -> Result { - Ok(Self::workspace_dir(name)?.join("workspace-cache.json")) - } - - /// Returns the file path for a workspace config. - fn config_path(name: &str) -> Result { - Ok(Self::workspace_dir(name)?.join("workspace-config.toml")) + Ok(workspaces) } - /// Validate workspace names to prevent path traversal. - fn validate_workspace_name(name: &str) -> Result<(), AppError> { - if name.trim().is_empty() { - return Err(AppError::config("Workspace name cannot be empty")); - } - - let path = Path::new(name); - if path.is_absolute() - || path.components().any(|c| { - matches!( - c, - Component::ParentDir | Component::RootDir | Component::Prefix(_) - ) - }) - { + /// Delete a workspace by removing its `.git-same/` directory. + /// + /// Also removes the workspace from the global registry. + pub fn delete(root: &Path) -> Result<(), AppError> { + let expanded_root = expand_path(root); + let dot_dir = Self::dot_dir(&expanded_root); + if !dot_dir.exists() { return Err(AppError::config(format!( - "Invalid workspace name '{}'", - name + "No workspace config found at '{}'", + dot_dir.display() ))); } - if name.contains('/') || name.contains('\\') { - return Err(AppError::config(format!( - "Invalid workspace name '{}'", - name - ))); - } + // Unregister from global config first so we don't leave stale registry entries + // when registry writes fail. + let tilde_path = tilde_collapse_path(&expanded_root); + Config::remove_from_registry(&tilde_path)?; - if !name - .chars() - .all(|c| c.is_ascii_alphanumeric() || matches!(c, '-' | '_' | '.')) - { - return Err(AppError::config( - "Workspace name may only contain letters, numbers, '-', '_' and '.'", - )); - } + std::fs::remove_dir_all(&dot_dir).map_err(|e| { + AppError::config(format!( + "Failed to remove workspace at '{}': {}", + dot_dir.display(), + e + )) + })?; Ok(()) } - /// Load a workspace config from a specific file path. - fn load_from_path(path: &Path) -> Result { - let content = std::fs::read_to_string(path).map_err(|e| { + /// Load a workspace config from a specific `.git-same/config.toml` path. + /// + /// Sets `root_path` from the parent of the `.git-same/` directory. + pub fn load_from_path(config_path: &Path) -> Result { + let content = std::fs::read_to_string(config_path).map_err(|e| { AppError::config(format!( - "Failed to read workspace config at {}: {}", - path.display(), + "Failed to read workspace config at '{}': {}", + config_path.display(), e )) })?; let mut ws = WorkspaceConfig::from_toml(&content)?; - // Derive name from the parent folder - if let Some(parent) = path.parent() { - if let Some(folder_name) = parent.file_name().and_then(|n| n.to_str()) { - ws.name = folder_name.to_string(); + // Derive root_path: parent of `.git-same/` directory + // config_path = /.git-same/config.toml + // parent = /.git-same/ + // parent.parent = / + if let Some(dot_dir) = config_path.parent() { + if let Some(root) = dot_dir.parent() { + ws.root_path = std::fs::canonicalize(root).unwrap_or_else(|_| root.to_path_buf()); } } @@ -221,6 +216,56 @@ impl WorkspaceStore { } } +fn rollback_workspace_write( + config_path: &Path, + previous_config_content: Option<&str>, + dot_dir: &Path, + remove_dot_dir: bool, +) { + match previous_config_content { + Some(previous) => { + if let Err(e) = std::fs::write(config_path, previous) { + tracing::warn!( + path = %config_path.display(), + error = %e, + "Failed to restore previous workspace config during rollback" + ); + } + } + None => { + if let Err(e) = std::fs::remove_file(config_path) { + if e.kind() != std::io::ErrorKind::NotFound { + tracing::warn!( + path = %config_path.display(), + error = %e, + "Failed to remove workspace config during rollback" + ); + } + } + } + } + + if remove_dot_dir { + if let Err(e) = std::fs::remove_dir(dot_dir) { + if e.kind() != std::io::ErrorKind::NotFound { + tracing::warn!( + path = %dot_dir.display(), + error = %e, + "Failed to remove workspace directory during rollback" + ); + } + } + } +} + +/// Expand a path: resolve `~` and make absolute. +fn expand_path(path: &Path) -> PathBuf { + let s = path.to_string_lossy(); + let expanded = shellexpand::tilde(&s); + let p = Path::new(expanded.as_ref()); + std::fs::canonicalize(p).unwrap_or_else(|_| p.to_path_buf()) +} + #[cfg(test)] #[path = "workspace_store_tests.rs"] mod tests; diff --git a/src/config/workspace_store_tests.rs b/src/config/workspace_store_tests.rs index 1b0a155..4dedf93 100644 --- a/src/config/workspace_store_tests.rs +++ b/src/config/workspace_store_tests.rs @@ -7,102 +7,302 @@ static HOME_LOCK: Mutex<()> = Mutex::new(()); fn with_temp_home(home: &Path, f: impl FnOnce() -> T) -> T { let _lock = HOME_LOCK.lock().expect("HOME lock poisoned"); let original_home = std::env::var("HOME").ok(); + let original_userprofile = std::env::var("USERPROFILE").ok(); + + struct HomeRestore { + home: Option, + userprofile: Option, + } - struct HomeRestore(Option); impl Drop for HomeRestore { fn drop(&mut self) { - if let Some(value) = self.0.take() { + if let Some(value) = self.home.take() { std::env::set_var("HOME", value); } else { std::env::remove_var("HOME"); } + + if let Some(value) = self.userprofile.take() { + std::env::set_var("USERPROFILE", value); + } else { + std::env::remove_var("USERPROFILE"); + } } } - let _restore = HomeRestore(original_home); + let _restore = HomeRestore { + home: original_home, + userprofile: original_userprofile, + }; std::env::set_var("HOME", home); + std::env::set_var("USERPROFILE", home); f() } #[test] -fn workspace_and_cache_paths_are_derived_from_workspace_name() { +fn dot_dir_cache_and_config_paths_are_derived_from_root() { + let root = Path::new("/tmp/my-workspace"); + + let dot_dir = WorkspaceStore::dot_dir(root); + let config = WorkspaceStore::config_path(root); + let cache = WorkspaceStore::cache_path(root); + let history = WorkspaceStore::sync_history_path(root); + + assert_eq!( + dot_dir, + std::path::PathBuf::from("/tmp/my-workspace/.git-same") + ); + assert_eq!( + config, + std::path::PathBuf::from("/tmp/my-workspace/.git-same/config.toml") + ); + assert_eq!( + cache, + std::path::PathBuf::from("/tmp/my-workspace/.git-same/cache.json") + ); + assert_eq!( + history, + std::path::PathBuf::from("/tmp/my-workspace/.git-same/sync-history.json") + ); +} + +#[test] +fn load_returns_error_when_no_config_exists() { let temp = tempfile::tempdir().unwrap(); + let err = WorkspaceStore::load(temp.path()).unwrap_err(); + assert!(err.to_string().contains("No workspace config found")); +} - with_temp_home(temp.path(), || { - let workspace_dir = WorkspaceStore::workspace_dir("alpha").unwrap(); - let cache_path = WorkspaceStore::cache_path("alpha").unwrap(); +#[test] +fn save_creates_dot_dir_and_config_file() { + let temp = tempfile::tempdir().unwrap(); + let home = temp.path().join("home"); + std::fs::create_dir_all(&home).unwrap(); + let root = temp.path().join("my-ws"); + std::fs::create_dir_all(&root).unwrap(); - assert_eq!(workspace_dir, temp.path().join(".config/git-same/alpha")); - assert_eq!( - cache_path, - temp.path() - .join(".config/git-same/alpha/workspace-cache.json") - ); + with_temp_home(&home, || { + let config_path = crate::config::Config::default_path().unwrap(); + std::fs::create_dir_all(config_path.parent().unwrap()).unwrap(); + std::fs::write(&config_path, crate::config::Config::default_toml()).unwrap(); + + let ws = WorkspaceConfig::new_from_root(&root); + WorkspaceStore::save(&ws).unwrap(); + + assert!(WorkspaceStore::config_path(&root).exists()); }); } #[test] -fn load_from_path_roundtrip_sets_name_from_parent_directory() { +fn load_from_path_roundtrip_sets_root_path_from_parent() { let temp = tempfile::tempdir().unwrap(); - let workspace_dir = temp.path().join("roundtrip"); - std::fs::create_dir_all(&workspace_dir).unwrap(); + let root = temp.path().join("roundtrip"); + let dot_dir = root.join(".git-same"); + std::fs::create_dir_all(&dot_dir).unwrap(); - let config_path = workspace_dir.join("workspace-config.toml"); - let workspace = WorkspaceConfig::new("ignored-name", "/tmp/roundtrip"); - std::fs::write(&config_path, workspace.to_toml().unwrap()).unwrap(); + let ws = WorkspaceConfig::new_from_root(&root); + let config_path = dot_dir.join("config.toml"); + std::fs::write(&config_path, ws.to_toml().unwrap()).unwrap(); let loaded = WorkspaceStore::load_from_path(&config_path).unwrap(); - assert_eq!(loaded.name, "roundtrip"); - assert_eq!(loaded.base_path, "/tmp/roundtrip"); + // root_path is canonicalized, so compare the file name component + assert_eq!(loaded.root_path.file_name(), root.file_name()); +} + +#[test] +fn delete_returns_error_when_dot_dir_missing() { + let temp = tempfile::tempdir().unwrap(); + // No .git-same/ directory inside temp, so delete should fail + let err = WorkspaceStore::delete(temp.path()).unwrap_err(); + assert!(err.to_string().contains("No workspace config found")); +} + +#[test] +fn delete_keeps_workspace_files_when_registry_update_fails() { + let temp = tempfile::tempdir().unwrap(); + let home = temp.path().join("home"); + std::fs::create_dir_all(&home).unwrap(); + + with_temp_home(&home, || { + let global_config_path = crate::config::Config::default_path().unwrap(); + std::fs::create_dir_all(global_config_path.parent().unwrap()).unwrap(); + std::fs::write(&global_config_path, "invalid = [").unwrap(); + + let root = temp.path().join("my-ws"); + let dot_dir = WorkspaceStore::dot_dir(&root); + std::fs::create_dir_all(&dot_dir).unwrap(); + std::fs::write( + dot_dir.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); + + let err = WorkspaceStore::delete(&root).unwrap_err(); + assert!(err.to_string().contains("Failed to parse config")); + assert!( + dot_dir.exists(), + ".git-same should remain when unregister fails" + ); + }); } #[test] -fn save_load_and_list_roundtrip_in_empty_config_root() { +fn delete_with_relative_root_removes_registered_workspace() { let temp = tempfile::tempdir().unwrap(); + let home = temp.path().join("home"); + std::fs::create_dir_all(&home).unwrap(); - with_temp_home(temp.path(), || { - let listed_before = WorkspaceStore::list().unwrap(); - assert!(listed_before.is_empty()); + with_temp_home(&home, || { + let global_config_path = crate::config::Config::default_path().unwrap(); + std::fs::create_dir_all(global_config_path.parent().unwrap()).unwrap(); + std::fs::write(&global_config_path, crate::config::Config::default_toml()).unwrap(); - let workspace = WorkspaceConfig::new("team-alpha", "/tmp/team-alpha"); - WorkspaceStore::save(&workspace).unwrap(); + let root = temp.path().join("my-ws"); + let dot_dir = WorkspaceStore::dot_dir(&root); + std::fs::create_dir_all(&dot_dir).unwrap(); + std::fs::write( + dot_dir.join("config.toml"), + "[provider]\nkind = \"github\"\n", + ) + .unwrap(); - let loaded = WorkspaceStore::load("team-alpha").unwrap(); - assert_eq!(loaded.name, "team-alpha"); - assert_eq!(loaded.base_path, "/tmp/team-alpha"); + let canonical_root = std::fs::canonicalize(&root).unwrap(); + let registry_path = crate::config::workspace::tilde_collapse_path(&canonical_root); + crate::config::Config::add_to_registry_at(&global_config_path, ®istry_path).unwrap(); - let listed_after = WorkspaceStore::list().unwrap(); - assert_eq!(listed_after.len(), 1); - assert_eq!(listed_after[0].name, "team-alpha"); + let original_cwd = std::env::current_dir().unwrap(); + std::env::set_current_dir(temp.path()).unwrap(); + struct CwdRestore(std::path::PathBuf); + impl Drop for CwdRestore { + fn drop(&mut self) { + let _ = std::env::set_current_dir(&self.0); + } + } + let _cwd_restore = CwdRestore(original_cwd); + + WorkspaceStore::delete(std::path::Path::new("my-ws")).unwrap(); + + assert!(!dot_dir.exists(), ".git-same should be deleted"); + let cfg = crate::config::Config::load_from(&global_config_path).unwrap(); + assert!( + cfg.workspaces.is_empty(), + "workspace registry should be empty after delete" + ); }); } #[test] -fn delete_nonexistent_workspace_returns_error() { +fn save_returns_error_when_global_config_is_missing() { let temp = tempfile::tempdir().unwrap(); + let home = temp.path().join("home"); + std::fs::create_dir_all(&home).unwrap(); + + with_temp_home(&home, || { + let root = temp.path().join("my-ws"); + std::fs::create_dir_all(&root).unwrap(); - with_temp_home(temp.path(), || { - let err = WorkspaceStore::delete("ghost-workspace").unwrap_err(); - assert!(err.to_string().contains("not found")); + let ws = WorkspaceConfig::new_from_root(&root); + let err = WorkspaceStore::save(&ws).unwrap_err(); + assert!(err.to_string().contains("Run 'gisa init' first")); + assert!(!WorkspaceStore::config_path(&root).exists()); + assert!(!WorkspaceStore::dot_dir(&root).exists()); }); } #[test] -fn workspace_name_rejects_path_traversal() { +fn save_updates_registry_when_global_config_exists() { let temp = tempfile::tempdir().unwrap(); + let home = temp.path().join("home"); + std::fs::create_dir_all(&home).unwrap(); + + with_temp_home(&home, || { + let config_path = crate::config::Config::default_path().unwrap(); + std::fs::create_dir_all(config_path.parent().unwrap()).unwrap(); + std::fs::write(&config_path, crate::config::Config::default_toml()).unwrap(); + + let root = temp.path().join("my-ws"); + std::fs::create_dir_all(&root).unwrap(); - with_temp_home(temp.path(), || { - let err = WorkspaceStore::workspace_dir("../escape").unwrap_err(); - assert!(err.to_string().contains("Invalid workspace name")); + let ws = WorkspaceConfig::new_from_root(&root); + WorkspaceStore::save(&ws).unwrap(); + + let cfg = crate::config::Config::load_from(&config_path).unwrap(); + assert_eq!(cfg.workspaces.len(), 1); }); } #[test] -fn workspace_name_allows_safe_characters() { +fn save_with_registry_config_path_uses_explicit_config_file() { let temp = tempfile::tempdir().unwrap(); + let root = temp.path().join("my-ws"); + std::fs::create_dir_all(&root).unwrap(); + + let custom_config_path = temp.path().join("custom-config.toml"); + std::fs::write(&custom_config_path, crate::config::Config::default_toml()).unwrap(); + + let ws = WorkspaceConfig::new_from_root(&root); + WorkspaceStore::save_with_registry_config_path(&ws, &custom_config_path).unwrap(); + + assert!(WorkspaceStore::config_path(&root).exists()); + + let cfg = crate::config::Config::load_from(&custom_config_path).unwrap(); + assert_eq!(cfg.workspaces.len(), 1); + assert_eq!( + cfg.workspaces[0], + crate::config::workspace::tilde_collapse_path(&root) + ); +} + +#[test] +fn save_rolls_back_new_workspace_write_when_registry_update_fails() { + let temp = tempfile::tempdir().unwrap(); + let home = temp.path().join("home"); + std::fs::create_dir_all(&home).unwrap(); + + with_temp_home(&home, || { + let global_config_path = crate::config::Config::default_path().unwrap(); + std::fs::create_dir_all(global_config_path.parent().unwrap()).unwrap(); + std::fs::write(&global_config_path, "invalid = [").unwrap(); + + let root = temp.path().join("my-ws"); + std::fs::create_dir_all(&root).unwrap(); + + let ws = WorkspaceConfig::new_from_root(&root); + let err = WorkspaceStore::save(&ws).unwrap_err(); + assert!(err.to_string().contains("Failed to parse config")); + assert!(!WorkspaceStore::config_path(&root).exists()); + assert!(!WorkspaceStore::dot_dir(&root).exists()); + }); +} + +#[test] +fn save_restores_existing_workspace_config_when_registry_update_fails() { + let temp = tempfile::tempdir().unwrap(); + let home = temp.path().join("home"); + std::fs::create_dir_all(&home).unwrap(); + + with_temp_home(&home, || { + let global_config_path = crate::config::Config::default_path().unwrap(); + std::fs::create_dir_all(global_config_path.parent().unwrap()).unwrap(); + std::fs::write(&global_config_path, "invalid = [").unwrap(); + + let root = temp.path().join("my-ws"); + let dot_dir = WorkspaceStore::dot_dir(&root); + std::fs::create_dir_all(&dot_dir).unwrap(); + + let mut previous = WorkspaceConfig::new_from_root(&root); + previous.username = "before".to_string(); + let previous_content = previous.to_toml().unwrap(); + let config_path = WorkspaceStore::config_path(&root); + std::fs::write(&config_path, &previous_content).unwrap(); + + let mut ws = WorkspaceConfig::new_from_root(&root); + ws.username = "after".to_string(); + let err = WorkspaceStore::save(&ws).unwrap_err(); + assert!(err.to_string().contains("Failed to parse config")); - with_temp_home(temp.path(), || { - let path = WorkspaceStore::workspace_dir("team.alpha-1_repo").unwrap(); - assert_eq!(path, temp.path().join(".config/git-same/team.alpha-1_repo")); + let restored = std::fs::read_to_string(&config_path).unwrap(); + assert_eq!(restored, previous_content); + assert!(dot_dir.exists()); }); } diff --git a/src/config/workspace_tests.rs b/src/config/workspace_tests.rs index 8c6a126..ae6f696 100644 --- a/src/config/workspace_tests.rs +++ b/src/config/workspace_tests.rs @@ -1,10 +1,9 @@ use super::*; #[test] -fn test_new_workspace_config() { - let ws = WorkspaceConfig::new("github", "~/github"); - assert_eq!(ws.name, "github"); - assert_eq!(ws.base_path, "~/github"); +fn test_new_from_root_workspace_config() { + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/github")); + assert_eq!(ws.root_path, std::path::PathBuf::from("/tmp/github")); assert_eq!(ws.provider.kind, ProviderKind::GitHub); assert!(ws.orgs.is_empty()); assert!(ws.last_synced.is_none()); @@ -14,35 +13,35 @@ fn test_new_workspace_config() { fn test_workspace_provider_default() { let provider = WorkspaceProvider::default(); assert_eq!(provider.kind, ProviderKind::GitHub); - assert_eq!(provider.auth, AuthMethod::GhCli); assert!(provider.prefer_ssh); assert!(provider.api_url.is_none()); } #[test] -fn test_workspace_provider_to_provider_entry() { +fn test_workspace_provider_effective_api_url() { let provider = WorkspaceProvider { kind: ProviderKind::GitHub, - auth: AuthMethod::GhCli, - api_url: None, - prefer_ssh: false, + api_url: Some("https://custom-api.example.com".to_string()), + prefer_ssh: true, }; - let entry = provider.to_provider_entry(); - assert_eq!(entry.kind, ProviderKind::GitHub); - assert_eq!(entry.auth, AuthMethod::GhCli); - assert!(entry.api_url.is_none()); - assert!(!entry.prefer_ssh); - assert!(entry.enabled); + assert_eq!( + provider.effective_api_url(), + "https://custom-api.example.com" + ); +} + +#[test] +fn test_workspace_provider_display_name() { + let provider = WorkspaceProvider::default(); + assert_eq!(provider.display_name(), "GitHub"); } #[test] fn test_serde_roundtrip() { let ws = WorkspaceConfig { - name: "my-workspace".to_string(), - base_path: "~/code/repos".to_string(), + root_path: std::path::PathBuf::from("/tmp/repos"), provider: WorkspaceProvider { kind: ProviderKind::GitHub, - auth: AuthMethod::GhCli, api_url: None, prefer_ssh: true, }, @@ -67,9 +66,8 @@ fn test_serde_roundtrip() { let toml_str = ws.to_toml().unwrap(); let parsed = WorkspaceConfig::from_toml(&toml_str).unwrap(); - // name is skip_serializing — it's derived from the folder, not the TOML - assert!(parsed.name.is_empty()); - assert_eq!(parsed.base_path, ws.base_path); + // root_path is skip — not written to TOML, so it's empty after parse + assert_eq!(parsed.root_path, std::path::PathBuf::new()); assert_eq!(parsed.username, ws.username); assert_eq!(parsed.orgs, ws.orgs); assert_eq!(parsed.exclude_repos, ws.exclude_repos); @@ -78,54 +76,86 @@ fn test_serde_roundtrip() { assert_eq!(parsed.concurrency, ws.concurrency); assert_eq!(parsed.last_synced, ws.last_synced); assert_eq!(parsed.provider.kind, ws.provider.kind); - assert_eq!(parsed.provider.auth, ws.provider.auth); assert!(parsed.filters.include_forks); } #[test] -fn test_expanded_base_path() { - let ws = WorkspaceConfig::new("test", "~/github"); +fn test_expanded_base_path_returns_root_path() { + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/github")); let expanded = ws.expanded_base_path(); - assert!(!expanded.to_string_lossy().contains('~')); + assert_eq!(expanded, std::path::PathBuf::from("/tmp/github")); } #[test] -fn test_summary() { +fn test_summary_with_orgs() { let ws = WorkspaceConfig { orgs: vec!["org1".to_string(), "org2".to_string()], last_synced: None, - ..WorkspaceConfig::new("github", "~/github") + ..WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/github")) }; let summary = ws.summary(); - assert!(summary.contains("github")); assert!(summary.contains("2 org(s)")); assert!(summary.contains("never synced")); } #[test] fn test_display_label() { - let ws = WorkspaceConfig::new("github-repos", "~/repos"); - assert_eq!(ws.display_label(), "~/repos (GitHub)"); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/repos")); + let label = ws.display_label(); + assert!(label.contains("GitHub")); + assert!(label.contains("/tmp/repos") || label.contains("~/")); } #[test] fn test_summary_all_orgs() { - let ws = WorkspaceConfig::new("work", "~/work"); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/work")); let summary = ws.summary(); assert!(summary.contains("all orgs")); } #[test] fn test_optional_fields_not_serialized_when_none() { - let ws = WorkspaceConfig::new("minimal", "~/minimal"); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/minimal")); let toml_str = ws.to_toml().unwrap(); - // name is derived from folder, never written to TOML as its own key + // root_path is skip_serializing — never written to TOML assert!( - !toml_str.lines().any(|l| l.starts_with("name ")), - "TOML should not contain a 'name' key" + !toml_str.lines().any(|l| l.starts_with("root_path")), + "TOML should not contain a 'root_path' key" ); assert!(!toml_str.contains("structure")); assert!(!toml_str.contains("sync_mode")); assert!(!toml_str.contains("concurrency")); assert!(!toml_str.contains("last_synced")); } + +#[test] +fn test_tilde_collapse_path_replaces_home() { + let home = std::env::var("HOME").unwrap_or_default(); + if home.is_empty() { + return; // Skip if HOME not set + } + let path = std::path::Path::new(&home).join("repos"); + let collapsed = tilde_collapse_path(&path); + assert!(collapsed.starts_with('~')); +} + +#[test] +fn test_tilde_collapse_path_home_exactly() { + let home = std::env::var("HOME").unwrap_or_default(); + if home.is_empty() { + return; + } + let collapsed = tilde_collapse_path(std::path::Path::new(&home)); + assert_eq!(collapsed, "~"); +} + +#[test] +fn test_tilde_collapse_path_does_not_match_string_prefix_only() { + let home = std::env::var("HOME").unwrap_or_default(); + if home.is_empty() { + return; + } + let fake = std::path::PathBuf::from(format!("{}-other/repos", home)); + let collapsed = tilde_collapse_path(&fake); + assert_eq!(collapsed, fake.to_string_lossy()); +} diff --git a/src/discovery.rs b/src/discovery.rs index 3192954..125d0a8 100644 --- a/src/discovery.rs +++ b/src/discovery.rs @@ -133,13 +133,24 @@ impl DiscoveryOrchestrator { git: &G, ) -> Vec<(PathBuf, String, String)> { let mut repos = Vec::new(); + let mut visited_dirs = HashSet::new(); + let mut seen_repos = HashSet::new(); // Determine scan depth based on structure // {org}/{repo} -> 2 levels // {provider}/{org}/{repo} -> 3 levels let depth = RepoPathTemplate::new(self.structure.clone()).scan_depth(); - self.scan_dir(base_path, base_path, git, &mut repos, 0, depth); + self.scan_dir( + base_path, + base_path, + git, + &mut repos, + &mut visited_dirs, + &mut seen_repos, + 0, + depth, + ); repos } @@ -151,6 +162,8 @@ impl DiscoveryOrchestrator { path: &Path, git: &G, repos: &mut Vec<(PathBuf, String, String)>, + visited_dirs: &mut HashSet, + seen_repos: &mut HashSet, current_depth: usize, max_depth: usize, ) { @@ -158,25 +171,43 @@ impl DiscoveryOrchestrator { return; } - let entries = match std::fs::read_dir(path) { + let canonical_path = std::fs::canonicalize(path).unwrap_or_else(|_| path.to_path_buf()); + if !visited_dirs.insert(canonical_path.clone()) { + return; + } + + let entries = match std::fs::read_dir(&canonical_path) { Ok(e) => e, Err(_) => return, }; for entry in entries.flatten() { - let entry_path = entry.path(); - if !entry_path.is_dir() { + // Avoid traversing symlinks to directories. + let Ok(file_type) = entry.file_type() else { + continue; + }; + if !file_type.is_dir() { continue; } + let entry_path = entry.path(); + // Skip hidden directories if entry.file_name().to_string_lossy().starts_with('.') { continue; } if current_depth + 1 == max_depth && git.is_repo(&entry_path) { + let canonical_repo = + std::fs::canonicalize(&entry_path).unwrap_or(entry_path.clone()); + if !seen_repos.insert(canonical_repo.clone()) { + continue; + } + // This is a repo at the expected depth - let rel_path = entry_path.strip_prefix(base_path).unwrap_or(&entry_path); + let rel_path = canonical_repo + .strip_prefix(base_path) + .unwrap_or(&canonical_repo); let parts: Vec<_> = rel_path.components().collect(); if parts.len() >= 2 { @@ -188,7 +219,7 @@ impl DiscoveryOrchestrator { .as_os_str() .to_string_lossy() .to_string(); - repos.push((entry_path.clone(), org, repo)); + repos.push((canonical_repo, org, repo)); } } else { // Recurse into subdirectory @@ -197,6 +228,8 @@ impl DiscoveryOrchestrator { &entry_path, git, repos, + visited_dirs, + seen_repos, current_depth + 1, max_depth, ); diff --git a/src/discovery_tests.rs b/src/discovery_tests.rs index 503a00c..14fb682 100644 --- a/src/discovery_tests.rs +++ b/src/discovery_tests.rs @@ -124,6 +124,32 @@ fn test_plan_sync_not_cloned() { assert!(skipped[0].1.contains("not cloned")); } +#[cfg(unix)] +#[test] +fn test_scan_local_ignores_symlinked_directories() { + use std::os::unix::fs::symlink; + + let temp = TempDir::new().unwrap(); + let repo_path = temp.path().join("org/repo"); + std::fs::create_dir_all(&repo_path).unwrap(); + symlink(temp.path().join("org"), temp.path().join("org-link")).unwrap(); + + let mut git = MockGit::new(); + git.add_repo( + std::fs::canonicalize(&repo_path) + .unwrap() + .to_string_lossy() + .to_string(), + ); + + let orchestrator = DiscoveryOrchestrator::new(FilterOptions::default(), "{org}/{repo}".into()); + let repos = orchestrator.scan_local(temp.path(), &git); + + assert_eq!(repos.len(), 1); + assert_eq!(repos[0].1, "org"); + assert_eq!(repos[0].2, "repo"); +} + #[test] fn test_merge_repos() { let repos1 = vec![test_repo("repo1", "org1")]; diff --git a/src/lib.rs b/src/lib.rs index 5378da4..c2da655 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -70,8 +70,8 @@ pub mod prelude { pub use crate::cache::{CacheManager, DiscoveryCache, CACHE_VERSION}; pub use crate::cli::{Cli, Command, InitArgs, ResetArgs, StatusArgs, SyncCmdArgs}; pub use crate::config::{ - AuthMethod, Config, ConfigCloneOptions, FilterOptions, ProviderEntry, - SyncMode as ConfigSyncMode, + Config, ConfigCloneOptions, FilterOptions, SyncMode as ConfigSyncMode, WorkspaceConfig, + WorkspaceProvider, }; pub use crate::discovery::DiscoveryOrchestrator; pub use crate::domain::RepoPathTemplate; diff --git a/src/lib_tests.rs b/src/lib_tests.rs index 6b76ccb..3d96e46 100644 --- a/src/lib_tests.rs +++ b/src/lib_tests.rs @@ -8,7 +8,7 @@ fn prelude_reexports_core_types() { assert_eq!(options.depth, 1); assert_eq!(options.branch.as_deref(), Some("main")); - let provider = ProviderEntry::github(); + let provider = WorkspaceProvider::default(); assert_eq!(provider.kind, ProviderKind::GitHub); let repo = Repo::test("rocket", "acme"); diff --git a/src/main.rs b/src/main.rs index 09dca1e..612b8b3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -40,12 +40,7 @@ async fn main() -> ExitCode { let verbosity = Verbosity::from(cli.verbosity()); let output = Output::new(verbosity, cli.is_json()); - // Print banner unless quiet or JSON output - if !output.is_json() && !cli.is_quiet() { - git_same::banner::print_banner(); - } - - let result = run_command(command, cli.config.as_deref(), &output).await; + let result = run_command(command, cli.config.as_deref(), &output, cli.is_quiet()).await; match result { Ok(()) => ExitCode::SUCCESS, @@ -65,6 +60,7 @@ async fn main() -> ExitCode { use git_same::config::Config; // Auto-create default config if it doesn't exist + let mut config_was_created = false; if cli.config.is_none() { let default_path = match Config::default_path() { Ok(path) => path, @@ -93,6 +89,7 @@ async fn main() -> ExitCode { ); return ExitCode::from(2); } + config_was_created = true; } } @@ -102,13 +99,15 @@ async fn main() -> ExitCode { }; match config { - Ok(config) => match git_same::app::tui::run_tui(config).await { - Ok(()) => ExitCode::SUCCESS, - Err(e) => { - eprintln!("TUI error: {}", e); - ExitCode::from(1) + Ok(config) => { + match git_same::app::tui::run_tui(config, config_was_created).await { + Ok(()) => ExitCode::SUCCESS, + Err(e) => { + eprintln!("TUI error: {}", e); + ExitCode::from(1) + } } - }, + } Err(e) => { eprintln!("Failed to load config: {}", e); eprintln!("Run 'gisa init' to create a configuration file."); diff --git a/src/provider/github/pagination.rs b/src/provider/github/pagination.rs index f12ffff..52a831a 100644 --- a/src/provider/github/pagination.rs +++ b/src/provider/github/pagination.rs @@ -110,7 +110,7 @@ pub async fn fetch_all_pages( .await { Ok(response) => response, - Err(e) if retry_count < MAX_RETRIES => { + Err(_) if retry_count < MAX_RETRIES => { retry_count += 1; tokio::time::sleep(Duration::from_millis(backoff_ms)).await; backoff_ms *= 2; diff --git a/src/provider/mod.rs b/src/provider/mod.rs index 63731a0..5797c4a 100644 --- a/src/provider/mod.rs +++ b/src/provider/mod.rs @@ -6,7 +6,6 @@ //! - **GitHub** - github.com (active) //! - **GitHub Enterprise** - coming soon //! - **GitLab** - coming soon -//! - **GitLab Self-Managed** - coming soon //! - **Codeberg** - coming soon //! - **Bitbucket** - coming soon //! @@ -14,15 +13,15 @@ //! //! ```no_run //! use git_same::provider::{create_provider, DiscoveryOptions, NoProgress}; -//! use git_same::config::ProviderEntry; +//! use git_same::config::WorkspaceProvider; //! //! # async fn example() -> Result<(), git_same::errors::AppError> { -//! let entry = ProviderEntry::github(); -//! let provider = create_provider(&entry, "ghp_token123")?; +//! let provider = WorkspaceProvider::default(); +//! let p = create_provider(&provider, "ghp_token123")?; //! //! let options = DiscoveryOptions::new(); //! let progress = NoProgress; -//! let repos = provider.discover_repos(&options, &progress).await?; +//! let repos = p.discover_repos(&options, &progress).await?; //! # Ok(()) //! # } //! ``` @@ -37,19 +36,23 @@ pub use traits::{ Credentials, DiscoveryOptions, DiscoveryProgress, NoProgress, Provider, RateLimitInfo, }; -use crate::config::ProviderEntry; +use crate::config::WorkspaceProvider; use crate::errors::{AppError, ProviderError}; use crate::types::ProviderKind; -/// Creates a provider instance based on configuration. -pub fn create_provider(entry: &ProviderEntry, token: &str) -> Result, AppError> { - let api_url = entry.effective_api_url(); +/// Creates a provider instance based on workspace provider configuration. +pub fn create_provider( + ws_provider: &WorkspaceProvider, + token: &str, +) -> Result, AppError> { + let api_url = ws_provider.effective_api_url(); - match entry.kind { + match ws_provider.kind { ProviderKind::GitHub => { let credentials = Credentials::new(token, api_url); - let provider = github::GitHubProvider::new(credentials, entry.display_name()) - .map_err(AppError::Provider)?; + let provider = + github::GitHubProvider::new(credentials, ws_provider.display_name().to_string()) + .map_err(AppError::Provider)?; Ok(Box::new(provider)) } other => Err(AppError::Provider(ProviderError::NotImplemented(format!( diff --git a/src/provider/mod_tests.rs b/src/provider/mod_tests.rs index b13b257..8f68605 100644 --- a/src/provider/mod_tests.rs +++ b/src/provider/mod_tests.rs @@ -1,11 +1,12 @@ use super::*; +use crate::config::WorkspaceProvider; use crate::types::ProviderKind; #[test] fn create_provider_supports_github() { - let github = ProviderEntry::github(); - let provider = create_provider(&github, "ghp_test_token").unwrap(); - assert_eq!(provider.kind(), ProviderKind::GitHub); + let provider = WorkspaceProvider::default(); + let result = create_provider(&provider, "ghp_test_token").unwrap(); + assert_eq!(result.kind(), ProviderKind::GitHub); } #[test] @@ -19,10 +20,13 @@ fn create_provider_returns_not_implemented_for_unsupported() { ]; for (kind, expected_name) in unsupported { - let mut entry = ProviderEntry::github(); - entry.kind = kind; + let ws_provider = WorkspaceProvider { + kind, + api_url: None, + prefer_ssh: true, + }; - match create_provider(&entry, "token") { + match create_provider(&ws_provider, "token") { Ok(_) => panic!("expected {} to be unsupported", expected_name), Err(err) => assert!( err.to_string().contains("coming soon"), diff --git a/src/setup/handler.rs b/src/setup/handler.rs index 31426fb..3b5139f 100644 --- a/src/setup/handler.rs +++ b/src/setup/handler.rs @@ -5,7 +5,7 @@ use super::state::{ }; use crate::auth::{get_auth_for_provider, gh_cli}; use crate::config::{WorkspaceConfig, WorkspaceManager}; -use crate::provider::{create_provider, Credentials}; +use crate::provider::create_provider; use crossterm::event::{KeyCode, KeyEvent, KeyModifiers}; /// Handle a key event in the setup wizard. @@ -35,7 +35,7 @@ pub async fn handle_key(state: &mut SetupState, key: KeyEvent) { } if key.modifiers == KeyModifiers::NONE && key.code == KeyCode::Char('q') - && !matches!(state.step, SetupStep::SelectPath) + && !matches!(state.step, SetupStep::SelectPath | SetupStep::Requirements) { state.outcome = Some(SetupOutcome::Cancelled); state.should_quit = true; @@ -43,6 +43,7 @@ pub async fn handle_key(state: &mut SetupState, key: KeyEvent) { } if !path_popup_active && state.step != SetupStep::SelectPath + && state.step != SetupStep::Requirements && key.modifiers == KeyModifiers::NONE && key.code == KeyCode::Esc { @@ -68,7 +69,7 @@ pub async fn handle_key(state: &mut SetupState, key: KeyEvent) { } match state.step { - SetupStep::Welcome => handle_welcome(state, key), + SetupStep::Requirements => handle_requirements(state, key), SetupStep::SelectProvider => handle_provider(state, key), SetupStep::Authenticate => handle_auth(state, key).await, SetupStep::SelectPath => handle_path(state, key), @@ -80,8 +81,10 @@ pub async fn handle_key(state: &mut SetupState, key: KeyEvent) { async fn handle_step_forward(state: &mut SetupState) { match state.step { - SetupStep::Welcome => { - state.next_step(); + SetupStep::Requirements => { + if !state.checks_loading && state.requirements_passed() { + state.next_step(); + } } SetupStep::SelectProvider => { if state.provider_choices[state.provider_index].available { @@ -101,9 +104,10 @@ async fn handle_step_forward(state: &mut SetupState) { }, SetupStep::SelectOrgs => { if state.org_loading { - do_discover_orgs(state).await; + // Discovery is driven externally while loading. } else if state.org_error.is_some() { state.org_loading = true; + state.org_discovery_in_progress = false; state.org_error = None; } else { state.next_step(); @@ -116,11 +120,6 @@ async fn handle_step_forward(state: &mut SetupState) { state.path_cursor = state.base_path.len(); } close_path_browse_to_input(state); - } else if state.path_suggestions_mode { - if let Some(s) = state.path_suggestions.get(state.path_suggestion_index) { - state.base_path = s.path.clone(); - state.path_cursor = state.base_path.len(); - } } confirm_path(state); } @@ -138,10 +137,12 @@ async fn handle_step_forward(state: &mut SetupState) { } } -fn handle_welcome(state: &mut SetupState, key: KeyEvent) { +fn handle_requirements(state: &mut SetupState, key: KeyEvent) { match key.code { KeyCode::Enter => { - state.next_step(); + if !state.checks_loading && state.requirements_passed() { + state.next_step(); + } } KeyCode::Esc => { state.prev_step(); @@ -198,8 +199,8 @@ async fn handle_auth(state: &mut SetupState, key: KeyEvent) { } async fn do_authenticate(state: &mut SetupState) { - let provider_entry = state.build_workspace_provider().to_provider_entry(); - match get_auth_for_provider(&provider_entry) { + let ws_provider = state.build_workspace_provider(); + match get_auth_for_provider(&ws_provider) { Ok(auth) => { let username = auth.username.or_else(|| gh_cli::get_username().ok()); state.username = username; @@ -346,7 +347,7 @@ fn close_path_browse_to_input(state: &mut SetupState) { state.path_browse_error = None; state.path_browse_info = None; state.path_cursor = state.base_path.len(); - state.path_completions = compute_completions(&state.base_path); + state.path_completions.clear(); state.path_completion_index = 0; } @@ -450,14 +451,25 @@ fn move_to_parent_or_collapse_selected_entry(state: &mut SetupState) { else { return; }; + if selected.depth == 0 { + let root_dir = std::path::PathBuf::from(shellexpand::tilde(&selected.path).as_ref()); + if let Some(parent) = root_dir.parent() { + let parent = parent.to_path_buf(); + set_browse_root(state, parent.clone()); + state.path_browse_info = Some(format!( + "Moved to parent: {}", + tilde_collapse(&parent.to_string_lossy()) + )); + } else { + state.path_browse_info = Some("Already at filesystem root".to_string()); + } + return; + } if selected.expanded { collapse_selected_entry(state); sync_browse_current_dir(state); return; } - if selected.depth == 0 { - return; - } for idx in (0..state.path_browse_index).rev() { if state.path_browse_entries[idx].depth + 1 == selected.depth { state.path_browse_index = idx; @@ -507,55 +519,23 @@ fn handle_path_browse(state: &mut SetupState, key: KeyEvent) { fn handle_path_suggestions(state: &mut SetupState, key: KeyEvent) { match key.code { - KeyCode::Up => { - if state.path_suggestion_index > 0 { - state.path_suggestion_index -= 1; - } - } - KeyCode::Down => { - if state.path_suggestion_index + 1 < state.path_suggestions.len() { - state.path_suggestion_index += 1; - } + KeyCode::Left => { + state.prev_step(); } KeyCode::Enter => { - if let Some(s) = state.path_suggestions.get(state.path_suggestion_index) { - state.base_path = s.path.clone(); - state.path_cursor = state.base_path.len(); - } confirm_path(state); } - KeyCode::Tab => { - if let Some(s) = state.path_suggestions.get(state.path_suggestion_index) { - state.base_path = s.path.clone(); - state.path_cursor = state.base_path.len(); - } - state.path_suggestions_mode = false; - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } KeyCode::Char('b') => { open_path_browse_mode(state); } KeyCode::Esc => { state.prev_step(); } - KeyCode::Backspace => { - state.path_suggestions_mode = false; - if state.path_cursor > 0 { - state.path_cursor -= 1; - state.base_path.remove(state.path_cursor); - } - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } - KeyCode::Char(c) => { - state.path_suggestions_mode = false; - state.base_path.clear(); - state.base_path.push(c); - state.path_cursor = 1; - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } + KeyCode::Tab => open_path_browse_mode(state), + KeyCode::Up | KeyCode::Down | KeyCode::Right => {} + KeyCode::Backspace | KeyCode::Delete => {} + KeyCode::Home | KeyCode::End => {} + KeyCode::Char(_) => {} _ => {} } } @@ -567,153 +547,33 @@ fn handle_path_input(state: &mut SetupState, key: KeyEvent) { } match key.code { - KeyCode::Tab => { - apply_tab_completion(state); + KeyCode::Left => { + state.prev_step(); } KeyCode::Enter => { confirm_path(state); } + KeyCode::Char('b') => { + open_path_browse_mode(state); + } KeyCode::Esc => { state.prev_step(); } - KeyCode::Backspace => { - if state.path_cursor > 0 { - state.path_cursor -= 1; - state.base_path.remove(state.path_cursor); - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } - } - KeyCode::Delete => { - if state.path_cursor < state.base_path.len() { - state.base_path.remove(state.path_cursor); - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } - } - KeyCode::Left => { - if state.path_cursor > 0 { - state.path_cursor -= 1; - } - } - KeyCode::Right => { - if state.path_cursor < state.base_path.len() { - state.path_cursor += 1; - } - } - KeyCode::Home => { - state.path_cursor = 0; - } - KeyCode::End => { - state.path_cursor = state.base_path.len(); - } - KeyCode::Char(c) => { - state.base_path.insert(state.path_cursor, c); - state.path_cursor += 1; - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } + KeyCode::Up | KeyCode::Down | KeyCode::Right => {} + KeyCode::Tab => {} + KeyCode::Backspace | KeyCode::Delete => {} + KeyCode::Home | KeyCode::End => {} + KeyCode::Char(_) => {} _ => {} } } -/// Compute directory completions for the current input path. -fn compute_completions(input: &str) -> Vec { - if input.is_empty() { - return Vec::new(); - } - let expanded = shellexpand::tilde(input); - let path = std::path::Path::new(expanded.as_ref()); - - let (parent, prefix) = if expanded.ends_with('/') { - (path.to_path_buf(), String::new()) - } else { - let parent = path - .parent() - .filter(|p| !p.as_os_str().is_empty()) - .map(std::path::Path::to_path_buf) - .unwrap_or_else(|| { - std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from(".")) - }); - let prefix = path - .file_name() - .map(|f| f.to_string_lossy().to_string()) - .unwrap_or_default(); - (parent, prefix) - }; - - let mut results = Vec::new(); - if let Ok(entries) = std::fs::read_dir(&parent) { - for entry in entries.flatten() { - if !entry.path().is_dir() { - continue; - } - let name = entry.file_name().to_string_lossy().to_string(); - if name.starts_with('.') { - continue; - } - if prefix.is_empty() || name.starts_with(&prefix) { - let full = parent.join(&name); - let display = tilde_collapse(&full.to_string_lossy()); - results.push(format!("{}/", display)); - } - } - } - results.sort(); - results -} - -fn apply_tab_completion(state: &mut SetupState) { - if state.path_completions.is_empty() { - return; - } - if state.path_completions.len() == 1 { - state.base_path = state.path_completions[0].clone(); - state.path_cursor = state.base_path.len(); - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } else { - let common = longest_common_prefix(&state.path_completions); - if common.len() > state.base_path.len() { - state.base_path = common; - state.path_cursor = state.base_path.len(); - state.path_completions = compute_completions(&state.base_path); - state.path_completion_index = 0; - } else { - // Already at common prefix, cycle through completions - state.base_path = state.path_completions[state.path_completion_index].clone(); - state.path_cursor = state.base_path.len(); - state.path_completion_index = - (state.path_completion_index + 1) % state.path_completions.len(); - } - } -} - -fn longest_common_prefix(strings: &[String]) -> String { - if strings.is_empty() { - return String::new(); - } - let mut prefix: Vec = strings[0].chars().collect(); - for s in &strings[1..] { - let mut matched = 0usize; - for (a, b) in prefix.iter().copied().zip(s.chars()) { - if a != b { - break; - } - matched += 1; - } - prefix.truncate(matched); - if prefix.is_empty() { - break; - } - } - prefix.into_iter().collect() -} - async fn handle_orgs(state: &mut SetupState, key: KeyEvent) { if state.org_loading { - // Trigger org discovery - do_discover_orgs(state).await; + // Discovery is triggered by a synthetic Null key in setup CLI mode. + if key.code == KeyCode::Null { + do_discover_orgs(state).await; + } return; } @@ -747,6 +607,7 @@ async fn handle_orgs(state: &mut SetupState, key: KeyEvent) { if state.org_error.is_some() { // Retry state.org_loading = true; + state.org_discovery_in_progress = false; state.org_error = None; } else { state.next_step(); @@ -763,19 +624,31 @@ async fn do_discover_orgs(state: &mut SetupState) { let Some(ref token) = state.auth_token else { state.org_error = Some("Not authenticated".to_string()); state.org_loading = false; + state.org_discovery_in_progress = false; return; }; - let provider_entry = state.build_workspace_provider().to_provider_entry(); - let api_url = provider_entry.effective_api_url(); - - let credentials = Credentials { - token: token.clone(), - api_base_url: api_url, - username: state.username.clone(), - }; + let ws_provider = state.build_workspace_provider(); + match discover_org_entries(ws_provider, token.clone()).await { + Ok(org_entries) => { + state.orgs = org_entries; + state.org_index = 0; + state.org_loading = false; + state.org_discovery_in_progress = false; + } + Err(e) => { + state.org_error = Some(e); + state.org_loading = false; + state.org_discovery_in_progress = false; + } + } +} - match create_provider(&provider_entry, &credentials.token) { +pub(crate) async fn discover_org_entries( + ws_provider: crate::config::WorkspaceProvider, + token: String, +) -> Result, String> { + match create_provider(&ws_provider, &token) { Ok(provider) => match provider.get_organizations().await { Ok(orgs) => { let mut org_entries: Vec = Vec::new(); @@ -792,19 +665,11 @@ async fn do_discover_orgs(state: &mut SetupState) { }); } org_entries.sort_by(|a, b| a.name.cmp(&b.name)); - state.orgs = org_entries; - state.org_index = 0; - state.org_loading = false; - } - Err(e) => { - state.org_error = Some(e.to_string()); - state.org_loading = false; + Ok(org_entries) } + Err(e) => Err(e.to_string()), }, - Err(e) => { - state.org_error = Some(e.to_string()); - state.org_loading = false; - } + Err(e) => Err(e.to_string()), } } @@ -841,7 +706,18 @@ fn handle_complete(state: &mut SetupState, key: KeyEvent) { } fn save_workspace(state: &SetupState) -> Result<(), crate::errors::AppError> { - let mut ws = WorkspaceConfig::new(&state.workspace_name, &state.base_path); + let expanded = shellexpand::tilde(&state.base_path); + let root = std::path::Path::new(expanded.as_ref()); + std::fs::create_dir_all(root).map_err(|e| { + crate::errors::AppError::config(format!( + "Failed to create workspace directory '{}': {}", + root.display(), + e + )) + })?; + let root = std::fs::canonicalize(root).unwrap_or_else(|_| root.to_path_buf()); + + let mut ws = WorkspaceConfig::new_from_root(&root); ws.provider = state.build_workspace_provider(); ws.username = state.username.clone().unwrap_or_default(); ws.orgs = state.selected_orgs(); diff --git a/src/setup/handler_tests.rs b/src/setup/handler_tests.rs index cac8407..6bfb28a 100644 --- a/src/setup/handler_tests.rs +++ b/src/setup/handler_tests.rs @@ -1,5 +1,5 @@ use super::*; -use crate::setup::state::SetupStep; +use crate::setup::state::{PathSuggestion, SetupStep}; fn cwd_collapsed() -> String { super::tilde_collapse(&std::env::current_dir().unwrap().to_string_lossy()) @@ -25,6 +25,7 @@ fn find_entry_index(state: &SetupState, path: &std::path::Path) -> usize { #[tokio::test] async fn q_quits_setup_wizard() { let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectProvider; handle_key( &mut state, @@ -59,9 +60,24 @@ async fn left_moves_to_previous_step() { assert_eq!(state.step, SetupStep::Authenticate); } +#[tokio::test] +async fn org_loading_ignores_non_null_keys() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectOrgs; + state.org_loading = true; + state.org_error = None; + state.auth_token = None; + + handle_key(&mut state, KeyEvent::new(KeyCode::Down, KeyModifiers::NONE)).await; + + assert!(state.org_loading); + assert!(state.org_error.is_none()); +} + #[tokio::test] async fn right_advances_from_provider_step() { let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectProvider; assert_eq!(state.step, SetupStep::SelectProvider); handle_key( @@ -73,6 +89,68 @@ async fn right_advances_from_provider_step() { assert_eq!(state.step, SetupStep::Authenticate); } +#[tokio::test] +async fn left_in_select_path_returns_to_orgs_step() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectPath; + state.path_browse_mode = false; + state.path_suggestions_mode = false; + + handle_key(&mut state, KeyEvent::new(KeyCode::Left, KeyModifiers::NONE)).await; + + assert_eq!(state.step, SetupStep::SelectOrgs); + assert!(!state.path_browse_mode); +} + +#[tokio::test] +async fn typing_does_not_edit_base_path_in_select_path_step() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectPath; + let original = state.base_path.clone(); + + for key in [ + KeyEvent::new(KeyCode::Char('x'), KeyModifiers::NONE), + KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE), + KeyEvent::new(KeyCode::Delete, KeyModifiers::NONE), + KeyEvent::new(KeyCode::Tab, KeyModifiers::NONE), + KeyEvent::new(KeyCode::Left, KeyModifiers::NONE), + KeyEvent::new(KeyCode::Right, KeyModifiers::NONE), + ] { + handle_key(&mut state, key).await; + } + + assert_eq!(state.base_path, original); + assert_eq!(state.step, SetupStep::SelectPath); +} + +#[tokio::test] +async fn enter_in_suggestions_mode_does_not_change_base_path() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectPath; + state.path_suggestions_mode = true; + state.path_suggestions = vec![ + PathSuggestion { + path: "~/Git-Same/GitHub".to_string(), + label: "terminal folder".to_string(), + }, + PathSuggestion { + path: "~/Developer".to_string(), + label: "other".to_string(), + }, + ]; + state.path_suggestion_index = 1; + let original = state.base_path.clone(); + + handle_key( + &mut state, + KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE), + ) + .await; + + assert_eq!(state.base_path, original); + assert_eq!(state.step, SetupStep::Confirm); +} + #[tokio::test] async fn b_opens_path_browser_from_suggestions_mode() { let temp = tempdir_in_cwd("gisa-path-browse-"); @@ -100,6 +178,33 @@ async fn b_opens_path_browser_from_suggestions_mode() { && entry.depth == 1)); } +#[tokio::test] +async fn left_on_root_moves_popup_to_parent_directory() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectPath; + + handle_key( + &mut state, + KeyEvent::new(KeyCode::Char('b'), KeyModifiers::CONTROL), + ) + .await; + assert!(state.path_browse_mode); + assert_eq!(state.path_browse_index, 0); + + let root_before = + std::path::PathBuf::from(shellexpand::tilde(&state.path_browse_entries[0].path).as_ref()); + let Some(parent_before) = root_before.parent().map(std::path::Path::to_path_buf) else { + // Nothing above `/` on this platform. + return; + }; + + handle_key(&mut state, KeyEvent::new(KeyCode::Left, KeyModifiers::NONE)).await; + + let expected = super::tilde_collapse(&parent_before.to_string_lossy()); + assert_eq!(state.path_browse_index, 0); + assert_eq!(state.path_browse_current_dir, expected); +} + #[tokio::test] async fn right_in_path_browse_mode_navigates_tree_without_advancing_step() { let temp = tempdir_in_cwd("gisa-path-nav-"); diff --git a/src/setup/mod.rs b/src/setup/mod.rs index b04b6d8..3fcbbe2 100644 --- a/src/setup/mod.rs +++ b/src/setup/mod.rs @@ -85,6 +85,14 @@ async fn run_wizard( loop { terminal.draw(|frame| ui::render(state, frame))?; + // If we're on the requirements step and checks have not run yet, + // run checks before waiting for key input. + if maybe_start_requirements_checks(state) { + terminal.draw(|frame| ui::render(state, frame))?; + run_requirements_checks(state).await; + continue; + } + // If we're on the orgs step and loading, trigger discovery before waiting for input if state.step == SetupStep::SelectOrgs && state.org_loading { // Render loading state first, then do discovery @@ -124,3 +132,33 @@ async fn run_wizard( } Ok(()) } + +pub(crate) fn maybe_start_requirements_checks(state: &mut SetupState) -> bool { + if state.step != SetupStep::Requirements || state.checks_triggered { + return false; + } + + state.checks_triggered = true; + state.checks_loading = true; + state.config_path_display = crate::config::Config::default_path() + .ok() + .map(|p| p.display().to_string()); + true +} + +pub(crate) fn apply_requirements_check_results( + state: &mut SetupState, + results: Vec, +) { + state.check_results = results; + state.checks_loading = false; +} + +pub(crate) async fn run_requirements_checks(state: &mut SetupState) { + let results = crate::checks::check_requirements().await; + apply_requirements_check_results(state, results); +} + +#[cfg(test)] +#[path = "mod_tests.rs"] +mod tests; diff --git a/src/setup/mod_tests.rs b/src/setup/mod_tests.rs new file mode 100644 index 0000000..9a7cc65 --- /dev/null +++ b/src/setup/mod_tests.rs @@ -0,0 +1,79 @@ +use super::*; + +fn sample_check(name: &str, passed: bool, critical: bool) -> crate::checks::CheckResult { + crate::checks::CheckResult { + name: name.to_string(), + passed, + message: "ok".to_string(), + suggestion: None, + critical, + } +} + +#[test] +fn maybe_start_requirements_checks_sets_expected_state() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + assert_eq!(state.step, SetupStep::Requirements); + assert!(!state.checks_triggered); + assert!(!state.checks_loading); + + assert!(maybe_start_requirements_checks(&mut state)); + assert!(state.checks_triggered); + assert!(state.checks_loading); + assert_eq!( + state.config_path_display, + crate::config::Config::default_path() + .ok() + .map(|p| p.display().to_string()) + ); +} + +#[test] +fn maybe_start_requirements_checks_noops_when_not_requirements_or_already_triggered() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectProvider; + assert!(!maybe_start_requirements_checks(&mut state)); + assert!(!state.checks_triggered); + assert!(!state.checks_loading); + + state.step = SetupStep::Requirements; + state.checks_triggered = true; + assert!(!maybe_start_requirements_checks(&mut state)); + assert!(state.checks_triggered); +} + +#[test] +fn apply_requirements_check_results_updates_state_and_clears_loading() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.checks_loading = true; + state.check_results = vec![sample_check("old", false, true)]; + + let results = vec![ + sample_check("Git", true, true), + sample_check("SSH Keys", false, false), + ]; + apply_requirements_check_results(&mut state, results); + + assert!(!state.checks_loading); + assert_eq!(state.check_results.len(), 2); + assert_eq!(state.check_results[0].name, "Git"); + assert!(state.check_results[0].passed); + assert_eq!(state.check_results[1].name, "SSH Keys"); + assert!(!state.check_results[1].passed); +} + +#[tokio::test] +async fn standalone_requirements_step_auto_runs_checks_end_to_end() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + assert_eq!(state.step, SetupStep::Requirements); + assert!(state.check_results.is_empty()); + assert!(!state.checks_loading); + + assert!(maybe_start_requirements_checks(&mut state)); + run_requirements_checks(&mut state).await; + + assert!(state.checks_triggered); + assert!(!state.checks_loading); + assert!(!state.check_results.is_empty()); + assert_eq!(state.step, SetupStep::Requirements); +} diff --git a/src/setup/screens/complete.rs b/src/setup/screens/complete.rs index b989900..fb784fb 100644 --- a/src/setup/screens/complete.rs +++ b/src/setup/screens/complete.rs @@ -55,11 +55,7 @@ pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { let lines = vec![ Line::raw(""), - Line::from(Span::styled( - format!(" {}", state.workspace_name), - value_style, - )), - Line::from(Span::styled(format!(" {}", state.base_path), dim)), + Line::from(Span::styled(format!(" {}", state.base_path), value_style)), Line::from(Span::styled( format!( " {} organization{} \u{00b7} {} repos", diff --git a/src/setup/screens/complete_tests.rs b/src/setup/screens/complete_tests.rs index db314e3..ce43e43 100644 --- a/src/setup/screens/complete_tests.rs +++ b/src/setup/screens/complete_tests.rs @@ -28,7 +28,6 @@ fn render_output(state: &SetupState) -> String { #[test] fn render_complete_first_setup_shows_workspace_created() { let mut state = SetupState::with_first_setup("~/Git-Same/GitHub", true); - state.workspace_name = "first-workspace".to_string(); state.base_path = "~/Git-Same/GitHub".to_string(); state.orgs = vec![OrgEntry { name: "acme".to_string(), @@ -38,7 +37,6 @@ fn render_complete_first_setup_shows_workspace_created() { let output = render_output(&state); assert!(output.contains("Workspace Created!")); - assert!(output.contains("first-workspace")); assert!(output.contains("1 organization")); assert!(output.contains("12 repos")); } @@ -46,11 +44,9 @@ fn render_complete_first_setup_shows_workspace_created() { #[test] fn render_complete_additional_setup_shows_workspace_added() { let mut state = SetupState::with_first_setup("~/Git-Same/GitHub", false); - state.workspace_name = "second-workspace".to_string(); state.base_path = "~/Git-Same/GitHub".to_string(); let output = render_output(&state); assert!(output.contains("Workspace Added!")); - assert!(output.contains("second-workspace")); assert!(output.contains("Press Enter to continue")); } diff --git a/src/setup/screens/confirm.rs b/src/setup/screens/confirm.rs index 5afb97b..d85a11d 100644 --- a/src/setup/screens/confirm.rs +++ b/src/setup/screens/confirm.rs @@ -10,7 +10,7 @@ use ratatui::Frame; pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { let chunks = Layout::vertical([ Constraint::Length(3), // Title - Constraint::Length(9), // Summary card + Constraint::Length(8), // Summary card Constraint::Min(3), // Info + error ]) .split(area); @@ -64,10 +64,6 @@ pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { Span::styled(" Organizations ", label_style), Span::styled(&orgs_display, value_style), ]), - Line::from(vec![ - Span::styled(" Workspace ", label_style), - Span::styled(&state.workspace_name, value_style), - ]), ]; let summary = Paragraph::new(summary_lines).block( @@ -81,10 +77,7 @@ pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { let mut info_lines: Vec = Vec::new(); info_lines.push(Line::raw("")); info_lines.push(Line::from(Span::styled( - format!( - " Config will be saved to: ~/.config/git-same/workspaces/{}/", - state.workspace_name - ), + format!(" Config will be saved to: {}/.git-same/", state.base_path), Style::default().fg(Color::DarkGray), ))); info_lines.push(Line::raw("")); diff --git a/src/setup/screens/confirm_tests.rs b/src/setup/screens/confirm_tests.rs index 4822d84..a2bbb85 100644 --- a/src/setup/screens/confirm_tests.rs +++ b/src/setup/screens/confirm_tests.rs @@ -29,7 +29,7 @@ fn render_output(state: &SetupState) -> String { fn render_confirm_shows_workspace_summary() { let mut state = SetupState::new("~/Git-Same/GitHub"); state.username = Some("octocat".to_string()); - state.workspace_name = "personal-workspace".to_string(); + state.base_path = "~/Git-Same/GitHub".to_string(); state.orgs = vec![ OrgEntry { name: "acme".to_string(), @@ -46,14 +46,13 @@ fn render_confirm_shows_workspace_summary() { let output = render_output(&state); assert!(output.contains("Review Workspace Configuration")); assert!(output.contains("@octocat")); - assert!(output.contains("personal-workspace")); assert!(output.contains("acme, tools")); } #[test] fn render_confirm_shows_inline_error_when_present() { let mut state = SetupState::new("~/Git-Same/GitHub"); - state.workspace_name = "broken".to_string(); + state.base_path = "~/Git-Same/GitHub".to_string(); state.error_message = Some("Unable to write config".to_string()); let output = render_output(&state); diff --git a/src/setup/screens/mod.rs b/src/setup/screens/mod.rs index f590757..d6132d1 100644 --- a/src/setup/screens/mod.rs +++ b/src/setup/screens/mod.rs @@ -6,4 +6,8 @@ pub mod confirm; pub mod orgs; pub mod path; pub mod provider; -pub mod welcome; +pub mod requirements; + +#[cfg(test)] +#[path = "mod_tests.rs"] +mod tests; diff --git a/src/setup/screens/mod_tests.rs b/src/setup/screens/mod_tests.rs new file mode 100644 index 0000000..dc76419 --- /dev/null +++ b/src/setup/screens/mod_tests.rs @@ -0,0 +1,12 @@ +use super::*; + +#[test] +fn test_screens_exports_are_accessible() { + let _ = &auth::render; + let _ = &complete::render; + let _ = &confirm::render; + let _ = &orgs::render; + let _ = &path::render; + let _ = &provider::render; + let _ = &requirements::render; +} diff --git a/src/setup/screens/path.rs b/src/setup/screens/path.rs index 25ac598..6886abf 100644 --- a/src/setup/screens/path.rs +++ b/src/setup/screens/path.rs @@ -1,4 +1,4 @@ -//! Step 4: Base path input screen with suggestions, tab completion, and live preview. +//! Step 4: Base path screen with folder navigation and live preview. use crate::setup::state::SetupState; use ratatui::layout::{Alignment, Constraint, Layout, Rect}; @@ -36,10 +36,10 @@ pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { Color::Cyan }; let muted = Color::DarkGray; - let input_text_color = if popup_open || state.path_suggestions_mode { + let input_text_color = if popup_open { Color::DarkGray } else { - Color::Yellow + Color::White }; // Title and info (above input) @@ -52,12 +52,15 @@ pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { " Repos will be organized as: //", Style::default().fg(muted), )), + Line::from(Span::styled( + " Base path starts at terminal folder. Press [b] to change it.", + Style::default().fg(muted), + )), ]; frame.render_widget(Paragraph::new(title_lines), chunks[0]); // Path input with styled border let input_style = Style::default().fg(input_text_color); - let cursor_pos = state.path_cursor.min(state.base_path.len()); let input_line = Line::from(vec![ Span::styled(" ", Style::default()), @@ -84,13 +87,6 @@ pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { ); frame.render_widget(input, chunks[1]); - // Show cursor in input mode - if !state.path_suggestions_mode && !state.path_browse_mode { - let cursor_x = chunks[1].x + 1 + 2 + cursor_pos as u16; - let cursor_y = chunks[1].y + 1; - frame.set_cursor_position((cursor_x, cursor_y)); - } - // Suggestions or completions list if state.path_suggestions_mode && !state.path_suggestions.is_empty() { render_suggestions(state, frame, chunks[2]); @@ -131,7 +127,6 @@ fn render_browse_popup(state: &SetupState, frame: &mut Frame, area: Rect) { frame.render_widget(Clear, popup_area); let popup = Block::default() - .title(" Folder Navigator ") .borders(Borders::ALL) .border_type(BorderType::Thick) .border_style(Style::default().fg(Color::Cyan)); @@ -140,6 +135,7 @@ fn render_browse_popup(state: &SetupState, frame: &mut Frame, area: Rect) { let show_message = state.path_browse_error.is_some() || state.path_browse_info.is_some(); let rows = Layout::vertical([ + Constraint::Length(3), // header Constraint::Length(1), // path Constraint::Min(3), // tree Constraint::Length(if show_message { 1 } else { 0 }), @@ -147,6 +143,8 @@ fn render_browse_popup(state: &SetupState, frame: &mut Frame, area: Rect) { ]) .split(inner); + render_popup_header(frame, rows[0]); + let path_line = Line::from(vec![ Span::styled("Path: ", Style::default().fg(Color::DarkGray)), Span::styled( @@ -156,9 +154,9 @@ fn render_browse_popup(state: &SetupState, frame: &mut Frame, area: Rect) { .add_modifier(Modifier::BOLD), ), ]); - frame.render_widget(Paragraph::new(path_line), rows[0]); + frame.render_widget(Paragraph::new(path_line), rows[1]); - render_browse_tree(state, frame, rows[1]); + render_browse_tree(state, frame, rows[2]); if show_message { let message = state @@ -179,12 +177,29 @@ fn render_browse_popup(state: &SetupState, frame: &mut Frame, area: Rect) { if let Some((msg, style)) = message { frame.render_widget( Paragraph::new(Line::from(Span::styled(msg, style))), - rows[2], + rows[3], ); } } - render_popup_footer(frame, rows[3]); + render_popup_footer(frame, rows[4]); +} + +fn render_popup_header(frame: &mut Frame, area: Rect) { + let header = Paragraph::new("Local Folder Navigator") + .style( + Style::default() + .fg(Color::Cyan) + .add_modifier(Modifier::BOLD), + ) + .alignment(Alignment::Center) + .block( + Block::default() + .borders(Borders::ALL) + .border_type(BorderType::Rounded) + .border_style(Style::default().fg(Color::DarkGray)), + ); + frame.render_widget(header, area); } fn render_browse_tree(state: &SetupState, frame: &mut Frame, area: Rect) { diff --git a/src/setup/screens/path_tests.rs b/src/setup/screens/path_tests.rs index 1d7e757..27616b4 100644 --- a/src/setup/screens/path_tests.rs +++ b/src/setup/screens/path_tests.rs @@ -1,5 +1,5 @@ use super::*; -use crate::setup::state::{PathBrowseEntry, PathSuggestion, SetupState}; +use crate::setup::state::{PathBrowseEntry, SetupState}; use ratatui::backend::TestBackend; use ratatui::Terminal; @@ -26,25 +26,15 @@ fn render_output(state: &SetupState) -> String { } #[test] -fn render_suggestions_mode_shows_suggestions_block() { +fn render_path_input_shows_base_path_without_suggestions() { let mut state = SetupState::new("~/Git-Same/GitHub"); - state.path_suggestions_mode = true; - state.path_suggestions = vec![ - PathSuggestion { - path: "~/Git-Same/GitHub".to_string(), - label: "current directory".to_string(), - }, - PathSuggestion { - path: "~/Developer".to_string(), - label: "recommended".to_string(), - }, - ]; - state.path_suggestion_index = 1; + state.path_suggestions_mode = false; + state.path_browse_mode = false; let output = render_output(&state); - assert!(output.contains("Suggestions:")); - assert!(output.contains("~/Developer")); - assert!(output.contains("recommended")); + assert!(output.contains("Base Path")); + assert!(output.contains("~/Git-Same/GitHub")); + assert!(!output.contains("Suggestions:")); } #[test] @@ -72,7 +62,7 @@ fn render_browse_mode_shows_folder_navigator_context() { state.path_browse_index = 1; let output = render_output(&state); - assert!(output.contains("Folder Navigator")); + assert!(output.contains("Local Folder Navigator")); assert!(output.contains("Path:")); assert!(output.contains("~/Projects")); assert!(output.contains("client")); diff --git a/src/setup/screens/requirements.rs b/src/setup/screens/requirements.rs new file mode 100644 index 0000000..8ceb551 --- /dev/null +++ b/src/setup/screens/requirements.rs @@ -0,0 +1,133 @@ +//! Step 1: System requirements check. + +use crate::setup::state::SetupState; +use ratatui::layout::{Constraint, Layout, Rect}; +use ratatui::style::{Color, Modifier, Style}; +use ratatui::text::{Line, Span}; +use ratatui::widgets::Paragraph; +use ratatui::Frame; + +pub fn render(state: &SetupState, frame: &mut Frame, area: Rect) { + let chunks = Layout::vertical([ + Constraint::Length(2), // Title + Constraint::Min(8), // Check results or spinner + Constraint::Length(3), // Config status + action hint + ]) + .split(area); + + // Title + let title_text = if state.is_first_setup { + "Welcome to Git-Same" + } else { + "System Requirements" + }; + let title = Paragraph::new(title_text).style( + Style::default() + .fg(Color::Cyan) + .add_modifier(Modifier::BOLD), + ); + frame.render_widget(title, chunks[0]); + + // Check list or spinner + if state.checks_loading { + let spinner_frames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]; + let frame_idx = (state.tick_count as usize / 2) % spinner_frames.len(); + let spinner = spinner_frames[frame_idx]; + let loading = Paragraph::new(Line::from(vec![ + Span::styled( + format!(" {} ", spinner), + Style::default().fg(Color::Yellow), + ), + Span::styled( + "Checking requirements...", + Style::default().fg(Color::DarkGray), + ), + ])); + frame.render_widget(loading, chunks[1]); + } else if state.check_results.is_empty() { + let placeholder = Paragraph::new(Line::from(Span::styled( + " Preparing checks...", + Style::default().fg(Color::DarkGray), + ))); + frame.render_widget(placeholder, chunks[1]); + } else { + let lines: Vec = state + .check_results + .iter() + .map(|check| { + let (icon, icon_color) = if check.passed { + (" ✓ ", Color::Rgb(21, 128, 61)) + } else if check.critical { + (" ✗ ", Color::Red) + } else { + (" ! ", Color::Yellow) + }; + let msg_color = if check.passed { + Color::DarkGray + } else if check.critical { + Color::Red + } else { + Color::Yellow + }; + Line::from(vec![ + Span::styled( + icon, + Style::default().fg(icon_color).add_modifier(Modifier::BOLD), + ), + Span::styled( + format!("{:<18}", &check.name), + Style::default().fg(Color::White), + ), + Span::styled(" — ", Style::default().fg(Color::DarkGray)), + Span::styled(&check.message, Style::default().fg(msg_color)), + ]) + }) + .collect(); + frame.render_widget(Paragraph::new(lines), chunks[1]); + } + + // Config status + action hint + let mut status_lines: Vec = Vec::new(); + + if let Some(ref path) = state.config_path_display { + let (label, color) = if state.config_was_created { + (" Config created at ", Color::Rgb(21, 128, 61)) + } else { + (" Config found at ", Color::DarkGray) + }; + status_lines.push(Line::from(vec![ + Span::styled(label, Style::default().fg(color)), + Span::styled(path, Style::default().fg(Color::Cyan)), + ])); + } + + if !state.check_results.is_empty() && !state.checks_loading { + let has_critical_fail = state.check_results.iter().any(|r| r.critical && !r.passed); + if has_critical_fail { + status_lines.push(Line::from(Span::styled( + " Fix critical requirements above to continue.", + Style::default().fg(Color::Red), + ))); + } else { + status_lines.push(Line::from(vec![ + Span::styled( + " All requirements met. Press ", + Style::default().fg(Color::DarkGray), + ), + Span::styled( + "[Enter]", + Style::default() + .fg(Color::Rgb(37, 99, 235)) + .add_modifier(Modifier::BOLD), + ), + Span::styled(" to continue.", Style::default().fg(Color::DarkGray)), + ])); + } + } + + frame.render_widget(Paragraph::new(status_lines), chunks[2]); +} + +#[cfg(test)] +#[path = "requirements_tests.rs"] +mod tests; diff --git a/src/setup/screens/requirements_tests.rs b/src/setup/screens/requirements_tests.rs new file mode 100644 index 0000000..25bf7f2 --- /dev/null +++ b/src/setup/screens/requirements_tests.rs @@ -0,0 +1,96 @@ +use super::*; +use crate::setup::state::SetupState; +use ratatui::backend::TestBackend; +use ratatui::Terminal; + +fn render_output(state: &SetupState) -> String { + let backend = TestBackend::new(100, 20); + let mut terminal = Terminal::new(backend).unwrap(); + + terminal + .draw(|frame| { + let area = frame.area(); + render(state, frame, area); + }) + .unwrap(); + + let buffer = terminal.backend().buffer(); + let mut text = String::new(); + for y in 0..buffer.area.height { + for x in 0..buffer.area.width { + text.push_str(buffer[(x, y)].symbol()); + } + text.push('\n'); + } + text +} + +#[test] +fn render_first_setup_shows_welcome_title() { + let state = SetupState::with_first_setup("~/Git-Same/GitHub", true); + let output = render_output(&state); + assert!(output.contains("Welcome to Git-Same")); +} + +#[test] +fn render_non_first_setup_shows_requirements_title() { + let state = SetupState::with_first_setup("~/Git-Same/GitHub", false); + let output = render_output(&state); + assert!(output.contains("System Requirements")); +} + +#[test] +fn render_loading_shows_spinner() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.checks_loading = true; + let output = render_output(&state); + assert!(output.contains("Checking requirements")); +} + +#[test] +fn render_passed_checks_shows_continue_hint() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.checks_loading = false; + state.check_results = vec![crate::checks::CheckResult { + name: "Git".to_string(), + passed: true, + message: "git 2.43.0".to_string(), + suggestion: None, + critical: true, + }]; + let output = render_output(&state); + assert!(output.contains("All requirements met")); +} + +#[test] +fn render_failed_critical_shows_fix_hint() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.checks_loading = false; + state.check_results = vec![crate::checks::CheckResult { + name: "Git".to_string(), + passed: false, + message: "not found".to_string(), + suggestion: None, + critical: true, + }]; + let output = render_output(&state); + assert!(output.contains("Fix critical requirements")); +} + +#[test] +fn render_config_created_shows_created_label() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.config_was_created = true; + state.config_path_display = Some("~/.config/git-same/config.toml".to_string()); + let output = render_output(&state); + assert!(output.contains("Config created at")); +} + +#[test] +fn render_config_found_shows_found_label() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.config_was_created = false; + state.config_path_display = Some("~/.config/git-same/config.toml".to_string()); + let output = render_output(&state); + assert!(output.contains("Config found at")); +} diff --git a/src/setup/screens/welcome.rs b/src/setup/screens/welcome.rs deleted file mode 100644 index 7c24aae..0000000 --- a/src/setup/screens/welcome.rs +++ /dev/null @@ -1,79 +0,0 @@ -//! Step 0: Welcome screen (first-time setup only). - -use crate::setup::state::SetupState; -use ratatui::layout::{Constraint, Layout, Rect}; -use ratatui::style::{Color, Modifier, Style}; -use ratatui::text::{Line, Span}; -use ratatui::widgets::Paragraph; -use ratatui::Frame; - -pub fn render(_state: &SetupState, frame: &mut Frame, area: Rect) { - let chunks = Layout::vertical([ - Constraint::Length(3), // Title - Constraint::Min(10), // Content - Constraint::Length(2), // Help - ]) - .split(area); - - // Title - let title = Paragraph::new("Welcome to Git-Same").style( - Style::default() - .fg(Color::Cyan) - .add_modifier(Modifier::BOLD), - ); - frame.render_widget(title, chunks[0]); - - // Content - let cyan = Style::default().fg(Color::Cyan); - let dim = Style::default().fg(Color::DarkGray); - let white = Style::default().fg(Color::White); - - let lines = vec![ - Line::raw(""), - Line::from(Span::styled( - " Git-Same mirrors your GitHub organization structure", - white, - )), - Line::from(Span::styled( - " to your local file system with parallel cloning", - white, - )), - Line::from(Span::styled(" and syncing.", white)), - Line::raw(""), - Line::from(Span::styled(" This wizard will help you:", dim)), - Line::raw(""), - Line::from(vec![ - Span::styled(" 1. ", cyan), - Span::styled("Connect to your Git provider", white), - ]), - Line::from(vec![ - Span::styled(" 2. ", cyan), - Span::styled("Authenticate your account", white), - ]), - Line::from(vec![ - Span::styled(" 3. ", cyan), - Span::styled("Select which organizations to sync", white), - ]), - Line::from(vec![ - Span::styled(" 4. ", cyan), - Span::styled("Choose where to store repos", white), - ]), - Line::raw(""), - Line::from(Span::styled( - " Press Enter to get started", - Style::default().fg(Color::Yellow), - )), - ]; - - let content = Paragraph::new(lines); - frame.render_widget(content, chunks[1]); - - // Help - let help = - Paragraph::new("Enter Start Esc Cancel").style(Style::default().fg(Color::DarkGray)); - frame.render_widget(help, chunks[2]); -} - -#[cfg(test)] -#[path = "welcome_tests.rs"] -mod tests; diff --git a/src/setup/screens/welcome_tests.rs b/src/setup/screens/welcome_tests.rs deleted file mode 100644 index ee30f63..0000000 --- a/src/setup/screens/welcome_tests.rs +++ /dev/null @@ -1,37 +0,0 @@ -use super::*; -use crate::setup::state::SetupState; -use ratatui::backend::TestBackend; -use ratatui::Terminal; - -fn render_output(state: &SetupState) -> String { - let backend = TestBackend::new(100, 20); - let mut terminal = Terminal::new(backend).unwrap(); - - terminal - .draw(|frame| { - let area = frame.area(); - render(state, frame, area); - }) - .unwrap(); - - let buffer = terminal.backend().buffer(); - let mut text = String::new(); - for y in 0..buffer.area.height { - for x in 0..buffer.area.width { - text.push_str(buffer[(x, y)].symbol()); - } - text.push('\n'); - } - text -} - -#[test] -fn render_welcome_shows_intro_and_steps() { - let state = SetupState::with_first_setup("~/Git-Same/GitHub", true); - - let output = render_output(&state); - assert!(output.contains("Welcome to Git-Same")); - assert!(output.contains("Connect to your Git provider")); - assert!(output.contains("Authenticate your account")); - assert!(output.contains("Press Enter to get started")); -} diff --git a/src/setup/state.rs b/src/setup/state.rs index 41210ce..24c18e0 100644 --- a/src/setup/state.rs +++ b/src/setup/state.rs @@ -6,19 +6,19 @@ use crate::types::ProviderKind; /// Which step of the wizard is active. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum SetupStep { - /// Step 0: Welcome screen (first-time only). - Welcome, - /// Step 1: Select a provider. + /// Step 1: System requirements check. + Requirements, + /// Step 2: Select a provider. SelectProvider, - /// Step 2: Authenticate and detect username. + /// Step 3: Authenticate and detect username. Authenticate, - /// Step 3: Discover and select organizations. + /// Step 4: Discover and select organizations. SelectOrgs, - /// Step 4: Enter the base path. + /// Step 5: Enter the base path. SelectPath, - /// Step 5: Review and save. + /// Step 6: Review and save. Confirm, - /// Step 6: Success / completion screen. + /// Success / completion screen. Complete, } @@ -39,7 +39,7 @@ pub enum SetupOutcome { Cancelled, } -/// Represents one of the provider choices shown in step 1. +/// Represents one of the provider choices shown in step 2. #[derive(Debug, Clone)] pub struct ProviderChoice { pub kind: ProviderKind, @@ -73,22 +73,30 @@ pub struct SetupState { /// Outcome when done. pub outcome: Option, - // Step 1: Provider selection + // Step 1: Requirements check + pub check_results: Vec, + pub checks_loading: bool, + pub checks_triggered: bool, + pub config_path_display: Option, + pub config_was_created: bool, + + // Step 2: Provider selection pub provider_choices: Vec, pub provider_index: usize, - // Step 2: Authentication + // Step 3: Authentication pub auth_status: AuthStatus, pub username: Option, pub auth_token: Option, - // Step 3: Org selection + // Step 4: Org selection pub orgs: Vec, pub org_index: usize, pub org_loading: bool, + pub org_discovery_in_progress: bool, pub org_error: Option, - // Step 4: Path + // Step 5: Path pub base_path: String, pub path_cursor: usize, pub path_suggestions_mode: bool, @@ -104,21 +112,17 @@ pub struct SetupState { pub path_browse_error: Option, pub path_browse_info: Option, - // Step 5: Confirm - pub workspace_name: String, - pub name_editing: bool, - // General pub error_message: Option, // Animation / UX /// Tick counter for spinner and animation effects. pub tick_count: u64, - /// Whether this is the first workspace setup (controls Welcome screen). + /// Whether this is the first workspace setup (controls UI text). pub is_first_setup: bool, } -/// Authentication status during step 2. +/// Authentication status during step 3. #[derive(Debug, Clone, PartialEq, Eq)] pub enum AuthStatus { /// Haven't checked yet. @@ -133,9 +137,15 @@ pub enum AuthStatus { /// Collapse an absolute path's home directory prefix into `~`. pub fn tilde_collapse(path: &str) -> String { - if let Ok(home) = std::env::var("HOME") { - if path.starts_with(&home) { - return format!("~{}", &path[home.len()..]); + let home_var = std::env::var("HOME").or_else(|_| std::env::var("USERPROFILE")); + if let Ok(home) = home_var { + let home_path = std::path::Path::new(&home); + let p = std::path::Path::new(path); + if let Ok(suffix) = p.strip_prefix(home_path) { + if suffix.as_os_str().is_empty() { + return "~".to_string(); + } + return format!("~{}{}", std::path::MAIN_SEPARATOR, suffix.to_string_lossy()); } } path.to_string() @@ -143,13 +153,11 @@ pub fn tilde_collapse(path: &str) -> String { impl SetupState { /// Create initial wizard state. - /// - /// If `is_first_setup` is true, the wizard starts with a Welcome screen. pub fn new(default_base_path: &str) -> Self { Self::with_first_setup(default_base_path, false) } - /// Create wizard state, optionally starting with the Welcome screen. + /// Create wizard state, optionally marking as first-time setup. pub fn with_first_setup(default_base_path: &str, is_first_setup: bool) -> Self { let provider_choices = vec![ ProviderChoice { @@ -187,16 +195,15 @@ impl SetupState { let base_path = default_base_path.to_string(); let path_cursor = base_path.len(); - let step = if is_first_setup { - SetupStep::Welcome - } else { - SetupStep::SelectProvider - }; - Self { - step, + step: SetupStep::Requirements, should_quit: false, outcome: None, + check_results: Vec::new(), + checks_loading: false, + checks_triggered: false, + config_path_display: None, + config_was_created: false, provider_choices, provider_index: 0, auth_status: AuthStatus::Pending, @@ -204,7 +211,7 @@ impl SetupState { auth_token: None, base_path, path_cursor, - path_suggestions_mode: true, + path_suggestions_mode: false, path_suggestions: Vec::new(), path_suggestion_index: 0, path_completions: Vec::new(), @@ -219,9 +226,8 @@ impl SetupState { orgs: Vec::new(), org_index: 0, org_loading: false, + org_discovery_in_progress: false, org_error: None, - workspace_name: String::new(), - name_editing: false, error_message: None, tick_count: 0, is_first_setup, @@ -254,49 +260,14 @@ impl SetupState { /// Populate the path suggestions list for the SelectPath step. pub fn populate_path_suggestions(&mut self) { - let mut suggestions = Vec::new(); - - // 1. Current path (always first — this is the default) - suggestions.push(PathSuggestion { + // Keep step 5 path fixed unless the user explicitly selects a folder + // from the folder navigator popup. + self.path_suggestions = vec![PathSuggestion { path: self.base_path.clone(), - label: "current directory".to_string(), - }); - - // 2. Common developer directories (only if they exist and differ) - for candidate in &[ - "~/Git-Same/GitHub", - "~/Developer", - "~/Projects", - "~/repos", - "~/code", - ] { - let expanded = shellexpand::tilde(candidate); - let path = std::path::Path::new(expanded.as_ref()); - let expanded_candidate = expanded.as_ref().to_string(); - if path.is_dir() - && !suggestions.iter().any(|s| { - s.path == *candidate - || shellexpand::tilde(&s.path).as_ref() == expanded_candidate - }) - { - suggestions.push(PathSuggestion { - path: candidate.to_string(), - label: String::new(), - }); - } - } - - // 3. Home directory (always last) - if !suggestions.iter().any(|s| s.path == "~") { - suggestions.push(PathSuggestion { - path: "~".to_string(), - label: "home".to_string(), - }); - } - - self.path_suggestions = suggestions; + label: "terminal folder".to_string(), + }]; self.path_suggestion_index = 0; - self.path_suggestions_mode = true; + self.path_suggestions_mode = false; self.path_browse_mode = false; self.path_browse_current_dir.clear(); self.path_browse_entries.clear(); @@ -304,32 +275,46 @@ impl SetupState { self.path_browse_show_hidden = false; self.path_browse_error = None; self.path_browse_info = None; + self.path_completions.clear(); + self.path_completion_index = 0; + self.path_cursor = self.base_path.len(); + } + + /// Whether all critical requirement checks have passed. + pub fn requirements_passed(&self) -> bool { + !self.check_results.is_empty() + && self + .check_results + .iter() + .filter(|r| r.critical) + .all(|r| r.passed) } - /// The 1-based step number for display (Welcome is not counted). + /// The 1-based step number for display. pub fn step_number(&self) -> usize { match self.step { - SetupStep::Welcome => 0, - SetupStep::SelectProvider => 1, - SetupStep::Authenticate => 2, - SetupStep::SelectOrgs => 3, - SetupStep::SelectPath => 4, - SetupStep::Confirm => 5, - SetupStep::Complete => 5, + SetupStep::Requirements => 1, + SetupStep::SelectProvider => 2, + SetupStep::Authenticate => 3, + SetupStep::SelectOrgs => 4, + SetupStep::SelectPath => 5, + SetupStep::Confirm => 6, + SetupStep::Complete => 6, } } - /// Total number of numbered steps (excluding Welcome and Complete). - pub const TOTAL_STEPS: usize = 5; + /// Total number of numbered steps (excluding Complete). + pub const TOTAL_STEPS: usize = 6; /// Move to the next step. pub fn next_step(&mut self) { self.error_message = None; self.step = match self.step { - SetupStep::Welcome => SetupStep::SelectProvider, + SetupStep::Requirements => SetupStep::SelectProvider, SetupStep::SelectProvider => SetupStep::Authenticate, SetupStep::Authenticate => { self.org_loading = true; + self.org_discovery_in_progress = false; self.orgs.clear(); self.org_index = 0; self.org_error = None; @@ -339,15 +324,7 @@ impl SetupState { self.populate_path_suggestions(); SetupStep::SelectPath } - SetupStep::SelectPath => { - // Derive workspace name from base_path + provider - let path = std::path::Path::new(&self.base_path); - let base = - crate::config::WorkspaceManager::name_from_path(path, self.selected_provider()); - self.workspace_name = - crate::config::WorkspaceManager::unique_name(&base).unwrap_or(base); - SetupStep::Confirm - } + SetupStep::SelectPath => SetupStep::Confirm, SetupStep::Confirm => SetupStep::Complete, SetupStep::Complete => { self.outcome = Some(SetupOutcome::Completed); @@ -361,16 +338,12 @@ impl SetupState { pub fn prev_step(&mut self) { self.error_message = None; self.step = match self.step { - SetupStep::Welcome => { - self.outcome = Some(SetupOutcome::Cancelled); - self.should_quit = true; - SetupStep::Welcome - } - SetupStep::SelectProvider => { + SetupStep::Requirements => { self.outcome = Some(SetupOutcome::Cancelled); self.should_quit = true; - SetupStep::SelectProvider + SetupStep::Requirements } + SetupStep::SelectProvider => SetupStep::Requirements, SetupStep::Authenticate => SetupStep::SelectProvider, SetupStep::SelectOrgs => SetupStep::Authenticate, SetupStep::SelectPath => SetupStep::SelectOrgs, diff --git a/src/setup/state_tests.rs b/src/setup/state_tests.rs index 09ce5e0..0a15f9e 100644 --- a/src/setup/state_tests.rs +++ b/src/setup/state_tests.rs @@ -3,13 +3,13 @@ use super::*; #[test] fn test_new_state() { let state = SetupState::new("~/Git-Same/GitHub"); - assert_eq!(state.step, SetupStep::SelectProvider); + assert_eq!(state.step, SetupStep::Requirements); assert!(!state.should_quit); assert_eq!(state.base_path, "~/Git-Same/GitHub"); assert_eq!(state.provider_choices.len(), 6); assert!(state.provider_choices[0].available); assert!(!state.provider_choices[2].available); // GitLab - assert!(state.path_suggestions_mode); + assert!(!state.path_suggestions_mode); assert!(!state.path_browse_mode); assert!(state.path_browse_entries.is_empty()); assert!(!state.path_browse_show_hidden); @@ -18,19 +18,22 @@ fn test_new_state() { assert!(state.path_suggestions.is_empty()); assert_eq!(state.tick_count, 0); assert!(!state.is_first_setup); + assert!(!state.checks_triggered); + assert!(!state.checks_loading); + assert!(!state.config_was_created); } #[test] -fn test_first_setup_starts_with_welcome() { +fn test_first_setup_starts_with_requirements() { let state = SetupState::with_first_setup("~/Git-Same/GitHub", true); - assert_eq!(state.step, SetupStep::Welcome); + assert_eq!(state.step, SetupStep::Requirements); assert!(state.is_first_setup); } #[test] -fn test_non_first_setup_starts_with_provider() { +fn test_non_first_setup_starts_with_requirements() { let state = SetupState::with_first_setup("~/Git-Same/GitHub", false); - assert_eq!(state.step, SetupStep::SelectProvider); + assert_eq!(state.step, SetupStep::Requirements); assert!(!state.is_first_setup); } @@ -38,14 +41,10 @@ fn test_non_first_setup_starts_with_provider() { fn test_populate_path_suggestions() { let mut state = SetupState::new("~/test-path"); state.populate_path_suggestions(); - // First suggestion is always the current directory (default) - assert!(!state.path_suggestions.is_empty()); + assert_eq!(state.path_suggestions.len(), 1); assert_eq!(state.path_suggestions[0].path, "~/test-path"); - assert_eq!(state.path_suggestions[0].label, "current directory"); - // Last suggestion is always home - let last = state.path_suggestions.last().unwrap(); - assert_eq!(last.path, "~"); - assert_eq!(last.label, "home"); + assert_eq!(state.path_suggestions[0].label, "terminal folder"); + assert!(!state.path_suggestions_mode); } #[test] @@ -60,28 +59,37 @@ fn test_tilde_collapse() { #[test] fn test_step_navigation() { let mut state = SetupState::new("~/Git-Same/GitHub"); - assert_eq!(state.step, SetupStep::SelectProvider); + assert_eq!(state.step, SetupStep::Requirements); state.next_step(); - assert_eq!(state.step, SetupStep::Authenticate); + assert_eq!(state.step, SetupStep::SelectProvider); state.next_step(); - assert_eq!(state.step, SetupStep::SelectOrgs); + assert_eq!(state.step, SetupStep::Authenticate); state.prev_step(); - assert_eq!(state.step, SetupStep::Authenticate); + assert_eq!(state.step, SetupStep::SelectProvider); } #[test] -fn test_welcome_navigation() { +fn test_requirements_to_provider() { let mut state = SetupState::with_first_setup("~/Git-Same/GitHub", true); - assert_eq!(state.step, SetupStep::Welcome); + assert_eq!(state.step, SetupStep::Requirements); state.next_step(); assert_eq!(state.step, SetupStep::SelectProvider); assert!(!state.should_quit); } +#[test] +fn test_provider_back_to_requirements() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectProvider; + state.prev_step(); + assert_eq!(state.step, SetupStep::Requirements); + assert!(!state.should_quit); +} + #[test] fn test_confirm_goes_to_complete() { let mut state = SetupState::new("~/Git-Same/GitHub"); @@ -125,7 +133,7 @@ fn test_selected_orgs() { } #[test] -fn test_cancel_from_first_step() { +fn test_cancel_from_requirements() { let mut state = SetupState::new("~/Git-Same/GitHub"); state.prev_step(); assert!(state.should_quit); @@ -133,27 +141,65 @@ fn test_cancel_from_first_step() { } #[test] -fn test_cancel_from_welcome() { - let mut state = SetupState::with_first_setup("~/Git-Same/GitHub", true); - state.prev_step(); - assert!(state.should_quit); - assert!(matches!(state.outcome, Some(SetupOutcome::Cancelled))); +fn test_requirements_passed_all_critical_pass() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.check_results = vec![ + crate::checks::CheckResult { + name: "Git".to_string(), + passed: true, + message: "ok".to_string(), + suggestion: None, + critical: true, + }, + crate::checks::CheckResult { + name: "SSH".to_string(), + passed: false, + message: "not found".to_string(), + suggestion: None, + critical: false, // warning only + }, + ]; + assert!(state.requirements_passed()); +} + +#[test] +fn test_requirements_passed_critical_fail() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.check_results = vec![crate::checks::CheckResult { + name: "Git".to_string(), + passed: false, + message: "not found".to_string(), + suggestion: None, + critical: true, + }]; + assert!(!state.requirements_passed()); +} + +#[test] +fn test_requirements_passed_empty_is_false() { + let state = SetupState::new("~/Git-Same/GitHub"); + assert!(!state.requirements_passed()); } #[test] fn test_step_number() { - let mut state = SetupState::with_first_setup("~/Git-Same/GitHub", true); - assert_eq!(state.step_number(), 0); + let mut state = SetupState::new("~/Git-Same/GitHub"); + assert_eq!(state.step_number(), 1); // Requirements state.step = SetupStep::SelectProvider; - assert_eq!(state.step_number(), 1); - state.step = SetupStep::Authenticate; assert_eq!(state.step_number(), 2); - state.step = SetupStep::SelectOrgs; + state.step = SetupStep::Authenticate; assert_eq!(state.step_number(), 3); - state.step = SetupStep::SelectPath; + state.step = SetupStep::SelectOrgs; assert_eq!(state.step_number(), 4); - state.step = SetupStep::Confirm; + state.step = SetupStep::SelectPath; assert_eq!(state.step_number(), 5); + state.step = SetupStep::Confirm; + assert_eq!(state.step_number(), 6); state.step = SetupStep::Complete; - assert_eq!(state.step_number(), 5); + assert_eq!(state.step_number(), 6); +} + +#[test] +fn test_total_steps_is_six() { + assert_eq!(SetupState::TOTAL_STEPS, 6); } diff --git a/src/setup/ui.rs b/src/setup/ui.rs index 2a2c5ba..6313a42 100644 --- a/src/setup/ui.rs +++ b/src/setup/ui.rs @@ -16,7 +16,7 @@ pub fn render(state: &SetupState, frame: &mut Frame) { let path_popup_active = state.step == SetupStep::SelectPath && state.path_browse_mode; // Graceful degradation for small terminals - let show_banner = height >= 30 && !path_popup_active; + let show_banner = height >= 30; let show_progress = height >= 20; let mut constraints = Vec::new(); @@ -46,27 +46,23 @@ pub fn render(state: &SetupState, frame: &mut Frame) { } // Title - let title_text = if state.step == SetupStep::Welcome { - "" - } else if state.is_first_setup { + let title_text = if state.is_first_setup { "Workspace Setup" } else { "New Workspace" }; - if !title_text.is_empty() { - let title = Paragraph::new(title_text) - .style( - Style::default() - .fg(if path_popup_active { - Color::DarkGray - } else { - Color::White - }) - .add_modifier(Modifier::BOLD), - ) - .alignment(Alignment::Center); - frame.render_widget(title, chunks[idx]); - } + let title = Paragraph::new(title_text) + .style( + Style::default() + .fg(if path_popup_active { + Color::DarkGray + } else { + Color::White + }) + .add_modifier(Modifier::BOLD), + ) + .alignment(Alignment::Center); + frame.render_widget(title, chunks[idx]); idx += 1; // Step progress indicator @@ -90,7 +86,7 @@ pub fn render(state: &SetupState, frame: &mut Frame) { idx += 1; match state.step { - SetupStep::Welcome => screens::welcome::render(state, frame, content_inner), + SetupStep::Requirements => screens::requirements::render(state, frame, content_inner), SetupStep::SelectProvider => screens::provider::render(state, frame, content_inner), SetupStep::Authenticate => screens::auth::render(state, frame, content_inner), SetupStep::SelectOrgs => screens::orgs::render(state, frame, content_inner), @@ -105,8 +101,8 @@ pub fn render(state: &SetupState, frame: &mut Frame) { /// Render the step progress indicator with nodes and connectors. fn render_step_progress(state: &SetupState, frame: &mut Frame, area: Rect, dimmed: bool) { - let steps = ["Provider", "Auth", "Orgs", "Path", "Save"]; - let current = state.step_number(); // 0 for Welcome, 1-5 for steps, 5 for Complete + let steps = ["Reqs", "Provider", "Auth", "Orgs", "Path", "Save"]; + let current = state.step_number(); // 1-6 for steps, 6 for Complete let green = if dimmed { Style::default().fg(Color::DarkGray) @@ -125,16 +121,19 @@ fn render_step_progress(state: &SetupState, frame: &mut Frame, area: Rect, dimme }; let dim = Style::default().fg(Color::DarkGray); + // 6 nodes + 5 connectors = 11 segments. Ratio: 3/28 per node, 2/28 per connector (6*3 + 5*2 = 28) let segments = Layout::horizontal([ - Constraint::Ratio(3, 23), - Constraint::Ratio(2, 23), - Constraint::Ratio(3, 23), - Constraint::Ratio(2, 23), - Constraint::Ratio(3, 23), - Constraint::Ratio(2, 23), - Constraint::Ratio(3, 23), - Constraint::Ratio(2, 23), - Constraint::Ratio(3, 23), + Constraint::Ratio(3, 28), + Constraint::Ratio(2, 28), + Constraint::Ratio(3, 28), + Constraint::Ratio(2, 28), + Constraint::Ratio(3, 28), + Constraint::Ratio(2, 28), + Constraint::Ratio(3, 28), + Constraint::Ratio(2, 28), + Constraint::Ratio(3, 28), + Constraint::Ratio(2, 28), + Constraint::Ratio(3, 28), ]) .split(area); @@ -244,11 +243,23 @@ fn render_status_bar(state: &SetupState, frame: &mut Frame, area: Rect) { }; let top_center = match state.step { - SetupStep::Welcome => vec![ - Span::styled("Press ", dim), - Span::styled("[Enter]", blue), - Span::styled(" to get started", dim), - ], + SetupStep::Requirements => { + if state.checks_loading { + vec![Span::styled("Checking system requirements...", yellow)] + } else if state.check_results.iter().any(|r| r.critical && !r.passed) { + vec![Span::styled( + "Fix critical requirements to continue", + yellow, + )] + } else if !state.check_results.is_empty() { + vec![ + Span::styled("[Enter]", blue), + Span::styled(" Continue to setup", dim), + ] + } else { + vec![Span::styled("Preparing...", dim)] + } + } SetupStep::SelectProvider => vec![ Span::styled("[↑] [↓]", blue), Span::styled(" Select provider", dim), @@ -270,19 +281,10 @@ fn render_status_bar(state: &SetupState, frame: &mut Frame, area: Rect) { SetupStep::SelectPath => { if state.path_browse_mode { vec![Span::styled("Folder popup active", dim)] - } else if state.path_suggestions_mode { - vec![ - Span::styled("[Tab]", blue), - Span::styled(" Edit ", dim), - Span::styled("[b]", blue), - Span::styled(" Browse", dim), - ] } else { vec![ - Span::styled("[Tab]", blue), - Span::styled(" Complete ", dim), - Span::styled("[Ctrl+b]", blue), - Span::styled(" Browse", dim), + Span::styled("[b]", blue), + Span::styled(" Open Folder Navigator", dim), ] } } @@ -338,21 +340,14 @@ fn render_status_bar(state: &SetupState, frame: &mut Frame, area: Rect) { SetupStep::SelectPath => { if state.path_browse_mode { vec![Span::styled("Use popup arrows and Enter", dim)] - } else if state.path_suggestions_mode { - vec![ - Span::styled("[↑] [↓]", blue), - Span::styled(" Move ", dim), - Span::styled("[←] [→]", blue), - Span::styled(" Step ", dim), - Span::styled("[Enter]", blue), - Span::styled(" Next Step", dim), - ] } else { vec![ - Span::styled("[←] [→]", blue), - Span::styled(" Step ", dim), + Span::styled("[←]", blue), + Span::styled(" Back Step ", dim), Span::styled("[Enter]", blue), - Span::styled(" Next Step", dim), + Span::styled(" Next Step ", dim), + Span::styled("[b]", blue), + Span::styled(" Browse folders", dim), ] } } diff --git a/src/setup/ui_tests.rs b/src/setup/ui_tests.rs index 2b6f4c7..78fa391 100644 --- a/src/setup/ui_tests.rs +++ b/src/setup/ui_tests.rs @@ -1,4 +1,24 @@ use super::*; +use crate::setup::state::{SetupState, SetupStep}; +use ratatui::backend::TestBackend; +use ratatui::Terminal; + +fn render_output(state: &SetupState, width: u16, height: u16) -> String { + let backend = TestBackend::new(width, height); + let mut terminal = Terminal::new(backend).unwrap(); + + terminal.draw(|frame| render(state, frame)).unwrap(); + + let buffer = terminal.backend().buffer(); + let mut text = String::new(); + for y in 0..buffer.area.height { + for x in 0..buffer.area.width { + text.push_str(buffer[(x, y)].symbol()); + } + text.push('\n'); + } + text +} #[test] fn center_cell_matches_width() { @@ -12,3 +32,14 @@ fn connector_cell_matches_width() { assert_eq!(connector_cell(7, true).chars().count(), 7); assert_eq!(connector_cell(7, false).chars().count(), 7); } + +#[test] +fn render_keeps_banner_visible_while_path_popup_is_open() { + let mut state = SetupState::new("~/Git-Same/GitHub"); + state.step = SetupStep::SelectPath; + state.path_browse_mode = true; + + let output = render_output(&state, 120, 40); + assert!(output.contains("Local Folder Navigator")); + assert!(output.contains("██████╗")); +} diff --git a/src/tui/app.rs b/src/tui/app.rs index cbffa8e..0a48ffb 100644 --- a/src/tui/app.rs +++ b/src/tui/app.rs @@ -12,7 +12,6 @@ use std::time::Instant; /// Which screen is active. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Screen { - SystemCheck, WorkspaceSetup, Workspaces, Dashboard, @@ -162,6 +161,7 @@ pub struct CheckEntry { pub name: String, pub passed: bool, pub message: String, + pub suggestion: Option, pub critical: bool, } @@ -243,12 +243,6 @@ pub struct App { /// Setup wizard state (active when on SetupWizard screen). pub setup_state: Option, - /// Whether the config file was successfully created by init. - pub config_created: bool, - - /// Path where config was written (for display). - pub config_path_display: Option, - /// Whether status scan is in progress. pub status_loading: bool, @@ -309,7 +303,7 @@ pub struct App { impl App { /// Create a new App with the given config and workspaces. - pub fn new(config: Config, workspaces: Vec) -> Self { + pub fn new(config: Config, workspaces: Vec, config_was_created: bool) -> Self { let (screen, active_workspace, base_path) = match workspaces.len() { 0 => (Screen::WorkspaceSetup, None, None), 1 => { @@ -319,8 +313,10 @@ impl App { } _ => { // Check for default workspace - if let Some(ref default_name) = config.default_workspace { - if let Some(ws) = workspaces.iter().find(|w| w.name == *default_name) { + if let Some(ref default_path) = config.default_workspace { + let expanded = shellexpand::tilde(default_path.as_str()); + let default_root = std::path::PathBuf::from(expanded.as_ref()); + if let Some(ws) = workspaces.iter().find(|w| w.root_path == default_root) { let bp = Some(ws.expanded_base_path()); (Screen::Dashboard, Some(ws.clone()), bp) } else { @@ -335,7 +331,7 @@ impl App { let sync_history = active_workspace .as_ref() .and_then(|ws| { - crate::cache::SyncHistoryManager::for_workspace(&ws.name) + crate::cache::SyncHistoryManager::for_workspace(&ws.root_path) .and_then(|m| m.load()) .ok() }) @@ -370,12 +366,12 @@ impl App { let default_path = std::env::current_dir() .map(|p| state::tilde_collapse(&p.to_string_lossy())) .unwrap_or_else(|_| "~/Git-Same/GitHub".to_string()); - Some(SetupState::with_first_setup(&default_path, true)) + let mut setup = SetupState::with_first_setup(&default_path, config_was_created); + setup.config_was_created = config_was_created; + Some(setup) } else { None }, - config_created: false, - config_path_display: None, status_loading: false, last_status_scan: None, stat_index: 0, @@ -403,7 +399,7 @@ impl App { if let Some(ws) = self.workspaces.get(index).cloned() { self.base_path = Some(ws.expanded_base_path()); // Load sync history for this workspace - self.sync_history = crate::cache::SyncHistoryManager::for_workspace(&ws.name) + self.sync_history = crate::cache::SyncHistoryManager::for_workspace(&ws.root_path) .and_then(|m| m.load()) .unwrap_or_default(); self.active_workspace = Some(ws); diff --git a/src/tui/app_tests.rs b/src/tui/app_tests.rs index af830cc..6cf1f2b 100644 --- a/src/tui/app_tests.rs +++ b/src/tui/app_tests.rs @@ -2,50 +2,73 @@ use super::*; #[test] fn test_new_no_workspaces_shows_setup_wizard() { - let app = App::new(Config::default(), vec![]); + let app = App::new(Config::default(), vec![], false); assert_eq!(app.screen, Screen::WorkspaceSetup); assert!(app.setup_state.is_some()); assert!(app.active_workspace.is_none()); assert!(app.base_path.is_none()); + assert!( + !app.setup_state + .as_ref() + .expect("setup state") + .is_first_setup + ); +} + +#[test] +fn test_new_no_workspaces_uses_config_created_for_first_setup_flag() { + let app = App::new(Config::default(), vec![], true); + let setup = app.setup_state.as_ref().expect("setup state"); + assert!(setup.is_first_setup); } #[test] fn test_new_single_workspace_auto_selects() { - let ws = WorkspaceConfig::new("test", "/tmp/test"); - let app = App::new(Config::default(), vec![ws]); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test")); + let app = App::new(Config::default(), vec![ws], false); assert_eq!(app.screen, Screen::Dashboard); assert!(app.active_workspace.is_some()); - assert_eq!(app.active_workspace.unwrap().name, "test"); + assert_eq!( + app.active_workspace.unwrap().root_path, + std::path::PathBuf::from("/tmp/test") + ); assert!(app.base_path.is_some()); } #[test] fn test_new_multiple_no_default_shows_selector() { - let ws1 = WorkspaceConfig::new("ws1", "/tmp/ws1"); - let ws2 = WorkspaceConfig::new("ws2", "/tmp/ws2"); - let app = App::new(Config::default(), vec![ws1, ws2]); + let ws1 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws1")); + let ws2 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws2")); + let app = App::new(Config::default(), vec![ws1, ws2], false); assert_eq!(app.screen, Screen::Workspaces); assert!(app.active_workspace.is_none()); } #[test] fn test_new_multiple_with_valid_default_auto_selects() { - let ws1 = WorkspaceConfig::new("ws1", "/tmp/ws1"); - let ws2 = WorkspaceConfig::new("ws2", "/tmp/ws2"); - let mut config = Config::default(); - config.default_workspace = Some("ws2".to_string()); - let app = App::new(config, vec![ws1, ws2]); + let ws1 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws1")); + let ws2 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws2")); + let config = Config { + default_workspace: Some("/tmp/ws2".to_string()), + ..Config::default() + }; + let app = App::new(config, vec![ws1, ws2], false); assert_eq!(app.screen, Screen::Dashboard); - assert_eq!(app.active_workspace.unwrap().name, "ws2"); + assert_eq!( + app.active_workspace.unwrap().root_path, + std::path::PathBuf::from("/tmp/ws2") + ); } #[test] fn test_new_multiple_with_invalid_default_shows_selector() { - let ws1 = WorkspaceConfig::new("ws1", "/tmp/ws1"); - let ws2 = WorkspaceConfig::new("ws2", "/tmp/ws2"); - let mut config = Config::default(); - config.default_workspace = Some("nonexistent".to_string()); - let app = App::new(config, vec![ws1, ws2]); + let ws1 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws1")); + let ws2 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws2")); + let config = Config { + default_workspace: Some("/tmp/nonexistent".to_string()), + ..Config::default() + }; + let app = App::new(config, vec![ws1, ws2], false); assert_eq!(app.screen, Screen::Workspaces); assert!(app.active_workspace.is_none()); } diff --git a/src/tui/backend.rs b/src/tui/backend.rs index 5e19853..f71c14a 100644 --- a/src/tui/backend.rs +++ b/src/tui/backend.rs @@ -6,7 +6,7 @@ use std::path::Path; use std::sync::Arc; use tokio::sync::mpsc::UnboundedSender; -use crate::config::{Config, WorkspaceConfig}; +use crate::config::{Config, WorkspaceConfig, WorkspaceProvider}; use crate::git::{FetchResult, GitOperations, PullResult, ShellGit}; use crate::operations::clone::CloneProgress; use crate::operations::sync::SyncProgress; @@ -256,6 +256,24 @@ pub fn spawn_changelog_fetch( } } +/// Spawn setup-wizard org discovery without blocking the TUI event loop. +pub fn spawn_setup_org_discovery( + ws_provider: WorkspaceProvider, + token: String, + tx: UnboundedSender, +) { + tokio::spawn(async move { + match crate::setup::handler::discover_org_entries(ws_provider, token).await { + Ok(orgs) => { + let _ = tx.send(AppEvent::Backend(BackendMessage::SetupOrgsDiscovered(orgs))); + } + Err(err) => { + let _ = tx.send(AppEvent::Backend(BackendMessage::SetupOrgsError(err))); + } + } + }); +} + /// Spawn a backend operation as a Tokio task. pub fn spawn_operation(operation: Operation, app: &App, tx: UnboundedSender) { let config = app.config.clone(); diff --git a/src/tui/backend_tests.rs b/src/tui/backend_tests.rs index ada8c42..2677074 100644 --- a/src/tui/backend_tests.rs +++ b/src/tui/backend_tests.rs @@ -230,7 +230,7 @@ fn sync_progress_emits_fetch_pull_error_and_skip() { #[tokio::test] async fn spawn_operation_sync_without_workspace_emits_operation_error() { - let mut app = App::new(Config::default(), Vec::new()); + let mut app = App::new(Config::default(), Vec::new(), false); app.active_workspace = None; let (tx, mut rx) = unbounded_channel(); @@ -251,7 +251,7 @@ async fn spawn_operation_sync_without_workspace_emits_operation_error() { #[tokio::test] async fn spawn_operation_status_without_workspace_emits_operation_error() { - let mut app = App::new(Config::default(), Vec::new()); + let mut app = App::new(Config::default(), Vec::new(), false); app.active_workspace = None; let (tx, mut rx) = unbounded_channel(); diff --git a/src/tui/event.rs b/src/tui/event.rs index 9c2e47f..b590ed0 100644 --- a/src/tui/event.rs +++ b/src/tui/event.rs @@ -5,6 +5,7 @@ use std::time::Duration; use tokio::sync::mpsc; use tracing::warn; +use crate::setup::state::OrgEntry; use crate::types::{OpSummary, OwnedRepo}; use super::app::{CheckEntry, Operation, RepoEntry}; @@ -35,6 +36,10 @@ pub enum BackendMessage { DiscoveryComplete(Vec), /// Discovery failed. DiscoveryError(String), + /// Setup wizard org discovery complete. + SetupOrgsDiscovered(Vec), + /// Setup wizard org discovery failed. + SetupOrgsError(String), /// Operation phase started with total and per-phase breakdown. OperationStarted { operation: Operation, @@ -70,10 +75,8 @@ pub enum BackendMessage { OperationError(String), /// Status scan results. StatusResults(Vec), - /// Init: config file created successfully. - InitConfigCreated(String), - /// Init: config creation failed. - InitConfigError(String), + /// Setup wizard requirement check results. + SetupCheckResults(Vec), /// Default workspace was set/cleared successfully. DefaultWorkspaceUpdated(Option), /// Default workspace operation failed. diff --git a/src/tui/event_tests.rs b/src/tui/event_tests.rs index 9b323f9..84a8392 100644 --- a/src/tui/event_tests.rs +++ b/src/tui/event_tests.rs @@ -1,4 +1,5 @@ use super::*; +use crate::setup::state::OrgEntry; use crate::tui::app::{CheckEntry, Operation, RepoEntry}; use crate::types::OpSummary; use crossterm::event::{KeyCode, KeyEvent, KeyModifiers}; @@ -48,6 +49,7 @@ fn backend_message_variants_construct_and_clone() { name: "git".to_string(), passed: true, message: "installed".to_string(), + suggestion: None, critical: true, }]; @@ -57,6 +59,12 @@ fn backend_message_variants_construct_and_clone() { BackendMessage::OrgComplete("acme".to_string(), 2), BackendMessage::DiscoveryComplete(vec![repo.clone()]), BackendMessage::DiscoveryError("err".to_string()), + BackendMessage::SetupOrgsDiscovered(vec![OrgEntry { + name: "acme".to_string(), + repo_count: 2, + selected: true, + }]), + BackendMessage::SetupOrgsError("err".to_string()), BackendMessage::OperationStarted { operation: Operation::Sync, total: 3, @@ -87,8 +95,7 @@ fn backend_message_variants_construct_and_clone() { }), BackendMessage::OperationError("err".to_string()), BackendMessage::StatusResults(status_rows), - BackendMessage::InitConfigCreated("/tmp/config.toml".to_string()), - BackendMessage::InitConfigError("failed".to_string()), + BackendMessage::SetupCheckResults(checks.clone()), BackendMessage::DefaultWorkspaceUpdated(Some("ws".to_string())), BackendMessage::DefaultWorkspaceError("bad".to_string()), BackendMessage::CheckResults(checks), diff --git a/src/tui/handler.rs b/src/tui/handler.rs index ac645f1..1828c50 100644 --- a/src/tui/handler.rs +++ b/src/tui/handler.rs @@ -62,12 +62,43 @@ pub async fn handle_event(app: &mut App, event: AppEvent, backend_tx: &Unbounded if app.screen == Screen::WorkspaceSetup { if let Some(ref mut setup) = app.setup_state { setup.tick_count = setup.tick_count.wrapping_add(1); - if setup.step == SetupStep::SelectOrgs && setup.org_loading { - crate::setup::handler::handle_key( - setup, - KeyEvent::new(KeyCode::Null, KeyModifiers::NONE), - ) - .await; + // Auto-trigger requirement checks on first tick + if crate::setup::maybe_start_requirements_checks(setup) { + let tx = backend_tx.clone(); + tokio::spawn(async move { + let results = crate::checks::check_requirements().await; + let entries: Vec = results + .into_iter() + .map(|r| CheckEntry { + name: r.name, + passed: r.passed, + message: r.message, + suggestion: r.suggestion, + critical: r.critical, + }) + .collect(); + let _ = tx.send(AppEvent::Backend(BackendMessage::SetupCheckResults( + entries, + ))); + }); + } + if setup.step == SetupStep::SelectOrgs + && setup.org_loading + && !setup.org_discovery_in_progress + { + if let Some(token) = setup.auth_token.clone() { + setup.org_discovery_in_progress = true; + let ws_provider = setup.build_workspace_provider(); + super::backend::spawn_setup_org_discovery( + ws_provider, + token, + backend_tx.clone(), + ); + } else { + setup.org_error = Some("Not authenticated".to_string()); + setup.org_loading = false; + setup.org_discovery_in_progress = false; + } } } } @@ -86,6 +117,7 @@ pub async fn handle_event(app: &mut App, event: AppEvent, backend_tx: &Unbounded name: r.name, passed: r.passed, message: r.message, + suggestion: r.suggestion, critical: r.critical, }) .collect(); @@ -168,10 +200,6 @@ async fn handle_key(app: &mut App, key: KeyEvent, backend_tx: &UnboundedSender screens::system_check::handle_key(app, key, backend_tx).await, Screen::WorkspaceSetup => unreachable!(), // handled above Screen::Workspaces => screens::workspaces::handle_key(app, key, backend_tx).await, Screen::Dashboard => screens::dashboard::handle_key(app, key, backend_tx).await, @@ -206,7 +233,7 @@ async fn handle_setup_wizard_key(app: &mut App, key: KeyEvent) { app.workspaces = workspaces; if let Some(ws) = app.workspaces.first().cloned() { app.base_path = Some(ws.expanded_base_path()); - app.sync_history = SyncHistoryManager::for_workspace(&ws.name) + app.sync_history = SyncHistoryManager::for_workspace(&ws.root_path) .and_then(|m| m.load()) .unwrap_or_default(); app.active_workspace = Some(ws); @@ -224,10 +251,10 @@ async fn handle_setup_wizard_key(app: &mut App, key: KeyEvent) { app.screen = Screen::Dashboard; app.screen_stack.clear(); } else { - // Cancelled — return to previous screen when available. + // Cancelled — return to previous screen when available, else quit. app.setup_state = None; if app.screen_stack.is_empty() { - app.screen = Screen::SystemCheck; + app.should_quit = true; } else { app.go_back(); } @@ -291,6 +318,26 @@ fn handle_backend_message( app.operation_state = OperationState::Idle; app.error_message = Some(msg); } + BackendMessage::SetupOrgsDiscovered(orgs) => { + if let Some(setup) = app.setup_state.as_mut() { + setup.org_discovery_in_progress = false; + if setup.step == SetupStep::SelectOrgs { + setup.orgs = orgs; + setup.org_index = 0; + setup.org_loading = false; + setup.org_error = None; + } + } + } + BackendMessage::SetupOrgsError(msg) => { + if let Some(setup) = app.setup_state.as_mut() { + setup.org_discovery_in_progress = false; + if setup.step == SetupStep::SelectOrgs { + setup.org_loading = false; + setup.org_error = Some(msg); + } + } + } BackendMessage::OperationStarted { operation, total, @@ -466,7 +513,11 @@ fn handle_backend_message( if let Some(ref mut ws) = app.active_workspace { ws.last_synced = Some(now.clone()); let _ = WorkspaceManager::save(ws); - if let Some(entry) = app.workspaces.iter_mut().find(|w| w.name == ws.name) { + if let Some(entry) = app + .workspaces + .iter_mut() + .find(|w| w.root_path == ws.root_path) + { entry.last_synced = Some(now.clone()); } } @@ -489,7 +540,7 @@ fn handle_backend_message( // Persist history to disk if let Some(ref ws) = app.active_workspace { - if let Ok(manager) = SyncHistoryManager::for_workspace(&ws.name) { + if let Ok(manager) = SyncHistoryManager::for_workspace(&ws.root_path) { let _ = manager.save(&app.sync_history); } } @@ -545,12 +596,25 @@ fn handle_backend_message( app.changelog_loaded += 1; } } - BackendMessage::InitConfigCreated(path) => { - app.config_created = true; - app.config_path_display = Some(path); - } - BackendMessage::InitConfigError(msg) => { - app.error_message = Some(msg); + BackendMessage::SetupCheckResults(entries) => { + // Populate app-level check_results (for Settings screen) + app.check_results = entries.clone(); + app.checks_loading = false; + // Also populate setup state if on Requirements step + if let Some(ref mut setup) = app.setup_state { + // Map CheckEntry back to CheckResult for setup state storage + let results = entries + .iter() + .map(|e| crate::checks::CheckResult { + name: e.name.clone(), + passed: e.passed, + message: e.message.clone(), + suggestion: e.suggestion.clone(), + critical: e.critical, + }) + .collect(); + crate::setup::apply_requirements_check_results(setup, results); + } } BackendMessage::DefaultWorkspaceUpdated(name) => { app.config.default_workspace = name; diff --git a/src/tui/handler_tests.rs b/src/tui/handler_tests.rs index 77ef8e1..9df93e4 100644 --- a/src/tui/handler_tests.rs +++ b/src/tui/handler_tests.rs @@ -1,13 +1,14 @@ use super::*; use crate::config::{Config, WorkspaceConfig}; -use crate::setup::state::{SetupState, SetupStep}; +use crate::setup::state::{OrgEntry, SetupState, SetupStep}; +use crate::tui::event::{AppEvent, BackendMessage}; use crossterm::event::{KeyCode, KeyEvent, KeyModifiers}; use tokio::sync::mpsc::unbounded_channel; #[tokio::test] async fn q_quits_immediately() { - let ws = WorkspaceConfig::new("test-ws", "/tmp/test-ws"); - let mut app = App::new(Config::default(), vec![ws]); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); let (tx, _rx) = unbounded_channel(); handle_key( @@ -22,40 +23,45 @@ async fn q_quits_immediately() { #[tokio::test] async fn setup_cancel_returns_to_previous_screen_when_present() { - let ws = WorkspaceConfig::new("test-ws", "/tmp/test-ws"); - let mut app = App::new(Config::default(), vec![ws]); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); app.screen = Screen::WorkspaceSetup; - app.screen_stack = vec![Screen::SystemCheck, Screen::Workspaces]; - app.setup_state = Some(SetupState::new("~/Git-Same/GitHub")); + app.screen_stack = vec![Screen::Settings, Screen::Workspaces]; + let mut setup = SetupState::new("~/Git-Same/GitHub"); + setup.step = crate::setup::state::SetupStep::SelectProvider; + app.setup_state = Some(setup); handle_setup_wizard_key(&mut app, KeyEvent::new(KeyCode::Esc, KeyModifiers::NONE)).await; assert!(app.setup_state.is_none()); assert_eq!(app.screen, Screen::Workspaces); - assert_eq!(app.screen_stack, vec![Screen::SystemCheck]); + assert_eq!(app.screen_stack, vec![Screen::Settings]); } #[tokio::test] -async fn setup_cancel_without_history_falls_back_to_system_check() { - let ws = WorkspaceConfig::new("test-ws", "/tmp/test-ws"); - let mut app = App::new(Config::default(), vec![ws]); +async fn setup_cancel_without_history_quits() { + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); app.screen = Screen::WorkspaceSetup; app.screen_stack.clear(); - app.setup_state = Some(SetupState::new("~/Git-Same/GitHub")); + let mut setup = SetupState::new("~/Git-Same/GitHub"); + setup.step = crate::setup::state::SetupStep::SelectProvider; + app.setup_state = Some(setup); handle_setup_wizard_key(&mut app, KeyEvent::new(KeyCode::Esc, KeyModifiers::NONE)).await; assert!(app.setup_state.is_none()); - assert_eq!(app.screen, Screen::SystemCheck); - assert!(app.screen_stack.is_empty()); + assert!(app.should_quit); } #[tokio::test] async fn setup_right_moves_to_next_step() { - let ws = WorkspaceConfig::new("test-ws", "/tmp/test-ws"); - let mut app = App::new(Config::default(), vec![ws]); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); app.screen = Screen::WorkspaceSetup; - app.setup_state = Some(SetupState::new("~/Git-Same/GitHub")); + let mut setup = SetupState::new("~/Git-Same/GitHub"); + setup.step = SetupStep::SelectProvider; + app.setup_state = Some(setup); handle_setup_wizard_key(&mut app, KeyEvent::new(KeyCode::Right, KeyModifiers::NONE)).await; @@ -64,3 +70,69 @@ async fn setup_right_moves_to_next_step() { Some(SetupStep::Authenticate) ); } + +#[tokio::test] +async fn setup_org_discovery_backend_message_populates_state() { + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); + let (tx, _rx) = unbounded_channel(); + app.screen = Screen::WorkspaceSetup; + app.setup_state = Some(SetupState::new("~/Git-Same/GitHub")); + let setup = app.setup_state.as_mut().expect("setup state"); + setup.step = SetupStep::SelectOrgs; + setup.org_loading = true; + setup.org_discovery_in_progress = true; + + handle_event( + &mut app, + AppEvent::Backend(BackendMessage::SetupOrgsDiscovered(vec![OrgEntry { + name: "acme".to_string(), + repo_count: 3, + selected: true, + }])), + &tx, + ) + .await; + + let setup = app.setup_state.as_ref().expect("setup state"); + assert!(!setup.org_loading); + assert!(!setup.org_discovery_in_progress); + assert!(setup.org_error.is_none()); + assert_eq!(setup.orgs.len(), 1); + assert_eq!(setup.orgs[0].name, "acme"); +} + +#[tokio::test] +async fn setup_check_results_preserve_suggestions() { + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); + let (tx, _rx) = unbounded_channel(); + app.screen = Screen::WorkspaceSetup; + app.setup_state = Some(SetupState::new("~/Git-Same/GitHub")); + + handle_event( + &mut app, + AppEvent::Backend(BackendMessage::SetupCheckResults(vec![CheckEntry { + name: "gh".to_string(), + passed: false, + message: "Not authenticated".to_string(), + suggestion: Some("Run: gh auth login".to_string()), + critical: true, + }])), + &tx, + ) + .await; + + assert_eq!(app.check_results.len(), 1); + assert_eq!( + app.check_results[0].suggestion.as_deref(), + Some("Run: gh auth login") + ); + + let setup = app.setup_state.as_ref().expect("setup state"); + assert_eq!(setup.check_results.len(), 1); + assert_eq!( + setup.check_results[0].suggestion.as_deref(), + Some("Run: gh auth login") + ); +} diff --git a/src/tui/mod.rs b/src/tui/mod.rs index 78aca15..43cbfe3 100644 --- a/src/tui/mod.rs +++ b/src/tui/mod.rs @@ -23,7 +23,7 @@ use std::io; use std::time::Duration; /// Run the TUI application. -pub async fn run_tui(config: Config) -> Result<()> { +pub async fn run_tui(config: Config, config_was_created: bool) -> Result<()> { // Setup terminal enable_raw_mode()?; let mut stdout = io::stdout(); @@ -47,7 +47,7 @@ pub async fn run_tui(config: Config) -> Result<()> { let workspaces = WorkspaceManager::list()?; // Create app state - let mut app = App::new(config, workspaces); + let mut app = App::new(config, workspaces, config_was_created); // Start event loop let tick_rate = Duration::from_millis(100); diff --git a/src/tui/screens/dashboard.rs b/src/tui/screens/dashboard.rs index 5053097..aed6008 100644 --- a/src/tui/screens/dashboard.rs +++ b/src/tui/screens/dashboard.rs @@ -15,7 +15,7 @@ use chrono::DateTime; use crossterm::event::{KeyCode, KeyEvent}; use tokio::sync::mpsc::UnboundedSender; -use crate::banner::render_banner; +use crate::banner::{render_animated_banner, render_banner}; use crate::tui::app::{App, Operation, OperationState, RepoEntry, Screen}; use crate::tui::event::AppEvent; @@ -66,7 +66,7 @@ pub async fn handle_key(app: &mut App, key: KeyEvent, backend_tx: &UnboundedSend app.navigate_to(Screen::Workspaces); } KeyCode::Char('i') => { - app.navigate_to(Screen::SystemCheck); + app.navigate_to(Screen::Settings); } KeyCode::Char('/') => { app.filter_active = true; @@ -246,6 +246,20 @@ pub(crate) fn format_timestamp(raw: &str) -> String { } } +fn sync_banner_phase(app: &App) -> Option { + match &app.operation_state { + OperationState::Discovering { + operation: Operation::Sync, + .. + } + | OperationState::Running { + operation: Operation::Sync, + .. + } => Some((app.tick_count as f64 / 50.0).fract()), + _ => None, + } +} + pub fn render(app: &mut App, frame: &mut Frame) { let chunks = Layout::vertical([ Constraint::Length(6), // Banner @@ -258,7 +272,11 @@ pub fn render(app: &mut App, frame: &mut Frame) { ]) .split(frame.area()); - render_banner(frame, chunks[0]); + if let Some(phase) = sync_banner_phase(app) { + render_animated_banner(frame, chunks[0], phase); + } else { + render_banner(frame, chunks[0]); + } render_tagline(frame, chunks[1]); render_config_reqs(app, frame, chunks[2]); render_workspace_info(app, frame, chunks[3]); @@ -342,10 +360,11 @@ fn render_workspace_info(app: &App, frame: &mut Frame, area: Rect) { .add_modifier(Modifier::BOLD); match &app.active_workspace { Some(ws) => { - let folder_name = std::path::Path::new(&ws.base_path) + let folder_name = ws + .root_path .file_name() .and_then(|n| n.to_str()) - .unwrap_or(&ws.base_path) + .unwrap_or_else(|| ws.root_path.to_str().unwrap_or("workspace")) .to_string(); render_info_line( @@ -1077,10 +1096,13 @@ fn render_bottom_actions(app: &App, frame: &mut Frame, area: Rect) { } _ => app.active_workspace.as_ref().and_then(|ws| { ws.last_synced.as_ref().map(|ts| { - let folder_name = std::path::Path::new(&ws.base_path) + let folder_name_owned = ws + .root_path .file_name() .and_then(|n| n.to_str()) - .unwrap_or(&ws.base_path); + .unwrap_or_else(|| ws.root_path.to_str().unwrap_or("workspace")) + .to_string(); + let folder_name = folder_name_owned.as_str(); let formatted = format_timestamp(ts); Line::from(vec![ Span::styled("Synced ", dim), diff --git a/src/tui/screens/dashboard_tests.rs b/src/tui/screens/dashboard_tests.rs index 0c8ace3..939dcc4 100644 --- a/src/tui/screens/dashboard_tests.rs +++ b/src/tui/screens/dashboard_tests.rs @@ -4,8 +4,8 @@ use crossterm::event::{KeyCode, KeyEvent, KeyModifiers}; use tokio::sync::mpsc::unbounded_channel; fn build_app() -> App { - let ws = WorkspaceConfig::new("test-ws", "/tmp/test-ws"); - let mut app = App::new(Config::default(), vec![ws]); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); app.screen = Screen::Dashboard; app.screen_stack.clear(); app @@ -63,3 +63,55 @@ fn hide_show_sync_progress_preserves_sync_state() { assert_eq!(app.scroll_offset, 9); assert_eq!(app.sync_log_index, 4); } + +#[test] +fn sync_banner_phase_is_none_when_sync_not_active() { + let mut app = build_app(); + app.tick_count = 75; + app.operation_state = OperationState::Discovering { + operation: Operation::Status, + message: "Scanning repos".to_string(), + }; + + assert_eq!(sync_banner_phase(&app), None); +} + +#[test] +fn sync_banner_phase_animates_while_sync_discovering() { + let mut app = build_app(); + app.tick_count = 75; + app.operation_state = OperationState::Discovering { + operation: Operation::Sync, + message: "Discovering repos".to_string(), + }; + + let phase = sync_banner_phase(&app).expect("sync should animate the banner"); + assert!((phase - 0.5).abs() < f64::EPSILON); +} + +#[test] +fn sync_banner_phase_animates_while_sync_running() { + let mut app = build_app(); + app.tick_count = 125; + app.operation_state = OperationState::Running { + operation: Operation::Sync, + total: 10, + completed: 2, + failed: 0, + skipped: 0, + current_repo: "org/repo".to_string(), + with_updates: 1, + cloned: 1, + synced: 1, + to_clone: 2, + to_sync: 8, + total_new_commits: 3, + started_at: std::time::Instant::now(), + active_repos: vec!["org/repo".to_string()], + throughput_samples: vec![1, 1], + last_sample_completed: 1, + }; + + let phase = sync_banner_phase(&app).expect("running sync should animate the banner"); + assert!((phase - 0.5).abs() < f64::EPSILON); +} diff --git a/src/tui/screens/mod.rs b/src/tui/screens/mod.rs index d85921c..fdfef9c 100644 --- a/src/tui/screens/mod.rs +++ b/src/tui/screens/mod.rs @@ -3,5 +3,4 @@ pub mod dashboard; pub mod settings; pub mod sync; -pub mod system_check; pub mod workspaces; diff --git a/src/tui/screens/settings.rs b/src/tui/screens/settings.rs index 809912a..f26d8c3 100644 --- a/src/tui/screens/settings.rs +++ b/src/tui/screens/settings.rs @@ -187,6 +187,14 @@ fn render_requirements_detail(app: &App, frame: &mut Frame, area: Rect) { spans.push(Span::styled(" (critical)", fail_style)); } lines.push(Line::from(spans)); + if !check.passed { + if let Some(suggestion) = &check.suggestion { + lines.push(Line::from(vec![ + Span::styled(" ", dim), + Span::styled(suggestion, dim), + ])); + } + } } } diff --git a/src/tui/screens/settings_tests.rs b/src/tui/screens/settings_tests.rs index 4c1686a..f8da0b2 100644 --- a/src/tui/screens/settings_tests.rs +++ b/src/tui/screens/settings_tests.rs @@ -22,8 +22,8 @@ fn render_output(app: &App) -> String { } fn app_for_settings() -> App { - let ws = WorkspaceConfig::new("ws", "/tmp/ws"); - App::new(Config::default(), vec![ws]) + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws")); + App::new(Config::default(), vec![ws], false) } #[test] diff --git a/src/tui/screens/sync.rs b/src/tui/screens/sync.rs index b226fb4..a2d0093 100644 --- a/src/tui/screens/sync.rs +++ b/src/tui/screens/sync.rs @@ -15,8 +15,6 @@ use crate::tui::app::{App, LogFilter, OperationState, SyncLogEntry, SyncLogStatu use crate::tui::event::AppEvent; use crate::tui::screens::dashboard::{hide_sync_progress, start_sync_operation}; -use crate::banner::render_animated_banner; - // ── Key handler ───────────────────────────────────────────────────────────── pub fn handle_key(app: &mut App, key: KeyEvent, backend_tx: &UnboundedSender) { @@ -219,14 +217,6 @@ const POPUP_HEIGHT_PERCENT: u16 = 80; pub fn render(app: &App, frame: &mut Frame) { let is_finished = matches!(&app.operation_state, OperationState::Finished { .. }); - // Animate during active ops, static otherwise - let phase = match &app.operation_state { - OperationState::Discovering { .. } | OperationState::Running { .. } => { - (app.tick_count as f64 / 50.0).fract() - } - _ => 0.0, - }; - let popup_area = centered_rect(frame.area(), POPUP_WIDTH_PERCENT, POPUP_HEIGHT_PERCENT); dim_outside_popup(frame, popup_area); frame.render_widget(Clear, popup_area); @@ -239,7 +229,7 @@ pub fn render(app: &App, frame: &mut Frame) { let inner = block.inner(popup_area); frame.render_widget(block, popup_area); - render_running_layout(app, frame, inner, phase); + render_running_layout(app, frame, inner); // Sync history overlay (on top of popup) if app.show_sync_history && is_finished { @@ -249,9 +239,8 @@ pub fn render(app: &App, frame: &mut Frame) { // ── Popup layout ──────────────────────────────────────────────────────────── -fn render_running_layout(app: &App, frame: &mut Frame, area: Rect, phase: f64) { +fn render_running_layout(app: &App, frame: &mut Frame, area: Rect) { let chunks = Layout::vertical([ - Constraint::Length(6), // Banner Constraint::Length(3), // Title Constraint::Length(3), // Progress bar Constraint::Length(1), // Enriched counters / summary @@ -263,15 +252,14 @@ fn render_running_layout(app: &App, frame: &mut Frame, area: Rect, phase: f64) { ]) .split(area); - render_animated_banner(frame, chunks[0], phase); - render_title(app, frame, chunks[1]); - render_progress_bar(app, frame, chunks[2]); - render_enriched_counters(app, frame, chunks[3]); - render_throughput(app, frame, chunks[4]); - render_phase_indicator(app, frame, chunks[5]); - render_worker_slots(app, frame, chunks[6]); - render_main_log(app, frame, chunks[7]); - render_bottom_actions(app, frame, chunks[8]); + render_title(app, frame, chunks[0]); + render_progress_bar(app, frame, chunks[1]); + render_enriched_counters(app, frame, chunks[2]); + render_throughput(app, frame, chunks[3]); + render_phase_indicator(app, frame, chunks[4]); + render_worker_slots(app, frame, chunks[5]); + render_main_log(app, frame, chunks[6]); + render_bottom_actions(app, frame, chunks[7]); } fn render_main_log(app: &App, frame: &mut Frame, area: Rect) { diff --git a/src/tui/screens/sync_tests.rs b/src/tui/screens/sync_tests.rs index a44b745..40a0076 100644 --- a/src/tui/screens/sync_tests.rs +++ b/src/tui/screens/sync_tests.rs @@ -6,8 +6,8 @@ use crossterm::event::{KeyCode, KeyEvent, KeyModifiers}; use tokio::sync::mpsc::unbounded_channel; fn build_app() -> App { - let ws = WorkspaceConfig::new("test-ws", "/tmp/test-ws"); - let mut app = App::new(Config::default(), vec![ws]); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(Config::default(), vec![ws], false); app.screen = Screen::Sync; app.screen_stack = vec![Screen::Dashboard]; app diff --git a/src/tui/screens/system_check.rs b/src/tui/screens/system_check.rs deleted file mode 100644 index f5fb5df..0000000 --- a/src/tui/screens/system_check.rs +++ /dev/null @@ -1,229 +0,0 @@ -//! Init check screen — displays requirement check results. - -use ratatui::{ - layout::{Constraint, Layout}, - style::{Color, Modifier, Style}, - text::{Line, Span}, - widgets::{Block, Borders, List, ListItem, Paragraph}, - Frame, -}; - -use crossterm::event::{KeyCode, KeyEvent}; -use tokio::sync::mpsc::UnboundedSender; - -use crate::config::Config; -use crate::setup::state::SetupState; -use crate::tui::app::{App, CheckEntry, Screen}; -use crate::tui::event::{AppEvent, BackendMessage}; -use crate::tui::widgets::status_bar; - -// ── Key handler ───────────────────────────────────────────────────────────── - -pub async fn handle_key(app: &mut App, key: KeyEvent, backend_tx: &UnboundedSender) { - match key.code { - KeyCode::Enter if !app.checks_loading => { - // Run requirement checks - app.checks_loading = true; - let results = crate::checks::check_requirements().await; - app.check_results = results - .into_iter() - .map(|r| CheckEntry { - name: r.name, - passed: r.passed, - message: r.message, - critical: r.critical, - }) - .collect(); - app.checks_loading = false; - } - KeyCode::Char('c') if !app.check_results.is_empty() && !app.config_created => { - // Create config file - let tx = backend_tx.clone(); - tokio::spawn(async move { - match Config::default_path() { - Ok(config_path) => { - if config_path.exists() { - let _ = tx.send(AppEvent::Backend(BackendMessage::InitConfigError( - format!( - "Config already exists at {}. Delete it first to recreate.", - config_path.display() - ), - ))); - return; - } - if let Some(parent) = config_path.parent() { - if let Err(e) = std::fs::create_dir_all(parent) { - let _ = - tx.send(AppEvent::Backend(BackendMessage::InitConfigError( - format!("Failed to create config directory: {}", e), - ))); - return; - } - } - let default_config = Config::default_toml(); - match std::fs::write(&config_path, default_config) { - Ok(()) => { - let _ = - tx.send(AppEvent::Backend(BackendMessage::InitConfigCreated( - config_path.display().to_string(), - ))); - } - Err(e) => { - let _ = - tx.send(AppEvent::Backend(BackendMessage::InitConfigError( - format!("Failed to write config: {}", e), - ))); - } - } - } - Err(e) => { - let _ = tx.send(AppEvent::Backend(BackendMessage::InitConfigError( - format!("Cannot determine config path: {}", e), - ))); - } - } - }); - } - KeyCode::Char('s') => { - // Launch setup wizard - let default_path = std::env::current_dir() - .map(|p| crate::setup::state::tilde_collapse(&p.to_string_lossy())) - .unwrap_or_else(|_| "~/Git-Same/GitHub".to_string()); - app.setup_state = Some(SetupState::new(&default_path)); - app.navigate_to(Screen::WorkspaceSetup); - } - _ => {} - } -} - -// ── Render ────────────────────────────────────────────────────────────────── - -pub fn render(app: &App, frame: &mut Frame) { - let chunks = Layout::vertical([ - Constraint::Length(3), // Title - Constraint::Min(8), // Check results - Constraint::Length(3), // Help - Constraint::Length(1), // Status bar - ]) - .split(frame.area()); - - // Title - let title = Paragraph::new(Line::from(Span::styled( - " System Requirements ", - Style::default() - .fg(Color::Cyan) - .add_modifier(Modifier::BOLD), - ))) - .centered() - .block( - Block::default() - .borders(Borders::BOTTOM) - .border_style(Style::default().fg(Color::DarkGray)), - ); - frame.render_widget(title, chunks[0]); - - // Check results - if app.checks_loading { - let loading = Paragraph::new(Line::from(Span::styled( - " Checking requirements...", - Style::default().fg(Color::Yellow), - ))) - .block( - Block::default() - .borders(Borders::ALL) - .border_style(Style::default().fg(Color::DarkGray)), - ); - frame.render_widget(loading, chunks[1]); - } else if app.check_results.is_empty() { - let empty = Paragraph::new(Line::from(Span::styled( - " Press Enter to check requirements", - Style::default().fg(Color::DarkGray), - ))) - .block( - Block::default() - .borders(Borders::ALL) - .border_style(Style::default().fg(Color::DarkGray)), - ); - frame.render_widget(empty, chunks[1]); - } else { - let items: Vec = app - .check_results - .iter() - .map(|check| { - let (icon, color) = if check.passed { - (" pass ", Color::Rgb(21, 128, 61)) - } else if check.critical { - (" FAIL ", Color::Red) - } else { - (" warn ", Color::Yellow) - }; - ListItem::new(Line::from(vec![ - Span::styled( - icon, - Style::default().fg(color).add_modifier(Modifier::BOLD), - ), - Span::styled(&check.name, Style::default().fg(Color::White)), - Span::styled(" — ", Style::default().fg(Color::DarkGray)), - Span::styled(&check.message, Style::default().fg(Color::DarkGray)), - ])) - }) - .collect(); - - let list = List::new(items).block( - Block::default() - .title(" Results ") - .borders(Borders::ALL) - .border_style(Style::default().fg(Color::DarkGray)), - ); - frame.render_widget(list, chunks[1]); - } - - // Help text / config status - let help_lines = if app.config_created { - let path = app - .config_path_display - .as_deref() - .unwrap_or("~/.config/git-same/config.toml"); - vec![Line::from(vec![ - Span::styled( - " Config created at ", - Style::default().fg(Color::Rgb(21, 128, 61)), - ), - Span::styled(path, Style::default().fg(Color::Cyan)), - Span::styled( - " — Press 's' to set up a workspace.", - Style::default().fg(Color::Yellow), - ), - ])] - } else if !app.check_results.is_empty() { - vec![Line::from(vec![ - Span::styled( - " Press 'c' to create config", - Style::default().fg(Color::Yellow), - ), - Span::styled( - " or 's' to set up a workspace.", - Style::default().fg(Color::DarkGray), - ), - ])] - } else { - vec![Line::from(Span::styled( - " No workspaces configured. Press 's' to set up a workspace.", - Style::default().fg(Color::Yellow), - ))] - }; - - let help = Paragraph::new(help_lines).block(Block::default().borders(Borders::TOP)); - frame.render_widget(help, chunks[2]); - - let hint = if !app.check_results.is_empty() && !app.config_created { - "Enter: Re-check c: Create Config s: Setup q: Quit" - } else { - "s: Setup Enter: Check q: Quit" - }; - status_bar::render(frame, chunks[3], hint); -} - -#[cfg(test)] -#[path = "system_check_tests.rs"] -mod tests; diff --git a/src/tui/screens/system_check_tests.rs b/src/tui/screens/system_check_tests.rs deleted file mode 100644 index 363d419..0000000 --- a/src/tui/screens/system_check_tests.rs +++ /dev/null @@ -1,74 +0,0 @@ -use super::*; -use crate::config::{Config, WorkspaceConfig}; -use crate::tui::app::Screen; -use crossterm::event::{KeyCode, KeyEvent, KeyModifiers}; -use ratatui::backend::TestBackend; -use ratatui::Terminal; -use tokio::sync::mpsc::unbounded_channel; - -fn render_output(app: &App) -> String { - let backend = TestBackend::new(110, 28); - let mut terminal = Terminal::new(backend).unwrap(); - - terminal.draw(|frame| render(app, frame)).unwrap(); - - let buffer = terminal.backend().buffer(); - let mut text = String::new(); - for y in 0..buffer.area.height { - for x in 0..buffer.area.width { - text.push_str(buffer[(x, y)].symbol()); - } - text.push('\n'); - } - text -} - -fn app_for_screen() -> App { - let ws = WorkspaceConfig::new("ws", "/tmp/ws"); - App::new(Config::default(), vec![ws]) -} - -#[tokio::test] -async fn handle_key_s_opens_setup_wizard() { - let mut app = app_for_screen(); - app.screen = Screen::SystemCheck; - - let (tx, _rx) = unbounded_channel(); - handle_key( - &mut app, - KeyEvent::new(KeyCode::Char('s'), KeyModifiers::NONE), - &tx, - ) - .await; - - assert_eq!(app.screen, Screen::WorkspaceSetup); - assert!(app.setup_state.is_some()); -} - -#[test] -fn render_loading_state_shows_checking_message() { - let mut app = app_for_screen(); - app.checks_loading = true; - app.check_results.clear(); - - let output = render_output(&app); - assert!(output.contains("System Requirements")); - assert!(output.contains("Checking requirements")); -} - -#[test] -fn render_results_state_shows_create_config_hint() { - let mut app = app_for_screen(); - app.checks_loading = false; - app.config_created = false; - app.check_results = vec![CheckEntry { - name: "git".to_string(), - passed: true, - message: "installed".to_string(), - critical: true, - }]; - - let output = render_output(&app); - assert!(output.contains("Results")); - assert!(output.contains("Press 'c' to create config")); -} diff --git a/src/tui/screens/workspaces.rs b/src/tui/screens/workspaces.rs index 9cd88e3..7f9638e 100644 --- a/src/tui/screens/workspaces.rs +++ b/src/tui/screens/workspaces.rs @@ -99,18 +99,13 @@ pub async fn handle_key(app: &mut App, key: KeyEvent, backend_tx: &UnboundedSend KeyCode::Char('d') if app.workspace_index < num_ws => { // Set default workspace if let Some(ws) = app.workspaces.get(app.workspace_index) { - let ws_name = ws.name.clone(); - let new_default_name = match next_default_workspace_name( - app.config.default_workspace.as_deref(), - &ws_name, - ) { - Some(name) => name, - None => { - return; - } - }; - - let new_default = Some(new_default_name); + let ws_path = crate::config::workspace::tilde_collapse_path(&ws.root_path); + let current_default = app.config.default_workspace.as_deref(); + if current_default == Some(ws_path.as_str()) { + // Already default, do nothing + return; + } + let new_default = Some(ws_path); let tx = backend_tx.clone(); let default_clone = new_default.clone(); tokio::spawn(async move { @@ -140,17 +135,6 @@ pub async fn handle_key(app: &mut App, key: KeyEvent, backend_tx: &UnboundedSend } } -fn next_default_workspace_name( - current_default: Option<&str>, - selected_workspace: &str, -) -> Option { - if current_default == Some(selected_workspace) { - None - } else { - Some(selected_workspace.to_string()) - } -} - #[cfg(all(not(test), target_os = "macos"))] fn open_workspace_folder(path: &std::path::Path) { let _ = std::process::Command::new("open").arg(path).spawn(); @@ -247,14 +231,16 @@ fn render_workspace_nav(app: &App, frame: &mut Frame, area: Rect) { let is_active = app .active_workspace .as_ref() - .map(|aw| aw.name == ws.name) + .map(|aw| aw.root_path == ws.root_path) .unwrap_or(false); - let is_default = app.config.default_workspace.as_deref() == Some(ws.name.as_str()); + let ws_path = crate::config::workspace::tilde_collapse_path(&ws.root_path); + let is_default = app.config.default_workspace.as_deref() == Some(ws_path.as_str()); - let folder_name = std::path::Path::new(&ws.base_path) + let folder_name = ws + .root_path .file_name() .and_then(|f| f.to_str()) - .unwrap_or(&ws.base_path); + .unwrap_or(ws_path.as_str()); let (marker, style) = if selected { ( @@ -330,28 +316,31 @@ fn render_workspace_detail(app: &App, ws: &WorkspaceConfig, frame: &mut Frame, a .fg(Color::Rgb(37, 99, 235)) .add_modifier(Modifier::BOLD); + let ws_tilde_path = crate::config::workspace::tilde_collapse_path(&ws.root_path); + let is_default = app .config .default_workspace .as_deref() - .map(|d| d == ws.name) + .map(|d| d == ws_tilde_path) .unwrap_or(false); let is_active = app .active_workspace .as_ref() - .map(|aw| aw.name == ws.name) + .map(|aw| aw.root_path == ws.root_path) .unwrap_or(false); - let full_path = ws.expanded_base_path().display().to_string(); + let full_path = ws.root_path.display().to_string(); - let config_file = WorkspaceManager::workspace_dir(&ws.name) - .map(|d| d.join("workspace-config.toml").display().to_string()) - .unwrap_or_else(|_| "unknown".to_string()); + let config_file = WorkspaceManager::dot_dir(&ws.root_path) + .join("config.toml") + .display() + .to_string(); - let cache_file = WorkspaceManager::cache_path(&ws.name) - .map(|p| p.display().to_string()) - .unwrap_or_else(|_| "unknown".to_string()); + let cache_file = WorkspaceManager::cache_path(&ws.root_path) + .display() + .to_string(); let username = if ws.username.is_empty() { "\u{2014}".to_string() @@ -376,15 +365,17 @@ fn render_workspace_detail(app: &App, ws: &WorkspaceConfig, frame: &mut Frame, a let default_label = if is_default { "Yes" } else { "No" }; let active_label = if is_active { "Yes" } else { "No" }; - let folder_name = std::path::Path::new(&ws.base_path) + let folder_name_owned = ws + .root_path .file_name() .and_then(|f| f.to_str()) - .unwrap_or(&ws.base_path); + .unwrap_or(ws_tilde_path.as_str()) + .to_string(); let mut lines = vec![ Line::from(""), Line::from(Span::styled( - format!(" Workspace: {}", folder_name), + format!(" Workspace: {}", folder_name_owned), section_style, )), Line::from(""), @@ -423,7 +414,7 @@ fn render_workspace_detail(app: &App, ws: &WorkspaceConfig, frame: &mut Frame, a lines.push(detail_row_with_hint( area, "Path", - &ws.base_path, + &ws_tilde_path, None, dim, val_style, diff --git a/src/tui/screens/workspaces_tests.rs b/src/tui/screens/workspaces_tests.rs index 1fb1e9d..b458166 100644 --- a/src/tui/screens/workspaces_tests.rs +++ b/src/tui/screens/workspaces_tests.rs @@ -23,11 +23,13 @@ fn wrap_comma_separated_values_empty_means_all() { } fn build_workspace_app(default_workspace: Option<&str>) -> App { - let mut config = Config::default(); - config.default_workspace = default_workspace.map(ToString::to_string); + let config = Config { + default_workspace: default_workspace.map(ToString::to_string), + ..Config::default() + }; - let ws = WorkspaceConfig::new("test-ws", "/tmp/test-ws"); - let mut app = App::new(config, vec![ws.clone()]); + let ws = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/test-ws")); + let mut app = App::new(config, vec![ws.clone()], false); app.screen = Screen::Workspaces; app.workspace_index = 0; app.active_workspace = Some(ws); @@ -78,11 +80,10 @@ async fn workspace_key_c_toggles_config_expansion() { #[tokio::test] async fn workspace_left_right_controls_panel_focus_and_list_movement() { - let mut config = Config::default(); - config.default_workspace = None; - let ws1 = WorkspaceConfig::new("ws1", "/tmp/ws1"); - let ws2 = WorkspaceConfig::new("ws2", "/tmp/ws2"); - let mut app = App::new(config, vec![ws1.clone(), ws2]); + let config = Config::default(); + let ws1 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws1")); + let ws2 = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/ws2")); + let mut app = App::new(config, vec![ws1.clone(), ws2], false); app.screen = Screen::Workspaces; app.workspace_index = 0; app.active_workspace = Some(ws1); @@ -162,7 +163,8 @@ async fn workspace_enter_selects_workspace_even_if_active() { #[tokio::test] async fn workspace_key_d_does_not_clear_when_already_default() { - let mut app = build_workspace_app(Some("test-ws")); + // The tilde_collapse_path of /tmp/test-ws should be "/tmp/test-ws" (no ~ replacement) + let mut app = build_workspace_app(Some("/tmp/test-ws")); let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel(); handle_key( @@ -172,19 +174,9 @@ async fn workspace_key_d_does_not_clear_when_already_default() { ) .await; - assert_eq!(app.config.default_workspace.as_deref(), Some("test-ws")); - assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty))); -} - -#[test] -fn next_default_workspace_name_is_set_only() { assert_eq!( - next_default_workspace_name(Some("current"), "next"), - Some("next".to_string()) - ); - assert_eq!(next_default_workspace_name(Some("same"), "same"), None); - assert_eq!( - next_default_workspace_name(None, "selected"), - Some("selected".to_string()) + app.config.default_workspace.as_deref(), + Some("/tmp/test-ws") ); + assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty))); } diff --git a/src/tui/ui.rs b/src/tui/ui.rs index e76a8db..27ea244 100644 --- a/src/tui/ui.rs +++ b/src/tui/ui.rs @@ -7,7 +7,6 @@ use ratatui::Frame; /// Render the current screen. pub fn render(app: &mut App, frame: &mut Frame) { match app.screen { - Screen::SystemCheck => screens::system_check::render(app, frame), Screen::WorkspaceSetup => { if let Some(ref setup) = app.setup_state { crate::setup::ui::render(setup, frame); diff --git a/src/workflows/status_scan_tests.rs b/src/workflows/status_scan_tests.rs index 19db8e1..6599683 100644 --- a/src/workflows/status_scan_tests.rs +++ b/src/workflows/status_scan_tests.rs @@ -4,7 +4,8 @@ use crate::config::{Config, WorkspaceConfig}; #[test] fn scan_workspace_status_returns_empty_when_base_path_missing() { let config = Config::default(); - let workspace = WorkspaceConfig::new("missing", "/tmp/git-same-does-not-exist-xyz"); + let workspace = + WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/git-same-does-not-exist-xyz")); let entries = scan_workspace_status(&config, &workspace); assert!(entries.is_empty()); @@ -14,7 +15,7 @@ fn scan_workspace_status_returns_empty_when_base_path_missing() { fn scan_workspace_status_returns_empty_for_empty_directory() { let temp = tempfile::tempdir().unwrap(); let config = Config::default(); - let workspace = WorkspaceConfig::new("empty", temp.path().to_string_lossy().to_string()); + let workspace = WorkspaceConfig::new_from_root(temp.path()); let entries = scan_workspace_status(&config, &workspace); assert!(entries.is_empty()); diff --git a/src/workflows/sync_workspace.rs b/src/workflows/sync_workspace.rs index 7b7643d..53f1a30 100644 --- a/src/workflows/sync_workspace.rs +++ b/src/workflows/sync_workspace.rs @@ -63,11 +63,9 @@ pub async fn prepare_sync_workspace( request: SyncWorkspaceRequest<'_>, discovery_progress: &dyn DiscoveryProgress, ) -> Result { - let provider_entry = request.workspace.provider.to_provider_entry(); - // Authenticate and build provider - let auth = get_auth_for_provider(&provider_entry)?; - let provider = create_provider(&provider_entry, &auth.token)?; + let auth = get_auth_for_provider(&request.workspace.provider)?; + let provider = create_provider(&request.workspace.provider, &auth.token)?; // Build orchestrator from workspace + global config let mut filters = request.workspace.filters.clone(); @@ -89,7 +87,7 @@ pub async fn prepare_sync_workspace( let mut cache_age_secs = None; if !request.refresh { - if let Ok(cache_manager) = CacheManager::for_workspace(&request.workspace.name) { + if let Ok(cache_manager) = CacheManager::for_workspace(&request.workspace.root_path) { if let Ok(Some(cache)) = cache_manager.load() { let discovery_options = orchestrator.to_discovery_options(); used_cache = true; @@ -131,18 +129,15 @@ pub async fn prepare_sync_workspace( .await .map_err(AppError::Provider)?; - if let Ok(cache_manager) = CacheManager::for_workspace(&request.workspace.name) { - let provider_label = provider_entry - .name - .clone() - .unwrap_or_else(|| provider_entry.kind.to_string()); + if let Ok(cache_manager) = CacheManager::for_workspace(&request.workspace.root_path) { + let provider_label = request.workspace.provider.kind.slug().to_string(); let mut repos_by_provider = HashMap::new(); repos_by_provider.insert(provider_label, repos.clone()); let cache = DiscoveryCache::new(auth.username.clone().unwrap_or_default(), repos_by_provider); if let Err(e) = cache_manager.save(&cache) { warn!( - workspace = %request.workspace.name, + workspace = %request.workspace.root_path.display(), error = %e, "Failed to save discovery cache" ); @@ -168,7 +163,7 @@ pub async fn prepare_sync_workspace( } } - let provider_name = provider_entry.kind.slug().to_string(); + let provider_name = request.workspace.provider.kind.slug().to_string(); let git = ShellGit::new(); let plan = orchestrator.plan_clone(&base_path, repos.clone(), &provider_name, &git); let (to_sync, skipped_sync) = orchestrator.plan_sync( @@ -240,7 +235,7 @@ pub async fn prepare_sync_workspace( base_path, structure, provider_name, - provider_prefer_ssh: provider_entry.prefer_ssh, + provider_prefer_ssh: request.workspace.provider.prefer_ssh, skip_uncommitted: request.skip_uncommitted, sync_mode, requested_concurrency, diff --git a/src/workflows/sync_workspace_tests.rs b/src/workflows/sync_workspace_tests.rs index 9248b25..f9c092e 100644 --- a/src/workflows/sync_workspace_tests.rs +++ b/src/workflows/sync_workspace_tests.rs @@ -26,7 +26,7 @@ fn prepared_workspace(with_clone: bool, with_sync: bool) -> PreparedSyncWorkspac }; PreparedSyncWorkspace { - workspace: WorkspaceConfig::new("ws", "/tmp"), + workspace: WorkspaceConfig::new_from_root(std::path::Path::new("/tmp")), auth: AuthResult { token: "token".to_string(), method: ResolvedAuthMethod::GhCli, @@ -87,7 +87,7 @@ async fn execute_prepared_sync_with_no_work_returns_empty_outcome() { #[test] fn sync_workspace_request_holds_expected_values() { let config = Config::default(); - let workspace = WorkspaceConfig::new("team", "/tmp/team"); + let workspace = WorkspaceConfig::new_from_root(std::path::Path::new("/tmp/team")); let request = SyncWorkspaceRequest { config: &config, @@ -104,5 +104,8 @@ fn sync_workspace_request_holds_expected_values() { assert!(!request.skip_uncommitted); assert_eq!(request.concurrency_override, Some(7)); assert!(request.create_base_path); - assert_eq!(request.workspace.name, "team"); + assert_eq!( + request.workspace.root_path, + std::path::PathBuf::from("/tmp/team") + ); } diff --git a/tests/integration_test.rs b/tests/integration_test.rs index 0a43ec3..004a20b 100644 --- a/tests/integration_test.rs +++ b/tests/integration_test.rs @@ -27,6 +27,72 @@ fn run_cli_with_env(home: &Path, args: &[&str]) -> std::process::Output { .expect("Failed to execute git-same") } +fn default_config_path(home: &Path) -> PathBuf { + home.join(".config").join("git-same").join("config.toml") +} + +fn write_workspace_config(root: &Path) { + let dot_dir = root.join(".git-same"); + std::fs::create_dir_all(&dot_dir).expect("Failed to create workspace metadata dir"); + std::fs::write( + dot_dir.join("config.toml"), + r#"[provider] +kind = "github" +"#, + ) + .expect("Failed to write workspace config"); +} + +fn setup_registered_workspaces(home: &Path, roots: &[PathBuf]) { + std::fs::create_dir_all(home.join(".config")).expect("Failed to create config dir"); + std::fs::create_dir_all(home.join(".cache")).expect("Failed to create cache dir"); + + let init_output = run_cli_with_env(home, &["init", "--force"]); + assert!( + init_output.status.success(), + "Init failed.\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&init_output.stdout), + String::from_utf8_lossy(&init_output.stderr) + ); + + for root in roots { + write_workspace_config(root); + } + + let repos_root = home.join("repos"); + let repos_arg = repos_root + .to_str() + .expect("Repos root path is not valid UTF-8"); + let scan_output = run_cli_with_env(home, &["scan", repos_arg, "--register"]); + assert!( + scan_output.status.success(), + "Scan/register failed.\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&scan_output.stdout), + String::from_utf8_lossy(&scan_output.stderr) + ); +} + +fn read_default_workspace(path: &Path) -> Option { + let content = std::fs::read_to_string(path).expect("Failed to read config file"); + let doc: toml::Value = toml::from_str(&content).expect("Failed to parse config TOML"); + doc.get("default_workspace") + .and_then(|v| v.as_str()) + .map(ToString::to_string) +} + +fn read_workspace_registry(path: &Path) -> Vec { + let content = std::fs::read_to_string(path).expect("Failed to read config file"); + let doc: toml::Value = toml::from_str(&content).expect("Failed to parse config TOML"); + doc.get("workspaces") + .and_then(|v| v.as_array()) + .map(|arr| { + arr.iter() + .filter_map(|v| v.as_str().map(ToString::to_string)) + .collect() + }) + .unwrap_or_default() +} + fn assert_banner_branding(stdout: &str) { let description = env!("CARGO_PKG_DESCRIPTION"); assert!( @@ -45,6 +111,11 @@ fn assert_banner_branding(stdout: &str) { "Unexpected legacy version suffix in subheadline, got:\n{}", stdout ); + assert!( + !stdout.contains("GT-SAME"), + "Unexpected legacy GT-SAME banner text in stdout, got:\n{}", + stdout + ); } #[test] @@ -202,7 +273,6 @@ fn test_init_creates_config() { // Verify content is valid TOML let content = std::fs::read_to_string(&config_path).expect("Failed to read config"); - assert!(content.contains("base_path")); assert!(content.contains("concurrency")); } @@ -238,8 +308,8 @@ fn test_init_force_overwrites() { // Verify content was overwritten let content = std::fs::read_to_string(&config_path).expect("Failed to read config"); assert!( - content.contains("base_path"), - "Config should contain base_path" + content.contains("concurrency"), + "Config should contain concurrency setting" ); } @@ -272,7 +342,9 @@ fn test_status_nonexistent_workspace() { assert!( stderr.contains("not found") || stderr.contains("No workspaces") - || stderr.contains("No workspace configured"), + || stderr.contains("No workspace configured") + || stderr.contains("No workspace config found") + || stderr.contains("Configuration error"), "Expected workspace not found error, got: {}", stderr ); @@ -340,6 +412,157 @@ fn test_workspace_list() { assert!(output.status.success()); } +#[test] +fn test_workspace_default_accepts_unique_folder_name() { + use tempfile::TempDir; + + let temp = TempDir::new().expect("Failed to create temp dir"); + let home = temp.path().join("home"); + + let ws_target = home.join("repos").join("team-a").join("work"); + let ws_other = home.join("repos").join("team-b").join("other"); + setup_registered_workspaces(&home, &[ws_target.clone(), ws_other]); + + let set_output = run_cli_with_env(&home, &["workspace", "default", "work"]); + assert!( + set_output.status.success(), + "workspace default by folder name failed.\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&set_output.stdout), + String::from_utf8_lossy(&set_output.stderr) + ); + + let config_path = default_config_path(&home); + let default_workspace = + read_default_workspace(&config_path).expect("Expected default_workspace to be set"); + let expected_default_suffix = Path::new("repos").join("team-a").join("work"); + assert!( + Path::new(&default_workspace).ends_with(&expected_default_suffix), + "Expected default workspace to point at team-a/work, got '{}'", + default_workspace + ); +} + +#[test] +fn test_workspace_default_rejects_ambiguous_folder_name() { + use tempfile::TempDir; + + let temp = TempDir::new().expect("Failed to create temp dir"); + let home = temp.path().join("home"); + + let ws_a = home.join("repos").join("team-a").join("work"); + let ws_b = home.join("repos").join("team-b").join("work"); + setup_registered_workspaces(&home, &[ws_a, ws_b]); + + let output = run_cli_with_env(&home, &["workspace", "default", "work"]); + assert!( + !output.status.success(), + "Expected ambiguous selector to fail.\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + + let stderr = String::from_utf8_lossy(&output.stderr).to_lowercase(); + assert!( + stderr.contains("ambiguous") && stderr.contains("explicit path"), + "Expected ambiguous selector guidance in stderr, got:\n{}", + stderr + ); + + let config_path = default_config_path(&home); + let default_workspace = read_default_workspace(&config_path); + assert!( + default_workspace.is_none(), + "default_workspace should remain unset on ambiguous selector, got {:?}", + default_workspace + ); +} + +#[test] +fn test_scan_register_requires_init() { + use tempfile::TempDir; + + let temp = TempDir::new().expect("Failed to create temp dir"); + let home = temp.path().join("home"); + let repos = home.join("repos"); + let ws_root = repos.join("team").join("project"); + write_workspace_config(&ws_root); + + let repos_arg = repos.to_str().expect("Repos path is not valid UTF-8"); + let output = run_cli_with_env(&home, &["scan", repos_arg, "--register"]); + assert!( + !output.status.success(), + "scan --register should fail when config is missing.\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + stderr.contains("Run 'gisa init' first") || stderr.contains("Config file not found"), + "Expected init guidance in stderr, got:\n{}", + stderr + ); + + assert!( + !default_config_path(&home).exists(), + "Config file should not be auto-created in this flow" + ); +} + +#[test] +fn test_scan_register_uses_custom_config_path() { + use tempfile::TempDir; + + let temp = TempDir::new().expect("Failed to create temp dir"); + let home = temp.path().join("home"); + let repos = home.join("repos"); + let ws_root = repos.join("team").join("project"); + write_workspace_config(&ws_root); + + let custom_config_path = temp.path().join("custom-config.toml"); + let custom_config_arg = custom_config_path + .to_str() + .expect("Custom config path is not valid UTF-8"); + let init_output = run_cli_with_env(&home, &["init", "--path", custom_config_arg, "--force"]); + assert!( + init_output.status.success(), + "init --path failed.\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&init_output.stdout), + String::from_utf8_lossy(&init_output.stderr) + ); + + let repos_arg = repos.to_str().expect("Repos path is not valid UTF-8"); + let scan_output = run_cli_with_env( + &home, + &["-C", custom_config_arg, "scan", repos_arg, "--register"], + ); + assert!( + scan_output.status.success(), + "scan --register with custom config failed.\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&scan_output.stdout), + String::from_utf8_lossy(&scan_output.stderr) + ); + + assert!( + !default_config_path(&home).exists(), + "Default config should not be required when -C is provided" + ); + + let workspaces = read_workspace_registry(&custom_config_path); + assert_eq!( + workspaces.len(), + 1, + "Expected one registered workspace in custom config, got {:?}", + workspaces + ); + let expected_registered_suffix = Path::new("repos").join("team").join("project"); + assert!( + Path::new(&workspaces[0]).ends_with(&expected_registered_suffix), + "Expected registered workspace path to point at repos/team/project, got '{}'", + workspaces[0] + ); +} + #[test] fn test_missing_config_suggests_init() { let output = Command::new(git_same_binary()) @@ -412,6 +635,32 @@ fn test_cli_subcommands_use_dashboard_subheadline() { } } +#[test] +fn test_workspace_list_uses_canonical_banner() { + use tempfile::TempDir; + + let temp = TempDir::new().expect("Failed to create temp dir"); + let home = temp.path().join("home"); + std::fs::create_dir_all(home.join(".config")).expect("Failed to create config dir"); + std::fs::create_dir_all(home.join(".cache")).expect("Failed to create cache dir"); + + let config_path = temp.path().join("config.toml"); + let config_str = config_path + .to_str() + .expect("Config path is not valid UTF-8"); + + let init_output = run_cli_with_env(&home, &["init", "--path", config_str, "--force"]); + assert!( + init_output.status.success(), + "Init failed: {:?}", + init_output + ); + + let workspace_output = run_cli_with_env(&home, &["-C", config_str, "workspace", "list"]); + assert!(workspace_output.status.success(), "workspace list failed"); + assert_banner_branding(&String::from_utf8_lossy(&workspace_output.stdout)); +} + #[test] fn test_banner_source_no_legacy_version_subheadline() { let source = include_str!("../src/banner.rs"); diff --git a/toolkit/Conductor/archive.sh b/toolkit/conductor/archive.sh similarity index 66% rename from toolkit/Conductor/archive.sh rename to toolkit/conductor/archive.sh index f546996..78261db 100755 --- a/toolkit/Conductor/archive.sh +++ b/toolkit/conductor/archive.sh @@ -10,7 +10,23 @@ cd "$PROJECT_DIR" PACKAGE_NAME="git-same" CARGO_BIN_DIR="${CARGO_HOME:-$HOME/.cargo}/bin" -BINARIES=("git-same" "gitsame" "gitsa" "gisa") +ALIAS_FILE="$PROJECT_DIR/toolkit/packaging/binary-aliases.txt" +if [ -f "$ALIAS_FILE" ]; then + BINARIES=() + while IFS= read -r line || [ -n "$line" ]; do + line="${line%%#*}" + line="${line#"${line%%[![:space:]]*}"}" + line="${line%"${line##*[![:space:]]}"}" + [ -n "$line" ] && BINARIES+=("$line") + done < "$ALIAS_FILE" + if [ ${#BINARIES[@]} -eq 0 ]; then + echo "WARNING: $ALIAS_FILE contains no aliases, falling back to hardcoded list." + BINARIES=("git-same" "gitsame" "gitsa" "gisa") + fi +else + echo "WARNING: $ALIAS_FILE not found, falling back to hardcoded list." + BINARIES=("git-same" "gitsame" "gitsa" "gisa") +fi echo "========================================" echo " Git-Same (Gisa CLI) Archive" diff --git a/toolkit/Conductor/run.sh b/toolkit/conductor/run.sh similarity index 66% rename from toolkit/Conductor/run.sh rename to toolkit/conductor/run.sh index c1b315b..da80e50 100755 --- a/toolkit/Conductor/run.sh +++ b/toolkit/conductor/run.sh @@ -9,18 +9,49 @@ PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" cd "$PROJECT_DIR" CARGO_BIN_DIR="${CARGO_HOME:-$HOME/.cargo}/bin" -GS_COMMAND="$CARGO_BIN_DIR/gisa" +ALIAS_FILE="$PROJECT_DIR/toolkit/packaging/binary-aliases.txt" +if [ ! -r "$ALIAS_FILE" ]; then + echo "ERROR: Alias manifest not found or unreadable: $ALIAS_FILE" + exit 1 +fi + +BINARIES=() +while IFS= read -r line || [ -n "$line" ]; do + line="${line%%#*}" + line="${line#"${line%%[![:space:]]*}"}" + line="${line%"${line##*[![:space:]]}"}" + [ -n "$line" ] && BINARIES+=("$line") +done < "$ALIAS_FILE" -# Install to ensure all binaries are up to date -echo "Installing with: cargo install --path ." -cargo install --path . +if [ ${#BINARIES[@]} -eq 0 ]; then + echo "ERROR: Alias manifest contains no aliases: $ALIAS_FILE" + exit 1 +fi + +PRIMARY_BIN="${BINARIES[0]}" +GS_COMMAND="$CARGO_BIN_DIR/$PRIMARY_BIN" + +# Install primary binary +echo "Installing with: cargo install --path . --force" +cargo install --path . --force echo "" -if [ ! -x "$GS_COMMAND" ]; then - echo "ERROR: gisa installation failed." +if [ ! -x "$CARGO_BIN_DIR/$PRIMARY_BIN" ]; then + echo "ERROR: $PRIMARY_BIN installation failed." exit 1 fi +# Create alias symlinks from manifest (skip primary) +for alias in "${BINARIES[@]:1}"; do + # Replace stale standalone alias binaries with a symlink to the primary binary. + if [ -e "$CARGO_BIN_DIR/$alias" ] && [ ! -L "$CARGO_BIN_DIR/$alias" ]; then + rm -f "$CARGO_BIN_DIR/$alias" + fi + ln -sf "$CARGO_BIN_DIR/$PRIMARY_BIN" "$CARGO_BIN_DIR/$alias" + echo " Symlinked: $alias -> $PRIMARY_BIN" +done +echo "" + # Warn if gisa is also installed elsewhere (e.g. Homebrew) RED='\033[0;31m' NC='\033[0m' diff --git a/toolkit/Conductor/setup.sh b/toolkit/conductor/setup.sh similarity index 91% rename from toolkit/Conductor/setup.sh rename to toolkit/conductor/setup.sh index fcb92b5..097953f 100755 --- a/toolkit/Conductor/setup.sh +++ b/toolkit/conductor/setup.sh @@ -71,6 +71,7 @@ echo " Setup Complete!" echo "========================================" echo "" echo "Next steps:" -echo " 1. Run: ./toolkit/Conductor/run.sh" -echo " 2. Or manually install: cargo install --path ." +echo " 1. Run: ./toolkit/conductor/run.sh" +echo " 2. Or manually install: cargo install --path . --force" +echo " (then refresh aliases via ./toolkit/conductor/run.sh)" echo "" diff --git a/toolkit/packaging/binary-aliases.txt b/toolkit/packaging/binary-aliases.txt new file mode 100644 index 0000000..11b7d88 --- /dev/null +++ b/toolkit/packaging/binary-aliases.txt @@ -0,0 +1,4 @@ +git-same +gitsame +gitsa +gisa