Compare commits

..

No commits in common. "main" and "v0.23.0" have entirely different histories.

491 changed files with 43783 additions and 87744 deletions

View File

@ -1,16 +1,5 @@
[profile.dev]
debug = "none"
incremental = false
[target.x86_64-unknown-linux-gnu]
rustflags = ["-Clink-arg=-fuse-ld=mold"]
[target.aarch64-unknown-linux-gnu]
rustflags = ["-Clink-arg=-fuse-ld=mold"]
[target.x86_64-pc-windows-msvc]
linker = "rust-lld.exe"
# NOTE: on Windows, build with the static CRT, so that produced .exe files don't
# depend on vcruntime140.dll; otherwise the user requires visual studio if they
# download a raw .exe
[target.x86_64-pc-windows-msvc]
rustflags = ["-Ctarget-feature=+crt-static"]

View File

@ -1,3 +0,0 @@
[profile.ci]
slow-timeout = { period = "5s", terminate-after = 20 }
fail-fast = false

View File

@ -2,12 +2,10 @@ root = true
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
# Turned off because some editors otherwise remove trailing spaces within
# multi-line string literals (intellij-rust/intellij-rust#5368).
trim_trailing_whitespace = false
[*.rs]
indent_style = space
indent_size = 4
# Turned off because some editors otherwise remove trailing spaces within
# multi-line string literals (intellij-rust/intellij-rust#5368).
trim_trailing_whitespace = false

2
.gitattributes vendored
View File

@ -1,3 +1,3 @@
Cargo.lock linguist-generated=true merge=binary
flake.lock linguist-generated=true merge=binary
uv.lock linguist-generated=true merge=binary
poetry.lock linguist-generated=true merge=binary

3
.github/CODEOWNERS vendored
View File

@ -1,3 +0,0 @@
# The maintainers own all files.
# See GOVERNANCE.md for the list of current maintainers.
* @jj-vcs/maintainers

View File

@ -10,7 +10,7 @@ assignees: ''
## Description
<!-- Thanks for your report! Please describe your problem or request here.
For questions, use https://github.com/jj-vcs/jj/discussions/new instead.
For questions, use https://github.com/martinvonz/jj/discussions/new instead.
Feel free to remove any of the sections below if they don't seem useful. -->

View File

@ -5,7 +5,7 @@ each commit representing one logical change. Address code review comments by
rewriting the commits rather than adding commits on top. Use force-push when
pushing the updated commits (`jj git push` does that automatically when you
rewrite commits). Merge the PR at will once it's been approved. See
https://github.com/jj-vcs/jj/blob/main/docs/contributing.md for details.
https://github.com/martinvonz/jj/blob/main/docs/contributing.md for details.
Note that you need to sign Google's CLA to contribute.
-->

View File

@ -1,50 +0,0 @@
name: Configure Windows Builders
description: |
This action configures the Windows builders to run tests.
runs:
using: "composite"
steps:
# The GitHub Actions hosted Windows runners have a slow persistent
# `C:` drive and a temporary `D:` drive with better throughput. The
# repository checkout is placed on `D:` by default, but the user
# profile is on `C:`, slowing down access to temporary directories
# and the Rust toolchain we install. Since our build environment is
# ephemeral anyway, we can save a couple minutes of CI time by
# placing everything on `D:`.
#
# Some projects have reported even bigger wins by mounting a VHDX
# virtual drive with a ReFS file system on it, with or without the
# native Dev Drive feature available in Windows 2025, but it seems
# to make things slightly slower for us overall compared to `D:`.
# Further investigation and experimentation would be welcome!
#
# See: <https://chadgolden.com/blog/github-actions-hosted-windows-runners-slower-than-expected-ci-and-you>
- name: 'Set up D: drive'
shell: pwsh
run: |
# Set up D: drive
# Short file names are disabled by default on the `D:` drive,
# which breaks some of our tests. Enable them.
#
# This has a slight performance penalty, and wont be possible
# if we switch to ReFS/Dev Drives. The alternatives are to
# reduce CI coverage for the security mitigation the tests are
# checking, or arrange for those tests to take a separate path
# to a drive that supports short file names to use instead of
# the primary temporary directory.
fsutil 8dot3name set D: 0
# Move the temporary directory to `D:\Temp`.
New-Item -Path D:\ -Name Temp -ItemType directory
# Copy the effective permissions without inheritance.
$Acl = Get-Acl -Path $env:TMP
$Acl.SetAccessRuleProtection($true, $true)
Set-Acl -Path D:\Temp -AclObject $Acl
Add-Content -Path $env:GITHUB_ENV @"
TMP=D:\Temp
TEMP=D:\Temp
RUSTUP_HOME=D:\.rustup
CARGO_HOME=D:\.cargo
"@

View File

@ -3,7 +3,7 @@ updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"
interval: "daily"
open-pull-requests-limit: 10
commit-message:
prefix: "cargo:"
@ -14,7 +14,7 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
interval: "daily"
open-pull-requests-limit: 10
commit-message:
prefix: "github:"

View File

@ -1,5 +0,0 @@
#!/bin/sh
# This is extremely approximate because the Cargo.lock file contains
# dependencies for all features and platforms, but it helps us keep an eye on
# things.
grep -c '^\[\[package\]\]' Cargo.lock

View File

@ -2,16 +2,21 @@
# Set up a virtual environment with the required tools, build, and deploy the docs.
#
# Run from the root directory of the project as
# .github/scripts/docs-build-deploy prerelease main
# .github/scripts/docs-build-deploy 'https://martinvonz.github.io' prerelease main
# All arguments after the first are passed to `mike deploy`, run
# `uv run -- mike deploy --help` for options. Note that `mike deploy`
# `poetry run -- mike deploy --help` for options. Note that `mike deploy`
# creates a commit directly on the `gh-pages` branch.
set -ev
export "SITE_URL_FOR_MKDOCS=$1"; shift
# Affects the generation of `sitemap.xml.gz` by `mkdocs`. See
# https://github.com/jimporter/mike/issues/103 and
# https://reproducible-builds.org/docs/source-date-epoch/
export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct docs/ mkdocs.yml)
# https://github.com/python-poetry/poetry/issues/1917 and
# https://github.com/python-poetry/poetry/issues/8623
export PYTHON_KEYRING_BACKEND=keyring.backends.fail.Keyring
poetry install # Only really needed once per environment unless there are updates
# TODO: `--alias-type symlink` is the
# default, and may be nicer in some ways. However,
# this requires deploying to GH Pages via a "custom GitHub Action", as in
@ -19,4 +24,4 @@ export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct docs/ mkdocs.yml)
# Otherwise, you get an error:
# > Site contained a symlink that should be dereferenced: /main.
# > For more information, see https://docs.github.com/github/working-with-github-pages/troubleshooting-jekyll-build-errors-for-github-pages-sites#config-file-error.
uv run -- mike deploy --alias-type copy "$@"
poetry run -- mike deploy --alias-type copy "$@"

View File

@ -1,39 +0,0 @@
#!/usr/bin/env bash
# This script invokes the forbidden power of an ancient evil in order to defend
# the one thing we hold most dear: bureaucratic norms
# Many thanks to Phabricator (and Evan) for the vintage ASCII art (Apache 2.0)
# <https://github.com/phacility/phabricator/blob/5720a38cfe95b00ca4be5016dd0d2f3195f4fa04/scripts/repository/commit_hook.php#L203>
rejection_reason=${1:-"No reason provided. The Dragons have spoken."}
cat >&2 <<'EOF'
+---------------------------------------------------------------+
| * * * PUSH REJECTED BY EVIL DRAGON BUREAUCRATS * * * |
+---------------------------------------------------------------+
\
\ ^ /^
\ / \ // \
\ |\___/| / \// .\
\ /V V \__ / // | \ \ *----*
/ / \/_/ // | \ \ \ |
@___@` \/_ // | \ \ \/\ \
0/0/| \/_ // | \ \ \ \
0/0/0/0/| \/// | \ \ | |
0/0/0/0/0/_|_ / ( // | \ _\ | /
0/0/0/0/0/0/`/,_ _ _/ ) ; -. | _ _\.-~ / /
,-} _ *-.|.-~-. .~ ~
* \__/ `/\ / ~-. _ .-~ /
\____(Oo) *. } { /
( (..) .----~-.\ \-` .~
//___\\\\ \ DENIED! ///.----..< \ _ -~
// \\\\ ///-._ _ _ _ _ _ _{^ - - - - ~
EOF
cat >&2 <<EOF
$rejection_reason
EOF
exit 1

View File

@ -5,7 +5,7 @@ on:
branches:
- main
permissions: {}
permissions: read-all
jobs:
binaries:
@ -22,10 +22,10 @@ jobs:
os: ubuntu-24.04
target: x86_64-unknown-linux-gnu
- build: linux-aarch64-musl
os: ubuntu-24.04-arm
os: ubuntu-24.04
target: aarch64-unknown-linux-musl
- build: linux-aarch64-gnu
os: ubuntu-24.04-arm
os: ubuntu-24.04
target: aarch64-unknown-linux-gnu
- build: macos-x86_64
os: macos-13
@ -37,39 +37,44 @@ jobs:
os: windows-2022
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.os }}
timeout-minutes: 20 # NOTE (aseipp): tests aren't run but sometimes builds take a while
timeout-minutes: 15 # NOTE (aseipp): keep in-sync with the build.yml timeout limit
name: Build binary artifacts
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- name: Install packages (Ubuntu)
if: startsWith(matrix.os, 'ubuntu')
if: matrix.os == 'ubuntu-24.04'
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends xz-utils liblz4-tool musl-tools
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8
with:
toolchain: stable
target: ${{ matrix.target }}
- name: Build release binary
shell: bash
run: cargo build --target ${{ matrix.target }} --verbose --release
run: |
CARGO_CMD=cargo
if [[ "${{ matrix.target }}" = aarch64-unknown-linux* ]]; then
echo "Downloading 'cross' binary for aarch64-linux..."
wget -c https://github.com/cross-rs/cross/releases/download/v0.2.5/cross-x86_64-unknown-linux-gnu.tar.gz -O - | tar -xz
CARGO_CMD=$PWD/cross
fi
$CARGO_CMD build --target ${{ matrix.target }} --verbose --release --features packaging,vendored-openssl
- name: Set up artifact directory
- name: Setup artifact directory
shell: bash
run: |
outdir="target/${{ matrix.target }}/release"
BIN=$outdir/jj
[[ "${{ matrix.os }}" == "windows-latest" ]] && BIN+=".exe"
[[ "${{ matrix.os }}" == "windows-latest" ]] && BIN+=".exe"
mkdir -p target/out
cp $BIN target/out
- name: Publish binary artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: jj-${{ matrix.target }}
path: target/out

27
.github/workflows/build-nix.yml vendored Normal file
View File

@ -0,0 +1,27 @@
name: nix
on:
push:
branches:
- main
pull_request:
permissions: read-all
jobs:
nix:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-14]
runs-on: ${{ matrix.os }}
timeout-minutes: 15 # NOTE (aseipp): keep in-sync with the build.yml timeout limit
name: flake check
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a
- uses: DeterminateSystems/magic-nix-cache-action@87b14cf437d03d37989d87f0fa5ce4f5dc1a330b
- run: nix flake check -L --show-trace

185
.github/workflows/build.yml vendored Normal file
View File

@ -0,0 +1,185 @@
name: build
on:
push:
pull_request:
permissions: read-all
env:
CARGO_INCREMENTAL: 0
CARGO_PROFILE_DEV_DEBUG: 0
jobs:
build:
strategy:
fail-fast: false
matrix:
# macos-13 is x86; macos-14 is ARM
os: [ubuntu-latest, macos-13, macos-14, windows-latest]
cargo_flags: [""]
include:
- os: ubuntu-latest
cargo_flags: "--all-features"
runs-on: ${{ matrix.os }}
# TODO FIXME (aseipp): keep the timeout limit to ~15 minutes. this is long
# enough to give us runway for the future, but also once we hit it, we're at
# the "builds are taking too long" stage and we should start looking at ways
# to optimize the CI.
#
# at the same time, this avoids some issues where some flaky, bugged tests
# seem to be causing multi-hour runs on Windows (GPG signing issues), which
# is a problem we should fix. in the mean time, this will make these flakes
# less harmful, as it won't cause builds to spin for multiple hours, requiring
# manual cancellation.
timeout-minutes: 15
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
# The default version of gpg installed on the runners is a version baked in with git
# which only contains the components needed by git and doesn't work for our test cases.
#
# This installs the latest gpg4win version, which is a variation of GnuPG built for
# Windows.
#
# There is some issue with windows PATH max length which is what all the PATH wrangling
# below is for. Please see the below link for where this fix was derived from:
# https://github.com/orgs/community/discussions/24933
- name: Setup GnuPG [windows]
if: ${{ matrix.os == 'windows-latest' }}
run: |
$env:PATH = "C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\ProgramData\chocolatey\bin"
[Environment]::SetEnvironmentVariable("Path", $env:PATH, "Machine")
choco install --yes gpg4win
echo "C:\Program Files (x86)\Gpg4win\..\GnuPG\bin" >> $env:GITHUB_PATH
# The default version of openssh on windows server is quite old (8.1) and doesn't have
# all the necessary signing/verification commands available (such as -Y find-principals)
- name: Setup ssh-agent [windows]
if: ${{ matrix.os == 'windows-latest' }}
run: |
Remove-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0
Remove-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0
choco install openssh --pre
- name: Install Rust
uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8
with:
toolchain: 1.76
- name: Build
run: cargo build --workspace --all-targets --verbose ${{ matrix.cargo_flags }}
- name: Test
run: cargo test --workspace --all-targets --verbose ${{ matrix.cargo_flags }}
env:
RUST_BACKTRACE: 1
build-no-git:
name: Build jj-lib without Git support
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Install Rust
uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8
with:
toolchain: 1.76
- name: Build
run: cargo build -p jj-lib --no-default-features --verbose
check-protos:
name: Check protos
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8
with:
toolchain: stable
- run: sudo apt update && sudo apt-get -y install protobuf-compiler
- name: Generate Rust code from .proto files
run: cargo run -p gen-protos
- name: Check for uncommitted changes
run: git diff --exit-code
rustfmt:
name: Check formatting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8
with:
toolchain: nightly
components: rustfmt
- run: cargo +nightly fmt --all -- --check
mkdocs:
name: Check that MkDocs can build the docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: 3.11
- name: Install poetry (latest release)
uses: abatilo/actions-poetry@e78f54a89cb052fff327414dd9ff010b5d2b4dbd
with:
poetry-version: latest
- name: Install dependencies
run: poetry install
- name: Check that `mkdocs` can build the docs
run: poetry run -- mkdocs build --strict
mkdocs-old-poetry:
name: Check that MkDocs can build the docs with Poetry 1.8
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: 3.11
- name: Install poetry
uses: abatilo/actions-poetry@e78f54a89cb052fff327414dd9ff010b5d2b4dbd
with:
# Test with the version of Poetry in Debian stable. If this starts
# failing, we should increase this version and document the minimum
# necessary version of Poetry in contributing.md.
#
# One way to install old `poetry` is using `pipx`:
# pipx install 'poetry<1.4' --suffix -1.3
poetry-version: 1.8
- name: Install dependencies
run: poetry install
- name: Check that `mkdocs` can build the docs
run: poetry run -- mkdocs build --strict
cargo-deny:
runs-on: ubuntu-latest
strategy:
matrix:
checks:
- advisories
- bans licenses sources
# Prevent sudden announcement of a new advisory from failing ci:
continue-on-error: ${{ matrix.checks == 'advisories' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: EmbarkStudios/cargo-deny-action@8371184bd11e21dcf8ac82ebf8c9c9f74ebf7268
with:
command: check ${{ matrix.checks }}
clippy-check:
name: Clippy check
permissions:
checks: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8
with:
toolchain: stable
components: clippy
- run: cargo +stable clippy --all-features --workspace --all-targets -- -D warnings

View File

@ -1,341 +0,0 @@
name: ci
on:
pull_request:
merge_group:
concurrency:
group: >-
${{ github.workflow }}-${{
github.event.pull_request.number
|| github.event.merge_group.head_ref
}}
cancel-in-progress: true
permissions: {}
jobs:
test:
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
build: [linux-x86_64-gnu, linux-aarch64-gnu, macos-x86_64, macos-aarch64, windows-x86_64]
include:
- build: linux-x86_64-gnu
os: ubuntu-24.04
cargo_flags: "--all-features"
- build: linux-aarch64-gnu
os: ubuntu-24.04-arm
cargo_flags: "--all-features"
- build: macos-x86_64
os: macos-13
cargo_flags: ""
- build: macos-aarch64
os: macos-14
cargo_flags: ""
- build: windows-x86_64
os: windows-2022
cargo_flags: ""
runs-on: ${{ matrix.os }}
# TODO FIXME (aseipp): keep the timeout limit to ~20 minutes. this is long
# enough to give us runway for the future, but also once we hit it, we're at
# the "builds are taking too long" stage and we should start looking at ways
# to optimize the CI, or the CI is flaking out on some weird spiked machine
#
# at the same time, this avoids some issues where some flaky, bugged tests
# seem to be causing multi-hour runs on Windows (GPG signing issues), which
# is a problem we should fix. in the mean time, this will make these flakes
# less harmful, as it won't cause builds to spin for multiple hours, requiring
# manual cancellation.
timeout-minutes: 20
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- name: Set up Windows Builders
if: startswith(matrix.os, 'windows')
uses: ./.github/actions/setup-windows
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
with:
toolchain: 1.84
- uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61
with:
tool: nextest,taplo-cli
- name: Install mold
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50
with:
make-default: false
- name: Build
run: >-
cargo build
--config .cargo/config-ci.toml
--workspace
--all-targets
--verbose
${{ matrix.cargo_flags }}
- name: Test
run: >-
cargo nextest run
--config .cargo/config-ci.toml
--workspace
--all-targets
--verbose
--profile ci
${{ matrix.cargo_flags }}
env:
RUST_BACKTRACE: 1
CARGO_TERM_COLOR: always
no-git:
name: build (no git)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
with:
toolchain: 1.84
- name: Build
run: cargo build -p jj-cli --no-default-features --verbose
build-nix:
name: nix flake
strategy:
fail-fast: ${{ github.event_name == 'merge_group' }}
matrix:
os: [ubuntu-24.04, ubuntu-24.04-arm, macos-14]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
persist-credentials: false
- uses: DeterminateSystems/nix-installer-action@21a544727d0c62386e78b4befe52d19ad12692e3
- run: nix flake check -L --show-trace
check-protos:
name: check (protos)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
with:
toolchain: stable
- run: sudo apt update && sudo apt-get -y install protobuf-compiler
- name: Generate Rust code from .proto files
run: cargo run -p gen-protos
- name: Check for uncommitted changes
run: git diff --exit-code
check-rustfmt:
name: check (rustfmt)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
with:
toolchain: nightly
components: rustfmt
- run: cargo +nightly fmt --all -- --check
check-clippy:
name: check (clippy)
permissions:
checks: write
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
with:
toolchain: stable
components: clippy
- run: cargo +stable clippy --all-features --workspace --all-targets -- -D warnings
check-cargo-deny:
runs-on: ubuntu-24.04
strategy:
matrix:
checks:
- advisories
- bans
- licenses
- sources
# Prevent sudden announcement of a new advisory from failing ci:
continue-on-error: ${{ matrix.checks == 'advisories' }}
name: check (cargo-deny, ${{ matrix.checks }})
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: EmbarkStudios/cargo-deny-action@34899fc7ba81ca6268d5947a7a16b4649013fea1
with:
command: check ${{ matrix.checks }}
check-codespell:
name: check (codespell)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: 3.11
- name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca
with:
# If you bump the version, also update docs/contributing.md
# and all other workflows that install uv
version: "0.5.1"
- name: Run Codespell
run: uv run -- codespell && echo Codespell exited successfully
check-doctests:
name: check (doctests)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
with:
toolchain: 1.84
# NOTE: We need to run `cargo test --doc` separately from normal tests:
# - `cargo build --all-targets` specifies: "Build all targets"
# - `cargo test --all-targets` specifies: "Test all targets (does not include doctests)"
- name: Run doctests
run: cargo test --workspace --doc
env:
RUST_BACKTRACE: 1
- name: Check `cargo doc` for lint issues
env:
RUSTDOCFLAGS: "--deny warnings"
run: cargo doc --workspace --no-deps
check-mkdocs:
name: check (mkdocs)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
with:
python-version: 3.11
- name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca
with:
# If you bump the version, also update docs/contributing.md
# and all other workflows that install uv
version: "0.5.1"
- name: Check that `mkdocs` can build the docs
run: uv run -- mkdocs build --strict
# An optional job to alert us when uv updates break the build
check-mkdocs-latest:
name: check (latest mkdocs, optional)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca
# 'only-managed' means that uv will always download Python, even
# if the runner happens to provide a compatible version
- name: Check that `mkdocs` can build the docs
run: uv run --python-preference=only-managed -- mkdocs build --strict
check-zizmor:
name: check (zizmor)
runs-on: ubuntu-latest
permissions:
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca
- name: Run zizmor
run: uvx zizmor --format sarif . > results.sarif
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b
with:
sarif_file: results.sarif
category: zizmor
# Count the (very approximate) number of dependencies in Cargo.lock and bail at a certain limit.
check-cargo-lock-bloat:
name: check (Cargo.lock dependency count)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- name: Check total dependency count in Cargo.lock
run: |
total_deps=$(./.github/scripts/count-cargo-lock-packages)
if [ "$total_deps" -gt "${TOTAL_DEP_LIMIT}" ]; then
./.github/scripts/dragon-bureaucrat \
"Cargo.lock has too many dependencies ($total_deps > ${TOTAL_DEP_LIMIT}). The Dragon banishes thee!
You can raise the limit in \`.github/workflows/ci.yml\` if necessary, but
consider whether its possible to trim things down first."
else
echo "Counted $total_deps Cargo.lock dependencies." \
"This is within the allowed limit of ${TOTAL_DEP_LIMIT}."
fi
env:
# This limit *can* be raised, we just want to be aware if we exceed it
TOTAL_DEP_LIMIT: 500
# Block the merge if required checks fail, but only in the merge
# queue. See also `required-checks-hack.yml`.
required-checks:
name: required checks (merge queue)
if: ${{ always() && github.event_name == 'merge_group' }}
needs:
- test
- no-git
- build-nix
- check-protos
- check-rustfmt
- check-clippy
- check-cargo-deny
- check-codespell
- check-doctests
- check-mkdocs
# - check-mkdocs-latest
# - check-zizmor
- check-cargo-lock-bloat
runs-on: ubuntu-latest
steps:
- name: Block merge if required checks fail
if: >-
${{
contains(needs.*.result, 'failure')
|| contains(needs.*.result, 'cancelled')
}}
run: exit 1

22
.github/workflows/codespell.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Codespell
on:
push:
branches:
- main
pull_request:
permissions: read-all
jobs:
codespell:
name: Codespell
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630
with:
check_filenames: true
check_hidden: true
skip: ./target,./.jj,*.lock
ignore_words_list: crate,NotIn,Wirth

View File

@ -3,11 +3,7 @@ name: Enable auto-merge for Dependabot PRs
on:
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
permissions: {}
permissions: read-all
jobs:
dependabot-auto-merge:
@ -15,7 +11,7 @@ jobs:
permissions:
contents: write
pull-requests: write
runs-on: ubuntu-24.04
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Enable auto-merge for Dependabot PRs

View File

@ -5,39 +5,31 @@ on:
branches:
- main
permissions: {}
permissions:
contents: write
jobs:
prerelease-docs-build-deploy:
# IMPORTANT: this workflow also functions as a test for `docs-deploy-website-latest-release` in
# releases.yml. Any fixes here should probably be duplicated there.
permissions:
contents: write
if: github.repository_owner == 'jj-vcs' # Stops this job from running on forks
if: github.repository_owner == 'martinvonz' # Stops this job from running on forks
strategy:
matrix:
os: [ubuntu-24.04]
os: [ubuntu-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
# `.github/scripts/docs-build-deploy` will need to `git push` to the docs branch
persist-credentials: true
- run: "git fetch origin gh-pages --depth=1"
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: 3.11
- name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca
- name: Install poetry
uses: abatilo/actions-poetry@e78f54a89cb052fff327414dd9ff010b5d2b4dbd
with:
version: "0.5.1"
poetry-version: latest
- name: Install dependencies, compile and deploy docs
run: |
git config user.name 'jj-docs[bot]'
git config user.email 'jj-docs[bot]@users.noreply.github.io'
export MKDOCS_SITE_NAME="Jujutsu docs (prerelease)"
export MKDOCS_PRIMARY_COLOR="blue grey"
.github/scripts/docs-build-deploy prerelease --push
.github/scripts/docs-build-deploy 'https://martinvonz.github.io/jj' prerelease --push
- name: "Show `git diff --stat`"
run: git diff --stat gh-pages^ gh-pages || echo "(No diffs)"

View File

@ -2,7 +2,7 @@ name: Release
on:
release:
types: [published]
types: [created]
permissions: read-all
@ -23,7 +23,7 @@ jobs:
os: ubuntu-24.04
target: x86_64-unknown-linux-musl
- build: linux-aarch64-musl
os: ubuntu-24.04-arm
os: ubuntu-24.04
target: aarch64-unknown-linux-musl
- build: macos-x86_64
os: macos-13
@ -38,26 +38,32 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- name: Install packages (Ubuntu)
if: startsWith(matrix.os, 'ubuntu')
if: matrix.os == 'ubuntu-24.04'
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends xz-utils liblz4-tool musl-tools
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b
uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8
with:
toolchain: stable
target: ${{ matrix.target }}
- name: Download cross-compilation tool (linux-aarch64)
if: matrix.target == 'aarch64-unknown-linux-musl'
run: wget -c https://github.com/cross-rs/cross/releases/download/v0.2.5/cross-x86_64-unknown-linux-gnu.tar.gz -O - | tar -xz
- name: Build release binary
shell: bash
run: cargo build --target ${{ matrix.target }} --verbose --release
run: |
CARGO_CMD=cargo
if [ "${{ matrix.target }}" = "aarch64-unknown-linux-musl" ]; then
CARGO_CMD=$PWD/cross
fi
$CARGO_CMD build --target ${{ matrix.target }} --verbose --release --features packaging,vendored-openssl
- name: Build archive
shell: bash
run: |
outdir="target/${{ matrix.target }}/release"
staging="jj-${RELEASE_TAG_NAME}-${{ matrix.target }}"
staging="jj-${{ github.event.release.tag_name }}-${{ matrix.target }}"
mkdir "$staging"
cp {README.md,LICENSE} "$staging/"
if [ "${{ matrix.os }}" = "windows-2022" ]; then
@ -70,8 +76,6 @@ jobs:
tar czf "$staging.tar.gz" -C "$staging" .
echo "ASSET=$staging.tar.gz" >> $GITHUB_ENV
fi
env:
RELEASE_TAG_NAME: ${{ github.event.release.tag_name }}
- name: Upload release archive
uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5
env:
@ -83,7 +87,7 @@ jobs:
asset_content_type: application/octet-stream
docs-release-archive:
runs-on: ubuntu-24.04
runs-on: ubuntu-latest
permissions:
contents: write
@ -93,24 +97,20 @@ jobs:
sudo apt-get update
sudo apt-get install -y --no-install-recommends xz-utils liblz4-tool musl-tools
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: 3.11
- name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca
- name: Install poetry
uses: abatilo/actions-poetry@e78f54a89cb052fff327414dd9ff010b5d2b4dbd
with:
version: "0.5.1"
poetry-version: latest
- name: Compile docs and zip them up
run: |
uv run mkdocs build
archive="jj-${RELEASE_TAG_NAME}-docs-html.tar.gz"
poetry install
poetry run -- mkdocs build -f mkdocs-offline.yml
archive="jj-${{ github.event.release.tag_name }}-docs-html.tar.gz"
tar czf "$archive" -C "rendered-docs" .
echo "ASSET=$archive" >> $GITHUB_ENV
env:
MKDOCS_OFFLINE: true
RELEASE_TAG_NAME: ${{ github.event.release.tag_name }}
- name: Upload release archive
uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5
env:
@ -122,31 +122,26 @@ jobs:
asset_content_type: application/octet-stream
docs-deploy-website-latest-release:
runs-on: ubuntu-24.04
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
# `.github/scripts/docs-build-deploy` will need to `git push` to the docs branch
persist-credentials: true
- run: "git fetch origin gh-pages --depth=1"
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: 3.11
- name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca
- name: Install poetry
uses: abatilo/actions-poetry@e78f54a89cb052fff327414dd9ff010b5d2b4dbd
with:
version: "0.5.1"
poetry-version: latest
- name: Install dependencies, compile and deploy docs to the "latest release" section of the website
run: |
git config user.name 'jj-docs[bot]'
git config user.email 'jj-docs[bot]@users.noreply.github.io'
# Using the 'latest' tag below makes the website default
# to this version.
.github/scripts/docs-build-deploy "${RELEASE_TAG_NAME}" latest --update-aliases --push
env:
RELEASE_TAG_NAME: ${{ github.event.release.tag_name }}
.github/scripts/docs-build-deploy 'https://martinvonz.github.io/jj' "${{ github.event.release.tag_name }}" latest --update-aliases --push
- name: "Show `git diff --stat`"
run: git diff --stat gh-pages^ gh-pages || echo "(No diffs)"

View File

@ -1,18 +0,0 @@
name: pr
on:
pull_request:
permissions: {}
jobs:
# The actual `required-checks` job is defined in `ci.yml` and only
# runs for `merge_group` events. This hack ensures that it doesnt
# block the merge for pull requests.
required-checks:
name: required checks (merge queue)
if: false
runs-on: ubuntu-latest
# Should never be run
steps:
- run: exit 1

View File

@ -7,13 +7,13 @@ on:
push:
branches: [ main ]
# No default permissions
permissions: {}
# Declare default permissions as read only.
permissions: read-all
jobs:
analysis:
name: Scorecards analysis
runs-on: ubuntu-24.04
runs-on: ubuntu-latest
permissions:
# Needed to upload the results to code-scanning dashboard.
security-events: write
@ -26,7 +26,7 @@ jobs:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46
with:
results_file: results.sarif
results_format: sarif
@ -38,7 +38,7 @@ jobs:
# Upload the results as artifacts (optional).
- name: "Upload artifact"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: SARIF file
path: results.sarif
@ -46,6 +46,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b
uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd
with:
sarif_file: results.sarif

9
.gitignore vendored
View File

@ -8,18 +8,9 @@ result
*.pending-snap
*.snap*
!cli/tests/cli-reference@.md.snap
# Per user insta settings.
# See https://insta.rs/docs/settings/#tool-config-file for details.
.config/insta.yaml
# mkdocs
/.venv
/.python-version
# Editor specific ignores
.idea
.vscode
.zed
# Generated by setting `JJ_TRACE` environment variable.
jj-trace-*.json

File diff suppressed because it is too large Load Diff

2848
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,128 +1,137 @@
cargo-features = []
[workspace]
resolver = "3"
resolver = "2"
members = ["cli", "lib", "lib/gen-protos", "lib/proc-macros", "lib/testutils"]
[workspace.package]
version = "0.29.0"
version = "0.23.0"
license = "Apache-2.0"
rust-version = "1.84" # NOTE: remember to update CI, contributing.md, changelog.md, and install-and-setup.md
rust-version = "1.76" # NOTE: remember to update CI, contributing.md, changelog.md, install-and-setup.md, and flake.nix
edition = "2021"
readme = "README.md"
homepage = "https://github.com/jj-vcs/jj"
repository = "https://github.com/jj-vcs/jj"
documentation = "https://jj-vcs.github.io/jj/"
homepage = "https://github.com/martinvonz/jj"
repository = "https://github.com/martinvonz/jj"
documentation = "https://martinvonz.github.io/jj/"
categories = ["version-control", "development-tools"]
keywords = ["VCS", "DVCS", "SCM", "Git", "Mercurial"]
[workspace.dependencies]
anyhow = "1.0.93"
assert_cmd = "2.0.8"
assert_matches = "1.5.0"
async-trait = "0.1.88"
async-trait = "0.1.83"
backoff = "0.4.0"
blake2 = "0.10.6"
bstr = "1.11.3"
clap = { version = "4.5.37", features = [
bstr = "1.10.0"
clap = { version = "4.5.20", features = [
"derive",
"deprecated",
"wrap_help",
"string",
] }
clap_complete = { version = "4.5.48", features = ["unstable-dynamic"] }
clap_complete_nushell = "4.5.5"
# Update clap-markdown manually since test_generate_md_cli_help snapshot
# will need regenerating.
clap-markdown = "=0.1.5"
clap_mangen = "0.2.25"
chrono = { version = "0.4.41", default-features = false, features = [
clap_complete = "4.5.37"
clap_complete_nushell = "4.5.4"
clap-markdown = "0.1.4"
clap_mangen = "0.2.10"
chrono = { version = "0.4.38", default-features = false, features = [
"std",
"clock",
] }
chrono-english = { version = "0.1.7" }
clru = "0.6.2"
config = { version = "0.13.4", default-features = false, features = ["toml"] }
criterion = "0.5.1"
crossterm = { version = "0.28", default-features = false, features = ["windows"] }
datatest-stable = "0.3.2"
crossterm = { version = "0.27", default-features = false }
digest = "0.10.7"
dirs = "5.0.1"
dunce = "1.0.5"
etcetera = "0.10.0"
either = "1.15.0"
either = "1.13.0"
esl01-renderdag = "0.3.0"
futures = "0.3.31"
gix = { version = "0.71.0", default-features = false, features = [
"attributes",
"blob-diff",
git2 = { version = "0.19.0", features = [
# Do *not* disable this feature even if you'd like dynamic linking. Instead,
# set the environment variable `LIBGIT2_NO_VENDOR=1` if dynamic linking must
# be used (this will override the Cargo feature), and allow static linking
# in other cases. Rationale: If neither the feature nor the environment
# variable are set, `git2` may still decide to vendor `libgit2` if it
# doesn't find a version of `libgit2` to link to dynamically. See also
# https://github.com/rust-lang/git2-rs/commit/3cef4119f
"vendored-libgit2"
] }
gix = { version = "0.66.0", default-features = false, features = [
"index",
"max-performance-safe",
"zlib-rs",
"blob-diff",
] }
glob = "0.3.2"
hashbrown = { version = "0.15.3", default-features = false, features = ["inline-more"] }
gix-filter = "0.13.0"
glob = "0.3.1"
hashbrown = { version = "0.15.1", default-features = false, features = ["inline-more"] }
hex = "0.4.3"
ignore = "0.4.23"
indexmap = { version = "2.9.0", features = ["serde"] }
indoc = "2.0.6"
insta = { version = "1.43.1", features = ["filters"] }
interim = { version = "0.2.1", features = ["chrono_0_4"] }
itertools = "0.14.0"
libc = { version = "0.2.172" }
indexmap = "2.6.0"
indoc = "2.0.4"
insta = { version = "1.41.1", features = ["filters"] }
itertools = "0.13.0"
libc = { version = "0.2.161" }
maplit = "1.0.2"
minus = { version = "5.6.1", features = ["dynamic_output", "search"] }
num_cpus = "1.16.0"
once_cell = "1.21.3"
os_pipe = "1.2.1"
pest = "2.8.0"
pest_derive = "2.8.0"
pollster = "0.4.0"
once_cell = "1.20.2"
pest = "2.7.14"
pest_derive = "2.7.14"
pollster = "0.3.0"
pretty_assertions = "1.4.1"
proc-macro2 = "1.0.95"
prost = "0.13.5"
prost-build = "0.13.5"
quote = "1.0.40"
proc-macro2 = "1.0.89"
prost = "0.12.6"
prost-build = "0.12.6"
quote = "1.0.36"
rand = "0.8.5"
rand_chacha = "0.3.1"
rayon = "1.10.0"
ref-cast = "1.0.24"
ref-cast = "1.0.23"
regex = "1.11.1"
rpassword = "7.4.0"
rustix = { version = "1.0.7", features = ["fs"] }
rpassword = "7.3.1"
rustix = { version = "0.38.39", features = ["fs"] }
same-file = "1.0.6"
sapling-renderdag = "0.1.0"
sapling-streampager = "0.11.0"
scm-record = "0.8.0"
scm-record = "0.4.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.140"
serde_json = "1.0.132"
slab = "0.4.9"
smallvec = { version = "1.14.0", features = [
smallvec = { version = "1.13.2", features = [
"const_generics",
"const_new",
"union",
] }
strsim = "0.11.1"
syn = "2.0.101"
tempfile = "3.19.1"
syn = "2.0.87"
tempfile = "3.13.0"
test-case = "3.3.1"
textwrap = "0.16.2"
thiserror = "2.0.12"
textwrap = "0.16.1"
thiserror = "1.0.68"
timeago = { version = "0.4.2", default-features = false }
tokio = { version = "1.44.2" }
toml_edit = { version = "0.22.26", features = ["serde"] }
tracing = "0.1.41"
tokio = { version = "1.41.0" }
toml_edit = { version = "0.19.15", features = ["serde"] }
tracing = "0.1.40"
tracing-chrome = "0.7.2"
tracing-subscriber = { version = "0.3.19", default-features = false, features = [
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
"std",
"ansi",
"env-filter",
"fmt",
] }
unicode-width = "0.2.0"
unicode-width = "0.1.14"
version_check = "0.9.5"
watchman_client = { version = "0.9.0" }
whoami = "1.6.0"
whoami = "1.5.2"
winreg = "0.52"
zstd = "0.12.4"
# put all inter-workspace libraries, i.e. those that use 'path = ...' here in
# their own (alphabetically sorted) block
jj-lib = { path = "lib", version = "0.29.0", default-features = false }
jj-lib-proc-macros = { path = "lib/proc-macros", version = "0.29.0" }
jj-lib = { path = "lib", version = "0.23.0" }
jj-lib-proc-macros = { path = "lib/proc-macros", version = "0.23.0" }
testutils = { path = "lib/testutils" }
[workspace.lints.clippy]
@ -132,9 +141,7 @@ implicit_clone = "warn"
needless_for_each = "warn"
semicolon_if_nothing_returned = "warn"
uninlined_format_args = "warn"
unused_trait_names = "warn"
useless_conversion = "warn"
# Insta suggests compiling these packages in opt mode for faster testing.
# See https://docs.rs/insta/latest/insta/#optional-faster-runs.
[profile.dev.package]

View File

@ -1,144 +0,0 @@
# Jujutsu Governance
## Overview
Jujutsu is an open source project, led, maintained and designed for a worldwide
community. Anyone who is interested can join, contribute, and participate in the
decision-making process. This document is intended to help you understand how
you can do that.
## Project roles
We greatly appreciate everyone's contributions, and Jujutsu has benefited
greatly from people who shared a single idea, change, or a suggestion, without
ever becoming a regular contributor. We also want everybody to feel welcome to
share their suggestions for the project (as long as you follow the Community
Guidelines).
There are two special roles for participants in the Jujutsu projects:
Maintainers and Contributors.
The role of the Maintainer is formally defined. These are the people empowered
to collectively make final decisions about most aspects of the project. They are
expected to take community's input seriously and to aim for the benefit of the
entire community.
The role of a Contributor is less formal. In situations where opinions become
numerous or contentious, it is acceptable for the maintainers to assign more
weight to the voices of the more established Contributors.
### Maintainers
**Maintainers** are the people who contribute, review, guide, and collectively
make decisions about the direction and scope of the project (see:
[Decision Making](#decision-making)). Maintainers are elected by a
[voting process](#adding-and-removing-maintainers).
A typical Maintainer is not only someone who has made "large" contributions, but
someone who has shown they are continuously committed to the project and its
community. Some expected responsibilities of maintainers include (but are not
exclusively limited to):
- Displaying a high level of commitment to the project and its community, and
being a role model for others.
- Writing patches &mdash; a lot of patches, especially "glue code" or "grunt
work" or general "housekeeping"; fixing bugs, ensuring documentation is always
high quality, consistent UX design, improving processes, making judgments on
dependencies, handling security vulnerabilities, and so on and so forth.
- Reviewing code submitted by others &mdash; with an eye to maintainability,
performance, code quality, and "style" (fitting in with the project).
- Participating in design discussions, especially with regards to architecture
or long-term vision.
- Ensuring the community remains a warm and welcoming place, to new and veteran
members alike.
This is not an exhaustive list, nor is it intended that every Maintainer does
each and every one of these individual tasks to equal amounts. Rather this is
only a guideline for what Maintainers are expected to conceptually do.
In short, Maintainers are the outwardly visible stewards of the project.
#### Current list of Maintainers
The current list of Maintainers:
- Austin Seipp (@thoughtpolice)
- Ilya Grigoriev (@ilyagr)
- Martin von Zweigbergk (@martinvonz)
- Waleed Khan (@arxanas)
- Yuya Nishihara (@yuja)
### Contributors
We consider contributors to be active participants in the project and community
who are _not_ maintainers. These are people who might:
- Help users by answering questions
- Participating in lively and respectful discussions across various channels
- Submit high-quality bug reports, reproduce reported bugs, and verifying fixes
- Submit patches or pull requests
- Provide reviews and input on others' pull requests
- Help with testing and quality assurance
- Submit feedback about planned features, use cases, or bugs
We essentially define them as **people who actively participate in the
project**. Examples of things that would _not_ make you a contributor are:
- Submitting a single bug report and never returning
- Writing blog posts or other evangelism
- Using the software in production
- Forking the project and maintaining your own version
- Writing a third-party tool or add-on
While these are all generally quite valuable, we don't consider these ongoing
contributions to the codebase or project itself, and on their own do not
constitute "active participation".
## Processes
For the purposes of making decisions across the project, the following processes
are defined.
### Decision-Making
The person proposing a decision to be made (i.e. technical, project direction,
etc.) can offer a proposal, along with a 2-to-4 week deadline for discussion.
During this time, Maintainers may participate with a vote of:
A) Support B) Reject C) Abstain
Each Maintainer gets one vote. The total number of "participating votes" is the
number of Maintainer votes which are not Abstain. The proposal is accepted when
more than half of the participating votes are Support.
In the event that a decision is reached before the proposed timeline, said
proposal can move on and be accepted immediately. In the event no consensus is
reached, a proposal may be re-submitted later on.
This document itself is subject to the Decision-Making process by the existing
set of Maintainers.
### Adding and Removing Maintainers
An active Contributor may, at any given time, nominate themselves or another
Contributor to become a Maintainer. This process is purely optional and no
Contributor is expected to do so; however, self-nomination is encouraged for
active participants. A vote and discussion by the existing Maintainers will be
used to decide the outcome.
Note that Contributors should demonstrate a high standard of continuous
participation to become a Maintainer; the upper limit on the number of
Maintainers is practically bounded, and so rejection should be considered as a
real possibility. As the scope of the project changes, this limit may increase,
but it is fundamentally fluid. (If you are unsure, you are free to privately ask
existing Maintainers before self-nominating if there is room.)
A Maintainer may, at any time, cede their responsibility and step down without a
vote.
A Maintainer can be removed by other Maintainers, subject to a vote of at-least
a 2/3rds majority from the existing Maintainer group (excluding the vote of the
Maintainer in question). This can be due to lack of participation or conduct
violations, among other things. Note that Maintainers are subject to a higher
set of behavioral and communicative standards than average contributor or
participant.

View File

@ -2,25 +2,22 @@
# Jujutsu—a version control system
<p><img title="jj logo" src="docs/images/jj-logo.svg" width="320" height="320"></p>
[![Release](https://img.shields.io/github/v/release/martinvonz/jj)](https://github.com/jj-vcs/jj/releases)
[![Release date](https://img.shields.io/github/release-date/martinvonz/jj)](https://github.com/jj-vcs/jj/releases)
![](https://img.shields.io/github/v/release/martinvonz/jj)
![](https://img.shields.io/github/release-date/martinvonz/jj)
<br/>
[![License](https://img.shields.io/github/license/martinvonz/jj)](https://github.com/jj-vcs/jj/blob/main/LICENSE)
![](https://img.shields.io/github/license/martinvonz/jj)
![](https://github.com/martinvonz/jj/workflows/build/badge.svg)
[![Discord](https://img.shields.io/discord/968932220549103686.svg?label=&logo=discord&logoColor=ffffff&color=7389D8&labelColor=6A7EC2)](https://discord.gg/dkmfj3aGQN)
[![IRC](https://img.shields.io/badge/irc-%23jujutsu-blue.svg)](https://web.libera.chat/?channel=#jujutsu)
**[Homepage] &nbsp;&nbsp;&bull;&nbsp;&nbsp;**
**[Installation] &nbsp;&nbsp;&bull;&nbsp;&nbsp;**
**[Getting Started] &nbsp;&nbsp;&bull;&nbsp;&nbsp;**
**[Development Roadmap] &nbsp;&nbsp;&bull;&nbsp;&nbsp;**
**[Contributing](#contributing)**
**[Development Roadmap]**
[Homepage]: https://jj-vcs.github.io/jj
[Installation]: https://jj-vcs.github.io/jj/latest/install-and-setup
[Getting Started]: https://jj-vcs.github.io/jj/latest/tutorial
[Development Roadmap]: https://jj-vcs.github.io/jj/latest/roadmap
[Homepage]: https://martinvonz.github.io/jj
[Installation]: https://martinvonz.github.io/jj/latest/install-and-setup
[Getting Started]: https://martinvonz.github.io/jj/latest/tutorial
[Development Roadmap]: https://martinvonz.github.io/jj/latest/roadmap
</div>
@ -70,10 +67,10 @@ systems into a single tool. Some of those sources of inspiration include:
theory of patches, as opposed to snapshots), the effect is that many forms of
conflict resolution can be performed and propagated automatically.
[perf]: https://github.com/jj-vcs/jj/discussions/49
[revset]: https://jj-vcs.github.io/jj/latest/revsets/
[no-index]: https://jj-vcs.github.io/jj/latest/git-comparison/#the-index
[conflicts]: https://jj-vcs.github.io/jj/latest/conflicts/
[perf]: https://github.com/martinvonz/jj/discussions/49
[revset]: https://martinvonz.github.io/jj/latest/revsets/
[no-index]: https://martinvonz.github.io/jj/latest/git-comparison/#the-index
[conflicts]: https://martinvonz.github.io/jj/latest/conflicts/
And it adds several innovative, useful features of its own:
@ -118,9 +115,9 @@ And it adds several innovative, useful features of its own:
_should_ happen is that it will expose conflicts between the local and remote
state, leaving you to resolve them.
[wcc]: https://jj-vcs.github.io/jj/latest/working-copy/
[wcc]: https://martinvonz.github.io/jj/latest/working-copy/
[undo-history]: https://en.wikipedia.org/wiki/Undo#History
[conc-safety]: https://jj-vcs.github.io/jj/latest/technical/concurrency/
[conc-safety]: https://martinvonz.github.io/jj/latest/technical/concurrency/
The command-line tool is called `jj` for now because it's easy to type and easy
to replace (rare in English). The project is called "Jujutsu" because it matches
@ -128,24 +125,12 @@ to replace (rare in English). The project is called "Jujutsu" because it matches
Jujutsu is relatively young, with lots of work to still be done. If you have any
questions, or want to talk about future plans, please join us on Discord
[![Discord](https://img.shields.io/discord/968932220549103686.svg?label=&logo=discord&logoColor=ffffff&color=7389D8&labelColor=6A7EC2)](https://discord.gg/dkmfj3aGQN),
start a [GitHub Discussion](https://github.com/jj-vcs/jj/discussions), or
send an IRC message to [`#jujutsu` on Libera
Chat](https://web.libera.chat/?channel=#jujutsu). The developers monitor all of
these channels[^bridge].
[^bridge]: To be more precise, the `#jujutsu` Libera IRC channel is bridged to
one of the channels on jj's Discord. Some of the developers stay on Discord and
use the bridge to follow IRC.
[![Discord](https://img.shields.io/discord/968932220549103686.svg?label=&logo=discord&logoColor=ffffff&color=7389D8&labelColor=6A7EC2)](https://discord.gg/dkmfj3aGQN)
or start a [GitHub Discussion](https://github.com/martinvonz/jj/discussions); the
developers monitor both channels.
### News and Updates 📣
- **December 2024**: The `jj` Repository has moved to the `jj-vcs` GitHub
organisation.
- **November 2024**: Version 0.24 is released which adds `jj file annotate`,
which is equivalent to `git blame` or `hg annotate`.
- **September 2024**: Martin gave a [presentation about Jujutsu][merge-vid-2024] at
Git Merge 2024.
- **Feb 2024**: Version 0.14 is released, which deprecates ["jj checkout" and "jj merge"](CHANGELOG.md#0140---2024-02-07),
as well as `jj init --git`, which is now just called `jj git init`.
- **Oct 2023**: Version 0.10.0 is released! Now includes a bundled merge and
@ -173,8 +158,7 @@ The wiki also contains a more extensive list of [media references][wiki-media].
[lwn]: https://lwn.net/Articles/958468/
[merge-talk]: https://www.youtube.com/watch?v=bx_LGilOuE4
[merge-slides]: https://docs.google.com/presentation/d/1F8j9_UOOSGUN9MvHxPZX_L4bQ9NMcYOp1isn17kTC_M/view
[merge-vid-2024]: https://www.youtube.com/watch?v=LV0JzI8IcCY
[wiki-media]: https://github.com/jj-vcs/jj/wiki/Media
[wiki-media]: https://github.com/martinvonz/jj/wiki/Media
## Getting started
@ -185,25 +169,23 @@ The wiki also contains a more extensive list of [media references][wiki-media].
> it unusable for your particular use.
Follow the [installation
instructions](https://jj-vcs.github.io/jj/latest/install-and-setup) to
instructions](https://martinvonz.github.io/jj/latest/install-and-setup) to
obtain and configure `jj`.
The best way to get started is probably to go through [the
tutorial](https://jj-vcs.github.io/jj/latest/tutorial). Also see the [Git
comparison](https://jj-vcs.github.io/jj/latest/git-comparison), which
tutorial](https://martinvonz.github.io/jj/latest/tutorial). Also see the [Git
comparison](https://martinvonz.github.io/jj/latest/git-comparison), which
includes a table of `jj` vs. `git` commands.
As you become more familiar with Jujutsu, the following resources may be helpful:
- The [FAQ](https://jj-vcs.github.io/jj/latest/FAQ).
- The [Glossary](https://jj-vcs.github.io/jj/latest/glossary).
- The [FAQ](https://martinvonz.github.io/jj/latest/FAQ).
- The [Glossary](https://martinvonz.github.io/jj/latest/glossary).
- The `jj help` command (e.g. `jj help rebase`).
- The `jj help -k <keyword>` command (e.g. `jj help -k config`). Use `jj help --help`
to see what keywords are available.
If you are using a **prerelease** version of `jj`, you would want to consult
[the docs for the prerelease (main branch)
version](https://jj-vcs.github.io/jj/prerelease/). You can also get there
version](https://martinvonz.github.io/jj/prerelease/). You can also get there
from the docs for the latest release by using the website's version switcher. The version switcher is visible in
the header of the website when you scroll to the top of any page.
@ -212,10 +194,16 @@ the header of the website when you scroll to the top of any page.
### Compatible with Git
Jujutsu is designed so that the underlying data and storage model is abstract.
Today, only the Git backend is production-ready. The Git backend uses the
Today, it features two [backends]—one of them uses a Git repository for storage,
while the other is a native storage backend[^native-backend]. The Git backend
uses the [libgit2](https://libgit2.org/) C library and the
[gitoxide](https://github.com/Byron/gitoxide) Rust library.
[backends]: https://jj-vcs.github.io/jj/latest/glossary#backend
[backends]: https://martinvonz.github.io/jj/latest/glossary#backend
[^native-backend]: At this time, there's practically no reason to use the native
backend. The backend exists mainly to make sure that it's possible to eventually
add functionality that cannot easily be added to the Git backend.
The Git backend is fully featured and maintained, and allows you to use Jujutsu
with any Git remote. The commits you create will look like regular Git commits.
@ -227,7 +215,7 @@ Here is how you can explore a GitHub repository with `jj`.
<img src="demos/git_compat.png" />
You can even have a ["co-located" local
repository](https://jj-vcs.github.io/jj/latest/git-compatibility#co-located-jujutsugit-repos)
repository](https://martinvonz.github.io/jj/latest/git-compatibility#co-located-jujutsugit-repos)
where you can use both `jj` and `git` commands interchangeably.
### The working copy is automatically committed
@ -268,7 +256,7 @@ necessarily have to be the most recent operation).
### Conflicts can be recorded in commits
If an operation results in
[conflicts](https://jj-vcs.github.io/jj/latest/glossary#conflict),
[conflicts](https://martinvonz.github.io/jj/latest/glossary#conflict),
information about those conflicts will be recorded in the commit(s). The
operation will succeed. You can then resolve the conflicts later. One
consequence of this design is that there's no need to continue interrupted
@ -301,8 +289,8 @@ commit to any other commit using `jj squash -i --from X --into Y`.
## Status
The tool is fairly feature-complete, but some important features like support
for Git submodules are not yet completed. There
The tool is fairly feature-complete, but some important features like (the
equivalent of) `git blame` are not yet supported. There
are also several performance bugs. It's likely that workflows and setups
different from what the core developers use are not well supported, e.g. there
is no native support for email-based workflows.
@ -321,7 +309,7 @@ scripts if requested.
## Related work
There are several tools trying to solve similar problems as Jujutsu. See
[related work](https://jj-vcs.github.io/jj/latest/related-work) for details.
[related work](https://martinvonz.github.io/jj/latest/related-work) for details.
## Contributing
@ -330,7 +318,7 @@ don't be shy. Please ask if you want a pointer on something you can help with,
and hopefully we can all figure something out.
We do have [a few policies and
suggestions](https://jj-vcs.github.io/jj/prerelease/contributing/)
suggestions](https://martinvonz.github.io/jj/prerelease/contributing/)
for contributors. The broad TL;DR:
- Bug reports are very welcome!
@ -350,7 +338,4 @@ That said, **this is not a Google product**.
## License
Jujutsu is available as Open Source Software, under the Apache 2.0 license. See
[`LICENSE`](./LICENSE) for details about copyright and redistribution.
The `jj` logo was contributed by J. Jennings and is licensed under a Creative
Commons License, see [`docs/images/LICENSE`](docs/images/LICENSE).
[LICENSE](./LICENSE) for details about copyright and redistribution.

View File

@ -1,10 +1,7 @@
To report a security issue, please use the "Report a vulnerability" button on
GitHub's Security tab for `jj`'s main repo, under
[Advisories](https://github.com/jj-vcs/jj/security/advisories).
Our vulnerability management team will respond within 3 working days of your
report. If the issue is confirmed as a vulnerability, we will open a Security
Advisory. This project follows a 90 day disclosure timeline.
Feel free to email Jujutsu VCS Security at <jj-security@googlegroups.com> if you
have questions.
To report a security issue, please
email Jujutsu VCS Security at <jj-security@googlegroups.com>
with a description of the issue, the steps you took to create the issue,
affected versions, and, if known, mitigations for the issue. Our vulnerability
management team will respond within 3 working days of your email. If the issue
is confirmed as a vulnerability, we will open a Security Advisory. This project
follows a 90 day disclosure timeline.

View File

@ -23,7 +23,7 @@ include = [
"/tests/",
"!*.pending-snap",
"!*.snap*",
"/tests/cli-reference@.md.snap",
"/tests/cli-reference@.md.snap"
]
[[bin]]
@ -48,10 +48,6 @@ required-features = ["test-fakes"]
[[test]]
name = "runner"
[[test]]
name = "datatest_runner"
harness = false
[dependencies]
bstr = { workspace = true }
chrono = { workspace = true }
@ -60,31 +56,30 @@ clap-markdown = { workspace = true }
clap_complete = { workspace = true }
clap_complete_nushell = { workspace = true }
clap_mangen = { workspace = true }
config = { workspace = true }
criterion = { workspace = true, optional = true }
crossterm = { workspace = true }
dirs = { workspace = true }
dunce = { workspace = true }
etcetera = { workspace = true }
esl01-renderdag = { workspace = true }
futures = { workspace = true }
gix = { workspace = true, optional = true }
glob = { workspace = true }
git2 = { workspace = true }
gix = { workspace = true }
indexmap = { workspace = true }
indoc = { workspace = true }
itertools = { workspace = true }
jj-lib = { workspace = true }
maplit = { workspace = true }
minus = { workspace = true }
once_cell = { workspace = true }
os_pipe = { workspace = true }
pest = { workspace = true }
pest_derive = { workspace = true }
pollster = { workspace = true }
rayon = { workspace = true }
regex = { workspace = true }
rpassword = { workspace = true }
sapling-renderdag = { workspace = true }
sapling-streampager = { workspace = true }
scm-record = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
slab = { workspace = true }
strsim = { workspace = true }
tempfile = { workspace = true }
@ -96,16 +91,15 @@ tracing = { workspace = true }
tracing-chrome = { workspace = true }
tracing-subscriber = { workspace = true }
unicode-width = { workspace = true }
whoami = { workspace = true }
[target.'cfg(unix)'.dependencies]
libc = { workspace = true }
[dev-dependencies]
anyhow = { workspace = true }
assert_cmd = { workspace = true }
assert_matches = { workspace = true }
async-trait = { workspace = true }
datatest-stable = { workspace = true }
insta = { workspace = true }
test-case = { workspace = true }
testutils = { workspace = true }
@ -113,10 +107,11 @@ testutils = { workspace = true }
jj-cli = { path = ".", features = ["test-fakes"], default-features = false }
[features]
default = ["watchman", "git"]
default = ["watchman"]
bench = ["dep:criterion"]
git = ["jj-lib/git", "dep:gix"]
packaging = []
test-fakes = ["jj-lib/testing"]
vendored-openssl = ["git2/vendored-openssl", "jj-lib/vendored-openssl"]
watchman = ["jj-lib/watchman"]
[package.metadata.binstall]

View File

@ -73,13 +73,13 @@ fn run_custom_command(
match command {
CustomCommand::InitJit => {
let wc_path = command_helper.cwd();
let settings = command_helper.settings_for_new_workspace(wc_path)?;
// Initialize a workspace with the custom backend
Workspace::init_with_backend(
&settings,
command_helper.settings(),
wc_path,
&|settings, store_path| Ok(Box::new(JitBackend::init(settings, store_path)?)),
Signer::from_settings(&settings).map_err(WorkspaceInitError::SignInit)?,
Signer::from_settings(command_helper.settings())
.map_err(WorkspaceInitError::SignInit)?,
)?;
Ok(())
}

View File

@ -45,7 +45,7 @@ fn run_custom_command(
let mut tx = workspace_command.start_transaction();
let new_commit = tx
.repo_mut()
.rewrite_commit(&commit)
.rewrite_commit(command_helper.settings(), &commit)
.set_description("Frobnicated!")
.write()?;
tx.finish(ui, "frobnicate")?;

View File

@ -15,29 +15,30 @@
use std::any::Any;
use std::rc::Rc;
use itertools::Itertools as _;
use itertools::Itertools;
use jj_cli::cli_util::CliRunner;
use jj_cli::commit_templater::CommitTemplateBuildFnTable;
use jj_cli::commit_templater::CommitTemplateLanguage;
use jj_cli::commit_templater::CommitTemplateLanguageExtension;
use jj_cli::template_builder::TemplateLanguage;
use jj_cli::template_parser;
use jj_cli::template_parser::TemplateParseError;
use jj_cli::templater::TemplatePropertyExt as _;
use jj_lib::backend::CommitId;
use jj_lib::commit::Commit;
use jj_lib::extensions_map::ExtensionsMap;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use jj_lib::repo::Repo;
use jj_lib::revset::FunctionCallNode;
use jj_lib::revset::LoweringContext;
use jj_lib::revset::PartialSymbolResolver;
use jj_lib::revset::RevsetDiagnostics;
use jj_lib::revset::RevsetExpression;
use jj_lib::revset::RevsetFilterExtension;
use jj_lib::revset::RevsetFilterPredicate;
use jj_lib::revset::RevsetParseContext;
use jj_lib::revset::RevsetParseError;
use jj_lib::revset::RevsetResolutionError;
use jj_lib::revset::SymbolResolverExtension;
use jj_lib::revset::UserRevsetExpression;
use once_cell::sync::OnceCell;
struct HexCounter;
@ -71,7 +72,7 @@ impl MostDigitsInId {
fn count(&self, repo: &dyn Repo) -> i64 {
*self.count.get_or_init(|| {
RevsetExpression::all()
.evaluate(repo)
.evaluate_programmatic(repo)
.unwrap()
.iter()
.map(Result::unwrap)
@ -99,7 +100,7 @@ impl PartialSymbolResolver for TheDigitestResolver {
Ok(Some(
RevsetExpression::all()
.evaluate(repo)
.evaluate_programmatic(repo)
.map_err(|err| RevsetResolutionError::Other(err.into()))?
.iter()
.map(Result::unwrap)
@ -119,6 +120,7 @@ impl SymbolResolverExtension for TheDigitest {
impl CommitTemplateLanguageExtension for HexCounter {
fn build_fn_table<'repo>(&self) -> CommitTemplateBuildFnTable<'repo> {
type L<'repo> = CommitTemplateLanguage<'repo>;
let mut table = CommitTemplateBuildFnTable::empty();
table.commit_methods.insert(
"has_most_digits",
@ -128,17 +130,18 @@ impl CommitTemplateLanguageExtension for HexCounter {
.cache_extension::<MostDigitsInId>()
.unwrap()
.count(language.repo());
let out_property =
property.map(move |commit| num_digits_in_id(commit.id()) == most_digits);
Ok(out_property.into_dyn_wrapped())
Ok(L::wrap_boolean(property.map(move |commit| {
num_digits_in_id(commit.id()) == most_digits
})))
},
);
table.commit_methods.insert(
"num_digits_in_id",
|_language, _diagnostics, _build_context, property, call| {
call.expect_no_arguments()?;
let out_property = property.map(|commit| num_digits_in_id(commit.id()));
Ok(out_property.into_dyn_wrapped())
Ok(L::wrap_integer(
property.map(|commit| num_digits_in_id(commit.id())),
))
},
);
table.commit_methods.insert(
@ -157,8 +160,9 @@ impl CommitTemplateLanguageExtension for HexCounter {
}
})?;
let out_property = property.map(move |commit| num_char_in_id(commit, char_arg));
Ok(out_property.into_dyn_wrapped())
Ok(L::wrap_integer(
property.map(move |commit| num_char_in_id(commit, char_arg)),
))
},
);
@ -186,8 +190,8 @@ impl RevsetFilterExtension for EvenDigitsFilter {
fn even_digits(
_diagnostics: &mut RevsetDiagnostics,
function: &FunctionCallNode,
_context: &LoweringContext,
) -> Result<Rc<UserRevsetExpression>, RevsetParseError> {
_context: &RevsetParseContext,
) -> Result<Rc<RevsetExpression>, RevsetParseError> {
function.expect_no_arguments()?;
Ok(RevsetExpression::filter(RevsetFilterPredicate::Extension(
Rc::new(EvenDigitsFilter),

View File

@ -14,12 +14,14 @@
use jj_cli::cli_util::CliRunner;
use jj_cli::operation_templater::OperationTemplateBuildFnTable;
use jj_cli::operation_templater::OperationTemplateLanguage;
use jj_cli::operation_templater::OperationTemplateLanguageExtension;
use jj_cli::template_builder::TemplateLanguage;
use jj_cli::template_parser;
use jj_cli::template_parser::TemplateParseError;
use jj_cli::templater::TemplatePropertyExt as _;
use jj_lib::extensions_map::ExtensionsMap;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use jj_lib::op_store::OperationId;
use jj_lib::operation::Operation;
@ -47,13 +49,15 @@ fn num_char_in_id(operation: Operation, ch_match: char) -> i64 {
impl OperationTemplateLanguageExtension for HexCounter {
fn build_fn_table(&self) -> OperationTemplateBuildFnTable {
type L = OperationTemplateLanguage;
let mut table = OperationTemplateBuildFnTable::empty();
table.operation_methods.insert(
"num_digits_in_id",
|_language, _diagnostics, _build_context, property, call| {
call.expect_no_arguments()?;
let out_property = property.map(|operation| num_digits_in_id(operation.id()));
Ok(out_property.into_dyn_wrapped())
Ok(L::wrap_integer(
property.map(|operation| num_digits_in_id(operation.id())),
))
},
);
table.operation_methods.insert(
@ -72,9 +76,9 @@ impl OperationTemplateLanguageExtension for HexCounter {
}
})?;
let out_property =
property.map(move |operation| num_char_in_id(operation, char_arg));
Ok(out_property.into_dyn_wrapped())
Ok(L::wrap_integer(
property.map(move |operation| num_char_in_id(operation, char_arg)),
))
},
);

View File

@ -17,7 +17,7 @@ use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use itertools::Itertools as _;
use itertools::Itertools;
use jj_cli::cli_util::CliRunner;
use jj_cli::cli_util::CommandHelper;
use jj_cli::command_error::CommandError;
@ -28,21 +28,18 @@ use jj_lib::commit::Commit;
use jj_lib::git_backend::GitBackend;
use jj_lib::local_working_copy::LocalWorkingCopy;
use jj_lib::op_store::OperationId;
use jj_lib::ref_name::WorkspaceName;
use jj_lib::ref_name::WorkspaceNameBuf;
use jj_lib::op_store::WorkspaceId;
use jj_lib::repo::ReadonlyRepo;
use jj_lib::repo_path::RepoPathBuf;
use jj_lib::settings::UserSettings;
use jj_lib::signing::Signer;
use jj_lib::store::Store;
use jj_lib::working_copy::CheckoutError;
use jj_lib::working_copy::CheckoutOptions;
use jj_lib::working_copy::CheckoutStats;
use jj_lib::working_copy::LockedWorkingCopy;
use jj_lib::working_copy::ResetError;
use jj_lib::working_copy::SnapshotError;
use jj_lib::working_copy::SnapshotOptions;
use jj_lib::working_copy::SnapshotStats;
use jj_lib::working_copy::WorkingCopy;
use jj_lib::working_copy::WorkingCopyFactory;
use jj_lib::working_copy::WorkingCopyStateError;
@ -64,23 +61,23 @@ fn run_custom_command(
match command {
CustomCommand::InitConflicts => {
let wc_path = command_helper.cwd();
let settings = command_helper.settings_for_new_workspace(wc_path)?;
let backend_initializer = |settings: &UserSettings, store_path: &Path| {
let backend: Box<dyn Backend> =
Box::new(GitBackend::init_internal(settings, store_path)?);
Ok(backend)
};
Workspace::init_with_factories(
&settings,
command_helper.settings(),
wc_path,
&backend_initializer,
Signer::from_settings(&settings).map_err(WorkspaceInitError::SignInit)?,
Signer::from_settings(command_helper.settings())
.map_err(WorkspaceInitError::SignInit)?,
&ReadonlyRepo::default_op_store_initializer(),
&ReadonlyRepo::default_op_heads_store_initializer(),
&ReadonlyRepo::default_index_store_initializer(),
&ReadonlyRepo::default_submodule_store_initializer(),
&ConflictsWorkingCopyFactory {},
WorkspaceName::DEFAULT.to_owned(),
WorkspaceId::default(),
)?;
Ok(())
}
@ -121,14 +118,14 @@ impl ConflictsWorkingCopy {
working_copy_path: PathBuf,
state_path: PathBuf,
operation_id: OperationId,
workspace_name: WorkspaceNameBuf,
workspace_id: WorkspaceId,
) -> Result<Self, WorkingCopyStateError> {
let inner = LocalWorkingCopy::init(
store,
working_copy_path.clone(),
state_path,
operation_id,
workspace_name,
workspace_id,
)?;
Ok(ConflictsWorkingCopy {
inner: Box::new(inner),
@ -154,8 +151,8 @@ impl WorkingCopy for ConflictsWorkingCopy {
Self::name()
}
fn workspace_name(&self) -> &WorkspaceName {
self.inner.workspace_name()
fn workspace_id(&self) -> &WorkspaceId {
self.inner.workspace_id()
}
fn operation_id(&self) -> &OperationId {
@ -188,14 +185,14 @@ impl WorkingCopyFactory for ConflictsWorkingCopyFactory {
working_copy_path: PathBuf,
state_path: PathBuf,
operation_id: OperationId,
workspace_name: WorkspaceNameBuf,
workspace_id: WorkspaceId,
) -> Result<Box<dyn WorkingCopy>, WorkingCopyStateError> {
Ok(Box::new(ConflictsWorkingCopy::init(
store,
working_copy_path,
state_path,
operation_id,
workspace_name,
workspace_id,
)?))
}
@ -235,37 +232,26 @@ impl LockedWorkingCopy for LockedConflictsWorkingCopy {
self.inner.old_tree_id()
}
fn snapshot(
&mut self,
options: &SnapshotOptions,
) -> Result<(MergedTreeId, SnapshotStats), SnapshotError> {
fn snapshot(&mut self, options: &SnapshotOptions) -> Result<MergedTreeId, SnapshotError> {
let options = SnapshotOptions {
base_ignores: options.base_ignores.chain(
"",
Path::new(""),
"/.conflicts".as_bytes(),
)?,
base_ignores: options.base_ignores.chain("", "/.conflicts".as_bytes())?,
..options.clone()
};
self.inner.snapshot(&options)
}
fn check_out(
&mut self,
commit: &Commit,
options: &CheckoutOptions,
) -> Result<CheckoutStats, CheckoutError> {
fn check_out(&mut self, commit: &Commit) -> Result<CheckoutStats, CheckoutError> {
let conflicts = commit
.tree()?
.conflicts()
.map(|(path, _value)| format!("{}\n", path.as_internal_file_string()))
.join("");
std::fs::write(self.wc_path.join(".conflicts"), conflicts).unwrap();
self.inner.check_out(commit, options)
self.inner.check_out(commit)
}
fn rename_workspace(&mut self, new_name: WorkspaceNameBuf) {
self.inner.rename_workspace(new_name);
fn rename_workspace(&mut self, new_workspace_id: WorkspaceId) {
self.inner.rename_workspace(new_workspace_id);
}
fn reset(&mut self, commit: &Commit) -> Result<(), ResetError> {
@ -283,9 +269,8 @@ impl LockedWorkingCopy for LockedConflictsWorkingCopy {
fn set_sparse_patterns(
&mut self,
new_sparse_patterns: Vec<RepoPathBuf>,
options: &CheckoutOptions,
) -> Result<CheckoutStats, CheckoutError> {
self.inner.set_sparse_patterns(new_sparse_patterns, options)
self.inner.set_sparse_patterns(new_sparse_patterns)
}
fn finish(

File diff suppressed because it is too large Load Diff

View File

@ -13,7 +13,6 @@
// limitations under the License.
use std::error;
use std::error::Error as _;
use std::io;
use std::io::Write as _;
use std::iter;
@ -22,20 +21,17 @@ use std::str;
use std::sync::Arc;
use itertools::Itertools as _;
use jj_lib::absorb::AbsorbError;
use jj_lib::backend::BackendError;
use jj_lib::config::ConfigFileSaveError;
use jj_lib::config::ConfigGetError;
use jj_lib::config::ConfigLoadError;
use jj_lib::config::ConfigMigrateError;
use jj_lib::dsl_util::Diagnostics;
use jj_lib::fileset::FilePatternParseError;
use jj_lib::fileset::FilesetParseError;
use jj_lib::fileset::FilesetParseErrorKind;
use jj_lib::fix::FixError;
use jj_lib::git::GitConfigParseError;
use jj_lib::git::GitExportError;
use jj_lib::git::GitImportError;
use jj_lib::git::GitRemoteManagementError;
use jj_lib::gitignore::GitIgnoreError;
use jj_lib::op_heads_store::OpHeadResolutionError;
use jj_lib::op_heads_store::OpHeadsStoreError;
use jj_lib::op_store::OpStoreError;
use jj_lib::op_walk::OpsetEvaluationError;
use jj_lib::op_walk::OpsetResolutionError;
@ -45,16 +41,13 @@ use jj_lib::repo::RepoLoaderError;
use jj_lib::repo::RewriteRootCommit;
use jj_lib::repo_path::RepoPathBuf;
use jj_lib::repo_path::UiPathParseError;
use jj_lib::revset;
use jj_lib::revset::RevsetEvaluationError;
use jj_lib::revset::RevsetParseError;
use jj_lib::revset::RevsetParseErrorKind;
use jj_lib::revset::RevsetResolutionError;
use jj_lib::signing::SignInitError;
use jj_lib::str_util::StringPatternParseError;
use jj_lib::trailer::TrailerParseError;
use jj_lib::transaction::TransactionCommitError;
use jj_lib::view::RenameWorkspaceError;
use jj_lib::working_copy::RecoverWorkspaceError;
use jj_lib::working_copy::ResetError;
use jj_lib::working_copy::SnapshotError;
use jj_lib::working_copy::WorkingCopyStateError;
@ -63,16 +56,12 @@ use thiserror::Error;
use crate::cli_util::short_operation_hash;
use crate::description_util::ParseBulkEditMessageError;
use crate::description_util::TempTextEditError;
use crate::description_util::TextEditError;
use crate::diff_util::DiffRenderError;
use crate::formatter::FormatRecorder;
use crate::formatter::Formatter;
use crate::merge_tools::ConflictResolveError;
use crate::merge_tools::DiffEditError;
use crate::merge_tools::MergeToolConfigError;
use crate::merge_tools::MergeToolPartialResolutionError;
use crate::revset_util::BookmarkNameParseError;
use crate::revset_util::UserRevsetEvaluationError;
use crate::template_parser::TemplateParseError;
use crate::template_parser::TemplateParseErrorKind;
@ -207,13 +196,6 @@ pub fn cli_error(err: impl Into<Box<dyn error::Error + Send + Sync>>) -> Command
CommandError::new(CommandErrorKind::Cli, err)
}
pub fn cli_error_with_message(
message: impl Into<String>,
source: impl Into<Box<dyn error::Error + Send + Sync>>,
) -> CommandError {
CommandError::with_message(CommandErrorKind::Cli, message, source)
}
pub fn internal_error(err: impl Into<Box<dyn error::Error + Send + Sync>>) -> CommandError {
CommandError::new(CommandErrorKind::Internal, err)
}
@ -229,7 +211,10 @@ fn format_similarity_hint<S: AsRef<str>>(candidates: &[S]) -> Option<String> {
match candidates {
[] => None,
names => {
let quoted_names = names.iter().map(|s| format!("`{}`", s.as_ref())).join(", ");
let quoted_names = names
.iter()
.map(|s| format!(r#""{}""#, s.as_ref()))
.join(", ");
Some(format!("Did you mean {quoted_names}?"))
}
}
@ -251,44 +236,15 @@ impl From<jj_lib::file_util::PathError> for CommandError {
}
}
impl From<ConfigFileSaveError> for CommandError {
fn from(err: ConfigFileSaveError) -> Self {
user_error(err)
impl From<config::ConfigError> for CommandError {
fn from(err: config::ConfigError) -> Self {
config_error(err)
}
}
impl From<ConfigGetError> for CommandError {
fn from(err: ConfigGetError) -> Self {
let hint = config_get_error_hint(&err);
let mut cmd_err = config_error(err);
cmd_err.extend_hints(hint);
cmd_err
}
}
impl From<ConfigLoadError> for CommandError {
fn from(err: ConfigLoadError) -> Self {
let hint = match &err {
ConfigLoadError::Read(_) => None,
ConfigLoadError::Parse { source_path, .. } => source_path
.as_ref()
.map(|path| format!("Check the config file: {}", path.display())),
};
let mut cmd_err = config_error(err);
cmd_err.extend_hints(hint);
cmd_err
}
}
impl From<ConfigMigrateError> for CommandError {
fn from(err: ConfigMigrateError) -> Self {
let hint = err
.source_path
.as_ref()
.map(|path| format!("Check the config file: {}", path.display()));
let mut cmd_err = config_error(err);
cmd_err.extend_hints(hint);
cmd_err
impl From<crate::config::ConfigError> for CommandError {
fn from(err: crate::config::ConfigError) -> Self {
config_error(err)
}
}
@ -325,12 +281,6 @@ impl From<BackendError> for CommandError {
}
}
impl From<OpHeadsStoreError> for CommandError {
fn from(err: OpHeadsStoreError) -> Self {
internal_error_with_message("Unexpected error from operation heads store", err)
}
}
impl From<WorkspaceInitError> for CommandError {
fn from(err: WorkspaceInitError) -> Self {
match err {
@ -346,17 +296,14 @@ impl From<WorkspaceInitError> for CommandError {
WorkspaceInitError::Path(err) => {
internal_error_with_message("Failed to access the repository", err)
}
WorkspaceInitError::OpHeadsStore(err) => {
user_error_with_message("Failed to record initial operation", err)
}
WorkspaceInitError::Backend(err) => {
user_error_with_message("Failed to access the repository", err)
}
WorkspaceInitError::WorkingCopyState(err) => {
internal_error_with_message("Failed to access the repository", err)
}
WorkspaceInitError::SignInit(err) => user_error(err),
WorkspaceInitError::TransactionCommit(err) => err.into(),
WorkspaceInitError::SignInit(err @ SignInitError::UnknownBackend(_)) => user_error(err),
WorkspaceInitError::SignInit(err) => internal_error(err),
}
}
}
@ -381,7 +328,6 @@ impl From<OpsetEvaluationError> for CommandError {
cmd_err
}
OpsetEvaluationError::OpHeadResolution(err) => err.into(),
OpsetEvaluationError::OpHeadsStore(err) => err.into(),
OpsetEvaluationError::OpStore(err) => err.into(),
}
}
@ -389,7 +335,45 @@ impl From<OpsetEvaluationError> for CommandError {
impl From<SnapshotError> for CommandError {
fn from(err: SnapshotError) -> Self {
internal_error_with_message("Failed to snapshot the working copy", err)
match err {
SnapshotError::NewFileTooLarge {
path,
size,
max_size,
} => {
// if the size difference is < 1KiB, then show exact bytes.
// otherwise, show in human-readable form; this avoids weird cases
// where a file is 400 bytes too large but the error says something
// like '1.0MiB, maximum size allowed is ~1.0MiB'
let size_diff = size.0 - max_size.0;
let err_str = if size_diff <= 1024 {
format!(
"it is {} bytes too large; the maximum size allowed is {} bytes ({}).",
size_diff, max_size.0, max_size,
)
} else {
format!("it is {size}; the maximum size allowed is ~{max_size}.")
};
user_error(format!(
"Failed to snapshot the working copy\nThe file '{}' is too large to be \
snapshotted: {}",
path.display(),
err_str,
))
.hinted(format!(
"This is to prevent large files from being added on accident. You can fix \
this error by:
- Adding the file to `.gitignore`
- Run `jj config set --repo snapshot.max-new-file-size {}`
This will increase the maximum file size allowed for new files, in this repository only.
- Run `jj --config-toml 'snapshot.max-new-file-size={}' st`
This will increase the maximum file size allowed for new files, for this command only.",
size.0, size.0
))
}
err => internal_error_with_message("Failed to snapshot the working copy", err),
}
}
}
@ -411,12 +395,6 @@ impl From<ResetError> for CommandError {
}
}
impl From<TransactionCommitError> for CommandError {
fn from(err: TransactionCommitError) -> Self {
internal_error(err)
}
}
impl From<DiffEditError> for CommandError {
fn from(err: DiffEditError) -> Self {
user_error_with_message("Failed to edit diff", err)
@ -437,27 +415,7 @@ impl From<DiffRenderError> for CommandError {
impl From<ConflictResolveError> for CommandError {
fn from(err: ConflictResolveError) -> Self {
match err {
ConflictResolveError::Backend(err) => err.into(),
ConflictResolveError::Io(err) => err.into(),
_ => {
let hint = match &err {
ConflictResolveError::ExecutableConflict { .. } => {
Some("Use `jj file chmod` to update the executable bit.".to_owned())
}
_ => None,
};
let mut cmd_err = user_error_with_message("Failed to resolve conflicts", err);
cmd_err.extend_hints(hint);
cmd_err
}
}
}
}
impl From<MergeToolPartialResolutionError> for CommandError {
fn from(err: MergeToolPartialResolutionError) -> Self {
user_error(err)
user_error_with_message("Failed to resolve conflicts", err)
}
}
@ -480,122 +438,46 @@ impl From<MergeToolConfigError> for CommandError {
}
}
impl From<TextEditError> for CommandError {
fn from(err: TextEditError) -> Self {
user_error(err)
impl From<git2::Error> for CommandError {
fn from(err: git2::Error) -> Self {
user_error_with_message("Git operation failed", err)
}
}
impl From<TempTextEditError> for CommandError {
fn from(err: TempTextEditError) -> Self {
let hint = err.path.as_ref().map(|path| {
let name = err.name.as_deref().unwrap_or("file");
format!("Edited {name} is left in {path}", path = path.display())
});
let mut cmd_err = user_error(err);
impl From<GitImportError> for CommandError {
fn from(err: GitImportError) -> Self {
let hint = match &err {
GitImportError::MissingHeadTarget { .. }
| GitImportError::MissingRefAncestor { .. } => Some(
"\
Is this Git repository a partial clone (cloned with the --filter argument)?
jj currently does not support partial clones. To use jj with this repository, try re-cloning with \
the full repository contents."
.to_string(),
),
GitImportError::RemoteReservedForLocalGitRepo => {
Some("Run `jj git remote rename` to give different name.".to_string())
}
GitImportError::InternalBackend(_) => None,
GitImportError::InternalGitError(_) => None,
GitImportError::UnexpectedBackend => None,
};
let mut cmd_err =
user_error_with_message("Failed to import refs from underlying Git repo", err);
cmd_err.extend_hints(hint);
cmd_err
}
}
impl From<TrailerParseError> for CommandError {
fn from(err: TrailerParseError) -> Self {
user_error(err)
impl From<GitExportError> for CommandError {
fn from(err: GitExportError) -> Self {
internal_error_with_message("Failed to export refs to underlying Git repo", err)
}
}
#[cfg(feature = "git")]
mod git {
use jj_lib::git::GitExportError;
use jj_lib::git::GitFetchError;
use jj_lib::git::GitImportError;
use jj_lib::git::GitPushError;
use jj_lib::git::GitRemoteManagementError;
use jj_lib::git::GitResetHeadError;
use jj_lib::git::UnexpectedGitBackendError;
use super::*;
impl From<GitImportError> for CommandError {
fn from(err: GitImportError) -> Self {
let hint = match &err {
GitImportError::MissingHeadTarget { .. }
| GitImportError::MissingRefAncestor { .. } => Some(
"\
Is this Git repository a partial clone (cloned with the --filter argument)?
jj currently does not support partial clones. To use jj with this repository, try re-cloning with \
the full repository contents."
.to_string(),
),
GitImportError::Backend(_) => None,
GitImportError::Git(_) => None,
GitImportError::UnexpectedBackend(_) => None,
};
let mut cmd_err =
user_error_with_message("Failed to import refs from underlying Git repo", err);
cmd_err.extend_hints(hint);
cmd_err
}
}
impl From<GitExportError> for CommandError {
fn from(err: GitExportError) -> Self {
user_error_with_message("Failed to export refs to underlying Git repo", err)
}
}
impl From<GitFetchError> for CommandError {
fn from(err: GitFetchError) -> Self {
if let GitFetchError::InvalidBranchPattern(pattern) = &err {
if pattern.as_exact().is_some_and(|s| s.contains('*')) {
return user_error_with_hint(
"Branch names may not include `*`.",
"Prefix the pattern with `glob:` to expand `*` as a glob",
);
}
}
match err {
GitFetchError::NoSuchRemote(_) => user_error(err),
GitFetchError::RemoteName(_) => user_error_with_hint(
err,
"Run `jj git remote rename` to give a different name.",
),
GitFetchError::InvalidBranchPattern(_) => user_error(err),
GitFetchError::Subprocess(_) => user_error(err),
}
}
}
impl From<GitPushError> for CommandError {
fn from(err: GitPushError) -> Self {
match err {
GitPushError::NoSuchRemote(_) => user_error(err),
GitPushError::RemoteName(_) => user_error_with_hint(
err,
"Run `jj git remote rename` to give a different name.",
),
GitPushError::Subprocess(_) => user_error(err),
GitPushError::UnexpectedBackend(_) => user_error(err),
}
}
}
impl From<GitRemoteManagementError> for CommandError {
fn from(err: GitRemoteManagementError) -> Self {
user_error(err)
}
}
impl From<GitResetHeadError> for CommandError {
fn from(err: GitResetHeadError) -> Self {
user_error_with_message("Failed to reset Git HEAD state", err)
}
}
impl From<UnexpectedGitBackendError> for CommandError {
fn from(err: UnexpectedGitBackendError) -> Self {
user_error(err)
}
impl From<GitRemoteManagementError> for CommandError {
fn from(err: GitRemoteManagementError) -> Self {
user_error(err)
}
}
@ -615,18 +497,6 @@ impl From<FilesetParseError> for CommandError {
}
}
impl From<RecoverWorkspaceError> for CommandError {
fn from(err: RecoverWorkspaceError) -> Self {
match err {
RecoverWorkspaceError::Backend(err) => err.into(),
RecoverWorkspaceError::Reset(err) => err.into(),
RecoverWorkspaceError::RewriteRootCommit(err) => err.into(),
RecoverWorkspaceError::TransactionCommit(err) => err.into(),
err @ RecoverWorkspaceError::WorkspaceMissingWorkingCopy(_) => user_error(err),
}
}
}
impl From<RevsetParseError> for CommandError {
fn from(err: RevsetParseError) -> Self {
let hint = revset_parse_error_hint(&err);
@ -680,6 +550,12 @@ impl From<clap::Error> for CommandError {
}
}
impl From<GitConfigParseError> for CommandError {
fn from(err: GitConfigParseError) -> Self {
internal_error_with_message("Failed to parse Git config", err)
}
}
impl From<WorkingCopyStateError> for CommandError {
fn from(err: WorkingCopyStateError) -> Self {
internal_error_with_message("Failed to access working copy state", err)
@ -698,36 +574,9 @@ impl From<ParseBulkEditMessageError> for CommandError {
}
}
impl From<AbsorbError> for CommandError {
fn from(err: AbsorbError) -> Self {
match err {
AbsorbError::Backend(err) => err.into(),
AbsorbError::RevsetEvaluation(err) => err.into(),
}
}
}
impl From<FixError> for CommandError {
fn from(err: FixError) -> Self {
match err {
FixError::Backend(err) => err.into(),
FixError::RevsetEvaluation(err) => err.into(),
FixError::IO(err) => err.into(),
FixError::FixContent(err) => internal_error_with_message(
"An error occurred while attempting to fix file content",
err,
),
}
}
}
fn find_source_parse_error_hint(err: &dyn error::Error) -> Option<String> {
let source = err.source()?;
if let Some(source) = source.downcast_ref() {
bookmark_name_parse_error_hint(source)
} else if let Some(source) = source.downcast_ref() {
config_get_error_hint(source)
} else if let Some(source) = source.downcast_ref() {
file_pattern_parse_error_hint(source)
} else if let Some(source) = source.downcast_ref() {
fileset_parse_error_hint(source)
@ -746,35 +595,9 @@ fn find_source_parse_error_hint(err: &dyn error::Error) -> Option<String> {
}
}
fn bookmark_name_parse_error_hint(err: &BookmarkNameParseError) -> Option<String> {
use revset::ExpressionKind;
match revset::parse_program(&err.input).map(|node| node.kind) {
Ok(ExpressionKind::RemoteSymbol(symbol)) => Some(format!(
"Looks like remote bookmark. Run `jj bookmark track {symbol}` to track it."
)),
_ => Some(
"See https://jj-vcs.github.io/jj/latest/revsets/ or use `jj help -k revsets` for how \
to quote symbols."
.into(),
),
}
}
fn config_get_error_hint(err: &ConfigGetError) -> Option<String> {
match &err {
ConfigGetError::NotFound { .. } => None,
ConfigGetError::Type { source_path, .. } => source_path
.as_ref()
.map(|path| format!("Check the config file: {}", path.display())),
}
}
fn file_pattern_parse_error_hint(err: &FilePatternParseError) -> Option<String> {
match err {
FilePatternParseError::InvalidKind(_) => Some(String::from(
"See https://jj-vcs.github.io/jj/latest/filesets/#file-patterns or `jj help -k \
filesets` for valid prefixes.",
)),
FilePatternParseError::InvalidKind(_) => None,
// Suggest root:"<path>" if input can be parsed as repo-relative path
FilePatternParseError::UiPath(UiPathParseError::Fs(e)) => {
RepoPathBuf::from_relative_path(&e.input).ok().map(|path| {
@ -789,8 +612,8 @@ fn file_pattern_parse_error_hint(err: &FilePatternParseError) -> Option<String>
fn fileset_parse_error_hint(err: &FilesetParseError) -> Option<String> {
match err.kind() {
FilesetParseErrorKind::SyntaxError => Some(String::from(
"See https://jj-vcs.github.io/jj/latest/filesets/ or use `jj help -k filesets` for \
filesets syntax and how to match file paths.",
"See https://martinvonz.github.io/jj/latest/filesets/ for filesets syntax, or for how \
to match file paths.",
)),
FilesetParseErrorKind::NoSuchFunction {
name: _,
@ -822,11 +645,6 @@ fn revset_parse_error_hint(err: &RevsetParseError) -> Option<String> {
// Only for the bottom error, which is usually the root cause
let bottom_err = iter::successors(Some(err), |e| e.origin()).last().unwrap();
match bottom_err.kind() {
RevsetParseErrorKind::SyntaxError => Some(
"See https://jj-vcs.github.io/jj/latest/revsets/ or use `jj help -k revsets` for \
revsets syntax and how to quote symbols."
.into(),
),
RevsetParseErrorKind::NotPrefixOperator {
op: _,
similar_op,
@ -841,7 +659,7 @@ fn revset_parse_error_hint(err: &RevsetParseError) -> Option<String> {
op: _,
similar_op,
description,
} => Some(format!("Did you mean `{similar_op}` for {description}?")),
} => Some(format!("Did you mean '{similar_op}' for {description}?")),
RevsetParseErrorKind::NoSuchFunction {
name: _,
candidates,
@ -862,18 +680,16 @@ fn revset_resolution_error_hint(err: &RevsetResolutionError) -> Option<String> {
| RevsetResolutionError::WorkspaceMissingWorkingCopy { .. }
| RevsetResolutionError::AmbiguousCommitIdPrefix(_)
| RevsetResolutionError::AmbiguousChangeIdPrefix(_)
| RevsetResolutionError::Backend(_)
| RevsetResolutionError::StoreError(_)
| RevsetResolutionError::Other(_) => None,
}
}
fn string_pattern_parse_error_hint(err: &StringPatternParseError) -> Option<String> {
match err {
StringPatternParseError::InvalidKind(_) => Some(
"Try prefixing with one of `exact:`, `glob:`, `regex:`, `substring:`, or one of these \
with `-i` suffix added (e.g. `glob-i:`) for case-insensitive matching"
.into(),
),
StringPatternParseError::InvalidKind(_) => {
Some("Try prefixing with one of `exact:`, `glob:`, `regex:`, or `substring:`".into())
}
StringPatternParseError::GlobPattern(_) | StringPatternParseError::Regex(_) => None,
}
}
@ -918,8 +734,7 @@ fn try_handle_command_result(
print_error(ui, "Config error: ", err, hints)?;
writeln!(
ui.stderr_formatter().labeled("hint"),
"For help, see https://jj-vcs.github.io/jj/latest/config/ or use `jj help -k \
config`."
"For help, see https://martinvonz.github.io/jj/latest/config/."
)?;
Ok(ExitCode::from(1))
}
@ -954,8 +769,7 @@ fn print_error(
Ok(())
}
/// Prints error sources one by one from the given `source` inclusive.
pub fn print_error_sources(ui: &Ui, source: Option<&dyn error::Error>) -> io::Result<()> {
fn print_error_sources(ui: &Ui, source: Option<&dyn error::Error>) -> io::Result<()> {
let Some(err) = source else {
return Ok(());
};
@ -966,7 +780,7 @@ pub fn print_error_sources(ui: &Ui, source: Option<&dyn error::Error>) -> io::Re
writeln!(formatter, "{err}")?;
} else {
writeln!(formatter.labeled("heading"), "Caused by:")?;
for (i, err) in iter::successors(Some(err), |&err| err.source()).enumerate() {
for (i, err) in iter::successors(Some(err), |err| err.source()).enumerate() {
write!(formatter.labeled("heading"), "{}: ", i + 1)?;
writeln!(formatter, "{err}")?;
}
@ -1020,8 +834,6 @@ fn handle_clap_error(ui: &mut Ui, err: &clap::Error, hints: &[ErrorHint]) -> io:
_ => {}
}
write!(ui.stderr(), "{clap_str}")?;
// Skip the first source error, which should be printed inline.
print_error_sources(ui, err.source().and_then(|err| err.source()))?;
print_error_hints(ui, hints)?;
Ok(ExitCode::from(2))
}
@ -1034,7 +846,7 @@ pub fn print_parse_diagnostics<T: error::Error>(
) -> io::Result<()> {
for diag in diagnostics {
writeln!(ui.warning_default(), "{context_message}")?;
for err in iter::successors(Some(diag as &dyn error::Error), |&err| err.source()) {
for err in iter::successors(Some(diag as &dyn error::Error), |err| err.source()) {
writeln!(ui.stderr(), "{err}")?;
}
// If we add support for multiple error diagnostics, we might have to do

View File

@ -12,27 +12,16 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use std::collections::HashSet;
use std::io::Write as _;
use std::io::Write;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use jj_lib::backend::CommitId;
use jj_lib::commit::CommitIteratorExt as _;
use jj_lib::object_id::ObjectId as _;
use jj_lib::refs::diff_named_ref_targets;
use jj_lib::repo::Repo as _;
use jj_lib::rewrite::RewriteRefsOptions;
use jj_lib::commit::CommitIteratorExt;
use jj_lib::object_id::ObjectId;
use tracing::instrument;
#[cfg(feature = "git")]
use crate::cli_util::has_tracked_remote_bookmarks;
use crate::cli_util::print_updated_commits;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Abandon a revision
@ -45,27 +34,15 @@ use crate::ui::Ui;
/// commit. This is true in general; it is not specific to this command.
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct AbandonArgs {
/// The revision(s) to abandon (default: @)
#[arg(
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
revisions_pos: Vec<RevisionArg>,
#[arg(
short = 'r',
hide = true,
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
revisions_opt: Vec<RevisionArg>,
// TODO: Remove in jj 0.34+
#[arg(long, short, hide = true)]
/// The revision(s) to abandon
#[arg(default_value = "@")]
revisions: Vec<RevisionArg>,
/// Do not print every abandoned commit on a separate line
#[arg(long, short)]
summary: bool,
/// Do not delete bookmarks pointing to the revisions to abandon
///
/// Bookmarks will be moved to the parent revisions instead.
#[arg(long)]
retain_bookmarks: bool,
/// Ignored (but lets you pass `-r` for consistency with other commands)
#[arg(short = 'r', hide = true, action = clap::ArgAction::Count)]
unused_revision: u8,
/// Do not modify the content of the children of the abandoned commits
#[arg(long)]
restore_descendants: bool,
@ -77,83 +54,53 @@ pub(crate) fn cmd_abandon(
command: &CommandHelper,
args: &AbandonArgs,
) -> Result<(), CommandError> {
if args.summary {
writeln!(ui.warning_default(), "--summary is no longer supported.")?;
}
let mut workspace_command = command.workspace_helper(ui)?;
let to_abandon: Vec<_> = if !args.revisions_pos.is_empty() || !args.revisions_opt.is_empty() {
workspace_command
.parse_union_revsets(ui, &[&*args.revisions_pos, &*args.revisions_opt].concat())?
} else {
workspace_command.parse_revset(ui, &RevisionArg::AT)?
}
.evaluate_to_commits()?
.try_collect()?;
let to_abandon: Vec<_> = workspace_command
.parse_union_revsets(ui, &args.revisions)?
.evaluate_to_commits()?
.try_collect()?;
if to_abandon.is_empty() {
writeln!(ui.status(), "No revisions to abandon.")?;
return Ok(());
}
let to_abandon_set: HashSet<&CommitId> = to_abandon.iter().ids().collect();
workspace_command.check_rewritable(to_abandon_set.iter().copied())?;
workspace_command.check_rewritable(to_abandon.iter().ids())?;
let mut tx = workspace_command.start_transaction();
let options = RewriteRefsOptions {
delete_abandoned_bookmarks: !args.retain_bookmarks,
for commit in &to_abandon {
tx.repo_mut().record_abandoned_commit(commit.id().clone());
}
let (num_rebased, extra_msg) = if args.restore_descendants {
(
tx.repo_mut().reparent_descendants(command.settings())?,
" (while preserving their content)",
)
} else {
(tx.repo_mut().rebase_descendants(command.settings())?, "")
};
let mut num_rebased = 0;
tx.repo_mut().transform_descendants_with_options(
to_abandon_set.iter().copied().cloned().collect(),
&HashMap::new(),
&options,
|rewriter| {
if to_abandon_set.contains(rewriter.old_commit().id()) {
rewriter.abandon();
} else if args.restore_descendants {
rewriter.reparent().write()?;
num_rebased += 1;
} else {
rewriter.rebase()?.write()?;
num_rebased += 1;
}
Ok(())
},
)?;
let deleted_bookmarks = diff_named_ref_targets(
tx.base_repo().view().local_bookmarks(),
tx.repo().view().local_bookmarks(),
)
.filter(|(_, (_old, new))| new.is_absent())
.map(|(name, _)| name.to_owned())
.collect_vec();
if let Some(mut formatter) = ui.status_formatter() {
writeln!(formatter, "Abandoned {} commits:", to_abandon.len())?;
print_updated_commits(
formatter.as_mut(),
&tx.base_workspace_helper().commit_summary_template(),
&to_abandon,
)?;
if !deleted_bookmarks.is_empty() {
writeln!(
formatter,
"Deleted bookmarks: {}",
deleted_bookmarks.iter().map(|n| n.as_symbol()).join(", ")
)?;
if to_abandon.len() == 1 {
write!(formatter, "Abandoned commit ")?;
tx.base_workspace_helper()
.write_commit_summary(formatter.as_mut(), &to_abandon[0])?;
writeln!(ui.status())?;
} else if !args.summary {
let template = tx.base_workspace_helper().commit_summary_template();
writeln!(formatter, "Abandoned the following commits:")?;
for commit in &to_abandon {
write!(formatter, " ")?;
template.format(commit, formatter.as_mut())?;
writeln!(formatter)?;
}
} else {
writeln!(formatter, "Abandoned {} commits.", &to_abandon.len())?;
}
if num_rebased > 0 {
if args.restore_descendants {
writeln!(
formatter,
"Rebased {num_rebased} descendant commits (while preserving their content) \
onto parents of abandoned commits",
)?;
} else {
writeln!(
formatter,
"Rebased {num_rebased} descendant commits onto parents of abandoned commits",
)?;
}
writeln!(
formatter,
"Rebased {num_rebased} descendant commits{extra_msg} onto parents of abandoned \
commits",
)?;
}
}
let transaction_description = if to_abandon.len() == 1 {
@ -166,20 +113,5 @@ pub(crate) fn cmd_abandon(
)
};
tx.finish(ui, transaction_description)?;
#[cfg(feature = "git")]
if jj_lib::git::get_git_backend(workspace_command.repo().store()).is_ok() {
let view = workspace_command.repo().view();
if deleted_bookmarks
.iter()
.any(|name| has_tracked_remote_bookmarks(view, name))
{
writeln!(
ui.hint_default(),
"Deleted bookmarks can be pushed by name or all at once with `jj git push \
--deleted`."
)?;
}
}
Ok(())
}

View File

@ -1,142 +0,0 @@
// Copyright 2024 The Jujutsu Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCompleter;
use jj_lib::absorb::absorb_hunks;
use jj_lib::absorb::split_hunks_to_trees;
use jj_lib::absorb::AbsorbSource;
use jj_lib::matchers::EverythingMatcher;
use pollster::FutureExt as _;
use tracing::instrument;
use crate::cli_util::print_updated_commits;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::diff_util::DiffFormat;
use crate::ui::Ui;
/// Move changes from a revision into the stack of mutable revisions
///
/// This command splits changes in the source revision and moves each change to
/// the closest mutable ancestor where the corresponding lines were modified
/// last. If the destination revision cannot be determined unambiguously, the
/// change will be left in the source revision.
///
/// The source revision will be abandoned if all changes are absorbed into the
/// destination revisions, and if the source revision has no description.
///
/// The modification made by `jj absorb` can be reviewed by `jj op show -p`.
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct AbsorbArgs {
/// Source revision to absorb from
#[arg(
long, short,
default_value = "@",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
from: RevisionArg,
/// Destination revisions to absorb into
///
/// Only ancestors of the source revision will be considered.
#[arg(
long, short = 't', visible_alias = "to",
default_value = "mutable()",
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
into: Vec<RevisionArg>,
/// Move only changes to these paths (instead of all paths)
#[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
}
#[instrument(skip_all)]
pub(crate) fn cmd_absorb(
ui: &mut Ui,
command: &CommandHelper,
args: &AbsorbArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let source_commit = workspace_command.resolve_single_rev(ui, &args.from)?;
let destinations = workspace_command
.parse_union_revsets(ui, &args.into)?
.resolve()?;
let matcher = workspace_command
.parse_file_patterns(ui, &args.paths)?
.to_matcher();
let repo = workspace_command.repo().as_ref();
let source = AbsorbSource::from_commit(repo, source_commit)?;
let selected_trees = split_hunks_to_trees(repo, &source, &destinations, &matcher).block_on()?;
let path_converter = workspace_command.path_converter();
for (path, reason) in selected_trees.skipped_paths {
let ui_path = path_converter.format_file_path(&path);
writeln!(ui.warning_default(), "Skipping {ui_path}: {reason}")?;
}
workspace_command.check_rewritable(selected_trees.target_commits.keys())?;
let mut tx = workspace_command.start_transaction();
let stats = absorb_hunks(tx.repo_mut(), &source, selected_trees.target_commits)?;
if let Some(mut formatter) = ui.status_formatter() {
if !stats.rewritten_destinations.is_empty() {
writeln!(
formatter,
"Absorbed changes into {} revisions:",
stats.rewritten_destinations.len()
)?;
print_updated_commits(
formatter.as_mut(),
&tx.commit_summary_template(),
stats.rewritten_destinations.iter().rev(),
)?;
}
if stats.num_rebased > 0 {
writeln!(
formatter,
"Rebased {} descendant commits.",
stats.num_rebased
)?;
}
}
tx.finish(
ui,
format!(
"absorb changes into {} commits",
stats.rewritten_destinations.len()
),
)?;
if let Some(mut formatter) = ui.status_formatter() {
if let Some(commit) = &stats.rewritten_source {
let repo = workspace_command.repo().as_ref();
if !commit.is_empty(repo)? {
writeln!(formatter, "Remaining changes:")?;
let diff_renderer = workspace_command.diff_renderer(vec![DiffFormat::Summary]);
let matcher = &EverythingMatcher; // also print excluded paths
let width = ui.term_width();
diff_renderer.show_patch(ui, formatter.as_mut(), commit, matcher, width)?;
}
}
}
Ok(())
}

View File

@ -12,44 +12,26 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use bstr::ByteVec as _;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use jj_lib::rewrite::merge_commit_trees;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::formatter::PlainTextFormatter;
use crate::ui::Ui;
/// Apply the reverse of given revisions on top of another revision
///
/// The description of the new revisions can be customized with the
/// `templates.backout_description` config variable.
/// Apply the reverse of a revision on top of another revision
#[derive(clap::Args, Clone, Debug)]
#[command(hide = true)]
pub(crate) struct BackoutArgs {
/// The revision(s) to apply the reverse of
#[arg(
long, short,
default_value = "@",
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short, default_value = "@")]
revisions: Vec<RevisionArg>,
/// The revision to apply the reverse changes on top of
// TODO: It seems better to default this to `@-`. Maybe the working
// copy should be rebased on top?
#[arg(
long, short,
default_value = "@",
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short, default_value = "@")]
destination: Vec<RevisionArg>,
}
@ -59,15 +41,6 @@ pub(crate) fn cmd_backout(
command: &CommandHelper,
args: &BackoutArgs,
) -> Result<(), CommandError> {
writeln!(
ui.warning_default(),
"`jj backout` is deprecated; use `jj revert` instead"
)?;
writeln!(
ui.warning_default(),
"`jj backout` will be removed in a future version, and this will be a hard error"
)?;
let mut workspace_command = command.workspace_helper(ui)?;
let to_back_out: Vec<_> = workspace_command
.parse_union_revsets(ui, &args.revisions)?
@ -82,6 +55,7 @@ pub(crate) fn cmd_backout(
let destination = workspace_command.resolve_single_rev(ui, revision_str)?;
parents.push(destination);
}
let mut tx = workspace_command.start_transaction();
let transaction_description = if to_back_out.len() == 1 {
format!("back out commit {}", to_back_out[0].id().hex())
} else {
@ -91,38 +65,25 @@ pub(crate) fn cmd_backout(
to_back_out.len() - 1
)
};
let commits_to_back_out_with_new_commit_descriptions = {
let template_text = command
.settings()
.get_string("templates.backout_description")?;
let template = workspace_command.parse_commit_template(ui, &template_text)?;
to_back_out
.into_iter()
.map(|commit| {
let mut output = Vec::new();
template
.format(&commit, &mut PlainTextFormatter::new(&mut output))
.expect("write() to vec backed formatter should never fail");
// Template output is usually UTF-8, but it can contain file content.
let commit_description = output.into_string_lossy();
(commit, commit_description)
})
.collect_vec()
};
let mut tx = workspace_command.start_transaction();
let mut new_base_tree = merge_commit_trees(tx.repo(), &parents)?;
for (commit_to_back_out, new_commit_description) in
commits_to_back_out_with_new_commit_descriptions
{
for commit_to_back_out in to_back_out {
let commit_to_back_out_subject = commit_to_back_out
.description()
.lines()
.next()
.unwrap_or_default();
let new_commit_description = format!(
"Back out \"{}\"\n\nThis backs out commit {}.\n",
commit_to_back_out_subject,
&commit_to_back_out.id().hex()
);
let old_base_tree = commit_to_back_out.parent_tree(tx.repo())?;
let old_tree = commit_to_back_out.tree()?;
let new_tree = new_base_tree.merge(&old_tree, &old_base_tree)?;
let new_parent_ids = parents.iter().map(|commit| commit.id().clone()).collect();
let new_commit = tx
.repo_mut()
.new_commit(new_parent_ids, new_tree.id())
.new_commit(command.settings(), new_parent_ids, new_tree.id())
.set_description(new_commit_description)
.write()?;
parents = vec![new_commit];

View File

@ -19,9 +19,10 @@ use criterion::measurement::Measurement;
use criterion::BatchSize;
use criterion::BenchmarkGroup;
use criterion::BenchmarkId;
use jj_lib::revset;
use jj_lib::revset::DefaultSymbolResolver;
use jj_lib::revset::RevsetExpression;
use jj_lib::revset::SymbolResolverExtension;
use jj_lib::revset::UserRevsetExpression;
use super::new_criterion;
use super::CriterionArgs;
@ -79,13 +80,14 @@ fn bench_revset<M: Measurement>(
revset: &RevisionArg,
) -> Result<(), CommandError> {
writeln!(ui.status(), "----------Testing revset: {revset}----------")?;
let expression = workspace_command
.parse_revset(ui, revset)?
.expression()
.clone();
let expression = revset::optimize(
workspace_command
.parse_revset(ui, revset)?
.expression()
.clone(),
);
// Time both evaluation and iteration.
let routine = |workspace_command: &WorkspaceCommandHelper,
expression: Rc<UserRevsetExpression>| {
let routine = |workspace_command: &WorkspaceCommandHelper, expression: Rc<RevsetExpression>| {
// Evaluate the expression without parsing/evaluating short-prefixes.
let repo = workspace_command.repo().as_ref();
let symbol_resolver =

View File

@ -12,42 +12,30 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use clap::builder::NonEmptyStringValueParser;
use jj_lib::object_id::ObjectId as _;
use jj_lib::op_store::RefTarget;
use jj_lib::ref_name::RefNameBuf;
use crate::cli_util::has_tracked_remote_bookmarks;
use super::has_tracked_remote_bookmarks;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error_with_hint;
use crate::command_error::CommandError;
use crate::complete;
use crate::revset_util;
use crate::ui::Ui;
/// Create a new bookmark
#[derive(clap::Args, Clone, Debug)]
pub struct BookmarkCreateArgs {
// TODO(#5374): Make required in jj 0.32+
/// The bookmark's target revision
//
// The `--to` alias exists for making it easier for the user to switch
// between `bookmark create`, `bookmark move`, and `bookmark set`. Currently target revision
// defaults to the working copy if not specified, but in the near future it will be required to
// explicitly specify it.
#[arg(
long, short,
visible_alias = "to",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
// between `bookmark create`, `bookmark move`, and `bookmark set`.
#[arg(long, short, visible_alias = "to")]
revision: Option<RevisionArg>,
/// The bookmarks to create
#[arg(required = true, value_parser = revset_util::parse_bookmark_name)]
names: Vec<RefNameBuf>,
#[arg(required = true, value_parser = NonEmptyStringValueParser::new())]
names: Vec<String>,
}
pub fn cmd_bookmark_create(
@ -56,13 +44,6 @@ pub fn cmd_bookmark_create(
args: &BookmarkCreateArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
if args.revision.is_none() {
writeln!(
ui.warning_default(),
"Target revision was not specified, defaulting to the working copy (-r@). In the near \
future it will be required to explicitly specify target revision."
)?;
}
let target_commit = workspace_command
.resolve_single_rev(ui, args.revision.as_ref().unwrap_or(&RevisionArg::AT))?;
let view = workspace_command.repo().view();
@ -70,29 +51,27 @@ pub fn cmd_bookmark_create(
for name in bookmark_names {
if view.get_local_bookmark(name).is_present() {
return Err(user_error_with_hint(
format!("Bookmark already exists: {name}", name = name.as_symbol()),
format!("Bookmark already exists: {name}"),
"Use `jj bookmark set` to update it.",
));
}
if has_tracked_remote_bookmarks(view, name) {
return Err(user_error_with_hint(
format!(
"Tracked remote bookmarks exist for deleted bookmark: {name}",
name = name.as_symbol()
),
format!("Tracked remote bookmarks exist for deleted bookmark: {name}"),
format!(
"Use `jj bookmark set` to recreate the local bookmark. Run `jj bookmark \
untrack 'glob:{name}@*'` to disassociate them.",
name = name.as_symbol()
untrack 'glob:{name}@*'` to disassociate them."
),
));
}
}
let mut tx = workspace_command.start_transaction();
for name in bookmark_names {
tx.repo_mut()
.set_local_bookmark_target(name, RefTarget::normal(target_commit.id().clone()));
for bookmark_name in bookmark_names {
tx.repo_mut().set_local_bookmark_target(
bookmark_name,
RefTarget::normal(target_commit.id().clone()),
);
}
if let Some(mut formatter) = ui.status_formatter() {
@ -104,11 +83,15 @@ pub fn cmd_bookmark_create(
tx.write_commit_summary(formatter.as_mut(), &target_commit)?;
writeln!(formatter)?;
}
if bookmark_names.len() > 1 && args.revision.is_none() {
writeln!(ui.hint_default(), "Use -r to specify the target revision.")?;
}
tx.finish(
ui,
format!(
"create bookmark {names} pointing to commit {id}",
names = bookmark_names.iter().map(|n| n.as_symbol()).join(", "),
names = bookmark_names.join(", "),
id = target_commit.id().hex()
),
)?;

View File

@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use itertools::Itertools as _;
use jj_lib::op_store::RefTarget;
use jj_lib::str_util::StringPattern;
@ -20,33 +19,18 @@ use jj_lib::str_util::StringPattern;
use super::find_local_bookmarks;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Delete an existing bookmark and propagate the deletion to remotes on the
/// next push
///
/// Revisions referred to by the deleted bookmarks are not abandoned. To delete
/// revisions as well as bookmarks, use `jj abandon`. For example, `jj abandon
/// main..<bookmark>` will abandon revisions belonging to the `<bookmark>`
/// branch (relative to the `main` branch.)
///
/// If you don't want the deletion of the local bookmark to propagate to any
/// tracked remote bookmarks, use `jj bookmark forget` instead.
#[derive(clap::Args, Clone, Debug)]
pub struct BookmarkDeleteArgs {
/// The bookmarks to delete
///
/// By default, the specified name matches exactly. Use `glob:` prefix to
/// select bookmarks by [wildcard pattern].
///
/// [wildcard pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets/#string-patterns
#[arg(
required = true,
value_parser = StringPattern::parse,
add = ArgValueCandidates::new(complete::local_bookmarks),
)]
/// select bookmarks by wildcard pattern. For details, see
/// https://martinvonz.github.io/jj/latest/revsets/#string-patterns.
#[arg(required = true, value_parser = StringPattern::parse)]
names: Vec<StringPattern>,
}
@ -72,10 +56,7 @@ pub fn cmd_bookmark_delete(
ui,
format!(
"delete bookmark {}",
matched_bookmarks
.iter()
.map(|(name, _)| name.as_symbol())
.join(", ")
matched_bookmarks.iter().map(|(name, _)| name).join(", ")
),
)?;
Ok(())

View File

@ -12,49 +12,31 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use itertools::Itertools as _;
use jj_lib::op_store::BookmarkTarget;
use jj_lib::op_store::RefTarget;
use jj_lib::op_store::RemoteRef;
use jj_lib::ref_name::RefName;
use jj_lib::str_util::StringPattern;
use jj_lib::view::View;
use super::find_bookmarks_with;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Forget a bookmark without marking it as a deletion to be pushed
/// Forget everything about a bookmark, including its local and remote
/// targets
///
/// If a local bookmark is forgotten, any corresponding remote bookmarks will
/// become untracked to ensure that the forgotten bookmark will not impact
/// remotes on future pushes.
/// A forgotten bookmark will not impact remotes on future pushes. It will be
/// recreated on future pulls if it still exists in the remote.
#[derive(clap::Args, Clone, Debug)]
pub struct BookmarkForgetArgs {
/// When forgetting a local bookmark, also forget any corresponding remote
/// bookmarks
///
/// A forgotten remote bookmark will not impact remotes on future pushes. It
/// will be recreated on future fetches if it still exists on the remote. If
/// there is a corresponding Git-tracking remote bookmark, it will also be
/// forgotten.
#[arg(long)]
include_remotes: bool,
/// The bookmarks to forget
///
/// By default, the specified name matches exactly. Use `glob:` prefix to
/// select bookmarks by [wildcard pattern].
///
/// [wildcard pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets/#string-patterns
#[arg(
required = true,
value_parser = StringPattern::parse,
add = ArgValueCandidates::new(complete::bookmarks),
)]
/// select bookmarks by wildcard pattern. For details, see
/// https://martinvonz.github.io/jj/latest/revsets/#string-patterns.
#[arg(required = true, value_parser = StringPattern::parse)]
names: Vec<StringPattern>,
}
@ -67,50 +49,32 @@ pub fn cmd_bookmark_forget(
let repo = workspace_command.repo().clone();
let matched_bookmarks = find_forgettable_bookmarks(repo.view(), &args.names)?;
let mut tx = workspace_command.start_transaction();
let mut forgotten_remote: usize = 0;
for (name, bookmark_target) in &matched_bookmarks {
tx.repo_mut()
.set_local_bookmark_target(name, RefTarget::absent());
for (remote, _) in &bookmark_target.remote_refs {
let symbol = name.to_remote_symbol(remote);
// If `--include-remotes` is specified, we forget the corresponding remote
// bookmarks instead of untracking them
if args.include_remotes {
tx.repo_mut()
.set_remote_bookmark(symbol, RemoteRef::absent());
forgotten_remote += 1;
continue;
}
// Git-tracking remote bookmarks cannot be untracked currently, so skip them
if jj_lib::git::is_special_git_remote(symbol.remote) {
continue;
}
tx.repo_mut().untrack_remote_bookmark(symbol);
for (remote_name, _) in &bookmark_target.remote_refs {
tx.repo_mut()
.set_remote_bookmark(name, remote_name, RemoteRef::absent());
}
}
writeln!(
ui.status(),
"Forgot {} local bookmarks.",
matched_bookmarks.len()
writeln!(ui.status(), "Forgot {} bookmarks.", matched_bookmarks.len())?;
tx.finish(
ui,
format!(
"forget bookmark {}",
matched_bookmarks.iter().map(|(name, _)| name).join(", ")
),
)?;
if forgotten_remote != 0 {
writeln!(ui.status(), "Forgot {forgotten_remote} remote bookmarks.")?;
}
let forgotten_bookmarks = matched_bookmarks
.iter()
.map(|(name, _)| name.as_symbol())
.join(", ");
tx.finish(ui, format!("forget bookmark {forgotten_bookmarks}"))?;
Ok(())
}
fn find_forgettable_bookmarks<'a>(
view: &'a View,
name_patterns: &[StringPattern],
) -> Result<Vec<(&'a RefName, BookmarkTarget<'a>)>, CommandError> {
) -> Result<Vec<(&'a str, BookmarkTarget<'a>)>, CommandError> {
find_bookmarks_with(name_patterns, |pattern| {
view.bookmarks()
.filter(|(name, _)| pattern.matches(name.as_str()))
.filter(|(name, _)| pattern.matches(name))
.map(Ok)
})
}

View File

@ -12,29 +12,18 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::cmp;
use std::collections::HashMap;
use std::collections::HashSet;
use std::rc::Rc;
use std::sync::Arc;
use clap::ValueEnum;
use clap_complete::ArgValueCandidates;
use itertools::Itertools as _;
use jj_lib::backend;
use jj_lib::backend::CommitId;
use jj_lib::config::ConfigValue;
use jj_lib::ref_name::RefName;
use jj_lib::repo::Repo as _;
use itertools::Itertools;
use jj_lib::git;
use jj_lib::revset::RevsetExpression;
use jj_lib::str_util::StringPattern;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::commit_templater::CommitRef;
use crate::complete;
use crate::templater::TemplateRenderer;
use crate::commit_templater::CommitTemplateLanguage;
use crate::commit_templater::RefName;
use crate::ui::Ui;
/// List bookmarks and their targets
@ -45,10 +34,8 @@ use crate::ui::Ui;
/// revisions are preceded by a "-" and new target revisions are preceded by a
/// "+".
///
/// See [`jj help -k bookmarks`] for more information.
///
/// [`jj help -k bookmarks`]:
/// https://jj-vcs.github.io/jj/latest/bookmarks
/// For information about bookmarks, see
/// https://martinvonz.github.io/jj/latest/bookmarks/.
#[derive(clap::Args, Clone, Debug)]
pub struct BookmarkListArgs {
/// Show all tracking and non-tracking remote bookmarks including the ones
@ -63,16 +50,13 @@ pub struct BookmarkListArgs {
/// bookmarks shown (can be repeated.)
///
/// By default, the specified remote name matches exactly. Use `glob:`
/// prefix to select remotes by [wildcard pattern].
///
/// [wildcard pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets/#string-patterns
/// prefix to select remotes by wildcard pattern. For details, see
/// https://martinvonz.github.io/jj/latest/revsets/#string-patterns.
#[arg(
long = "remote",
value_name = "REMOTE",
conflicts_with_all = ["all_remotes"],
value_parser = StringPattern::parse,
add = ArgValueCandidates::new(complete::git_remotes),
)]
remotes: Option<Vec<StringPattern>>,
@ -88,43 +72,25 @@ pub struct BookmarkListArgs {
/// Show bookmarks whose local name matches
///
/// By default, the specified name matches exactly. Use `glob:` prefix to
/// select bookmarks by [wildcard pattern].
///
/// [wildcard pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets/#string-patterns
#[arg(value_parser = StringPattern::parse, add = ArgValueCandidates::new(complete::bookmarks))]
/// select bookmarks by wildcard pattern. For details, see
/// https://martinvonz.github.io/jj/latest/revsets/#string-patterns.
#[arg(value_parser = StringPattern::parse)]
names: Option<Vec<StringPattern>>,
/// Show bookmarks whose local targets are in the given revisions
///
/// Note that `-r deleted_bookmark` will not work since `deleted_bookmark`
/// wouldn't have a local target.
#[arg(long, short, value_name = "REVSETS")]
#[arg(long, short)]
revisions: Option<Vec<RevisionArg>>,
/// Render each bookmark using the given template
///
/// All 0-argument methods of the [`CommitRef` type] are available as
/// keywords in the template expression. See [`jj help -k templates`]
/// for more information.
/// All 0-argument methods of the `RefName` type are available as keywords.
///
/// [`CommitRef` type]:
/// https://jj-vcs.github.io/jj/latest/templates/#commitref-type
///
/// [`jj help -k templates`]:
/// https://jj-vcs.github.io/jj/latest/templates/
#[arg(long, short = 'T', add = ArgValueCandidates::new(complete::template_aliases))]
/// For the syntax, see https://martinvonz.github.io/jj/latest/templates/
#[arg(long, short = 'T')]
template: Option<String>,
/// Sort bookmarks based on the given key (or multiple keys)
///
/// Suffix the key with `-` to sort in descending order of the value (e.g.
/// `--sort name-`). Note that when using multiple keys, the first key is
/// the most significant.
///
/// This defaults to the `ui.bookmark-list-sort-keys` setting.
#[arg(long, value_name = "SORT_KEY", value_enum, value_delimiter = ',')]
sort: Vec<SortKey>,
}
pub fn cmd_bookmark_list(
@ -138,15 +104,11 @@ pub fn cmd_bookmark_list(
// Like cmd_git_push(), names and revisions are OR-ed.
let bookmark_names_to_list = if args.names.is_some() || args.revisions.is_some() {
let mut bookmark_names: HashSet<&RefName> = HashSet::new();
let mut bookmark_names: HashSet<&str> = HashSet::new();
if let Some(patterns) = &args.names {
bookmark_names.extend(
view.bookmarks()
.filter(|(name, _)| {
patterns
.iter()
.any(|pattern| pattern.matches(name.as_str()))
})
.filter(|&(name, _)| patterns.iter().any(|pattern| pattern.matches(name)))
.map(|(name, _)| name),
);
}
@ -171,711 +133,95 @@ pub fn cmd_bookmark_list(
None
};
let template: TemplateRenderer<Rc<CommitRef>> = {
let template = {
let language = workspace_command.commit_template_language();
let text = match &args.template {
Some(value) => value.to_owned(),
None => workspace_command
.settings()
.get("templates.bookmark_list")?,
None => command.settings().config().get("templates.bookmark_list")?,
};
workspace_command
.parse_template(ui, &language, &text)?
.parse_template(ui, &language, &text, CommitTemplateLanguage::wrap_ref_name)?
.labeled("bookmark_list")
};
let mut bookmark_list_items: Vec<RefListItem> = Vec::new();
ui.request_pager();
let mut formatter = ui.stdout_formatter();
let mut found_deleted_local_bookmark = false;
let mut found_deleted_tracking_local_bookmark = false;
let bookmarks_to_list = view.bookmarks().filter(|(name, target)| {
bookmark_names_to_list
.as_ref()
.is_none_or(|bookmark_names| bookmark_names.contains(name))
.map_or(true, |bookmark_names| bookmark_names.contains(name))
&& (!args.conflicted || target.local_target.has_conflict())
});
for (name, bookmark_target) in bookmarks_to_list {
let local_target = bookmark_target.local_target;
let remote_refs = bookmark_target.remote_refs;
let (mut tracked_remote_refs, untracked_remote_refs) = remote_refs
let (mut tracking_remote_refs, untracked_remote_refs) = remote_refs
.iter()
.copied()
.filter(|(remote_name, _)| {
args.remotes.as_ref().is_none_or(|patterns| {
patterns
.iter()
.any(|pattern| pattern.matches(remote_name.as_str()))
.filter(|&(remote_name, _)| {
args.remotes.as_ref().map_or(true, |patterns| {
patterns.iter().any(|pattern| pattern.matches(remote_name))
})
})
.partition::<Vec<_>, _>(|&(_, remote_ref)| remote_ref.is_tracked());
.partition::<Vec<_>, _>(|&(_, remote_ref)| remote_ref.is_tracking());
if args.tracked {
tracked_remote_refs.retain(|&(remote, _)| !jj_lib::git::is_special_git_remote(remote));
tracking_remote_refs
.retain(|&(remote, _)| remote != git::REMOTE_NAME_FOR_LOCAL_GIT_REPO);
} else if !args.all_remotes && args.remotes.is_none() {
tracked_remote_refs.retain(|&(_, remote_ref)| remote_ref.target != *local_target);
tracking_remote_refs.retain(|&(_, remote_ref)| remote_ref.target != *local_target);
}
let include_local_only = !args.tracked && args.remotes.is_none();
if include_local_only && local_target.is_present() || !tracked_remote_refs.is_empty() {
let primary = CommitRef::local(
if include_local_only && local_target.is_present() || !tracking_remote_refs.is_empty() {
let ref_name = RefName::local(
name,
local_target.clone(),
remote_refs.iter().map(|&(_, remote_ref)| remote_ref),
);
let tracked = tracked_remote_refs
template.format(&ref_name, formatter.as_mut())?;
}
for &(remote, remote_ref) in &tracking_remote_refs {
let ref_name = RefName::remote(name, remote, remote_ref.clone(), local_target);
template.format(&ref_name, formatter.as_mut())?;
}
if local_target.is_absent() && !tracking_remote_refs.is_empty() {
found_deleted_local_bookmark = true;
found_deleted_tracking_local_bookmark |= tracking_remote_refs
.iter()
.map(|&(remote, remote_ref)| {
CommitRef::remote(name, remote, remote_ref.clone(), local_target)
})
.collect();
bookmark_list_items.push(RefListItem { primary, tracked });
.any(|&(remote, _)| remote != git::REMOTE_NAME_FOR_LOCAL_GIT_REPO);
}
if !args.tracked && (args.all_remotes || args.remotes.is_some()) {
bookmark_list_items.extend(untracked_remote_refs.iter().map(
|&(remote, remote_ref)| RefListItem {
primary: CommitRef::remote_only(name, remote, remote_ref.target.clone()),
tracked: vec![],
},
));
for &(remote, remote_ref) in &untracked_remote_refs {
let ref_name = RefName::remote_only(name, remote, remote_ref.target.clone());
template.format(&ref_name, formatter.as_mut())?;
}
}
}
let sort_keys = if args.sort.is_empty() {
workspace_command
.settings()
.get_value_with("ui.bookmark-list-sort-keys", parse_sort_keys)?
} else {
args.sort.clone()
};
let store = repo.store();
let mut commits: HashMap<CommitId, Arc<backend::Commit>> = HashMap::new();
if sort_keys.iter().any(|key| key.is_commit_dependant()) {
commits = bookmark_list_items
.iter()
.filter_map(|item| item.primary.target().added_ids().next())
.cloned()
.map(|commit_id| {
store
.get_commit(&commit_id)
.map(|commit| (commit_id, commit.store_commit().clone()))
})
.try_collect()?;
}
sort(&mut bookmark_list_items, &sort_keys, &commits);
ui.request_pager();
let mut formatter = ui.stdout_formatter();
bookmark_list_items
.iter()
.flat_map(|item| itertools::chain([&item.primary], &item.tracked))
.try_for_each(|commit_ref| template.format(commit_ref, formatter.as_mut()))?;
drop(formatter);
#[cfg(feature = "git")]
if jj_lib::git::get_git_backend(repo.store()).is_ok() {
// Print only one of these hints. It's not important to mention unexported
// bookmarks, but user might wonder why deleted bookmarks are still listed.
let deleted_tracking = bookmark_list_items
.iter()
.filter(|item| item.primary.is_local() && item.primary.is_absent())
.map(|item| {
item.tracked.iter().any(|r| {
let remote = r.remote_name().expect("tracked ref should be remote");
!jj_lib::git::is_special_git_remote(remote.as_ref())
})
})
.max();
match deleted_tracking {
Some(true) => {
writeln!(
ui.hint_default(),
"Bookmarks marked as deleted can be *deleted permanently* on the remote by \
running `jj git push --deleted`. Use `jj bookmark forget` if you don't want \
that."
)?;
}
Some(false) => {
writeln!(
ui.hint_default(),
"Bookmarks marked as deleted will be deleted from the underlying Git repo on \
the next `jj git export`."
)?;
}
None => {}
}
// Print only one of these hints. It's not important to mention unexported
// bookmarks, but user might wonder why deleted bookmarks are still listed.
if found_deleted_tracking_local_bookmark {
writeln!(
ui.hint_default(),
"Bookmarks marked as deleted will be *deleted permanently* on the remote on the next \
`jj git push`. Use `jj bookmark forget` to prevent this."
)?;
} else if found_deleted_local_bookmark {
writeln!(
ui.hint_default(),
"Bookmarks marked as deleted will be deleted from the underlying Git repo on the next \
`jj git export`."
)?;
}
Ok(())
}
#[derive(Clone, Debug)]
struct RefListItem {
/// Local bookmark or untracked remote bookmark.
primary: Rc<CommitRef>,
/// Remote bookmarks tracked by the primary (or local) bookmark.
tracked: Vec<Rc<CommitRef>>,
}
/// Sort key for the `--sort` argument option.
#[derive(Copy, Clone, PartialEq, Debug, ValueEnum)]
enum SortKey {
Name,
#[value(name = "name-")]
NameDesc,
AuthorName,
#[value(name = "author-name-")]
AuthorNameDesc,
AuthorEmail,
#[value(name = "author-email-")]
AuthorEmailDesc,
AuthorDate,
#[value(name = "author-date-")]
AuthorDateDesc,
CommitterName,
#[value(name = "committer-name-")]
CommitterNameDesc,
CommitterEmail,
#[value(name = "committer-email-")]
CommitterEmailDesc,
CommitterDate,
#[value(name = "committer-date-")]
CommitterDateDesc,
}
impl SortKey {
fn is_commit_dependant(&self) -> bool {
match self {
SortKey::Name | SortKey::NameDesc => false,
SortKey::AuthorName
| SortKey::AuthorNameDesc
| SortKey::AuthorEmail
| SortKey::AuthorEmailDesc
| SortKey::AuthorDate
| SortKey::AuthorDateDesc
| SortKey::CommitterName
| SortKey::CommitterNameDesc
| SortKey::CommitterEmail
| SortKey::CommitterEmailDesc
| SortKey::CommitterDate
| SortKey::CommitterDateDesc => true,
}
}
}
fn parse_sort_keys(value: ConfigValue) -> Result<Vec<SortKey>, String> {
if let Some(array) = value.as_array() {
array
.iter()
.map(|item| {
item.as_str()
.ok_or("Expected sort key as a string".to_owned())
.and_then(|key| SortKey::from_str(key, false))
})
.try_collect()
} else {
Err("Expected an array of sort keys as strings".to_owned())
}
}
fn sort(
bookmark_items: &mut [RefListItem],
sort_keys: &[SortKey],
commits: &HashMap<CommitId, Arc<backend::Commit>>,
) {
let to_commit = |item: &RefListItem| {
let id = item.primary.target().added_ids().next()?;
commits.get(id)
};
// Multi-pass sorting, the first key is most significant.
// Skip first iteration if sort key is `Name`, since bookmarks are already
// sorted by name.
for sort_key in sort_keys
.iter()
.rev()
.skip_while(|key| *key == &SortKey::Name)
{
match sort_key {
SortKey::Name => {
bookmark_items.sort_by_key(|item| {
(
item.primary.name().to_owned(),
item.primary.remote_name().map(|name| name.to_owned()),
)
});
}
SortKey::NameDesc => {
bookmark_items.sort_by_key(|item| {
cmp::Reverse((
item.primary.name().to_owned(),
item.primary.remote_name().map(|name| name.to_owned()),
))
});
}
SortKey::AuthorName => bookmark_items
.sort_by_key(|item| to_commit(item).map(|commit| commit.author.name.as_str())),
SortKey::AuthorNameDesc => bookmark_items.sort_by_key(|item| {
cmp::Reverse(to_commit(item).map(|commit| commit.author.name.as_str()))
}),
SortKey::AuthorEmail => bookmark_items
.sort_by_key(|item| to_commit(item).map(|commit| commit.author.email.as_str())),
SortKey::AuthorEmailDesc => bookmark_items.sort_by_key(|item| {
cmp::Reverse(to_commit(item).map(|commit| commit.author.email.as_str()))
}),
SortKey::AuthorDate => bookmark_items
.sort_by_key(|item| to_commit(item).map(|commit| commit.author.timestamp)),
SortKey::AuthorDateDesc => bookmark_items.sort_by_key(|item| {
cmp::Reverse(to_commit(item).map(|commit| commit.author.timestamp))
}),
SortKey::CommitterName => bookmark_items
.sort_by_key(|item| to_commit(item).map(|commit| commit.committer.name.as_str())),
SortKey::CommitterNameDesc => bookmark_items.sort_by_key(|item| {
cmp::Reverse(to_commit(item).map(|commit| commit.committer.name.as_str()))
}),
SortKey::CommitterEmail => bookmark_items
.sort_by_key(|item| to_commit(item).map(|commit| commit.committer.email.as_str())),
SortKey::CommitterEmailDesc => bookmark_items.sort_by_key(|item| {
cmp::Reverse(to_commit(item).map(|commit| commit.committer.email.as_str()))
}),
SortKey::CommitterDate => bookmark_items
.sort_by_key(|item| to_commit(item).map(|commit| commit.committer.timestamp)),
SortKey::CommitterDateDesc => bookmark_items.sort_by_key(|item| {
cmp::Reverse(to_commit(item).map(|commit| commit.committer.timestamp))
}),
}
}
}
#[cfg(test)]
mod tests {
use jj_lib::backend::ChangeId;
use jj_lib::backend::MergedTreeId;
use jj_lib::backend::MillisSinceEpoch;
use jj_lib::backend::Signature;
use jj_lib::backend::Timestamp;
use jj_lib::backend::TreeId;
use jj_lib::op_store::RefTarget;
use super::*;
fn make_backend_commit(author: Signature, committer: Signature) -> Arc<backend::Commit> {
Arc::new(backend::Commit {
parents: vec![],
predecessors: vec![],
root_tree: MergedTreeId::Legacy(TreeId::new(vec![])),
change_id: ChangeId::new(vec![]),
description: String::new(),
author,
committer,
secure_sig: None,
})
}
fn make_default_signature() -> Signature {
Signature {
name: "Test User".to_owned(),
email: "test.user@g.com".to_owned(),
timestamp: Timestamp {
timestamp: MillisSinceEpoch(0),
tz_offset: 0,
},
}
}
fn commit_id_generator() -> impl FnMut() -> CommitId {
let mut iter = (1_u128..).map(|n| CommitId::new(n.to_le_bytes().into()));
move || iter.next().unwrap()
}
fn commit_ts_generator() -> impl FnMut() -> Timestamp {
// iter starts as 1, 1, 2, ... for test purposes
let mut iter = Some(1_i64).into_iter().chain(1_i64..).map(|ms| Timestamp {
timestamp: MillisSinceEpoch(ms),
tz_offset: 0,
});
move || iter.next().unwrap()
}
// Helper function to prepare test data, sort and prepare snapshot with relevant
// information.
fn prepare_data_sort_and_snapshot(sort_keys: &[SortKey]) -> String {
let mut new_commit_id = commit_id_generator();
let mut new_timestamp = commit_ts_generator();
let names = ["bob", "alice", "eve", "bob", "bob"];
let emails = [
"bob@g.com",
"alice@g.com",
"eve@g.com",
"bob@g.com",
"bob@g.com",
];
let bookmark_names = ["feature", "bug-fix", "chore", "bug-fix", "feature"];
let remote_names = [None, Some("upstream"), None, Some("origin"), Some("origin")];
let deleted = [false, false, false, false, true];
let mut bookmark_items: Vec<RefListItem> = Vec::new();
let mut commits: HashMap<CommitId, Arc<backend::Commit>> = HashMap::new();
for (&name, &email, bookmark_name, remote_name, &is_deleted) in
itertools::izip!(&names, &emails, &bookmark_names, &remote_names, &deleted)
{
let commit_id = new_commit_id();
let mut b_name = "foo";
let mut author = make_default_signature();
let mut committer = make_default_signature();
if sort_keys.contains(&SortKey::Name) || sort_keys.contains(&SortKey::NameDesc) {
b_name = bookmark_name;
}
if sort_keys.contains(&SortKey::AuthorName)
|| sort_keys.contains(&SortKey::AuthorNameDesc)
{
author.name = String::from(name);
}
if sort_keys.contains(&SortKey::AuthorEmail)
|| sort_keys.contains(&SortKey::AuthorEmailDesc)
{
author.email = String::from(email);
}
if sort_keys.contains(&SortKey::AuthorDate)
|| sort_keys.contains(&SortKey::AuthorDateDesc)
{
author.timestamp = new_timestamp();
}
if sort_keys.contains(&SortKey::CommitterName)
|| sort_keys.contains(&SortKey::CommitterNameDesc)
{
committer.name = String::from(name);
}
if sort_keys.contains(&SortKey::CommitterEmail)
|| sort_keys.contains(&SortKey::CommitterEmailDesc)
{
committer.email = String::from(email);
}
if sort_keys.contains(&SortKey::CommitterDate)
|| sort_keys.contains(&SortKey::CommitterDateDesc)
{
committer.timestamp = new_timestamp();
}
if let Some(remote_name) = remote_name {
if is_deleted {
bookmark_items.push(RefListItem {
primary: CommitRef::remote_only(b_name, *remote_name, RefTarget::absent()),
tracked: vec![CommitRef::local_only(
b_name,
RefTarget::normal(commit_id.clone()),
)],
});
} else {
bookmark_items.push(RefListItem {
primary: CommitRef::remote_only(
b_name,
*remote_name,
RefTarget::normal(commit_id.clone()),
),
tracked: vec![],
});
}
} else {
bookmark_items.push(RefListItem {
primary: CommitRef::local_only(b_name, RefTarget::normal(commit_id.clone())),
tracked: vec![],
});
}
commits.insert(commit_id, make_backend_commit(author, committer));
}
// The sort function has an assumption that refs are sorted by name.
// Here we support this assumption.
bookmark_items.sort_by_key(|item| {
(
item.primary.name().to_owned(),
item.primary.remote_name().map(|name| name.to_owned()),
)
});
sort_and_snapshot(&mut bookmark_items, sort_keys, &commits)
}
// Helper function to sort refs and prepare snapshot with relevant information.
fn sort_and_snapshot(
items: &mut [RefListItem],
sort_keys: &[SortKey],
commits: &HashMap<CommitId, Arc<backend::Commit>>,
) -> String {
sort(items, sort_keys, commits);
let to_commit = |item: &RefListItem| {
let id = item.primary.target().added_ids().next()?;
commits.get(id)
};
macro_rules! row_format {
($($args:tt)*) => {
format!("{:<20}{:<16}{:<17}{:<14}{:<16}{:<17}{}", $($args)*)
}
}
let header = row_format!(
"Name",
"AuthorName",
"AuthorEmail",
"AuthorDate",
"CommitterName",
"CommitterEmail",
"CommitterDate"
);
let rows: Vec<String> = items
.iter()
.map(|item| {
let name = [Some(item.primary.name()), item.primary.remote_name()]
.iter()
.flatten()
.join("@");
let commit = to_commit(item);
let author_name = commit
.map(|c| c.author.name.clone())
.unwrap_or_else(|| String::from("-"));
let author_email = commit
.map(|c| c.author.email.clone())
.unwrap_or_else(|| String::from("-"));
let author_date = commit
.map(|c| c.author.timestamp.timestamp.0.to_string())
.unwrap_or_else(|| String::from("-"));
let committer_name = commit
.map(|c| c.committer.name.clone())
.unwrap_or_else(|| String::from("-"));
let committer_email = commit
.map(|c| c.committer.email.clone())
.unwrap_or_else(|| String::from("-"));
let committer_date = commit
.map(|c| c.committer.timestamp.timestamp.0.to_string())
.unwrap_or_else(|| String::from("-"));
row_format!(
name,
author_name,
author_email,
author_date,
committer_name,
committer_email,
committer_date
)
})
.collect();
let mut result = vec![header];
result.extend(rows);
result.join("\n")
}
#[test]
fn test_sort_by_name() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::Name]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
bug-fix@origin Test User test.user@g.com 0 Test User test.user@g.com 0
bug-fix@upstream Test User test.user@g.com 0 Test User test.user@g.com 0
chore Test User test.user@g.com 0 Test User test.user@g.com 0
feature Test User test.user@g.com 0 Test User test.user@g.com 0
feature@origin - - - - - -
");
}
#[test]
fn test_sort_by_name_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::NameDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
feature@origin - - - - - -
feature Test User test.user@g.com 0 Test User test.user@g.com 0
chore Test User test.user@g.com 0 Test User test.user@g.com 0
bug-fix@upstream Test User test.user@g.com 0 Test User test.user@g.com 0
bug-fix@origin Test User test.user@g.com 0 Test User test.user@g.com 0
");
}
#[test]
fn test_sort_by_author_name() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::AuthorName]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin - - - - - -
foo@upstream alice test.user@g.com 0 Test User test.user@g.com 0
foo bob test.user@g.com 0 Test User test.user@g.com 0
foo@origin bob test.user@g.com 0 Test User test.user@g.com 0
foo eve test.user@g.com 0 Test User test.user@g.com 0
");
}
#[test]
fn test_sort_by_author_name_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::AuthorNameDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo eve test.user@g.com 0 Test User test.user@g.com 0
foo bob test.user@g.com 0 Test User test.user@g.com 0
foo@origin bob test.user@g.com 0 Test User test.user@g.com 0
foo@upstream alice test.user@g.com 0 Test User test.user@g.com 0
foo@origin - - - - - -
");
}
#[test]
fn test_sort_by_author_email() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::AuthorEmail]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin - - - - - -
foo@upstream Test User alice@g.com 0 Test User test.user@g.com 0
foo Test User bob@g.com 0 Test User test.user@g.com 0
foo@origin Test User bob@g.com 0 Test User test.user@g.com 0
foo Test User eve@g.com 0 Test User test.user@g.com 0
");
}
#[test]
fn test_sort_by_author_email_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::AuthorEmailDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo Test User eve@g.com 0 Test User test.user@g.com 0
foo Test User bob@g.com 0 Test User test.user@g.com 0
foo@origin Test User bob@g.com 0 Test User test.user@g.com 0
foo@upstream Test User alice@g.com 0 Test User test.user@g.com 0
foo@origin - - - - - -
");
}
#[test]
fn test_sort_by_author_date() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::AuthorDate]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin - - - - - -
foo Test User test.user@g.com 1 Test User test.user@g.com 0
foo@upstream Test User test.user@g.com 1 Test User test.user@g.com 0
foo Test User test.user@g.com 2 Test User test.user@g.com 0
foo@origin Test User test.user@g.com 3 Test User test.user@g.com 0
");
}
#[test]
fn test_sort_by_author_date_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::AuthorDateDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin Test User test.user@g.com 3 Test User test.user@g.com 0
foo Test User test.user@g.com 2 Test User test.user@g.com 0
foo Test User test.user@g.com 1 Test User test.user@g.com 0
foo@upstream Test User test.user@g.com 1 Test User test.user@g.com 0
foo@origin - - - - - -
");
}
#[test]
fn test_sort_by_committer_name() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::CommitterName]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin - - - - - -
foo@upstream Test User test.user@g.com 0 alice test.user@g.com 0
foo Test User test.user@g.com 0 bob test.user@g.com 0
foo@origin Test User test.user@g.com 0 bob test.user@g.com 0
foo Test User test.user@g.com 0 eve test.user@g.com 0
");
}
#[test]
fn test_sort_by_committer_name_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::CommitterNameDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo Test User test.user@g.com 0 eve test.user@g.com 0
foo Test User test.user@g.com 0 bob test.user@g.com 0
foo@origin Test User test.user@g.com 0 bob test.user@g.com 0
foo@upstream Test User test.user@g.com 0 alice test.user@g.com 0
foo@origin - - - - - -
");
}
#[test]
fn test_sort_by_committer_email() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::CommitterEmail]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin - - - - - -
foo@upstream Test User test.user@g.com 0 Test User alice@g.com 0
foo Test User test.user@g.com 0 Test User bob@g.com 0
foo@origin Test User test.user@g.com 0 Test User bob@g.com 0
foo Test User test.user@g.com 0 Test User eve@g.com 0
");
}
#[test]
fn test_sort_by_committer_email_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::CommitterEmailDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo Test User test.user@g.com 0 Test User eve@g.com 0
foo Test User test.user@g.com 0 Test User bob@g.com 0
foo@origin Test User test.user@g.com 0 Test User bob@g.com 0
foo@upstream Test User test.user@g.com 0 Test User alice@g.com 0
foo@origin - - - - - -
");
}
#[test]
fn test_sort_by_committer_date() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::CommitterDate]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin - - - - - -
foo Test User test.user@g.com 0 Test User test.user@g.com 1
foo@upstream Test User test.user@g.com 0 Test User test.user@g.com 1
foo Test User test.user@g.com 0 Test User test.user@g.com 2
foo@origin Test User test.user@g.com 0 Test User test.user@g.com 3
");
}
#[test]
fn test_sort_by_committer_date_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::CommitterDateDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
foo@origin Test User test.user@g.com 0 Test User test.user@g.com 3
foo Test User test.user@g.com 0 Test User test.user@g.com 2
foo Test User test.user@g.com 0 Test User test.user@g.com 1
foo@upstream Test User test.user@g.com 0 Test User test.user@g.com 1
foo@origin - - - - - -
");
}
#[test]
fn test_sort_by_author_date_desc_and_name() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::AuthorDateDesc, SortKey::Name]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
bug-fix@origin Test User test.user@g.com 3 Test User test.user@g.com 0
chore Test User test.user@g.com 2 Test User test.user@g.com 0
bug-fix@upstream Test User test.user@g.com 1 Test User test.user@g.com 0
feature Test User test.user@g.com 1 Test User test.user@g.com 0
feature@origin - - - - - -
");
}
#[test]
fn test_sort_by_committer_name_and_name_desc() {
insta::assert_snapshot!(
prepare_data_sort_and_snapshot(&[SortKey::CommitterName, SortKey::NameDesc]), @r"
Name AuthorName AuthorEmail AuthorDate CommitterName CommitterEmail CommitterDate
feature@origin - - - - - -
bug-fix@upstream Test User test.user@g.com 0 alice test.user@g.com 0
feature Test User test.user@g.com 0 bob test.user@g.com 0
bug-fix@origin Test User test.user@g.com 0 bob test.user@g.com 0
chore Test User test.user@g.com 0 eve test.user@g.com 0
");
}
}

View File

@ -24,10 +24,9 @@ mod untrack;
use itertools::Itertools as _;
use jj_lib::backend::CommitId;
use jj_lib::git;
use jj_lib::op_store::RefTarget;
use jj_lib::op_store::RemoteRef;
use jj_lib::ref_name::RefName;
use jj_lib::ref_name::RemoteRefSymbol;
use jj_lib::repo::Repo;
use jj_lib::str_util::StringPattern;
use jj_lib::view::View;
@ -51,20 +50,16 @@ use self::track::BookmarkTrackArgs;
use self::untrack::cmd_bookmark_untrack;
use self::untrack::BookmarkUntrackArgs;
use crate::cli_util::CommandHelper;
use crate::cli_util::RemoteBookmarkName;
use crate::cli_util::RemoteBookmarkNamePattern;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::ui::Ui;
// Unlike most other aliases, `b` is defined in the config and can be overridden
// by the user.
/// Manage bookmarks [default alias: b]
/// Manage bookmarks
///
/// See [`jj help -k bookmarks`] for more information.
///
/// [`jj help -k bookmarks`]:
/// https://jj-vcs.github.io/jj/latest/bookmarks
/// For information about bookmarks, see
/// https://martinvonz.github.io/jj/latest/bookmarks.
#[derive(clap::Subcommand, Clone, Debug)]
pub enum BookmarkCommand {
#[command(visible_alias("c"))]
@ -107,7 +102,7 @@ pub fn cmd_bookmark(
fn find_local_bookmarks<'a>(
view: &'a View,
name_patterns: &[StringPattern],
) -> Result<Vec<(&'a RefName, &'a RefTarget)>, CommandError> {
) -> Result<Vec<(&'a str, &'a RefTarget)>, CommandError> {
find_bookmarks_with(name_patterns, |pattern| {
view.local_bookmarks_matching(pattern).map(Ok)
})
@ -116,11 +111,11 @@ fn find_local_bookmarks<'a>(
fn find_bookmarks_with<'a, 'b, V, I>(
name_patterns: &'b [StringPattern],
mut find_matches: impl FnMut(&'b StringPattern) -> I,
) -> Result<Vec<(&'a RefName, V)>, CommandError>
) -> Result<Vec<(&'a str, V)>, CommandError>
where
I: Iterator<Item = Result<(&'a RefName, V), CommandError>>,
I: Iterator<Item = Result<(&'a str, V), CommandError>>,
{
let mut matching_bookmarks: Vec<(&'a RefName, V)> = vec![];
let mut matching_bookmarks: Vec<(&'a str, V)> = vec![];
let mut unmatched_patterns = vec![];
for pattern in name_patterns {
let mut matches = find_matches(pattern).peekable();
@ -146,12 +141,19 @@ where
fn find_remote_bookmarks<'a>(
view: &'a View,
name_patterns: &[RemoteBookmarkNamePattern],
) -> Result<Vec<(RemoteRefSymbol<'a>, &'a RemoteRef)>, CommandError> {
) -> Result<Vec<(RemoteBookmarkName, &'a RemoteRef)>, CommandError> {
let mut matching_bookmarks = vec![];
let mut unmatched_patterns = vec![];
for pattern in name_patterns {
let mut matches = view
.remote_bookmarks_matching(&pattern.bookmark, &pattern.remote)
.map(|((bookmark, remote), remote_ref)| {
let name = RemoteBookmarkName {
bookmark: bookmark.to_owned(),
remote: remote.to_owned(),
};
(name, remote_ref)
})
.peekable();
if matches.peek().is_none() {
unmatched_patterns.push(pattern);
@ -160,8 +162,8 @@ fn find_remote_bookmarks<'a>(
}
match &unmatched_patterns[..] {
[] => {
matching_bookmarks.sort_unstable_by(|(sym1, _), (sym2, _)| sym1.cmp(sym2));
matching_bookmarks.dedup_by(|(sym1, _), (sym2, _)| sym1 == sym2);
matching_bookmarks.sort_unstable_by(|(name1, _), (name2, _)| name1.cmp(name2));
matching_bookmarks.dedup_by(|(name1, _), (name2, _)| name1 == name2);
Ok(matching_bookmarks)
}
[pattern] if pattern.is_exact() => {
@ -174,6 +176,17 @@ fn find_remote_bookmarks<'a>(
}
}
/// Whether or not the `bookmark` has any tracked remotes (i.e. is a tracking
/// local bookmark.)
fn has_tracked_remote_bookmarks(view: &View, bookmark: &str) -> bool {
view.remote_bookmarks_matching(
&StringPattern::exact(bookmark),
&StringPattern::everything(),
)
.filter(|&((_, remote_name), _)| remote_name != git::REMOTE_NAME_FOR_LOCAL_GIT_REPO)
.any(|(_, remote_ref)| remote_ref.is_tracking())
}
fn is_fast_forward(repo: &dyn Repo, old_target: &RefTarget, new_target_id: &CommitId) -> bool {
if old_target.is_present() {
// Strictly speaking, "all" old targets should be ancestors, but we allow

View File

@ -12,8 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use jj_lib::object_id::ObjectId as _;
use jj_lib::op_store::RefTarget;
@ -25,7 +23,6 @@ use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error_with_hint;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Move existing bookmarks to target revision
@ -44,24 +41,17 @@ use crate::ui::Ui;
#[command(group(clap::ArgGroup::new("source").multiple(true).required(true)))]
pub struct BookmarkMoveArgs {
/// Move bookmarks from the given revisions
#[arg(
long, short,
group = "source",
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
// We intentionally do not support the short `-f` for `--from` since it
// could be confused with a shorthand for `--force`, and people might not
// realize they need `-B`/`--allow-backwards` instead.
#[arg(long, group = "source", value_name = "REVISIONS")]
from: Vec<RevisionArg>,
// TODO(#5374): Make required in jj 0.32+
/// Move bookmarks to this revision
// Currently this defaults to the working copy, but in the near
// future it will be required to explicitly specify it.
#[arg(
long, short,
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
to: Option<RevisionArg>,
// We intentionally do not support the short `-t` for `--to` since we don't
// support `-f` for `--from`.
#[arg(long, default_value = "@", value_name = "REVISION")]
to: RevisionArg,
/// Allow moving bookmarks backwards or sideways
#[arg(long, short = 'B')]
@ -70,15 +60,9 @@ pub struct BookmarkMoveArgs {
/// Move bookmarks matching the given name patterns
///
/// By default, the specified name matches exactly. Use `glob:` prefix to
/// select bookmarks by [wildcard pattern].
///
/// [wildcard pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets/#string-patterns
#[arg(
group = "source",
value_parser = StringPattern::parse,
add = ArgValueCandidates::new(complete::local_bookmarks),
)]
/// select bookmarks by wildcard pattern. For details, see
/// https://martinvonz.github.io/jj/latest/revsets/#string-patterns.
#[arg(group = "source", value_parser = StringPattern::parse)]
names: Vec<StringPattern>,
}
@ -89,15 +73,8 @@ pub fn cmd_bookmark_move(
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let repo = workspace_command.repo().clone();
if args.to.is_none() {
writeln!(
ui.warning_default(),
"Target revision was not specified, defaulting to the working copy (--to=@). In the \
near future it will be required to explicitly specify it."
)?;
}
let target_commit =
workspace_command.resolve_single_rev(ui, args.to.as_ref().unwrap_or(&RevisionArg::AT))?;
let target_commit = workspace_command.resolve_single_rev(ui, &args.to)?;
let matched_bookmarks = {
let is_source_ref: Box<dyn Fn(&RefTarget) -> _> = if !args.from.is_empty() {
let is_source_commit = workspace_command
@ -151,10 +128,7 @@ pub fn cmd_bookmark_move(
.find(|(_, old_target)| !is_fast_forward(repo.as_ref(), old_target, target_commit.id()))
{
return Err(user_error_with_hint(
format!(
"Refusing to move bookmark backwards or sideways: {name}",
name = name.as_symbol()
),
format!("Refusing to move bookmark backwards or sideways: {name}"),
"Use --allow-backwards to allow it.",
));
}
@ -182,10 +156,7 @@ pub fn cmd_bookmark_move(
ui,
format!(
"point bookmark {names} to commit {id}",
names = matched_bookmarks
.iter()
.map(|(name, _)| name.as_symbol())
.join(", "),
names = matched_bookmarks.iter().map(|(name, _)| name).join(", "),
id = target_commit.id().hex()
),
)?;

View File

@ -12,16 +12,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use jj_lib::op_store::RefTarget;
use jj_lib::ref_name::RefNameBuf;
use crate::cli_util::has_tracked_remote_bookmarks;
use super::has_tracked_remote_bookmarks;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::revset_util;
use crate::ui::Ui;
/// Rename `old` bookmark name to `new` bookmark name
@ -30,15 +26,10 @@ use crate::ui::Ui;
#[derive(clap::Args, Clone, Debug)]
pub struct BookmarkRenameArgs {
/// The old name of the bookmark
#[arg(
value_parser = revset_util::parse_bookmark_name,
add = ArgValueCandidates::new(complete::local_bookmarks),
)]
old: RefNameBuf,
old: String,
/// The new name of the bookmark
#[arg(value_parser = revset_util::parse_bookmark_name)]
new: RefNameBuf,
new: String,
}
pub fn cmd_bookmark_rename(
@ -51,17 +42,13 @@ pub fn cmd_bookmark_rename(
let old_bookmark = &args.old;
let ref_target = view.get_local_bookmark(old_bookmark).clone();
if ref_target.is_absent() {
return Err(user_error(format!(
"No such bookmark: {old_bookmark}",
old_bookmark = old_bookmark.as_symbol()
)));
return Err(user_error(format!("No such bookmark: {old_bookmark}")));
}
let new_bookmark = &args.new;
if view.get_local_bookmark(new_bookmark).is_present() {
return Err(user_error(format!(
"Bookmark already exists: {new_bookmark}",
new_bookmark = new_bookmark.as_symbol()
"Bookmark already exists: {new_bookmark}"
)));
}
@ -72,11 +59,7 @@ pub fn cmd_bookmark_rename(
.set_local_bookmark_target(old_bookmark, RefTarget::absent());
tx.finish(
ui,
format!(
"rename bookmark {old_bookmark} to {new_bookmark}",
old_bookmark = old_bookmark.as_symbol(),
new_bookmark = new_bookmark.as_symbol()
),
format!("rename bookmark {old_bookmark} to {new_bookmark}"),
)?;
let view = workspace_command.repo().view();
@ -84,15 +67,12 @@ pub fn cmd_bookmark_rename(
writeln!(
ui.warning_default(),
"Tracked remote bookmarks for bookmark {old_bookmark} were not renamed.",
old_bookmark = old_bookmark.as_symbol(),
)?;
writeln!(
ui.hint_default(),
"To rename the bookmark on the remote, you can `jj git push --bookmark \
{old_bookmark}` first (to delete it on the remote), and then `jj git push --bookmark \
{new_bookmark}`. `jj git push --all` would also be sufficient.",
old_bookmark = old_bookmark.as_symbol(),
new_bookmark = new_bookmark.as_symbol()
{new_bookmark}`. `jj git push --all` would also be sufficient."
)?;
}
if has_tracked_remote_bookmarks(view, new_bookmark) {
@ -101,14 +81,11 @@ pub fn cmd_bookmark_rename(
// allowed even if the original old bookmark had tracked remotes.
writeln!(
ui.warning_default(),
"Tracked remote bookmarks for bookmark {new_bookmark} exist.",
new_bookmark = new_bookmark.as_symbol()
"Tracked remote bookmarks for bookmark {new_bookmark} exist."
)?;
writeln!(
ui.hint_default(),
"Run `jj bookmark untrack 'glob:{new_bookmark}@*'` to disassociate them.",
// TODO: use .as_symbol() if pattern parser is ported to revset
new_bookmark = new_bookmark.as_str()
"Run `jj bookmark untrack 'glob:{new_bookmark}@*'` to disassociate them."
)?;
}

View File

@ -12,37 +12,23 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use clap::builder::NonEmptyStringValueParser;
use jj_lib::object_id::ObjectId as _;
use jj_lib::op_store::RefTarget;
use jj_lib::ref_name::RefNameBuf;
use super::has_tracked_remote_bookmarks;
use super::is_fast_forward;
use crate::cli_util::has_tracked_remote_bookmarks;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error_with_hint;
use crate::command_error::CommandError;
use crate::complete;
use crate::revset_util;
use crate::ui::Ui;
/// Create or update a bookmark to point to a certain commit
#[derive(clap::Args, Clone, Debug)]
pub struct BookmarkSetArgs {
// TODO(#5374): Make required in jj 0.32+
/// The bookmark's target revision
//
// Currently target revision defaults to the working copy if not specified, but in the near
// future it will be required to explicitly specify it.
#[arg(
long, short,
visible_alias = "to",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short, visible_alias = "to")]
revision: Option<RevisionArg>,
/// Allow moving the bookmark backwards or sideways
@ -50,12 +36,8 @@ pub struct BookmarkSetArgs {
allow_backwards: bool,
/// The bookmarks to update
#[arg(
required = true,
value_parser = revset_util::parse_bookmark_name,
add = ArgValueCandidates::new(complete::local_bookmarks),
)]
names: Vec<RefNameBuf>,
#[arg(required = true, value_parser = NonEmptyStringValueParser::new())]
names: Vec<String>,
}
pub fn cmd_bookmark_set(
@ -64,13 +46,6 @@ pub fn cmd_bookmark_set(
args: &BookmarkSetArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
if args.revision.is_none() {
writeln!(
ui.warning_default(),
"Target revision was not specified, defaulting to the working copy (--revision=@). In \
the near future it will be required to explicitly specify target revision."
)?;
}
let target_commit = workspace_command
.resolve_single_rev(ui, args.revision.as_ref().unwrap_or(&RevisionArg::AT))?;
let repo = workspace_command.repo().as_ref();
@ -88,10 +63,7 @@ pub fn cmd_bookmark_set(
}
if !args.allow_backwards && !is_fast_forward(repo, old_target, target_commit.id()) {
return Err(user_error_with_hint(
format!(
"Refusing to move bookmark backwards or sideways: {name}",
name = name.as_symbol()
),
format!("Refusing to move bookmark backwards or sideways: {name}"),
"Use --allow-backwards to allow it.",
));
}
@ -123,12 +95,19 @@ pub fn cmd_bookmark_set(
if bookmark_names.len() > 1 && args.revision.is_none() {
writeln!(ui.hint_default(), "Use -r to specify the target revision.")?;
}
if new_bookmark_count > 0 {
// TODO: delete this hint in jj 0.25+
writeln!(
ui.hint_default(),
"Consider using `jj bookmark move` if your intention was to move existing bookmarks."
)?;
}
tx.finish(
ui,
format!(
"point bookmark {names} to commit {id}",
names = bookmark_names.iter().map(|n| n.as_symbol()).join(", "),
names = bookmark_names.join(", "),
id = target_commit.id().hex()
),
)?;

View File

@ -13,18 +13,15 @@
// limitations under the License.
use std::collections::HashMap;
use std::rc::Rc;
use clap_complete::ArgValueCandidates;
use itertools::Itertools as _;
use super::find_remote_bookmarks;
use crate::cli_util::CommandHelper;
use crate::cli_util::RemoteBookmarkNamePattern;
use crate::command_error::CommandError;
use crate::commit_templater::CommitRef;
use crate::complete;
use crate::templater::TemplateRenderer;
use crate::commit_templater::CommitTemplateLanguage;
use crate::commit_templater::RefName;
use crate::ui::Ui;
/// Start tracking given remote bookmarks
@ -37,17 +34,11 @@ pub struct BookmarkTrackArgs {
/// Remote bookmarks to track
///
/// By default, the specified name matches exactly. Use `glob:` prefix to
/// select bookmarks by [wildcard pattern].
/// select bookmarks by wildcard pattern. For details, see
/// https://martinvonz.github.io/jj/latest/revsets/#string-patterns.
///
/// Examples: bookmark@remote, glob:main@*, glob:jjfan-*@upstream
///
/// [wildcard pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets/#string-patterns
#[arg(
required = true,
value_name = "BOOKMARK@REMOTE",
add = ArgValueCandidates::new(complete::untracked_bookmarks),
)]
#[arg(required = true, value_name = "BOOKMARK@REMOTE")]
names: Vec<RemoteBookmarkNamePattern>,
}
@ -57,53 +48,55 @@ pub fn cmd_bookmark_track(
args: &BookmarkTrackArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let repo = workspace_command.repo().clone();
let mut symbols = Vec::new();
for (symbol, remote_ref) in find_remote_bookmarks(repo.view(), &args.names)? {
if remote_ref.is_tracked() {
let view = workspace_command.repo().view();
let mut names = Vec::new();
for (name, remote_ref) in find_remote_bookmarks(view, &args.names)? {
if remote_ref.is_tracking() {
writeln!(
ui.warning_default(),
"Remote bookmark already tracked: {symbol}"
"Remote bookmark already tracked: {name}"
)?;
} else {
symbols.push(symbol);
names.push(name);
}
}
let mut tx = workspace_command.start_transaction();
for &symbol in &symbols {
tx.repo_mut().track_remote_bookmark(symbol);
for name in &names {
tx.repo_mut()
.track_remote_bookmark(&name.bookmark, &name.remote);
}
if !symbols.is_empty() {
if !names.is_empty() {
writeln!(
ui.status(),
"Started tracking {} remote bookmarks.",
symbols.len()
names.len()
)?;
}
tx.finish(
ui,
format!("track remote bookmark {}", symbols.iter().join(", ")),
format!("track remote bookmark {}", names.iter().join(", ")),
)?;
//show conflicted bookmarks if there are some
if let Some(mut formatter) = ui.status_formatter() {
let template: TemplateRenderer<Rc<CommitRef>> = {
let template = {
let language = workspace_command.commit_template_language();
let text = workspace_command
let text = command
.settings()
.config()
.get::<String>("templates.bookmark_list")?;
workspace_command
.parse_template(ui, &language, &text)?
.parse_template(ui, &language, &text, CommitTemplateLanguage::wrap_ref_name)?
.labeled("bookmark_list")
};
let mut remote_per_bookmark: HashMap<_, Vec<_>> = HashMap::new();
for symbol in &symbols {
let mut remote_per_bookmark: HashMap<&str, Vec<&str>> = HashMap::new();
for n in &names {
remote_per_bookmark
.entry(symbol.name)
.entry(&n.bookmark)
.or_default()
.push(symbol.remote);
.push(&n.remote);
}
let bookmarks_to_list =
workspace_command
@ -116,18 +109,18 @@ pub fn cmd_bookmark_track(
for (name, bookmark_target) in bookmarks_to_list {
let local_target = bookmark_target.local_target;
let commit_ref = CommitRef::local(
let ref_name = RefName::local(
name,
local_target.clone(),
bookmark_target.remote_refs.iter().map(|x| x.1),
);
template.format(&commit_ref, formatter.as_mut())?;
template.format(&ref_name, formatter.as_mut())?;
for (remote_name, remote_ref) in bookmark_target.remote_refs {
if remote_per_bookmark[name].contains(&remote_name) {
let commit_ref =
CommitRef::remote(name, remote_name, remote_ref.clone(), local_target);
template.format(&commit_ref, formatter.as_mut())?;
let ref_name =
RefName::remote(name, remote_name, remote_ref.clone(), local_target);
template.format(&ref_name, formatter.as_mut())?;
}
}
}

View File

@ -12,39 +12,29 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use itertools::Itertools as _;
use jj_lib::git;
use super::find_remote_bookmarks;
use crate::cli_util::CommandHelper;
use crate::cli_util::RemoteBookmarkNamePattern;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Stop tracking given remote bookmarks
///
/// A non-tracking remote bookmark is just a pointer to the last-fetched remote
/// bookmark. It won't be imported as a local bookmark on future pulls.
///
/// If you want to forget a local bookmark while also untracking the
/// corresponding remote bookmarks, use `jj bookmark forget` instead.
#[derive(clap::Args, Clone, Debug)]
pub struct BookmarkUntrackArgs {
/// Remote bookmarks to untrack
///
/// By default, the specified name matches exactly. Use `glob:` prefix to
/// select bookmarks by [wildcard pattern].
/// select bookmarks by wildcard pattern. For details, see
/// https://martinvonz.github.io/jj/latest/revsets/#string-patterns.
///
/// Examples: bookmark@remote, glob:main@*, glob:jjfan-*@upstream
///
/// [wildcard pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets/#string-patterns
#[arg(
required = true,
value_name = "BOOKMARK@REMOTE",
add = ArgValueCandidates::new(complete::tracked_bookmarks)
)]
#[arg(required = true, value_name = "BOOKMARK@REMOTE")]
names: Vec<RemoteBookmarkNamePattern>,
}
@ -54,39 +44,40 @@ pub fn cmd_bookmark_untrack(
args: &BookmarkUntrackArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let repo = workspace_command.repo().clone();
let mut symbols = Vec::new();
for (symbol, remote_ref) in find_remote_bookmarks(repo.view(), &args.names)? {
if jj_lib::git::is_special_git_remote(symbol.remote) {
let view = workspace_command.repo().view();
let mut names = Vec::new();
for (name, remote_ref) in find_remote_bookmarks(view, &args.names)? {
if name.remote == git::REMOTE_NAME_FOR_LOCAL_GIT_REPO {
// This restriction can be lifted if we want to support untracked @git
// bookmarks.
writeln!(
ui.warning_default(),
"Git-tracking bookmark cannot be untracked: {symbol}"
"Git-tracking bookmark cannot be untracked: {name}"
)?;
} else if !remote_ref.is_tracked() {
} else if !remote_ref.is_tracking() {
writeln!(
ui.warning_default(),
"Remote bookmark not tracked yet: {symbol}"
"Remote bookmark not tracked yet: {name}"
)?;
} else {
symbols.push(symbol);
names.push(name);
}
}
let mut tx = workspace_command.start_transaction();
for &symbol in &symbols {
tx.repo_mut().untrack_remote_bookmark(symbol);
for name in &names {
tx.repo_mut()
.untrack_remote_bookmark(&name.bookmark, &name.remote);
}
if !symbols.is_empty() {
if !names.is_empty() {
writeln!(
ui.status(),
"Stopped tracking {} remote bookmarks.",
symbols.len()
names.len()
)?;
}
tx.finish(
ui,
format!("untrack remote bookmark {}", symbols.iter().join(", ")),
format!("untrack remote bookmark {}", names.iter().join(", ")),
)?;
Ok(())
}

View File

@ -0,0 +1,70 @@
// Copyright 2020 The Jujutsu Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use jj_lib::object_id::ObjectId;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::description_util::join_message_paragraphs;
use crate::ui::Ui;
/// Create a new, empty change and edit it in the working copy (DEPRECATED, use
/// `jj new`)
///
/// For more information, see
/// https://martinvonz.github.io/jj/latest/working-copy/.
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct CheckoutArgs {
/// The revision to update to
revision: RevisionArg,
/// Ignored (but lets you pass `-r` for consistency with other commands)
#[arg(short = 'r', hide = true)]
unused_revision: bool,
/// The change description to use
#[arg(long = "message", short, value_name = "MESSAGE")]
message_paragraphs: Vec<String>,
}
#[instrument(skip_all)]
pub(crate) fn cmd_checkout(
ui: &mut Ui,
command: &CommandHelper,
args: &CheckoutArgs,
) -> Result<(), CommandError> {
writeln!(
ui.warning_default(),
"`jj checkout` is deprecated; use `jj new` instead, which is equivalent"
)?;
writeln!(
ui.warning_default(),
"`jj checkout` will be removed in a future version, and this will be a hard error"
)?;
let mut workspace_command = command.workspace_helper(ui)?;
let target = workspace_command.resolve_single_rev(ui, &args.revision)?;
let mut tx = workspace_command.start_transaction();
let commit_builder = tx
.repo_mut()
.new_commit(
command.settings(),
vec![target.id().clone()],
target.tree_id().clone(),
)
.set_description(join_message_paragraphs(&args.message_paragraphs));
let new_commit = commit_builder.write()?;
tx.edit(&new_commit).unwrap();
tx.finish(ui, format!("check out commit {}", target.id().hex()))?;
Ok(())
}

View File

@ -12,17 +12,14 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCompleter;
use jj_lib::backend::Signature;
use jj_lib::object_id::ObjectId as _;
use jj_lib::repo::Repo as _;
use jj_lib::object_id::ObjectId;
use jj_lib::repo::Repo;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::description_util::add_trailers;
use crate::description_util::description_template;
use crate::description_util::edit_description;
use crate::description_util::join_message_paragraphs;
@ -31,6 +28,7 @@ use crate::ui::Ui;
/// Update the description and create a new change on top.
#[derive(clap::Args, Clone, Debug)]
#[command(visible_aliases=&["ci"])]
pub(crate) struct CommitArgs {
/// Interactively choose which changes to include in the first commit
#[arg(short, long)]
@ -42,11 +40,7 @@ pub(crate) struct CommitArgs {
#[arg(long = "message", short, value_name = "MESSAGE")]
message_paragraphs: Vec<String>,
/// Put these paths in the first commit
#[arg(
value_name = "FILESETS",
value_hint = clap::ValueHint::AnyPath,
add = ArgValueCompleter::new(complete::modified_files),
)]
#[arg(value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
/// Reset the author to the configured user
///
@ -88,7 +82,6 @@ pub(crate) fn cmd_commit(
let advanceable_bookmarks = workspace_command.get_advanceable_bookmarks(commit.parent_ids())?;
let diff_selector =
workspace_command.diff_selector(ui, args.tool.as_deref(), args.interactive)?;
let text_editor = workspace_command.text_editor()?;
let mut tx = workspace_command.start_transaction();
let base_tree = commit.parent_tree(tx.repo())?;
let format_instructions = || {
@ -117,7 +110,10 @@ new working-copy commit.
)?;
}
let mut commit_builder = tx.repo_mut().rewrite_commit(&commit).detach();
let mut commit_builder = tx
.repo_mut()
.rewrite_commit(command.settings(), &commit)
.detach();
commit_builder.set_tree_id(tree_id);
if args.reset_author {
commit_builder.set_author(commit_builder.committer().clone());
@ -132,38 +128,38 @@ new working-copy commit.
}
let description = if !args.message_paragraphs.is_empty() {
let mut description = join_message_paragraphs(&args.message_paragraphs);
if !description.is_empty() {
// The first trailer would become the first line of the description.
// Also, a commit with no description is treated in a special way in jujutsu: it
// can be discarded as soon as it's no longer the working copy. Adding a
// trailer to an empty description would break that logic.
commit_builder.set_description(description);
description = add_trailers(ui, &tx, &commit_builder)?;
}
description
join_message_paragraphs(&args.message_paragraphs)
} else {
let description = add_trailers(ui, &tx, &commit_builder)?;
commit_builder.set_description(description);
if commit_builder.description().is_empty() {
commit_builder.set_description(command.settings().default_description());
}
let temp_commit = commit_builder.write_hidden()?;
let description = description_template(ui, &tx, "", &temp_commit)?;
edit_description(&text_editor, &description)?
let template = description_template(ui, &tx, "", &temp_commit)?;
edit_description(
tx.base_workspace_helper().repo_path(),
&template,
command.settings(),
)?
};
commit_builder.set_description(description);
let new_commit = commit_builder.write(tx.repo_mut())?;
let workspace_names = tx.repo().view().workspaces_for_wc_commit_id(commit.id());
if !workspace_names.is_empty() {
let workspace_ids = tx.repo().view().workspaces_for_wc_commit_id(commit.id());
if !workspace_ids.is_empty() {
let new_wc_commit = tx
.repo_mut()
.new_commit(vec![new_commit.id().clone()], commit.tree_id().clone())
.new_commit(
command.settings(),
vec![new_commit.id().clone()],
commit.tree_id().clone(),
)
.write()?;
// Does nothing if there's no bookmarks to advance.
tx.advance_bookmarks(advanceable_bookmarks, new_commit.id());
for name in workspace_names {
tx.repo_mut().edit(name, &new_wc_commit).unwrap();
for workspace_id in workspace_ids {
tx.repo_mut().edit(workspace_id, &new_wc_commit).unwrap();
}
}
tx.finish(ui, format!("commit {}", commit.id().hex()))?;

View File

@ -12,12 +12,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use jj_lib::config::ConfigLayer;
use tracing::instrument;
use super::ConfigLevelArgs;
use crate::cli_util::get_new_config_file_path;
use crate::cli_util::run_ui_editor;
use crate::cli_util::CommandHelper;
use crate::command_error::print_error_sources;
use crate::command_error::CommandError;
use crate::ui::Ui;
@ -33,43 +33,10 @@ pub struct ConfigEditArgs {
#[instrument(skip_all)]
pub fn cmd_config_edit(
ui: &mut Ui,
_ui: &mut Ui,
command: &CommandHelper,
args: &ConfigEditArgs,
) -> Result<(), CommandError> {
let editor = command.text_editor()?;
let file = args.level.edit_config_file(ui, command)?;
if !file.path().exists() {
file.save()?;
}
// Editing again and again until either of these conditions is met
// 1. The config is OK
// 2. The user restores previous one
loop {
editor.edit_file(file.path())?;
// Trying to load back config. If error, prompt to continue editing
if let Err(e) = ConfigLayer::load_from_file(file.layer().source, file.path().to_path_buf())
{
writeln!(
ui.warning_default(),
"An error has been found inside the config:"
)?;
print_error_sources(ui, Some(&e))?;
let continue_editing = ui.prompt_yes_no(
"Do you want to keep editing the file? If not, previous config will be restored.",
Some(true),
)?;
if !continue_editing {
// Saving back previous config
file.save()?;
break;
}
} else {
// config is OK
break;
}
}
Ok(())
let config_path = get_new_config_file_path(&args.level.expect_source_kind(), command)?;
run_ui_editor(command.settings(), &config_path)
}

View File

@ -14,14 +14,12 @@
use std::io::Write as _;
use clap_complete::ArgValueCandidates;
use jj_lib::config::ConfigNamePathBuf;
use jj_lib::config::ConfigValue;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::command_error::config_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::config::ConfigNamePathBuf;
use crate::ui::Ui;
/// Get the value of a given config option.
@ -36,7 +34,7 @@ use crate::ui::Ui;
#[derive(clap::Args, Clone, Debug)]
#[command(verbatim_doc_comment)]
pub struct ConfigGetArgs {
#[arg(required = true, add = ArgValueCandidates::new(complete::leaf_config_keys))]
#[arg(required = true)]
name: ConfigNamePathBuf,
}
@ -46,24 +44,34 @@ pub fn cmd_config_get(
command: &CommandHelper,
args: &ConfigGetArgs,
) -> Result<(), CommandError> {
let stringified = command
.settings()
.get_value_with(&args.name, |value| match value {
// Remove extra formatting from a string value
ConfigValue::String(v) => Ok(v.into_value()),
// Print other values in TOML syntax (but whitespace trimmed)
ConfigValue::Integer(_)
| ConfigValue::Float(_)
| ConfigValue::Boolean(_)
| ConfigValue::Datetime(_) => Ok(value.decorated("", "").to_string()),
// TODO: maybe okay to just print array or table in TOML syntax?
ConfigValue::Array(_) => {
Err("Expected a value convertible to a string, but is an array")
}
ConfigValue::InlineTable(_) => {
Err("Expected a value convertible to a string, but is a table")
let value = args
.name
.lookup_value(command.settings().config())
.and_then(|value| value.into_string())
.map_err(|err| match err {
config::ConfigError::Type {
origin,
unexpected,
expected,
key,
} => {
let expected = format!("a value convertible to {expected}");
// Copied from `impl fmt::Display for ConfigError`. We can't use
// the `Display` impl directly because `expected` is required to
// be a `'static str`.
let mut buf = String::new();
use std::fmt::Write;
write!(buf, "invalid type: {unexpected}, expected {expected}").unwrap();
if let Some(key) = key {
write!(buf, " for key `{key}`").unwrap();
}
if let Some(origin) = origin {
write!(buf, " in {origin}").unwrap();
}
config_error(buf)
}
err => err.into(),
})?;
writeln!(ui.stdout(), "{stringified}")?;
writeln!(ui.stdout(), "{value}")?;
Ok(())
}

View File

@ -12,30 +12,25 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use jj_lib::config::ConfigNamePathBuf;
use jj_lib::config::ConfigSource;
use jj_lib::settings::UserSettings;
use tracing::instrument;
use super::ConfigLevelArgs;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::complete;
use crate::config::resolved_config_values;
use crate::config::to_toml_value;
use crate::config::AnnotatedValue;
use crate::generic_templater;
use crate::config::ConfigNamePathBuf;
use crate::config::ConfigSource;
use crate::generic_templater::GenericTemplateLanguage;
use crate::template_builder::TemplateLanguage as _;
use crate::templater::TemplatePropertyExt as _;
use crate::templater::TemplateRenderer;
use crate::ui::Ui;
/// List variables set in config files, along with their values.
/// List variables set in config file, along with their values.
#[derive(clap::Args, Clone, Debug)]
#[command(mut_group("config_level", |g| g.required(false)))]
pub struct ConfigListArgs {
/// An optional name of a specific config option to look up.
#[arg(add = ArgValueCandidates::new(complete::config_keys))]
pub name: Option<ConfigNamePathBuf>,
/// Whether to explicitly include built-in default values in the list.
#[arg(long, conflicts_with = "config_level")]
@ -45,29 +40,17 @@ pub struct ConfigListArgs {
pub include_overridden: bool,
#[command(flatten)]
pub level: ConfigLevelArgs,
// TODO(#1047): Support --show-origin using LayeredConfigs.
/// Render each variable using the given template
///
/// The following keywords are available in the template expression:
/// The following keywords are defined:
///
/// * `name: String`: Config name.
/// * `value: ConfigValue`: Value to be formatted in TOML syntax.
/// * `value: String`: Serialized value in TOML syntax.
/// * `overridden: Boolean`: True if the value is shadowed by other.
/// * `source: String`: Source of the value.
/// * `path: String`: Path to the config file.
///
/// Can be overridden by the `templates.config_list` setting. To
/// see a detailed config list, use the `builtin_config_list_detailed`
/// template.
///
/// See [`jj help -k templates`] for more information.
///
/// [`jj help -k templates`]:
/// https://jj-vcs.github.io/jj/latest/templates/
#[arg(
long, short = 'T',
verbatim_doc_comment,
add = ArgValueCandidates::new(complete::template_aliases)
)]
/// For the syntax, see https://martinvonz.github.io/jj/latest/templates/
#[arg(long, short = 'T', verbatim_doc_comment)]
template: Option<String>,
}
@ -77,38 +60,46 @@ pub fn cmd_config_list(
command: &CommandHelper,
args: &ConfigListArgs,
) -> Result<(), CommandError> {
let template: TemplateRenderer<AnnotatedValue> = {
let language = config_template_language(command.settings());
let template = {
let language = config_template_language();
let text = match &args.template {
Some(value) => value.to_owned(),
None => command.settings().get_string("templates.config_list")?,
None => command
.settings()
.config()
.get_string("templates.config_list")?,
};
command
.parse_template(ui, &language, &text)?
.parse_template(ui, &language, &text, GenericTemplateLanguage::wrap_self)?
.labeled("config_list")
};
ui.request_pager();
let mut formatter = ui.stdout_formatter();
let name_path = args.name.clone().unwrap_or_else(ConfigNamePathBuf::root);
let mut annotated_values = resolved_config_values(command.settings().config(), &name_path);
// The default layer could be excluded beforehand as layers[len..], but we
// can't do the same for "annotated.source == target_source" in order for
// resolved_config_values() to mark values overridden by the upper layers.
if let Some(target_source) = args.level.get_source_kind() {
annotated_values.retain(|annotated| annotated.source == target_source);
} else if !args.include_defaults {
annotated_values.retain(|annotated| annotated.source != ConfigSource::Default);
}
if !args.include_overridden {
annotated_values.retain(|annotated| !annotated.is_overridden);
}
if !annotated_values.is_empty() {
ui.request_pager();
let mut formatter = ui.stdout_formatter();
for annotated in &annotated_values {
template.format(annotated, formatter.as_mut())?;
let mut wrote_values = false;
for annotated in command.resolved_config_values(&name_path)? {
// Remove overridden values.
if annotated.is_overridden && !args.include_overridden {
continue;
}
} else {
if let Some(target_source) = args.level.get_source_kind() {
if target_source != annotated.source {
continue;
}
}
// Skip built-ins if not included.
if !args.include_defaults && annotated.source == ConfigSource::Default {
continue;
}
template.format(&annotated, formatter.as_mut())?;
wrote_values = true;
}
drop(formatter);
if !wrote_values {
// Note to stderr explaining why output is empty.
if let Some(name) = &args.name {
writeln!(ui.warning_default(), "No matching config key for {name}")?;
@ -119,40 +110,25 @@ pub fn cmd_config_list(
Ok(())
}
type ConfigTemplateLanguage = GenericTemplateLanguage<'static, AnnotatedValue>;
generic_templater::impl_self_property_wrapper!(AnnotatedValue);
// AnnotatedValue will be cloned internally in the templater. If the cloning
// cost matters, wrap it with Rc.
fn config_template_language(settings: &UserSettings) -> ConfigTemplateLanguage {
let mut language = ConfigTemplateLanguage::new(settings);
fn config_template_language() -> GenericTemplateLanguage<'static, AnnotatedValue> {
type L = GenericTemplateLanguage<'static, AnnotatedValue>;
let mut language = L::new();
// "name" instead of "path" to avoid confusion with the source file path
language.add_keyword("name", |self_property| {
let out_property = self_property.map(|annotated| annotated.name.to_string());
Ok(out_property.into_dyn_wrapped())
let out_property = self_property.map(|annotated| annotated.path.to_string());
Ok(L::wrap_string(out_property))
});
language.add_keyword("value", |self_property| {
// .decorated("", "") to trim leading/trailing whitespace
let out_property = self_property.map(|annotated| annotated.value.decorated("", ""));
Ok(out_property.into_dyn_wrapped())
});
language.add_keyword("source", |self_property| {
let out_property = self_property.map(|annotated| annotated.source.to_string());
Ok(out_property.into_dyn_wrapped())
});
language.add_keyword("path", |self_property| {
let out_property = self_property.map(|annotated| {
// TODO: maybe add FilePath(PathBuf) template type?
annotated
.path
.as_ref()
.map_or_else(String::new, |path| path.to_string_lossy().into_owned())
});
Ok(out_property.into_dyn_wrapped())
// TODO: would be nice if we can provide raw dynamically-typed value
let out_property =
self_property.and_then(|annotated| Ok(to_toml_value(&annotated.value)?.to_string()));
Ok(L::wrap_string(out_property))
});
language.add_keyword("overridden", |self_property| {
let out_property = self_property.map(|annotated| annotated.is_overridden);
Ok(out_property.into_dyn_wrapped())
Ok(L::wrap_boolean(out_property))
});
language
}

View File

@ -19,11 +19,6 @@ mod path;
mod set;
mod unset;
use std::path::Path;
use itertools::Itertools as _;
use jj_lib::config::ConfigFile;
use jj_lib::config::ConfigSource;
use tracing::instrument;
use self::edit::cmd_config_edit;
@ -39,9 +34,8 @@ use self::set::ConfigSetArgs;
use self::unset::cmd_config_unset;
use self::unset::ConfigUnsetArgs;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::config::ConfigEnv;
use crate::config::ConfigSource;
use crate::ui::Ui;
#[derive(clap::Args, Clone, Debug)]
@ -57,6 +51,10 @@ pub(crate) struct ConfigLevelArgs {
}
impl ConfigLevelArgs {
fn expect_source_kind(&self) -> ConfigSource {
self.get_source_kind().expect("No config_level provided")
}
fn get_source_kind(&self) -> Option<ConfigSource> {
if self.user {
Some(ConfigSource::User)
@ -66,60 +64,6 @@ impl ConfigLevelArgs {
None
}
}
fn config_paths<'a>(&self, config_env: &'a ConfigEnv) -> Result<Vec<&'a Path>, CommandError> {
if self.user {
let paths = config_env.user_config_paths().collect_vec();
if paths.is_empty() {
return Err(user_error("No user config path found"));
}
Ok(paths)
} else if self.repo {
config_env
.repo_config_path()
.map(|p| vec![p])
.ok_or_else(|| user_error("No repo config path found"))
} else {
panic!("No config_level provided")
}
}
fn edit_config_file(
&self,
ui: &Ui,
command: &CommandHelper,
) -> Result<ConfigFile, CommandError> {
let config_env = command.config_env();
let config = command.raw_config();
let pick_one = |mut files: Vec<ConfigFile>, not_found_error: &str| {
if files.len() > 1 {
let mut choices = vec![];
let mut formatter = ui.stderr_formatter();
for (i, file) in files.iter().enumerate() {
writeln!(formatter, "{}: {}", i + 1, file.path().display())?;
choices.push((i + 1).to_string());
}
drop(formatter);
let index =
ui.prompt_choice("Choose a config file (default 1)", &choices, Some(0))?;
return Ok(files[index].clone());
}
files.pop().ok_or_else(|| user_error(not_found_error))
};
if self.user {
pick_one(
config_env.user_config_files(config)?,
"No user config path found to edit",
)
} else if self.repo {
pick_one(
config_env.repo_config_files(config)?,
"No repo config path found to edit",
)
} else {
panic!("No config_level provided")
}
}
}
/// Manage config options
@ -127,11 +71,8 @@ impl ConfigLevelArgs {
/// Operates on jj configuration, which comes from the config file and
/// environment variables.
///
/// See [`jj help -k config`] to know more about file locations, supported
/// config options, and other details about `jj config`.
///
/// [`jj help -k config`]:
/// https://jj-vcs.github.io/jj/latest/config/
/// For file locations, supported config options, and other details about jj
/// config, see https://martinvonz.github.io/jj/latest/config/.
#[derive(clap::Subcommand, Clone, Debug)]
pub(crate) enum ConfigCommand {
#[command(visible_alias("e"))]

View File

@ -17,16 +17,17 @@ use std::io::Write as _;
use tracing::instrument;
use super::ConfigLevelArgs;
use crate::cli_util::get_new_config_file_path;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::ui::Ui;
/// Print the paths to the config files
/// Print the path to the config file
///
/// A config file at that path may or may not exist.
///
/// See `jj config edit` if you'd like to immediately edit a file.
/// See `jj config edit` if you'd like to immediately edit the file.
#[derive(clap::Args, Clone, Debug)]
pub struct ConfigPathArgs {
#[command(flatten)]
@ -39,14 +40,13 @@ pub fn cmd_config_path(
command: &CommandHelper,
args: &ConfigPathArgs,
) -> Result<(), CommandError> {
for config_path in args.level.config_paths(command.config_env())? {
writeln!(
ui.stdout(),
"{}",
config_path
.to_str()
.ok_or_else(|| user_error("The config path is not valid UTF-8"))?
)?;
}
let config_path = get_new_config_file_path(&args.level.expect_source_kind(), command)?;
writeln!(
ui.stdout(),
"{}",
config_path
.to_str()
.ok_or_else(|| user_error("The config path is not valid UTF-8"))?
)?;
Ok(())
}

View File

@ -14,41 +14,28 @@
use std::io;
use clap_complete::ArgValueCandidates;
use jj_lib::commit::Commit;
use jj_lib::config::ConfigNamePathBuf;
use jj_lib::config::ConfigValue;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use tracing::instrument;
use super::ConfigLevelArgs;
use crate::cli_util::get_new_config_file_path;
use crate::cli_util::CommandHelper;
use crate::cli_util::WorkspaceCommandHelper;
use crate::command_error::user_error_with_message;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::config::parse_value_or_bare_string;
use crate::config::parse_toml_value_or_bare_string;
use crate::config::write_config_value_to_file;
use crate::config::ConfigNamePathBuf;
use crate::ui::Ui;
/// Update a config file to set the given option to a given value.
/// Update config file to set the given option to a given value.
#[derive(clap::Args, Clone, Debug)]
pub struct ConfigSetArgs {
#[arg(required = true, add = ArgValueCandidates::new(complete::leaf_config_keys))]
#[arg(required = true)]
name: ConfigNamePathBuf,
/// New value to set
///
/// The value should be specified as a TOML expression. If string value
/// isn't enclosed by any TOML constructs (such as apostrophes or array
/// notation), quotes can be omitted. Note that the value may also need
/// shell quoting. TOML multi-line strings can be useful if the value
/// contains apostrophes. For example, to set `foo.bar` to the string
/// "{don't}" use `jj config set --user foo.bar "'''{don't}'''"`. This is
/// valid in both Bash and Fish.
///
/// Alternative, e.g. to avoid dealing with shell quoting, use `jj config
/// edit` to edit the TOML file directly.
#[arg(required = true, value_parser = parse_value_or_bare_string)]
value: ConfigValue,
#[arg(required = true)]
value: String,
#[command(flatten)]
level: ConfigLevelArgs,
}
@ -65,20 +52,26 @@ pub fn cmd_config_set(
command: &CommandHelper,
args: &ConfigSetArgs,
) -> Result<(), CommandError> {
let mut file = args.level.edit_config_file(ui, command)?;
let config_path = get_new_config_file_path(&args.level.expect_source_kind(), command)?;
if config_path.is_dir() {
return Err(user_error(format!(
"Can't set config in path {path} (dirs not supported)",
path = config_path.display()
)));
}
// TODO(#531): Infer types based on schema (w/ --type arg to override).
let value = parse_toml_value_or_bare_string(&args.value);
// If the user is trying to change the author config, we should warn them that
// it won't affect the working copy author
if args.name == ConfigNamePathBuf::from_iter(vec!["user", "name"]) {
check_wc_author(ui, command, &args.value, AuthorChange::Name)?;
check_wc_author(ui, command, &value, AuthorChange::Name)?;
} else if args.name == ConfigNamePathBuf::from_iter(vec!["user", "email"]) {
check_wc_author(ui, command, &args.value, AuthorChange::Email)?;
check_wc_author(ui, command, &value, AuthorChange::Email)?;
};
file.set_value(&args.name, &args.value)
.map_err(|err| user_error_with_message(format!("Failed to set {}", args.name), err))?;
file.save()?;
Ok(())
write_config_value_to_file(&args.name, value, &config_path)
}
/// Returns the commit of the working copy if it exists.

View File

@ -12,22 +12,21 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use jj_lib::config::ConfigNamePathBuf;
use tracing::instrument;
use super::ConfigLevelArgs;
use crate::cli_util::get_new_config_file_path;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error;
use crate::command_error::user_error_with_message;
use crate::command_error::CommandError;
use crate::complete;
use crate::config::remove_config_value_from_file;
use crate::config::ConfigNamePathBuf;
use crate::ui::Ui;
/// Update a config file to unset the given option.
/// Update config file to unset the given option.
#[derive(clap::Args, Clone, Debug)]
pub struct ConfigUnsetArgs {
#[arg(required = true, add = ArgValueCandidates::new(complete::leaf_config_keys))]
#[arg(required = true)]
name: ConfigNamePathBuf,
#[command(flatten)]
level: ConfigLevelArgs,
@ -35,17 +34,17 @@ pub struct ConfigUnsetArgs {
#[instrument(skip_all)]
pub fn cmd_config_unset(
ui: &mut Ui,
_ui: &mut Ui,
command: &CommandHelper,
args: &ConfigUnsetArgs,
) -> Result<(), CommandError> {
let mut file = args.level.edit_config_file(ui, command)?;
let old_value = file
.delete_value(&args.name)
.map_err(|err| user_error_with_message(format!("Failed to unset {}", args.name), err))?;
if old_value.is_none() {
return Err(user_error(format!(r#""{}" doesn't exist"#, args.name)));
let config_path = get_new_config_file_path(&args.level.expect_source_kind(), command)?;
if config_path.is_dir() {
return Err(user_error(format!(
"Can't set config in path {path} (dirs not supported)",
path = config_path.display()
)));
}
file.save()?;
Ok(())
remove_config_value_from_file(&args.name, &config_path)
}

View File

@ -16,20 +16,19 @@ use std::fmt::Debug;
use std::io::Write as _;
use futures::executor::block_on_stream;
use jj_lib::backend::Backend;
use jj_lib::backend::CopyRecord;
use jj_lib::repo::Repo as _;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::ui::Ui;
/// Show information about file copies detected
/// Rebuild commit index
#[derive(clap::Args, Clone, Debug)]
pub struct CopyDetectionArgs {
/// Show file copies detected in changed files in this revision, compared to
/// its parent(s)
#[arg(default_value = "@", value_name = "REVSET")]
/// Show changes in this revision, compared to its parent(s)
#[arg(default_value = "@")]
revision: RevisionArg,
}
@ -39,12 +38,14 @@ pub fn cmd_debug_copy_detection(
args: &CopyDetectionArgs,
) -> Result<(), CommandError> {
let ws = command.workspace_helper(ui)?;
let store = ws.repo().store();
let Some(git) = ws.git_backend() else {
writeln!(ui.stderr(), "Not a git backend.")?;
return Ok(());
};
let commit = ws.resolve_single_rev(ui, &args.revision)?;
for parent_id in commit.parent_ids() {
for CopyRecord { target, source, .. } in
block_on_stream(store.get_copy_records(None, parent_id, commit.id())?)
block_on_stream(git.get_copy_records(None, parent_id, commit.id())?)
.filter_map(|r| r.ok())
{
writeln!(

View File

@ -15,7 +15,7 @@
use std::fmt::Debug;
use std::io::Write as _;
use jj_lib::working_copy::WorkingCopy as _;
use jj_lib::working_copy::WorkingCopy;
use super::check_local_disk_wc;
use crate::cli_util::CommandHelper;
@ -40,11 +40,10 @@ pub fn cmd_debug_local_working_copy(
for (file, state) in wc.file_states()? {
writeln!(
ui.stdout(),
"{:?} {:13?} {:10?} {:?} {:?}",
"{:?} {:13?} {:10?} {:?}",
state.file_type,
state.size,
state.mtime.0,
state.materialized_conflict_data,
file
)?;
}

View File

@ -12,19 +12,18 @@
// See the License for the specific language governing permissions and
// limitations under the License.
mod copy_detection;
mod fileset;
mod index;
mod init_simple;
mod local_working_copy;
mod operation;
mod reindex;
mod revset;
mod snapshot;
mod template;
mod tree;
mod watchman;
mod working_copy;
pub mod copy_detection;
pub mod fileset;
pub mod index;
pub mod local_working_copy;
pub mod operation;
pub mod reindex;
pub mod revset;
pub mod snapshot;
pub mod template;
pub mod tree;
pub mod watchman;
pub mod working_copy;
use std::any::Any;
use std::fmt::Debug;
@ -38,8 +37,6 @@ use self::fileset::cmd_debug_fileset;
use self::fileset::DebugFilesetArgs;
use self::index::cmd_debug_index;
use self::index::DebugIndexArgs;
use self::init_simple::cmd_debug_init_simple;
use self::init_simple::DebugInitSimpleArgs;
use self::local_working_copy::cmd_debug_local_working_copy;
use self::local_working_copy::DebugLocalWorkingCopyArgs;
use self::operation::cmd_debug_operation;
@ -70,7 +67,6 @@ pub enum DebugCommand {
CopyDetection(CopyDetectionArgs),
Fileset(DebugFilesetArgs),
Index(DebugIndexArgs),
InitSimple(DebugInitSimpleArgs),
LocalWorkingCopy(DebugLocalWorkingCopyArgs),
#[command(visible_alias = "view")]
Operation(DebugOperationArgs),
@ -90,13 +86,12 @@ pub fn cmd_debug(
subcommand: &DebugCommand,
) -> Result<(), CommandError> {
match subcommand {
DebugCommand::CopyDetection(args) => cmd_debug_copy_detection(ui, command, args),
DebugCommand::Fileset(args) => cmd_debug_fileset(ui, command, args),
DebugCommand::Index(args) => cmd_debug_index(ui, command, args),
DebugCommand::InitSimple(args) => cmd_debug_init_simple(ui, command, args),
DebugCommand::LocalWorkingCopy(args) => cmd_debug_local_working_copy(ui, command, args),
DebugCommand::Operation(args) => cmd_debug_operation(ui, command, args),
DebugCommand::Reindex(args) => cmd_debug_reindex(ui, command, args),
DebugCommand::CopyDetection(args) => cmd_debug_copy_detection(ui, command, args),
DebugCommand::Revset(args) => cmd_debug_revset(ui, command, args),
DebugCommand::Snapshot(args) => cmd_debug_snapshot(ui, command, args),
DebugCommand::Template(args) => cmd_debug_template(ui, command, args),

View File

@ -15,19 +15,17 @@
use std::fmt::Debug;
use std::io::Write as _;
use clap_complete::ArgValueCandidates;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use jj_lib::op_walk;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Show information about an operation and its view
#[derive(clap::Args, Clone, Debug)]
pub struct DebugOperationArgs {
#[arg(default_value = "@", add = ArgValueCandidates::new(complete::operations))]
#[arg(default_value = "@")]
operation: String,
#[arg(long, value_enum, default_value = "all")]
display: OperationDisplay,

View File

@ -15,7 +15,7 @@
use std::fmt::Debug;
use std::io::Write as _;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use jj_lib::revset;
use jj_lib::revset::RevsetDiagnostics;
@ -37,7 +37,7 @@ pub fn cmd_debug_revset(
args: &DebugRevsetArgs,
) -> Result<(), CommandError> {
let workspace_command = command.workspace_helper(ui)?;
let workspace_ctx = workspace_command.env().revset_parse_context();
let workspace_ctx = workspace_command.revset_parse_context();
let repo = workspace_command.repo().as_ref();
let mut diagnostics = RevsetDiagnostics::new();
@ -47,6 +47,11 @@ pub fn cmd_debug_revset(
writeln!(ui.stdout(), "{expression:#?}")?;
writeln!(ui.stdout())?;
let expression = revset::optimize(expression);
writeln!(ui.stdout(), "-- Optimized:")?;
writeln!(ui.stdout(), "{expression:#?}")?;
writeln!(ui.stdout())?;
let symbol_resolver = revset_util::default_symbol_resolver(
repo,
command.revset_extensions().symbol_resolvers(),
@ -57,17 +62,7 @@ pub fn cmd_debug_revset(
writeln!(ui.stdout(), "{expression:#?}")?;
writeln!(ui.stdout())?;
let expression = revset::optimize(expression);
writeln!(ui.stdout(), "-- Optimized:")?;
writeln!(ui.stdout(), "{expression:#?}")?;
writeln!(ui.stdout())?;
let backend_expression = expression.to_backend_expression(repo);
writeln!(ui.stdout(), "-- Backend:")?;
writeln!(ui.stdout(), "{backend_expression:#?}")?;
writeln!(ui.stdout())?;
let revset = expression.evaluate_unoptimized(repo)?;
let revset = expression.evaluate(repo)?;
writeln!(ui.stdout(), "-- Evaluated:")?;
writeln!(ui.stdout(), "{revset:#?}")?;
writeln!(ui.stdout())?;

View File

@ -17,7 +17,7 @@ use std::io::Write as _;
use jj_lib::backend::TreeId;
use jj_lib::merged_tree::MergedTree;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPathBuf;
use crate::cli_util::CommandHelper;
@ -29,13 +29,12 @@ use crate::ui::Ui;
/// List the recursive entries of a tree.
#[derive(clap::Args, Clone, Debug)]
pub struct DebugTreeArgs {
#[arg(long, short = 'r', value_name = "REVSET")]
#[arg(long, short = 'r')]
revision: Option<RevisionArg>,
#[arg(long, conflicts_with = "revision")]
id: Option<String>,
#[arg(long, requires = "id")]
dir: Option<String>,
#[arg(value_name = "FILESETS")]
paths: Vec<String>,
// TODO: Add an option to include trees that are ancestors of the matched paths
}
@ -55,7 +54,7 @@ pub fn cmd_debug_tree(
RepoPathBuf::root()
};
let store = workspace_command.repo().store();
let tree = store.get_tree(dir, &tree_id)?;
let tree = store.get_tree(&dir, &tree_id)?;
MergedTree::resolved(tree)
} else {
let commit = workspace_command

View File

@ -50,19 +50,15 @@ pub fn cmd_debug_watchman(
let mut workspace_command = command.workspace_helper(ui)?;
let repo = workspace_command.repo().clone();
let watchman_config = WatchmanConfig {
// The value is likely irrelevant here. TODO(ilyagr): confirm
register_trigger: false,
};
match subcommand {
DebugWatchmanCommand::Status => {
// TODO(ilyagr): It would be nice to add colors here
let config = match workspace_command.settings().fsmonitor_settings()? {
let config = match command.settings().fsmonitor_settings()? {
FsmonitorSettings::Watchman(config) => {
writeln!(ui.stdout(), "Watchman is enabled via `core.fsmonitor`.")?;
writeln!(
ui.stdout(),
r"Background snapshotting is {}. Use `core.watchman.register-snapshot-trigger` to control it.",
r"Background snapshotting is {}. Use `core.watchman.register_snapshot_trigger` to control it.",
if config.register_trigger {
"enabled"
} else {
@ -80,7 +76,7 @@ pub fn cmd_debug_watchman(
ui.stdout(),
"Attempting to contact the `watchman` CLI regardless..."
)?;
watchman_config
WatchmanConfig::default()
}
other_fsmonitor => {
return Err(user_error(format!(
@ -106,12 +102,12 @@ pub fn cmd_debug_watchman(
}
DebugWatchmanCommand::QueryClock => {
let wc = check_local_disk_wc(workspace_command.working_copy().as_any())?;
let (clock, _changed_files) = wc.query_watchman(&watchman_config)?;
let (clock, _changed_files) = wc.query_watchman(&WatchmanConfig::default())?;
writeln!(ui.stdout(), "Clock: {clock:?}")?;
}
DebugWatchmanCommand::QueryChangedFiles => {
let wc = check_local_disk_wc(workspace_command.working_copy().as_any())?;
let (_clock, changed_files) = wc.query_watchman(&watchman_config)?;
let (_clock, changed_files) = wc.query_watchman(&WatchmanConfig::default())?;
writeln!(ui.stdout(), "Changed files: {changed_files:?}")?;
}
DebugWatchmanCommand::ResetClock => {

View File

@ -14,27 +14,22 @@
use std::collections::HashMap;
use std::io;
use std::io::Read as _;
use std::iter;
use std::io::Read;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use itertools::Itertools;
use jj_lib::backend::Signature;
use jj_lib::commit::CommitIteratorExt as _;
use jj_lib::object_id::ObjectId as _;
use jj_lib::commit::CommitIteratorExt;
use jj_lib::object_id::ObjectId;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::description_util::add_trailers_with_template;
use crate::description_util::description_template;
use crate::description_util::edit_description;
use crate::description_util::edit_multiple_descriptions;
use crate::description_util::join_message_paragraphs;
use crate::description_util::parse_trailers_template;
use crate::description_util::ParsedBulkEditMessage;
use crate::text_util::parse_author;
use crate::ui::Ui;
@ -46,29 +41,17 @@ use crate::ui::Ui;
#[derive(clap::Args, Clone, Debug)]
#[command(visible_aliases = &["desc"])]
pub(crate) struct DescribeArgs {
/// The revision(s) whose description to edit (default: @)
#[arg(
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_mutable)
)]
revisions_pos: Vec<RevisionArg>,
#[arg(
short = 'r',
hide = true,
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_mutable)
)]
revisions_opt: Vec<RevisionArg>,
/// The revision(s) whose description to edit
#[arg(default_value = "@")]
revisions: Vec<RevisionArg>,
/// Ignored (but lets you pass `-r` for consistency with other commands)
#[arg(short = 'r', hide = true, action = clap::ArgAction::Count)]
unused_revision: u8,
/// The change description to use (don't open editor)
///
/// If multiple revisions are specified, the same description will be used
/// for all of them.
#[arg(
long = "message",
short,
value_name = "MESSAGE",
conflicts_with = "stdin"
)]
#[arg(long = "message", short, value_name = "MESSAGE")]
message_paragraphs: Vec<String>,
/// Read the change description from stdin
///
@ -79,14 +62,8 @@ pub(crate) struct DescribeArgs {
/// Don't open an editor
///
/// This is mainly useful in combination with e.g. `--reset-author`.
#[arg(long, conflicts_with = "edit")]
no_edit: bool,
/// Open an editor
///
/// Forces an editor to open when using `--stdin` or `--message` to
/// allow the message to be edited afterwards.
#[arg(long)]
edit: bool,
no_edit: bool,
/// Reset the author to the configured user
///
/// This resets the author name, email, and timestamp.
@ -116,20 +93,15 @@ pub(crate) fn cmd_describe(
args: &DescribeArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let commits: Vec<_> = if !args.revisions_pos.is_empty() || !args.revisions_opt.is_empty() {
workspace_command
.parse_union_revsets(ui, &[&*args.revisions_pos, &*args.revisions_opt].concat())?
} else {
workspace_command.parse_revset(ui, &RevisionArg::AT)?
}
.evaluate_to_commits()?
.try_collect()?; // in reverse topological order
let commits: Vec<_> = workspace_command
.parse_union_revsets(ui, &args.revisions)?
.evaluate_to_commits()?
.try_collect()?; // in reverse topological order
if commits.is_empty() {
writeln!(ui.status(), "No revisions to describe.")?;
return Ok(());
}
workspace_command.check_rewritable(commits.iter().ids())?;
let text_editor = workspace_command.text_editor()?;
let mut tx = workspace_command.start_transaction();
let tx_description = if commits.len() == 1 {
@ -152,70 +124,62 @@ pub(crate) fn cmd_describe(
None
};
// edit and no_edit are conflicting arguments and therefore it should not
// be possible for both to be true at the same time.
assert!(!(args.edit && args.no_edit));
let use_editor = args.edit || (shared_description.is_none() && !args.no_edit);
let mut commit_builders = commits
.iter()
.map(|commit| {
let mut commit_builder = tx.repo_mut().rewrite_commit(commit).detach();
if let Some(description) = &shared_description {
commit_builder.set_description(description);
}
if args.reset_author {
let new_author = commit_builder.committer().clone();
commit_builder.set_author(new_author);
}
if let Some((name, email)) = args.author.clone() {
let new_author = Signature {
name,
email,
timestamp: commit_builder.author().timestamp,
};
commit_builder.set_author(new_author);
}
commit_builder
})
.collect_vec();
if let Some(trailer_template) = parse_trailers_template(ui, &tx)? {
for commit_builder in &mut commit_builders {
// The first trailer would become the first line of the description.
// Also, a commit with no description is treated in a special way in jujutsu: it
// can be discarded as soon as it's no longer the working copy. Adding a
// trailer to an empty description would break that logic.
if use_editor || !commit_builder.description().is_empty() {
let temp_commit = commit_builder.write_hidden()?;
let new_description = add_trailers_with_template(&trailer_template, &temp_commit)?;
commit_builder.set_description(new_description);
}
}
}
if use_editor {
let temp_commits: Vec<_> = iter::zip(&commits, &commit_builders)
let commit_descriptions: Vec<(_, _)> = if args.no_edit || shared_description.is_some() {
commits
.iter()
.map(|commit| {
let new_description = shared_description
.as_deref()
.unwrap_or_else(|| commit.description());
(commit, new_description.to_owned())
})
.collect()
} else {
let temp_commits: Vec<(_, _)> = commits
.iter()
// Edit descriptions in topological order
.rev()
.map(|(commit, commit_builder)| {
commit_builder
.write_hidden()
.map(|temp_commit| (commit.id(), temp_commit))
.map(|commit| -> Result<_, CommandError> {
let mut commit_builder = tx
.repo_mut()
.rewrite_commit(command.settings(), commit)
.detach();
if commit_builder.description().is_empty() {
commit_builder.set_description(command.settings().default_description());
}
if args.reset_author {
let new_author = commit_builder.committer().clone();
commit_builder.set_author(new_author);
}
if let Some((name, email)) = args.author.clone() {
let new_author = Signature {
name,
email,
timestamp: commit_builder.author().timestamp,
};
commit_builder.set_author(new_author);
}
let temp_commit = commit_builder.write_hidden()?;
Ok((commit.id(), temp_commit))
})
.try_collect()?;
if let [(_, temp_commit)] = &*temp_commits {
let template = description_template(ui, &tx, "", temp_commit)?;
let description = edit_description(&text_editor, &template)?;
commit_builders[0].set_description(description);
let description = edit_description(
tx.base_workspace_helper().repo_path(),
&template,
command.settings(),
)?;
vec![(&commits[0], description)]
} else {
let ParsedBulkEditMessage {
descriptions,
missing,
duplicates,
unexpected,
} = edit_multiple_descriptions(ui, &text_editor, &tx, &temp_commits)?;
} = edit_multiple_descriptions(ui, &tx, &temp_commits, command.settings())?;
if !missing.is_empty() {
return Err(user_error(format!(
"The description for the following commits were not found in the edited \
@ -237,59 +201,75 @@ pub(crate) fn cmd_describe(
)));
}
for (commit, commit_builder) in iter::zip(&commits, &mut commit_builders) {
let description = descriptions.get(commit.id()).unwrap();
commit_builder.set_description(description);
}
let commit_descriptions = commits
.iter()
.map(|commit| {
let description = descriptions.get(commit.id()).unwrap().to_owned();
(commit, description)
})
.collect();
commit_descriptions
}
};
// Filter out unchanged commits to avoid rebasing descendants in
// `transform_descendants` below unnecessarily.
let commit_builders: HashMap<_, _> = iter::zip(&commits, commit_builders)
.filter(|(old_commit, commit_builder)| {
old_commit.description() != commit_builder.description()
let commit_descriptions: HashMap<_, _> = commit_descriptions
.into_iter()
.filter(|(commit, new_description)| {
new_description != commit.description()
|| args.reset_author
// Ignore author timestamp which could be updated if the old
// commit was discardable.
|| old_commit.author().name != commit_builder.author().name
|| old_commit.author().email != commit_builder.author().email
|| args.author.as_ref().is_some_and(|(name, email)| {
name != &commit.author().name || email != &commit.author().email
})
})
.map(|(old_commit, commit_builder)| (old_commit.id(), commit_builder))
.map(|(commit, new_description)| (commit.id(), new_description))
.collect();
let mut num_described = 0;
let mut num_reparented = 0;
let mut num_rebased = 0;
// Even though `MutRepo::rewrite_commit` and `MutRepo::rebase_descendants` can
// handle rewriting of a commit even if it is a descendant of another commit
// being rewritten, using `MutRepo::transform_descendants` prevents us from
// rewriting the same commit multiple times, and adding additional entries
// in the predecessor chain.
tx.repo_mut().transform_descendants(
commit_builders.keys().map(|&id| id.clone()).collect(),
command.settings(),
commit_descriptions
.keys()
.map(|&id| id.clone())
.collect_vec(),
|rewriter| {
let old_commit_id = rewriter.old_commit().id().clone();
let commit_builder = rewriter.reparent();
if let Some(temp_builder) = commit_builders.get(&old_commit_id) {
commit_builder
.set_description(temp_builder.description())
.set_author(temp_builder.author().clone())
// Copy back committer for consistency with author timestamp
.set_committer(temp_builder.committer().clone())
.write()?;
let mut commit_builder = rewriter.rebase(command.settings())?;
if let Some(description) = commit_descriptions.get(&old_commit_id) {
commit_builder = commit_builder.set_description(description);
if args.reset_author {
let new_author = commit_builder.committer().clone();
commit_builder = commit_builder.set_author(new_author);
}
if let Some((name, email)) = args.author.clone() {
let new_author = Signature {
name,
email,
timestamp: commit_builder.author().timestamp,
};
commit_builder = commit_builder.set_author(new_author);
}
num_described += 1;
} else {
commit_builder.write()?;
num_reparented += 1;
num_rebased += 1;
}
commit_builder.write()?;
Ok(())
},
)?;
if num_described > 1 {
writeln!(ui.status(), "Updated {num_described} commits")?;
}
if num_reparented > 0 {
writeln!(ui.status(), "Rebased {num_reparented} descendant commits")?;
if num_rebased > 0 {
writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?;
}
tx.finish(ui, tx_description)?;
Ok(())

View File

@ -12,91 +12,50 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCompleter;
use indexmap::IndexSet;
use itertools::Itertools as _;
use itertools::Itertools;
use jj_lib::copies::CopyRecords;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use jj_lib::rewrite::merge_commit_trees;
use tracing::instrument;
use crate::cli_util::print_unmatched_explicit_paths;
use crate::cli_util::short_commit_hash;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error_with_hint;
use crate::command_error::CommandError;
use crate::complete;
use crate::diff_util::get_copy_records;
use crate::diff_util::DiffFormatArgs;
use crate::ui::Ui;
/// Compare file contents between two revisions
///
/// With the `-r` option, shows the changes compared to the parent revision.
/// If there are several parent revisions (i.e., the given revision is a
/// merge), then they will be merged and the changes from the result to the
/// given revision will be shown.
/// With the `-r` option, which is the default, shows the changes compared to
/// the parent revision. If there are several parent revisions (i.e., the given
/// revision is a merge), then they will be merged and the changes from the
/// result to the given revision will be shown.
///
/// With the `--from` and/or `--to` options, shows the difference from/to the
/// given revisions. If either is left out, it defaults to the working-copy
/// commit. For example, `jj diff --from main` shows the changes from "main"
/// (perhaps a bookmark name) to the working-copy commit.
///
/// If no option is specified, it defaults to `-r @`.
#[derive(clap::Args, Clone, Debug)]
#[command(mut_arg("ignore_all_space", |a| a.short('w')))]
#[command(mut_arg("ignore_space_change", |a| a.short('b')))]
pub(crate) struct DiffArgs {
/// Show changes in these revisions
/// Show changes in this revision, compared to its parent(s)
///
/// If there are multiple revisions, then then total diff for all of them
/// will be shown. For example, if you have a linear chain of revisions
/// A..D, then `jj diff -r B::D` equals `jj diff --from A --to D`. Multiple
/// heads and/or roots are supported, but gaps in the revset are not
/// supported (e.g. `jj diff -r 'A|C'` in a linear chain A..C).
///
/// If a revision is a merge commit, this shows changes *from* the
/// If the revision is a merge commit, this shows changes *from* the
/// automatic merge of the contents of all of its parents *to* the contents
/// of the revision itself.
///
/// If none of `-r`, `-f`, or `-t` is provided, then the default is `-r @`.
#[arg(
long,
short,
value_name = "REVSETS",
alias = "revision",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
revisions: Option<Vec<RevisionArg>>,
#[arg(long, short)]
revision: Option<RevisionArg>,
/// Show changes from this revision
///
/// If none of `-r`, `-f`, or `-t` is provided, then the default is `-r @`.
#[arg(
long,
short,
conflicts_with = "revisions",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, conflicts_with = "revision")]
from: Option<RevisionArg>,
/// Show changes to this revision
///
/// If none of `-r`, `-f`, or `-t` is provided, then the default is `-r @`.
#[arg(
long,
short,
conflicts_with = "revisions",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, conflicts_with = "revision")]
to: Option<RevisionArg>,
/// Restrict the diff to these paths
#[arg(
value_name = "FILESETS",
value_hint = clap::ValueHint::AnyPath,
add = ArgValueCompleter::new(complete::modified_revision_or_range_files),
)]
#[arg(value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
#[command(flatten)]
format: DiffFormatArgs,
@ -112,14 +71,14 @@ pub(crate) fn cmd_diff(
let repo = workspace_command.repo();
let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?;
let matcher = fileset_expression.to_matcher();
let resolve_revision = |r: &Option<RevisionArg>| {
workspace_command.resolve_single_rev(ui, r.as_ref().unwrap_or(&RevisionArg::AT))
};
let from_tree;
let to_tree;
let mut copy_records = CopyRecords::default();
if args.from.is_some() || args.to.is_some() {
let resolve_revision = |r: &Option<RevisionArg>| {
workspace_command.resolve_single_rev(ui, r.as_ref().unwrap_or(&RevisionArg::AT))
};
let from = resolve_revision(&args.from)?;
let to = resolve_revision(&args.to)?;
from_tree = from.tree()?;
@ -128,44 +87,14 @@ pub(crate) fn cmd_diff(
let records = get_copy_records(repo.store(), from.id(), to.id(), &matcher)?;
copy_records.add_records(records)?;
} else {
let revision_args = args
.revisions
.as_deref()
.unwrap_or(std::slice::from_ref(&RevisionArg::AT));
let revisions_evaluator = workspace_command.parse_union_revsets(ui, revision_args)?;
let target_expression = revisions_evaluator.expression();
let mut gaps_revset = workspace_command
.attach_revset_evaluator(target_expression.connected().minus(target_expression))
.evaluate_to_commit_ids()?;
if let Some(commit_id) = gaps_revset.next() {
return Err(user_error_with_hint(
"Cannot diff revsets with gaps in.",
format!(
"Revision {} would need to be in the set.",
short_commit_hash(&commit_id?)
),
));
}
let heads: Vec<_> = workspace_command
.attach_revset_evaluator(target_expression.heads())
.evaluate_to_commits()?
.try_collect()?;
let roots: Vec<_> = workspace_command
.attach_revset_evaluator(target_expression.roots())
.evaluate_to_commits()?
.try_collect()?;
// Collect parents outside of revset to preserve parent order
let parents: IndexSet<_> = roots.iter().flat_map(|c| c.parents()).try_collect()?;
let parents = parents.into_iter().collect_vec();
let to = resolve_revision(&args.revision)?;
let parents: Vec<_> = to.parents().try_collect()?;
from_tree = merge_commit_trees(repo.as_ref(), &parents)?;
to_tree = merge_commit_trees(repo.as_ref(), &heads)?;
to_tree = to.tree()?;
for p in &parents {
for to in &heads {
let records = get_copy_records(repo.store(), p.id(), to.id(), &matcher)?;
copy_records.add_records(records)?;
}
let records = get_copy_records(repo.store(), p.id(), to.id(), &matcher)?;
copy_records.add_records(records)?;
}
}

View File

@ -12,30 +12,29 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write as _;
use std::io::Write;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use itertools::Itertools;
use jj_lib::matchers::EverythingMatcher;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use jj_lib::rewrite::merge_commit_trees;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Touch up the content changes in a revision with a diff editor
///
/// With the `-r` option, starts a [diff editor] on the changes in the revision.
/// With the `-r` option, which is the default, starts a [diff editor] on the
/// changes in the revision.
///
/// With the `--from` and/or `--to` options, starts a [diff editor] comparing
/// the "from" revision to the "to" revision.
///
/// [diff editor]:
/// https://jj-vcs.github.io/jj/latest/config/#editing-diffs
/// https://martinvonz.github.io/jj/latest/config/#editing-diffs
///
/// Edit the right side of the diff until it looks the way you want. Once you
/// close the editor, the revision specified with `-r` or `--to` will be
@ -49,32 +48,17 @@ pub(crate) struct DiffeditArgs {
/// The revision to touch up
///
/// Defaults to @ if neither --to nor --from are specified.
#[arg(
long,
short,
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
#[arg(long, short)]
revision: Option<RevisionArg>,
/// Show changes from this revision
///
/// Defaults to @ if --to is specified.
#[arg(
long, short,
conflicts_with = "revision",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, conflicts_with = "revision")]
from: Option<RevisionArg>,
/// Edit changes in this revision
///
/// Defaults to @ if --from is specified.
#[arg(
long, short,
conflicts_with = "revision",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
#[arg(long, conflicts_with = "revision")]
to: Option<RevisionArg>,
/// Specify diff editor to be used
#[arg(long, value_name = "NAME")]
@ -135,21 +119,25 @@ don't make any changes, then the operation will be aborted.",
if tree_id == *target_commit.tree_id() {
writeln!(ui.status(), "Nothing changed.")?;
} else {
tx.repo_mut()
.rewrite_commit(&target_commit)
let new_commit = tx
.repo_mut()
.rewrite_commit(command.settings(), &target_commit)
.set_tree_id(tree_id)
.write()?;
// rebase_descendants early; otherwise `new_commit` would always have
// a conflicted change id at this point.
let (num_rebased, extra_msg) = if args.restore_descendants {
(
tx.repo_mut().reparent_descendants()?,
tx.repo_mut().reparent_descendants(command.settings())?,
" (while preserving their content)",
)
} else {
(tx.repo_mut().rebase_descendants()?, "")
(tx.repo_mut().rebase_descendants(command.settings())?, "")
};
if let Some(mut formatter) = ui.status_formatter() {
write!(formatter, "Created ")?;
tx.write_commit_summary(formatter.as_mut(), &new_commit)?;
writeln!(formatter)?;
if num_rebased > 0 {
writeln!(
formatter,

View File

@ -12,91 +12,31 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write as _;
use std::io::Write;
use bstr::ByteVec as _;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use jj_lib::backend::BackendResult;
use indexmap::IndexMap;
use itertools::Itertools;
use jj_lib::backend::CommitId;
use jj_lib::repo::Repo as _;
use jj_lib::rewrite::duplicate_commits;
use jj_lib::rewrite::duplicate_commits_onto_parents;
use jj_lib::rewrite::DuplicateCommitsStats;
use jj_lib::commit::Commit;
use jj_lib::repo::Repo;
use tracing::instrument;
use crate::cli_util::compute_commit_location;
use crate::cli_util::short_commit_hash;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::formatter::PlainTextFormatter;
use crate::ui::Ui;
/// Create new changes with the same content as existing ones
///
/// When none of the `--destination`, `--insert-after`, or `--insert-before`
/// arguments are provided, commits will be duplicated onto their existing
/// parents or onto other newly duplicated commits.
///
/// When any of the `--destination`, `--insert-after`, or `--insert-before`
/// arguments are provided, the roots of the specified commits will be
/// duplicated onto the destination indicated by the arguments. Other specified
/// commits will be duplicated onto these newly duplicated commits. If the
/// `--insert-after` or `--insert-before` arguments are provided, the new
/// children indicated by the arguments will be rebased onto the heads of the
/// specified commits.
///
/// By default, the duplicated commits retain the descriptions of the originals.
/// This can be customized with the `templates.duplicate_description` setting.
/// Create a new change with the same content as an existing one
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct DuplicateArgs {
/// The revision(s) to duplicate (default: @)
#[arg(
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
revisions_pos: Vec<RevisionArg>,
#[arg(
short = 'r',
hide = true,
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
revisions_opt: Vec<RevisionArg>,
/// The revision(s) to duplicate onto (can be repeated to create a merge
/// commit)
#[arg(
long,
short,
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
destination: Option<Vec<RevisionArg>>,
/// The revision(s) to insert after (can be repeated to create a merge
/// commit)
#[arg(
long,
short = 'A',
visible_alias = "after",
conflicts_with = "destination",
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
insert_after: Option<Vec<RevisionArg>>,
/// The revision(s) to insert before (can be repeated to create a merge
/// commit)
#[arg(
long,
short = 'B',
visible_alias = "before",
conflicts_with = "destination",
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
insert_before: Option<Vec<RevisionArg>>,
/// The revision(s) to duplicate
#[arg(default_value = "@")]
revisions: Vec<RevisionArg>,
/// Ignored (but lets you pass `-r` for consistency with other commands)
#[arg(short = 'r', hide = true, action = clap::ArgAction::Count)]
unused_revision: u8,
}
#[instrument(skip_all)]
@ -106,13 +46,8 @@ pub(crate) fn cmd_duplicate(
args: &DuplicateArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let to_duplicate: Vec<CommitId> =
if !args.revisions_pos.is_empty() || !args.revisions_opt.is_empty() {
workspace_command
.parse_union_revsets(ui, &[&*args.revisions_pos, &*args.revisions_opt].concat())?
} else {
workspace_command.parse_revset(ui, &RevisionArg::AT)?
}
let to_duplicate: Vec<CommitId> = workspace_command
.parse_union_revsets(ui, &args.revisions)?
.evaluate_to_commit_ids()?
.try_collect()?; // in reverse topological order
if to_duplicate.is_empty() {
@ -122,104 +57,37 @@ pub(crate) fn cmd_duplicate(
if to_duplicate.last() == Some(workspace_command.repo().store().root_commit_id()) {
return Err(user_error("Cannot duplicate the root commit"));
}
let location = if args.destination.is_none()
&& args.insert_after.is_none()
&& args.insert_before.is_none()
{
None
} else {
Some(compute_commit_location(
ui,
&workspace_command,
args.destination.as_deref(),
args.insert_after.as_deref(),
args.insert_before.as_deref(),
"duplicated commits",
)?)
};
let mut duplicated_old_to_new: IndexMap<&CommitId, Commit> = IndexMap::new();
let mut tx = workspace_command.start_transaction();
let base_repo = tx.base_repo().clone();
let store = base_repo.store();
let mut_repo = tx.repo_mut();
if let Some((parent_commit_ids, children_commit_ids)) = &location {
if !parent_commit_ids.is_empty() {
for commit_id in &to_duplicate {
for parent_commit_id in parent_commit_ids {
if tx.repo().index().is_ancestor(commit_id, parent_commit_id) {
writeln!(
ui.warning_default(),
"Duplicating commit {} as a descendant of itself",
short_commit_hash(commit_id)
)?;
break;
}
}
}
for commit_id in &to_duplicate {
for child_commit_id in children_commit_ids {
if tx.repo().index().is_ancestor(child_commit_id, commit_id) {
writeln!(
ui.warning_default(),
"Duplicating commit {} as an ancestor of itself",
short_commit_hash(commit_id)
)?;
break;
}
}
}
}
for original_commit_id in to_duplicate.iter().rev() {
// Topological order ensures that any parents of `original_commit` are
// either not in `to_duplicate` or were already duplicated.
let original_commit = store.get_commit(original_commit_id)?;
let new_parents = original_commit
.parent_ids()
.iter()
.map(|id| duplicated_old_to_new.get(id).map_or(id, |c| c.id()).clone())
.collect();
let new_commit = mut_repo
.rewrite_commit(command.settings(), &original_commit)
.generate_new_change_id()
.set_parents(new_parents)
.write()?;
duplicated_old_to_new.insert(original_commit_id, new_commit);
}
let new_descs = {
let template = tx
.settings()
.get_string("templates.duplicate_description")?;
let parsed = tx.parse_commit_template(ui, &template)?;
to_duplicate
.iter()
.map(|commit_id| -> BackendResult<_> {
let mut output = Vec::new();
let commit = tx.repo().store().get_commit(commit_id)?;
parsed
.format(&commit, &mut PlainTextFormatter::new(&mut output))
.expect("write() to vec backed formatter should never fail");
Ok((commit_id.clone(), output.into_string_lossy()))
})
.try_collect()?
};
let num_to_duplicate = to_duplicate.len();
let DuplicateCommitsStats {
duplicated_commits,
num_rebased,
} = if let Some((parent_commit_ids, children_commit_ids)) = location {
duplicate_commits(
tx.repo_mut(),
&to_duplicate,
&new_descs,
&parent_commit_ids,
&children_commit_ids,
)?
} else {
duplicate_commits_onto_parents(tx.repo_mut(), &to_duplicate, &new_descs)?
};
if let Some(mut formatter) = ui.status_formatter() {
for (old_id, new_commit) in &duplicated_commits {
for (old_id, new_commit) in &duplicated_old_to_new {
write!(formatter, "Duplicated {} as ", short_commit_hash(old_id))?;
tx.write_commit_summary(formatter.as_mut(), new_commit)?;
writeln!(formatter)?;
}
if num_rebased > 0 {
writeln!(
ui.status(),
"Rebased {num_rebased} commits onto duplicated commits"
)?;
}
}
tx.finish(ui, format!("duplicate {num_to_duplicate} commit(s)"))?;
tx.finish(ui, format!("duplicate {} commit(s)", to_duplicate.len()))?;
Ok(())
}

View File

@ -12,29 +12,25 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write as _;
use std::io::Write;
use clap_complete::ArgValueCompleter;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Sets the specified revision as the working-copy revision
///
/// Note: it is [generally recommended] to instead use `jj new` and `jj
/// Note: it is generally recommended to instead use `jj new` and `jj
/// squash`.
///
/// [generally recommended]:
/// https://jj-vcs.github.io/jj/latest/FAQ#how-do-i-resume-working-on-an-existing-change
/// For more information, see https://martinvonz.github.io/jj/latest/FAQ#how-do-i-resume-working-on-an-existing-change
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct EditArgs {
/// The commit to edit
#[arg(value_name = "REVSET", add = ArgValueCompleter::new(complete::revset_expression_mutable))]
revision: RevisionArg,
/// Ignored (but lets you pass `-r` for consistency with other commands)
#[arg(short = 'r', hide = true)]

View File

@ -12,15 +12,9 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::convert::Infallible;
use clap_complete::ArgValueCandidates;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use itertools::Itertools;
use jj_lib::commit::Commit;
use jj_lib::dag_walk::topo_order_reverse_ok;
use jj_lib::graph::reverse_graph;
use jj_lib::graph::GraphEdge;
use jj_lib::matchers::EverythingMatcher;
use tracing::instrument;
@ -30,11 +24,11 @@ use crate::cli_util::CommandHelper;
use crate::cli_util::LogContentFormat;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::commit_templater::CommitTemplateLanguage;
use crate::diff_util::DiffFormatArgs;
use crate::graphlog::get_graphlog;
use crate::graphlog::Edge;
use crate::graphlog::GraphStyle;
use crate::templater::TemplateRenderer;
use crate::ui::Ui;
/// Show how a change has evolved over time
@ -43,39 +37,26 @@ use crate::ui::Ui;
/// of a change evolves when the change is updated, rebased, etc.
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct EvologArgs {
#[arg(
long, short,
default_value = "@",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short, default_value = "@")]
revision: RevisionArg,
/// Limit number of revisions to show
///
/// Applied after revisions are reordered topologically, but before being
/// reversed.
#[arg(long, short = 'n')]
limit: Option<usize>,
/// Show revisions in the opposite order (older revisions first)
#[arg(long)]
reversed: bool,
// TODO: Delete `-l` alias in jj 0.25+
#[arg(
short = 'l',
hide = true,
conflicts_with = "limit",
value_name = "LIMIT"
)]
deprecated_limit: Option<usize>,
/// Don't show the graph, show a flat list of revisions
#[arg(long)]
no_graph: bool,
/// Render each revision using the given template
///
/// Run `jj log -T` to list the built-in templates.
///
/// You can also specify arbitrary template expressions using the
/// [built-in keywords]. See [`jj help -k templates`] for more
/// information.
///
/// [built-in keywords]:
/// https://jj-vcs.github.io/jj/latest/templates/#commit-keywords
///
/// [`jj help -k templates`]:
/// https://jj-vcs.github.io/jj/latest/templates/
#[arg(long, short = 'T', add = ArgValueCandidates::new(complete::template_aliases))]
/// For the syntax, see https://martinvonz.github.io/jj/latest/templates/
#[arg(long, short = 'T')]
template: Option<String>,
/// Show patch compared to the previous version of this change
///
@ -99,25 +80,31 @@ pub(crate) fn cmd_evolog(
let start_commit = workspace_command.resolve_single_rev(ui, &args.revision)?;
let diff_renderer = workspace_command.diff_renderer_for_log(&args.diff_format, args.patch)?;
let graph_style = GraphStyle::from_settings(workspace_command.settings())?;
let with_content_format = LogContentFormat::new(ui, workspace_command.settings())?;
let graph_style = GraphStyle::from_settings(command.settings())?;
let with_content_format = LogContentFormat::new(ui, command.settings())?;
let template: TemplateRenderer<Commit>;
let node_template: TemplateRenderer<Option<Commit>>;
let template;
let node_template;
{
let language = workspace_command.commit_template_language();
let template_string = match &args.template {
Some(value) => value.to_string(),
None => workspace_command.settings().get_string("templates.log")?,
None => command.settings().config().get_string("templates.log")?,
};
template = workspace_command
.parse_template(ui, &language, &template_string)?
.parse_template(
ui,
&language,
&template_string,
CommitTemplateLanguage::wrap_commit,
)?
.labeled("log");
node_template = workspace_command
.parse_template(
ui,
&language,
&get_node_template(graph_style, workspace_command.settings())?,
&get_node_template(graph_style, command.settings())?,
CommitTemplateLanguage::wrap_commit_opt,
)?
.labeled("node");
}
@ -145,34 +132,24 @@ pub(crate) fn cmd_evolog(
predecessors
},
)?;
if let Some(n) = args.limit {
if args.deprecated_limit.is_some() {
writeln!(
ui.warning_default(),
"The -l shorthand is deprecated, use -n instead."
)?;
}
if let Some(n) = args.limit.or(args.deprecated_limit) {
commits.truncate(n);
}
if !args.no_graph {
let mut raw_output = formatter.raw()?;
let mut graph = get_graphlog(graph_style, raw_output.as_mut());
let commit_nodes = commits
.into_iter()
.map(|c| {
let ids = c.predecessor_ids();
let edges = ids.iter().cloned().map(GraphEdge::direct).collect_vec();
(c, edges)
})
.collect_vec();
let commit_nodes = if args.reversed {
reverse_graph(
commit_nodes.into_iter().map(Result::<_, Infallible>::Ok),
Commit::id,
)
.unwrap()
} else {
commit_nodes
};
for node in commit_nodes {
let (commit, edges) = node;
for commit in commits {
let edges = commit
.predecessor_ids()
.iter()
.map(|id| Edge::Direct(id.clone()))
.collect_vec();
let mut buffer = vec![];
let within_graph = with_content_format.sub_width(graph.width(commit.id(), &edges));
within_graph.write(ui.new_formatter(&mut buffer).as_mut(), |formatter| {
@ -202,10 +179,6 @@ pub(crate) fn cmd_evolog(
)?;
}
} else {
if args.reversed {
commits.reverse();
}
for commit in commits {
with_content_format
.write(formatter, |formatter| template.format(&commit, formatter))?;

View File

@ -12,20 +12,16 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use clap_complete::ArgValueCompleter;
use jj_lib::annotate::get_annotation_for_file;
use jj_lib::annotate::FileAnnotation;
use jj_lib::annotate::FileAnnotator;
use jj_lib::commit::Commit;
use jj_lib::repo::Repo;
use jj_lib::revset::RevsetExpression;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::commit_templater::AnnotationLine;
use crate::complete;
use crate::templater::TemplateRenderer;
use crate::ui::Ui;
@ -33,38 +29,16 @@ use crate::ui::Ui;
///
/// Annotates a revision line by line. Each line includes the source change that
/// introduced the associated line. A path to the desired file must be provided.
/// The per-line prefix for each line can be customized via
/// template with the `templates.annotate_commit_summary` config variable.
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct FileAnnotateArgs {
/// the file to annotate
#[arg(
value_hint = clap::ValueHint::AnyPath,
add = ArgValueCompleter::new(complete::all_revision_files),
)]
#[arg(value_hint = clap::ValueHint::AnyPath)]
path: String,
/// an optional revision to start at
#[arg(
long,
short,
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short)]
revision: Option<RevisionArg>,
/// Render each line using the given template
///
/// All 0-argument methods of the [`AnnotationLine` type] are available as
/// keywords in the template expression. See [`jj help -k templates`] for
/// more information.
///
/// If not specified, this defaults to the `templates.file_annotate`
/// setting.
///
/// [`AnnotationLine` type]:
/// https://jj-vcs.github.io/jj/latest/templates/#annotationline-type
///
/// [`jj help -k templates`]:
/// https://jj-vcs.github.io/jj/latest/templates/
#[arg(long, short = 'T', add = ArgValueCandidates::new(complete::template_aliases))]
template: Option<String>,
}
#[instrument(skip_all)]
@ -89,22 +63,13 @@ pub(crate) fn cmd_file_annotate(
)));
}
let template_text = match &args.template {
Some(value) => value.clone(),
None => workspace_command
.settings()
.get_string("templates.file_annotate")?,
};
let language = workspace_command.commit_template_language();
let template = workspace_command.parse_template(ui, &language, &template_text)?;
let annotate_commit_summary_text = command
.settings()
.config()
.get_string("templates.annotate_commit_summary")?;
let template = workspace_command.parse_commit_template(ui, &annotate_commit_summary_text)?;
// TODO: Should we add an option to limit the domain to e.g. recent commits?
// Note that this is probably different from "--skip REVS", which won't
// exclude the revisions, but will ignore diffs in those revisions as if
// ancestor revisions had new content.
let mut annotator = FileAnnotator::from_commit(&starting_commit, &file_path)?;
annotator.compute(repo.as_ref(), &RevsetExpression::all())?;
let annotation = annotator.to_annotation();
let annotation = get_annotation_for_file(repo.as_ref(), &starting_commit, &file_path)?;
render_file_annotation(repo.as_ref(), ui, &template, &annotation)?;
Ok(())
@ -113,29 +78,16 @@ pub(crate) fn cmd_file_annotate(
fn render_file_annotation(
repo: &dyn Repo,
ui: &mut Ui,
template_render: &TemplateRenderer<AnnotationLine>,
template_render: &TemplateRenderer<Commit>,
annotation: &FileAnnotation,
) -> Result<(), CommandError> {
ui.request_pager();
let mut formatter = ui.stdout_formatter();
let mut last_id = None;
let default_id = repo.store().root_commit_id();
for (line_number, (commit_id, content)) in annotation.lines().enumerate() {
/* At least in cases where the repository was jj-initialized shallowly,
then unshallow'd with git, some changes will not have a commit id
because jj does not import the unshallow'd commits. So we default
to the root commit id for now. */
let commit_id = commit_id.unwrap_or(default_id);
for (line_no, (commit_id, line)) in annotation.lines().enumerate() {
let commit = repo.store().get_commit(commit_id)?;
let first_line_in_hunk = last_id != Some(commit_id);
let annotation_line = AnnotationLine {
commit,
content: content.to_owned(),
line_number: line_number + 1,
first_line_in_hunk,
};
template_render.format(&annotation_line, formatter.as_mut())?;
last_id = Some(commit_id);
template_render.format(&commit, formatter.as_mut())?;
write!(formatter, " {:>4}: ", line_no + 1)?;
formatter.write_all(line)?;
}
Ok(())

View File

@ -12,10 +12,9 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCompleter;
use jj_lib::backend::TreeValue;
use jj_lib::merged_tree::MergedTreeBuilder;
use jj_lib::object_id::ObjectId as _;
use jj_lib::object_id::ObjectId;
use tracing::instrument;
use crate::cli_util::print_unmatched_explicit_paths;
@ -23,7 +22,6 @@ use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, clap::ValueEnum)]
@ -45,20 +43,10 @@ enum ChmodMode {
pub(crate) struct FileChmodArgs {
mode: ChmodMode,
/// The revision to update
#[arg(
long, short,
default_value = "@",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
#[arg(long, short, default_value = "@")]
revision: RevisionArg,
/// Paths to change the executable bit for
#[arg(
required = true,
value_name = "FILESETS",
value_hint = clap::ValueHint::AnyPath,
add = ArgValueCompleter::new(complete::all_revision_files),
)]
#[arg(required = true, value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
}
@ -87,7 +75,7 @@ pub(crate) fn cmd_file_chmod(
let store = tree.store();
let mut tree_builder = MergedTreeBuilder::new(commit.tree_id().clone());
for (repo_path, result) in tree.entries_matching(matcher.as_ref()) {
let mut tree_value = result?;
let tree_value = result?;
let user_error_with_path = |msg: &str| {
user_error(format!(
"{msg} at '{}'.",
@ -106,17 +94,22 @@ pub(crate) fn cmd_file_chmod(
};
return Err(user_error_with_path(message));
}
for value in tree_value.iter_mut().flatten() {
if let TreeValue::File { id: _, executable } = value {
*executable = executable_bit;
let new_tree_value = tree_value.map(|value| match value {
Some(TreeValue::File { id, executable: _ }) => Some(TreeValue::File {
id: id.clone(),
executable: executable_bit,
}),
Some(TreeValue::Conflict(_)) => {
panic!("Conflict sides must not themselves be conflicts")
}
}
tree_builder.set_or_remove(repo_path, tree_value);
value => value.clone(),
});
tree_builder.set_or_remove(repo_path, new_tree_value);
}
let new_tree_id = tree_builder.write_tree(store)?;
tx.repo_mut()
.rewrite_commit(&commit)
.rewrite_commit(command.settings(), &commit)
.set_tree_id(new_tree_id)
.write()?;
tx.finish(

View File

@ -12,45 +12,23 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCompleter;
use std::io::Write;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::commit_templater::TreeEntry;
use crate::complete;
use crate::templater::TemplateRenderer;
use crate::ui::Ui;
/// List files in a revision
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct FileListArgs {
/// The revision to list files in
#[arg(
long, short,
default_value = "@",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short, default_value = "@")]
revision: RevisionArg,
/// Render each file entry using the given template
///
/// All 0-argument methods of the [`TreeEntry` type] are available as
/// keywords in the template expression. See [`jj help -k templates`] for
/// more information.
///
/// [`TreeEntry` type]:
/// https://jj-vcs.github.io/jj/latest/templates/#treeentry-type
///
/// [`jj help -k templates`]:
/// https://jj-vcs.github.io/jj/latest/templates/
#[arg(long, short = 'T')]
template: Option<String>,
/// Only list files matching these prefixes (instead of all files)
#[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)]
#[arg(value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
}
@ -66,25 +44,13 @@ pub(crate) fn cmd_file_list(
let matcher = workspace_command
.parse_file_patterns(ui, &args.paths)?
.to_matcher();
let template: TemplateRenderer<TreeEntry> = {
let language = workspace_command.commit_template_language();
let text = match &args.template {
Some(value) => value.to_owned(),
None => workspace_command.settings().get("templates.file_list")?,
};
workspace_command
.parse_template(ui, &language, &text)?
.labeled("file_list")
};
ui.request_pager();
let mut formatter = ui.stdout_formatter();
for (path, value) in tree.entries_matching(matcher.as_ref()) {
let entry = TreeEntry {
path,
value: value?,
};
template.format(&entry, formatter.as_mut())?;
for (name, _value) in tree.entries_matching(matcher.as_ref()) {
writeln!(
ui.stdout(),
"{}",
&workspace_command.format_file_path(&name)
)?;
}
Ok(())
}

View File

@ -12,12 +12,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.
mod annotate;
mod chmod;
mod list;
mod show;
mod track;
mod untrack;
pub mod annotate;
pub mod chmod;
pub mod list;
pub mod show;
pub mod track;
pub mod untrack;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;

View File

@ -13,9 +13,8 @@
// limitations under the License.
use std::io;
use std::io::Write as _;
use std::io::Write;
use clap_complete::ArgValueCompleter;
use jj_lib::backend::BackendResult;
use jj_lib::conflicts::materialize_merge_result;
use jj_lib::conflicts::materialize_tree_value;
@ -23,9 +22,9 @@ use jj_lib::conflicts::MaterializedTreeValue;
use jj_lib::fileset::FilePattern;
use jj_lib::fileset::FilesetExpression;
use jj_lib::merge::MergedTreeValue;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPath;
use pollster::FutureExt as _;
use pollster::FutureExt;
use tracing::instrument;
use crate::cli_util::print_unmatched_explicit_paths;
@ -34,7 +33,6 @@ use crate::cli_util::RevisionArg;
use crate::cli_util::WorkspaceCommandHelper;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Print contents of files in a revision
@ -44,20 +42,10 @@ use crate::ui::Ui;
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct FileShowArgs {
/// The revision to get the file contents from
#[arg(
long, short,
default_value = "@",
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short, default_value = "@")]
revision: RevisionArg,
/// Paths to print
#[arg(
required = true,
value_name = "FILESETS",
value_hint = clap::ValueHint::FilePath,
add = ArgValueCompleter::new(complete::all_revision_files),
)]
#[arg(required = true, value_hint = clap::ValueHint::FilePath)]
paths: Vec<String>,
}
@ -129,15 +117,11 @@ fn write_tree_entries<P: AsRef<RepoPath>>(
"Path '{ui_path}' exists but access is denied: {err}"
)?;
}
MaterializedTreeValue::File(mut file) => {
io::copy(&mut file.reader, &mut ui.stdout_formatter().as_mut())?;
MaterializedTreeValue::File { mut reader, .. } => {
io::copy(&mut reader, &mut ui.stdout_formatter().as_mut())?;
}
MaterializedTreeValue::FileConflict(file) => {
materialize_merge_result(
&file.contents,
workspace_command.env().conflict_marker_style(),
&mut ui.stdout_formatter(),
)?;
MaterializedTreeValue::FileConflict { contents, .. } => {
materialize_merge_result(&contents, &mut ui.stdout_formatter())?;
}
MaterializedTreeValue::OtherConflict { id } => {
ui.stdout_formatter().write_all(id.describe().as_bytes())?;

View File

@ -12,17 +12,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io;
use std::io::Write as _;
use std::io::Write;
use indoc::writedoc;
use itertools::Itertools as _;
use jj_lib::repo_path::RepoPathUiConverter;
use jj_lib::working_copy::SnapshotStats;
use jj_lib::working_copy::UntrackedReason;
use jj_lib::working_copy::SnapshotOptions;
use tracing::instrument;
use crate::cli_util::print_untracked_files;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::ui::Ui;
@ -39,7 +33,7 @@ use crate::ui::Ui;
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct FileTrackArgs {
/// Paths to track
#[arg(required = true, value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)]
#[arg(required = true, value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
}
@ -49,73 +43,26 @@ pub(crate) fn cmd_file_track(
command: &CommandHelper,
args: &FileTrackArgs,
) -> Result<(), CommandError> {
let (mut workspace_command, auto_stats) = command.workspace_helper_with_stats(ui)?;
let mut workspace_command = command.workspace_helper(ui)?;
let matcher = workspace_command
.parse_file_patterns(ui, &args.paths)?
.to_matcher();
let options = workspace_command.snapshot_options_with_start_tracking_matcher(&matcher)?;
let mut tx = workspace_command.start_transaction().into_inner();
let base_ignores = workspace_command.base_ignores()?;
let (mut locked_ws, _wc_commit) = workspace_command.start_working_copy_mutation()?;
let (_tree_id, track_stats) = locked_ws.locked_wc().snapshot(&options)?;
let num_rebased = tx.repo_mut().rebase_descendants()?;
locked_ws.locked_wc().snapshot(&SnapshotOptions {
base_ignores,
fsmonitor_settings: command.settings().fsmonitor_settings()?,
progress: None,
start_tracking_matcher: &matcher,
max_new_file_size: command.settings().max_new_file_size()?,
})?;
let num_rebased = tx.repo_mut().rebase_descendants(command.settings())?;
if num_rebased > 0 {
writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?;
}
let repo = tx.commit("track paths")?;
let repo = tx.commit("track paths");
locked_ws.finish(repo.op_id().clone())?;
print_track_snapshot_stats(
ui,
auto_stats,
track_stats,
workspace_command.env().path_converter(),
)?;
Ok(())
}
pub fn print_track_snapshot_stats(
ui: &Ui,
auto_stats: SnapshotStats,
track_stats: SnapshotStats,
path_converter: &RepoPathUiConverter,
) -> io::Result<()> {
let mut merged_untracked_paths = auto_stats.untracked_paths;
for (path, reason) in track_stats
.untracked_paths
.into_iter()
// focus on files that are now tracked with `file track`
.filter(|(_, reason)| !matches!(reason, UntrackedReason::FileNotAutoTracked))
{
// if the path was previously rejected because it wasn't tracked, update its
// reason
merged_untracked_paths.insert(path, reason);
}
print_untracked_files(ui, &merged_untracked_paths, path_converter)?;
let (large_files, sizes): (Vec<_>, Vec<_>) = merged_untracked_paths
.iter()
.filter_map(|(path, reason)| match reason {
UntrackedReason::FileTooLarge { size, .. } => Some((path, *size)),
UntrackedReason::FileNotAutoTracked => None,
})
.unzip();
if let Some(size) = sizes.iter().max() {
let large_files_list = large_files
.iter()
.map(|path| path_converter.format_file_path(path))
.join(" ");
writedoc!(
ui.hint_default(),
r"
This is to prevent large files from being added by accident. You can fix this by:
- Adding the file to `.gitignore`
- Run `jj config set --repo snapshot.max-new-file-size {size}`
This will increase the maximum file size allowed for new files, in this repository only.
- Run `jj --config snapshot.max-new-file-size={size} file track {large_files_list}`
This will increase the maximum file size allowed for new files, for this command only.
"
)?;
}
Ok(())
}

View File

@ -12,20 +12,18 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write as _;
use std::io::Write;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use itertools::Itertools;
use jj_lib::merge::Merge;
use jj_lib::merged_tree::MergedTreeBuilder;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use jj_lib::working_copy::SnapshotOptions;
use tracing::instrument;
use crate::cli_util::print_snapshot_stats;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error_with_hint;
use crate::command_error::CommandError;
use crate::complete;
use crate::ui::Ui;
/// Stop tracking specified paths in the working copy
@ -35,12 +33,7 @@ pub(crate) struct FileUntrackArgs {
///
/// The paths could be ignored via a .gitignore or .git/info/exclude (in
/// colocated repos).
#[arg(
required = true,
value_name = "FILESETS",
value_hint = clap::ValueHint::AnyPath,
add = ArgValueCompleter::new(complete::all_revision_files),
)]
#[arg(required = true, value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
}
@ -55,11 +48,10 @@ pub(crate) fn cmd_file_untrack(
let matcher = workspace_command
.parse_file_patterns(ui, &args.paths)?
.to_matcher();
let auto_tracking_matcher = workspace_command.auto_tracking_matcher(ui)?;
let options =
workspace_command.snapshot_options_with_start_tracking_matcher(&auto_tracking_matcher)?;
let mut tx = workspace_command.start_transaction().into_inner();
let base_ignores = workspace_command.base_ignores()?;
let auto_tracking_matcher = workspace_command.auto_tracking_matcher(ui)?;
let (mut locked_ws, wc_commit) = workspace_command.start_working_copy_mutation()?;
// Create a new tree without the unwanted files
let mut tree_builder = MergedTreeBuilder::new(wc_commit.tree_id().clone());
@ -70,14 +62,20 @@ pub(crate) fn cmd_file_untrack(
let new_tree_id = tree_builder.write_tree(&store)?;
let new_commit = tx
.repo_mut()
.rewrite_commit(&wc_commit)
.rewrite_commit(command.settings(), &wc_commit)
.set_tree_id(new_tree_id)
.write()?;
// Reset the working copy to the new commit
locked_ws.locked_wc().reset(&new_commit)?;
// Commit the working copy again so we can inform the user if paths couldn't be
// untracked because they're not ignored.
let (wc_tree_id, stats) = locked_ws.locked_wc().snapshot(&options)?;
let wc_tree_id = locked_ws.locked_wc().snapshot(&SnapshotOptions {
base_ignores,
fsmonitor_settings: command.settings().fsmonitor_settings()?,
progress: None,
start_tracking_matcher: &auto_tracking_matcher,
max_new_file_size: command.settings().max_new_file_size()?,
})?;
if wc_tree_id != *new_commit.tree_id() {
let wc_tree = store.get_root_tree(&wc_tree_id)?;
let added_back = wc_tree.entries_matching(matcher.as_ref()).collect_vec();
@ -105,12 +103,11 @@ Make sure they're ignored, then try again.",
locked_ws.locked_wc().reset(&new_commit)?;
}
}
let num_rebased = tx.repo_mut().rebase_descendants()?;
let num_rebased = tx.repo_mut().rebase_descendants(command.settings())?;
if num_rebased > 0 {
writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?;
}
let repo = tx.commit("untrack paths")?;
let repo = tx.commit("untrack paths");
locked_ws.finish(repo.op_id().clone())?;
print_snapshot_stats(ui, &stats, workspace_command.env().path_converter())?;
Ok(())
}

View File

@ -13,26 +13,35 @@
// limitations under the License.
use std::collections::HashMap;
use std::io::Write as _;
use std::path::Path;
use std::collections::HashSet;
use std::io::Write;
use std::process::Stdio;
use std::sync::mpsc::channel;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use futures::StreamExt;
use itertools::Itertools;
use jj_lib::backend::BackendError;
use jj_lib::backend::CommitId;
use jj_lib::backend::FileId;
use jj_lib::backend::TreeValue;
use jj_lib::fileset;
use jj_lib::fileset::FilesetDiagnostics;
use jj_lib::fileset::FilesetExpression;
use jj_lib::fix::fix_files;
use jj_lib::fix::FileToFix;
use jj_lib::fix::FixError;
use jj_lib::fix::ParallelFileFixer;
use jj_lib::matchers::EverythingMatcher;
use jj_lib::matchers::Matcher;
use jj_lib::merged_tree::MergedTree;
use jj_lib::merged_tree::MergedTreeBuilder;
use jj_lib::merged_tree::TreeDiffEntry;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPathBuf;
use jj_lib::repo_path::RepoPathUiConverter;
use jj_lib::settings::UserSettings;
use jj_lib::revset::RevsetExpression;
use jj_lib::revset::RevsetIteratorExt;
use jj_lib::store::Store;
use pollster::FutureExt as _;
use jj_lib::tree::Tree;
use pollster::FutureExt;
use rayon::iter::IntoParallelIterator;
use rayon::prelude::ParallelIterator;
use tracing::instrument;
use crate::cli_util::CommandHelper;
@ -40,7 +49,7 @@ use crate::cli_util::RevisionArg;
use crate::command_error::config_error;
use crate::command_error::print_parse_diagnostics;
use crate::command_error::CommandError;
use crate::complete;
use crate::config::to_toml_value;
use crate::config::CommandNameAndArgs;
use crate::ui::Ui;
@ -75,9 +84,6 @@ use crate::ui::Ui;
/// empty, no files will be affected by the tool. If there are multiple
/// patterns, the tool is applied only once to each file in the union of the
/// patterns.
/// - `enabled`: Enables or disables the tool. If omitted, the tool is enabled.
/// This is useful for defining disabled tools in user configuration that can
/// be enabled in individual repositories with one config setting.
///
/// For example, the following configuration defines how two code formatters
/// (`clang-format` and `black`) will apply to three different file extensions
@ -98,21 +104,30 @@ use crate::ui::Ui;
/// currently unspecified, and may change between releases. If two tools affect
/// the same file, the second tool to run will receive its input from the
/// output of the first tool.
///
/// There is also a deprecated configuration schema that defines a single
/// command that will affect all changed files in the specified revisions. For
/// example, the following configuration would apply the Rust formatter to all
/// changed files (whether they are Rust files or not):
///
/// ```toml
/// [fix]
/// tool-command = ["rustfmt", "--emit", "stdout"]
/// ```
///
/// The tool defined by `tool-command` acts as if it was the first entry in
/// `fix.tools`, and uses `pattern = "all()"``. Support for `tool-command`
/// will be removed in a future version.
#[derive(clap::Args, Clone, Debug)]
#[command(verbatim_doc_comment)]
pub(crate) struct FixArgs {
/// Fix files in the specified revision(s) and their descendants. If no
/// revisions are specified, this defaults to the `revsets.fix` setting, or
/// `reachable(@, mutable())` if it is not set.
#[arg(
long,
short,
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_mutable),
)]
#[arg(long, short)]
source: Vec<RevisionArg>,
/// Fix only these paths
#[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)]
#[arg(value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
/// Fix unchanged files in addition to changed ones. If no paths are
/// specified, all files in the repo will be fixed.
@ -127,10 +142,9 @@ pub(crate) fn cmd_fix(
args: &FixArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let workspace_root = workspace_command.workspace_root().to_owned();
let tools_config = get_tools_config(ui, workspace_command.settings())?;
let tools_config = get_tools_config(ui, command.settings().config())?;
let root_commits: Vec<CommitId> = if args.source.is_empty() {
let revs = workspace_command.settings().get_string("revsets.fix")?;
let revs = command.settings().config().get_string("revsets.fix")?;
workspace_command.parse_revset(ui, &RevisionArg::from(revs))?
} else {
workspace_command.parse_union_revsets(ui, &args.source)?
@ -143,102 +157,240 @@ pub(crate) fn cmd_fix(
.to_matcher();
let mut tx = workspace_command.start_transaction();
let mut parallel_fixer = ParallelFileFixer::new(|store, file_to_fix| {
fix_one_file(&workspace_root, &tools_config, store, file_to_fix)
});
let summary = fix_files(
root_commits,
&matcher,
args.include_unchanged_files,
tx.repo_mut(),
&mut parallel_fixer,
// Collect all of the unique `ToolInput`s we're going to use. Tools should be
// deterministic, and should not consider outside information, so it is safe to
// deduplicate inputs that correspond to multiple files or commits. This is
// typically more efficient, but it does prevent certain use cases like
// providing commit IDs as inputs to be inserted into files. We also need to
// record the mapping between tool inputs and paths/commits, to efficiently
// rewrite the commits later.
//
// If a path is being fixed in a particular commit, it must also be fixed in all
// that commit's descendants. We do this as a way of propagating changes,
// under the assumption that it is more useful than performing a rebase and
// risking merge conflicts. In the case of code formatters, rebasing wouldn't
// reliably produce well formatted code anyway. Deduplicating inputs helps
// to prevent quadratic growth in the number of tool executions required for
// doing this in long chains of commits with disjoint sets of modified files.
let commits: Vec<_> = RevsetExpression::commits(root_commits.clone())
.descendants()
.evaluate_programmatic(tx.base_repo().as_ref())?
.iter()
.commits(tx.repo().store())
.try_collect()?;
let mut unique_tool_inputs: HashSet<ToolInput> = HashSet::new();
let mut commit_paths: HashMap<CommitId, HashSet<RepoPathBuf>> = HashMap::new();
for commit in commits.iter().rev() {
let mut paths: HashSet<RepoPathBuf> = HashSet::new();
// If --include-unchanged-files, we always fix every matching file in the tree.
// Otherwise, we fix the matching changed files in this commit, plus any that
// were fixed in ancestors, so we don't lose those changes. We do this
// instead of rebasing onto those changes, to avoid merge conflicts.
let parent_tree = if args.include_unchanged_files {
MergedTree::resolved(Tree::empty(tx.repo().store().clone(), RepoPathBuf::root()))
} else {
for parent_id in commit.parent_ids() {
if let Some(parent_paths) = commit_paths.get(parent_id) {
paths.extend(parent_paths.iter().cloned());
}
}
commit.parent_tree(tx.repo())?
};
// TODO: handle copy tracking
let mut diff_stream = parent_tree.diff_stream(&commit.tree()?, &matcher);
async {
while let Some(TreeDiffEntry {
path: repo_path,
values,
}) = diff_stream.next().await
{
let (_before, after) = values?;
// Deleted files have no file content to fix, and they have no terms in `after`,
// so we don't add any tool inputs for them. Conflicted files produce one tool
// input for each side of the conflict.
for term in after.into_iter().flatten() {
// We currently only support fixing the content of normal files, so we skip
// directories and symlinks, and we ignore the executable bit.
if let TreeValue::File { id, executable: _ } = term {
// TODO: Skip the file if its content is larger than some configured size,
// preferably without actually reading it yet.
let tool_input = ToolInput {
file_id: id.clone(),
repo_path: repo_path.clone(),
};
unique_tool_inputs.insert(tool_input.clone());
paths.insert(repo_path.clone());
}
}
}
Ok::<(), BackendError>(())
}
.block_on()?;
commit_paths.insert(commit.id().clone(), paths);
}
// Run the configured tool on all of the chosen inputs.
let fixed_file_ids = fix_file_ids(
tx.repo().store().as_ref(),
&tools_config,
&unique_tool_inputs,
)?;
// Substitute the fixed file IDs into all of the affected commits. Currently,
// fixes cannot delete or rename files, change the executable bit, or modify
// other parts of the commit like the description.
let mut num_checked_commits = 0;
let mut num_fixed_commits = 0;
tx.repo_mut().transform_descendants(
command.settings(),
root_commits.iter().cloned().collect_vec(),
|mut rewriter| {
// TODO: Build the trees in parallel before `transform_descendants()` and only
// keep the tree IDs in memory, so we can pass them to the rewriter.
let repo_paths = commit_paths.get(rewriter.old_commit().id()).unwrap();
let old_tree = rewriter.old_commit().tree()?;
let mut tree_builder = MergedTreeBuilder::new(old_tree.id().clone());
let mut changes = 0;
for repo_path in repo_paths {
let old_value = old_tree.path_value(repo_path)?;
let new_value = old_value.map(|old_term| {
if let Some(TreeValue::File { id, executable }) = old_term {
let tool_input = ToolInput {
file_id: id.clone(),
repo_path: repo_path.clone(),
};
if let Some(new_id) = fixed_file_ids.get(&tool_input) {
return Some(TreeValue::File {
id: new_id.clone(),
executable: *executable,
});
}
}
old_term.clone()
});
if new_value != old_value {
tree_builder.set_or_remove(repo_path.clone(), new_value);
changes += 1;
}
}
num_checked_commits += 1;
if changes > 0 {
num_fixed_commits += 1;
let new_tree = tree_builder.write_tree(rewriter.mut_repo().store())?;
let builder = rewriter.reparent(command.settings())?;
builder.set_tree_id(new_tree).write()?;
}
Ok(())
},
)?;
writeln!(
ui.status(),
"Fixed {} commits of {} checked.",
summary.num_fixed_commits,
summary.num_checked_commits
"Fixed {num_fixed_commits} commits of {num_checked_commits} checked."
)?;
tx.finish(ui, format!("fixed {} commits", summary.num_fixed_commits))
tx.finish(ui, format!("fixed {num_fixed_commits} commits"))
}
/// Invokes all matching tools (if any) to file_to_fix. If the content is
/// successfully transformed the new content is written and the new FileId is
/// returned. Returns None if the content is unchanged.
///
/// The matching tools are invoked in order, with the result of one tool feeding
/// into the next tool. Returns FixError if there is an error reading or writing
/// the file. However, if a tool invocation fails for whatever reason, the tool
/// is simply skipped and we proceed to invoke the next tool (this is
/// indistinguishable from succeeding with no changes).
/// Represents the API between `jj fix` and the tools it runs.
// TODO: Add the set of changed line/byte ranges, so those can be passed into code formatters via
// flags. This will help avoid introducing unrelated changes when working on code with out of date
// formatting.
#[derive(PartialEq, Eq, Hash, Clone)]
struct ToolInput {
/// File content is the primary input, provided on the tool's standard
/// input. We use the `FileId` as a placeholder here, so we can hold all
/// the inputs in memory without also holding all the content at once.
file_id: FileId,
/// The path is provided to allow passing it into the tool so it can
/// potentially:
/// - Choose different behaviors for different file names, extensions, etc.
/// - Update parts of the file's content that should be derived from the
/// file's path.
repo_path: RepoPathBuf,
}
/// Applies `run_tool()` to the inputs and stores the resulting file content.
///
/// Returns a map describing the subset of `tool_inputs` that resulted in
/// changed file content. Failures when handling an input will cause it to be
/// omitted from the return value, which is indistinguishable from succeeding
/// with no changes.
/// TODO: Better error handling so we can tell the user what went wrong with
/// each failed input.
fn fix_one_file(
workspace_root: &Path,
tools_config: &ToolsConfig,
fn fix_file_ids<'a>(
store: &Store,
file_to_fix: &FileToFix,
) -> Result<Option<FileId>, FixError> {
let mut matching_tools = tools_config
.tools
.iter()
.filter(|tool_config| tool_config.matcher.matches(&file_to_fix.repo_path))
.peekable();
if matching_tools.peek().is_some() {
// The first matching tool gets its input from the committed file, and any
// subsequent matching tool gets its input from the previous matching tool's
// output.
let mut old_content = vec![];
let mut read = store.read_file(&file_to_fix.repo_path, &file_to_fix.file_id)?;
read.read_to_end(&mut old_content)?;
let new_content = matching_tools.fold(old_content.clone(), |prev_content, tool_config| {
match run_tool(
workspace_root,
&tool_config.command,
file_to_fix,
&prev_content,
) {
Ok(next_content) => next_content,
// TODO: Because the stderr is passed through, this isn't always failing
// silently, but it should do something better will the exit code, tool
// name, etc.
Err(_) => prev_content,
tools_config: &ToolsConfig,
tool_inputs: &'a HashSet<ToolInput>,
) -> Result<HashMap<&'a ToolInput, FileId>, CommandError> {
let (updates_tx, updates_rx) = channel();
// TODO: Switch to futures, or document the decision not to. We don't need
// threads unless the threads will be doing more than waiting for pipes.
tool_inputs.into_par_iter().try_for_each_init(
|| updates_tx.clone(),
|updates_tx, tool_input| -> Result<(), CommandError> {
let mut matching_tools = tools_config
.tools
.iter()
.filter(|tool_config| tool_config.matcher.matches(&tool_input.repo_path))
.peekable();
if matching_tools.peek().is_some() {
// The first matching tool gets its input from the committed file, and any
// subsequent matching tool gets its input from the previous matching tool's
// output.
let mut old_content = vec![];
let mut read = store.read_file(&tool_input.repo_path, &tool_input.file_id)?;
read.read_to_end(&mut old_content)?;
let new_content =
matching_tools.fold(old_content.clone(), |prev_content, tool_config| {
match run_tool(&tool_config.command, tool_input, &prev_content) {
Ok(next_content) => next_content,
// TODO: Because the stderr is passed through, this isn't always failing
// silently, but it should do something better will the exit code, tool
// name, etc.
Err(_) => prev_content,
}
});
if new_content != old_content {
// TODO: send futures back over channel
let new_file_id = store
.write_file(&tool_input.repo_path, &mut new_content.as_slice())
.block_on()?;
updates_tx.send((tool_input, new_file_id)).unwrap();
}
}
});
if new_content != old_content {
// TODO: send futures back over channel
let new_file_id = store
.write_file(&file_to_fix.repo_path, &mut new_content.as_slice())
.block_on()?;
return Ok(Some(new_file_id));
}
Ok(())
},
)?;
drop(updates_tx);
let mut result = HashMap::new();
while let Ok((tool_input, new_file_id)) = updates_rx.recv() {
result.insert(tool_input, new_file_id);
}
Ok(None)
Ok(result)
}
/// Runs the `tool_command` to fix the given file content.
///
/// The `old_content` is assumed to be that of the `file_to_fix`'s `FileId`, but
/// The `old_content` is assumed to be that of the `tool_input`'s `FileId`, but
/// this is not verified.
///
/// Returns the new file content, whose value will be the same as `old_content`
/// unless the command introduced changes. Returns `None` if there were any
/// failures when starting, stopping, or communicating with the subprocess.
fn run_tool(
workspace_root: &Path,
tool_command: &CommandNameAndArgs,
file_to_fix: &FileToFix,
tool_input: &ToolInput,
old_content: &[u8],
) -> Result<Vec<u8>, ()> {
// TODO: Pipe stderr so we can tell the user which commit, file, and tool it is
// associated with.
let mut vars: HashMap<&str, &str> = HashMap::new();
vars.insert("path", file_to_fix.repo_path.as_internal_file_string());
let mut command = tool_command.to_command_with_variables(&vars);
tracing::debug!(?command, ?file_to_fix.repo_path, "spawning fix tool");
let mut child = command
.current_dir(workspace_root)
vars.insert("path", tool_input.repo_path.as_internal_file_string());
let mut child = tool_command
.to_command_with_variables(&vars)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
@ -251,7 +403,6 @@ fn run_tool(
Some(child.wait_with_output().or(Err(())))
})
.unwrap()?;
tracing::debug!(?command, ?output.status, "fix tool exited:");
if output.status.success() {
Ok(output.stdout)
} else {
@ -265,8 +416,6 @@ struct ToolConfig {
command: CommandNameAndArgs,
/// The matcher that determines if this tool matches a file.
matcher: Box<dyn Matcher>,
/// Whether the tool is enabled
enabled: bool,
// TODO: Store the `name` field here and print it with the command's stderr, to clearly
// associate any errors/warnings with the tool and its configuration entry.
}
@ -284,59 +433,83 @@ struct ToolsConfig {
struct RawToolConfig {
command: CommandNameAndArgs,
patterns: Vec<String>,
#[serde(default = "default_tool_enabled")]
enabled: bool,
}
fn default_tool_enabled() -> bool {
true
}
/// Parses the `fix.tools` config table.
///
/// Parses the deprecated `fix.tool-command` config as if it was the first entry
/// in `fix.tools`.
///
/// Fails if any of the commands or patterns are obviously unusable, but does
/// not check for issues that might still occur later like missing executables.
/// This is a place where we could fail earlier in some cases, though.
fn get_tools_config(ui: &mut Ui, settings: &UserSettings) -> Result<ToolsConfig, CommandError> {
let mut tools: Vec<ToolConfig> = settings
.table_keys("fix.tools")
// Sort keys early so errors are deterministic.
.sorted()
.map(|name| -> Result<ToolConfig, CommandError> {
let mut diagnostics = FilesetDiagnostics::new();
let tool: RawToolConfig = settings.get(["fix", "tools", name])?;
let expression = FilesetExpression::union_all(
tool.patterns
.iter()
.map(|arg| {
fileset::parse(
&mut diagnostics,
arg,
&RepoPathUiConverter::Fs {
cwd: "".into(),
base: "".into(),
},
)
})
.try_collect()?,
);
print_parse_diagnostics(ui, &format!("In `fix.tools.{name}`"), &diagnostics)?;
Ok(ToolConfig {
command: tool.command,
matcher: expression.to_matcher(),
enabled: tool.enabled,
})
})
.try_collect()?;
if tools.is_empty() {
return Err(config_error("No `fix.tools` are configured"));
fn get_tools_config(ui: &mut Ui, config: &config::Config) -> Result<ToolsConfig, CommandError> {
let mut tools_config = ToolsConfig { tools: Vec::new() };
// TODO: Remove this block of code and associated documentation after at least
// one release where the feature is marked deprecated.
if let Ok(tool_command) = config.get::<CommandNameAndArgs>("fix.tool-command") {
// This doesn't change the displayed indices of the `fix.tools` definitions, and
// doesn't have a `name` that could conflict with them. That would matter more
// if we already had better error handling that made use of the `name`.
tools_config.tools.push(ToolConfig {
command: tool_command,
matcher: Box::new(EverythingMatcher),
});
writeln!(
ui.warning_default(),
r"The `fix.tool-command` config option is deprecated and will be removed in a future version."
)?;
writeln!(
ui.hint_default(),
r###"Replace it with the following:
[fix.tools.legacy-tool-command]
command = {}
patterns = ["all()"]
"###,
to_toml_value(&config.get::<config::Value>("fix.tool-command").unwrap()).unwrap()
)?;
}
tools.retain(|t| t.enabled);
if tools.is_empty() {
if let Ok(tools_table) = config.get_table("fix.tools") {
// Convert the map into a sorted vector early so errors are deterministic.
let mut tools: Vec<ToolConfig> = tools_table
.into_iter()
.sorted_by(|a, b| a.0.cmp(&b.0))
.map(|(name, value)| -> Result<ToolConfig, CommandError> {
let mut diagnostics = FilesetDiagnostics::new();
let tool: RawToolConfig = value.try_deserialize()?;
let expression = FilesetExpression::union_all(
tool.patterns
.iter()
.map(|arg| {
fileset::parse(
&mut diagnostics,
arg,
&RepoPathUiConverter::Fs {
cwd: "".into(),
base: "".into(),
},
)
})
.try_collect()?,
);
print_parse_diagnostics(ui, &format!("In `fix.tools.{name}`"), &diagnostics)?;
Ok(ToolConfig {
command: tool.command,
matcher: expression.to_matcher(),
})
})
.try_collect()?;
tools_config.tools.append(&mut tools);
}
if tools_config.tools.is_empty() {
// TODO: This is not a useful message when one or both fields are present but
// have the wrong type. After removing `fix.tool-command`, it will be simpler to
// propagate any errors from `config.get_array("fix.tools")`.
Err(config_error(
"At least one entry of `fix.tools` must be enabled.".to_string(),
"At least one entry of `fix.tools` or `fix.tool-command` is required.".to_string(),
))
} else {
Ok(ToolsConfig { tools })
Ok(tools_config)
}
}

View File

@ -14,20 +14,18 @@
use std::fs;
use std::io;
use std::io::Write as _;
use std::io::Write;
use std::num::NonZeroU32;
use std::path::Path;
use std::path::PathBuf;
use jj_lib::git;
use jj_lib::git::GitFetch;
use jj_lib::ref_name::RefNameBuf;
use jj_lib::ref_name::RemoteName;
use jj_lib::ref_name::RemoteNameBuf;
use jj_lib::repo::Repo as _;
use jj_lib::git::GitFetchError;
use jj_lib::git::GitFetchStats;
use jj_lib::repo::Repo;
use jj_lib::str_util::StringPattern;
use jj_lib::workspace::Workspace;
use super::write_repository_level_trunk_alias;
use crate::cli_util::CommandHelper;
use crate::cli_util::WorkspaceCommandHelper;
use crate::command_error::cli_error;
@ -35,7 +33,10 @@ use crate::command_error::user_error;
use crate::command_error::user_error_with_message;
use crate::command_error::CommandError;
use crate::commands::git::maybe_add_gitignore;
use crate::git_util::absolute_git_url;
use crate::config::write_config_value_to_file;
use crate::config::ConfigNamePathBuf;
use crate::git_util::get_git_repo;
use crate::git_util::map_git_error;
use crate::git_util::print_git_import_stats;
use crate::git_util::with_remote_git_callbacks;
use crate::ui::Ui;
@ -46,9 +47,7 @@ use crate::ui::Ui;
#[derive(clap::Args, Clone, Debug)]
pub struct GitCloneArgs {
/// URL or path of the Git repo to clone
///
/// Local path will be resolved to absolute form.
#[arg(value_hint = clap::ValueHint::Url)]
#[arg(value_hint = clap::ValueHint::DirPath)]
source: String,
/// Specifies the target directory for the Jujutsu repository clone.
/// If not provided, defaults to a directory named after the last component
@ -58,7 +57,7 @@ pub struct GitCloneArgs {
destination: Option<String>,
/// Name of the newly created remote
#[arg(long = "remote", default_value = "origin")]
remote_name: RemoteNameBuf,
remote_name: String,
/// Whether or not to colocate the Jujutsu repo with the git repo
#[arg(long)]
colocate: bool,
@ -67,6 +66,23 @@ pub struct GitCloneArgs {
depth: Option<NonZeroU32>,
}
fn absolute_git_source(cwd: &Path, source: &str) -> String {
// Git appears to turn URL-like source to absolute path if local git directory
// exits, and fails because '$PWD/https' is unsupported protocol. Since it would
// be tedious to copy the exact git (or libgit2) behavior, we simply assume a
// source containing ':' is a URL, SSH remote, or absolute path with Windows
// drive letter.
if !source.contains(':') && Path::new(source).exists() {
// It's less likely that cwd isn't utf-8, so just fall back to original source.
cwd.join(source)
.into_os_string()
.into_string()
.unwrap_or_else(|_| source.to_owned())
} else {
source.to_owned()
}
}
fn clone_destination_for_source(source: &str) -> Option<&str> {
let destination = source.strip_suffix(".git").unwrap_or(source);
let destination = destination.strip_suffix('/').unwrap_or(destination);
@ -92,7 +108,7 @@ pub fn cmd_git_clone(
if command.global_args().at_operation.is_some() {
return Err(cli_error("--at-op is not respected"));
}
let source = absolute_git_url(command.cwd(), &args.source)?;
let source = absolute_git_source(command.cwd(), &args.source);
let wc_path_str = args
.destination
.as_deref()
@ -113,16 +129,18 @@ pub fn cmd_git_clone(
// Canonicalize because fs::remove_dir_all() doesn't seem to like e.g.
// `/some/path/.`
let canonical_wc_path = dunce::canonicalize(&wc_path)
let canonical_wc_path: PathBuf = wc_path
.canonicalize()
.map_err(|err| user_error_with_message(format!("Failed to create {wc_path_str}"), err))?;
let clone_result = (|| -> Result<_, CommandError> {
let workspace_command = init_workspace(ui, command, &canonical_wc_path, args.colocate)?;
let mut workspace_command =
configure_remote(ui, command, workspace_command, remote_name, &source)?;
let default_branch = fetch_new_remote(ui, &mut workspace_command, remote_name, args.depth)?;
Ok((workspace_command, default_branch))
})();
let clone_result = do_git_clone(
ui,
command,
args.colocate,
args.depth,
remote_name,
&source,
&canonical_wc_path,
);
if clone_result.is_err() {
let clean_up_dirs = || -> io::Result<()> {
fs::remove_dir_all(canonical_wc_path.join(".jj"))?;
@ -145,19 +163,30 @@ pub fn cmd_git_clone(
}
}
let (mut workspace_command, default_branch) = clone_result?;
if let Some(name) = &default_branch {
let default_symbol = name.to_remote_symbol(remote_name);
write_repository_level_trunk_alias(ui, workspace_command.repo_path(), default_symbol)?;
let (mut workspace_command, stats) = clone_result?;
if let Some(default_branch) = &stats.default_branch {
// Set repository level `trunk()` alias to the default remote branch.
let config_path = workspace_command.repo_path().join("config.toml");
write_config_value_to_file(
&ConfigNamePathBuf::from_iter(["revset-aliases", "trunk()"]),
format!("{default_branch}@{remote_name}").into(),
&config_path,
)?;
writeln!(
ui.status(),
"Setting the revset alias \"trunk()\" to \"{default_branch}@{remote_name}\""
)?;
let default_branch_remote_ref = workspace_command
.repo()
.view()
.get_remote_bookmark(default_symbol);
.get_remote_bookmark(default_branch, remote_name);
if let Some(commit_id) = default_branch_remote_ref.target.as_normal().cloned() {
let mut checkout_tx = workspace_command.start_transaction();
// For convenience, create local bookmark as Git would do.
checkout_tx.repo_mut().track_remote_bookmark(default_symbol);
checkout_tx
.repo_mut()
.track_remote_bookmark(default_branch, remote_name);
if let Ok(commit) = checkout_tx.repo().store().get_commit(&commit_id) {
checkout_tx.check_out(&commit)?;
}
@ -167,64 +196,53 @@ pub fn cmd_git_clone(
Ok(())
}
fn init_workspace(
ui: &Ui,
fn do_git_clone(
ui: &mut Ui,
command: &CommandHelper,
wc_path: &Path,
colocate: bool,
) -> Result<WorkspaceCommandHelper, CommandError> {
let settings = command.settings_for_new_workspace(wc_path)?;
let (workspace, repo) = if colocate {
Workspace::init_colocated_git(&settings, wc_path)?
} else {
Workspace::init_internal_git(&settings, wc_path)?
};
let workspace_command = command.for_workable_repo(ui, workspace, repo)?;
maybe_add_gitignore(&workspace_command)?;
Ok(workspace_command)
}
fn configure_remote(
ui: &Ui,
command: &CommandHelper,
workspace_command: WorkspaceCommandHelper,
remote_name: &RemoteName,
source: &str,
) -> Result<WorkspaceCommandHelper, CommandError> {
git::add_remote(workspace_command.repo().store(), remote_name, source)?;
// Reload workspace to apply new remote configuration to
// gix::ThreadSafeRepository behind the store.
let workspace = command.load_workspace_at(
workspace_command.workspace_root(),
workspace_command.settings(),
)?;
let op = workspace
.repo_loader()
.load_operation(workspace_command.repo().op_id())?;
let repo = workspace.repo_loader().load_at(&op)?;
command.for_workable_repo(ui, workspace, repo)
}
fn fetch_new_remote(
ui: &Ui,
workspace_command: &mut WorkspaceCommandHelper,
remote_name: &RemoteName,
depth: Option<NonZeroU32>,
) -> Result<Option<RefNameBuf>, CommandError> {
remote_name: &str,
source: &str,
wc_path: &Path,
) -> Result<(WorkspaceCommandHelper, GitFetchStats), CommandError> {
let (workspace, repo) = if colocate {
Workspace::init_colocated_git(command.settings(), wc_path)?
} else {
Workspace::init_internal_git(command.settings(), wc_path)?
};
let git_repo = get_git_repo(repo.store())?;
writeln!(
ui.status(),
r#"Fetching into new repo in "{}""#,
workspace_command.workspace_root().display()
wc_path.display()
)?;
let git_settings = workspace_command.settings().git_settings()?;
let mut workspace_command = command.for_workable_repo(ui, workspace, repo)?;
maybe_add_gitignore(&workspace_command)?;
git_repo.remote(remote_name, source).unwrap();
let mut fetch_tx = workspace_command.start_transaction();
let mut git_fetch = GitFetch::new(fetch_tx.repo_mut(), &git_settings)?;
with_remote_git_callbacks(ui, |cb| {
git_fetch.fetch(remote_name, &[StringPattern::everything()], cb, depth)
let stats = with_remote_git_callbacks(ui, None, |cb| {
git::fetch(
fetch_tx.repo_mut(),
&git_repo,
remote_name,
&[StringPattern::everything()],
cb,
&command.settings().git_settings(),
depth,
)
})
.map_err(|err| match err {
GitFetchError::NoSuchRemote(_) => {
panic!("shouldn't happen as we just created the git remote")
}
GitFetchError::GitImportError(err) => CommandError::from(err),
GitFetchError::InternalGitError(err) => map_git_error(err),
GitFetchError::InvalidBranchPattern => {
unreachable!("we didn't provide any globs")
}
})?;
let default_branch = git_fetch.get_default_branch(remote_name)?;
let import_stats = git_fetch.import_refs()?;
print_git_import_stats(ui, fetch_tx.repo(), &import_stats, true)?;
print_git_import_stats(ui, fetch_tx.repo(), &stats.import_stats, true)?;
fetch_tx.finish(ui, "fetch from git remote into empty repo")?;
Ok(default_branch)
Ok((workspace_command, stats))
}

View File

@ -16,7 +16,7 @@ use jj_lib::git;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::git_util::print_git_export_stats;
use crate::git_util::print_failed_git_export;
use crate::ui::Ui;
/// Update the underlying Git repo with changes made in the repo
@ -30,8 +30,8 @@ pub fn cmd_git_export(
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let mut tx = workspace_command.start_transaction();
let stats = git::export_refs(tx.repo_mut())?;
let failed_refs = git::export_refs(tx.repo_mut())?;
tx.finish(ui, "export git refs")?;
print_git_export_stats(ui, &stats)?;
print_failed_git_export(ui, &failed_refs)?;
Ok(())
}

View File

@ -12,27 +12,17 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashSet;
use clap_complete::ArgValueCandidates;
use itertools::Itertools as _;
use jj_lib::config::ConfigGetResultExt as _;
use jj_lib::git;
use jj_lib::git::GitFetch;
use jj_lib::ref_name::RemoteName;
use jj_lib::repo::Repo as _;
use itertools::Itertools;
use jj_lib::repo::Repo;
use jj_lib::settings::ConfigResultExt as _;
use jj_lib::settings::UserSettings;
use jj_lib::str_util::StringPattern;
use crate::cli_util::CommandHelper;
use crate::cli_util::WorkspaceCommandHelper;
use crate::cli_util::WorkspaceCommandTransaction;
use crate::command_error::config_error;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::commands::git::get_single_remote;
use crate::complete;
use crate::git_util::print_git_import_stats;
use crate::git_util::with_remote_git_callbacks;
use crate::git_util::get_git_repo;
use crate::git_util::git_fetch;
use crate::ui::Ui;
/// Fetch from a Git remote
@ -44,173 +34,72 @@ pub struct GitFetchArgs {
/// Fetch only some of the branches
///
/// By default, the specified name matches exactly. Use `glob:` prefix to
/// expand `*` as a glob, e.g. `--branch 'glob:push-*'`. Other wildcard
/// characters such as `?` are *not* supported.
#[arg(
long, short,
alias = "bookmark",
default_value = "glob:*",
value_parser = StringPattern::parse,
add = ArgValueCandidates::new(complete::bookmarks),
)]
/// expand `*` as a glob. The other wildcard characters aren't supported.
#[arg(long, short, alias="bookmark", default_value = "glob:*", value_parser = StringPattern::parse)]
branch: Vec<StringPattern>,
/// The remote to fetch from (only named remotes are supported, can be
/// repeated)
///
/// This defaults to the `git.fetch` setting. If that is not configured, and
/// if there are multiple remotes, the remote named "origin" will be used.
///
/// By default, the specified remote names matches exactly. Use a [string
/// pattern], e.g. `--remote 'glob:*'`, to select remotes using
/// patterns.
///
/// [string pattern]:
/// https://jj-vcs.github.io/jj/latest/revsets#string-patterns
#[arg(
long = "remote",
value_name = "REMOTE",
value_parser = StringPattern::parse,
add = ArgValueCandidates::new(complete::git_remotes),
)]
remotes: Vec<StringPattern>,
#[arg(long = "remote", value_name = "REMOTE")]
remotes: Vec<String>,
/// Fetch from all remotes
#[arg(long, conflicts_with = "remotes")]
all_remotes: bool,
}
#[tracing::instrument(skip_all)]
#[tracing::instrument(skip(ui, command))]
pub fn cmd_git_fetch(
ui: &mut Ui,
command: &CommandHelper,
args: &GitFetchArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let remote_patterns = if args.all_remotes {
vec![StringPattern::everything()]
let git_repo = get_git_repo(workspace_command.repo().store())?;
let remotes = if args.all_remotes {
get_all_remotes(&git_repo)?
} else if args.remotes.is_empty() {
get_default_fetch_remotes(ui, &workspace_command)?
get_default_fetch_remotes(ui, command.settings(), &git_repo)?
} else {
args.remotes.clone()
};
let all_remotes = git::get_all_remote_names(workspace_command.repo().store())?;
let mut matching_remotes = HashSet::new();
for pattern in remote_patterns {
let remotes = all_remotes
.iter()
.filter(|r| pattern.matches(r.as_str()))
.collect_vec();
if remotes.is_empty() {
writeln!(ui.warning_default(), "No git remotes matching '{pattern}'")?;
} else {
matching_remotes.extend(remotes);
}
}
if matching_remotes.is_empty() {
return Err(user_error("No git remotes to push"));
}
let remotes = matching_remotes
.iter()
.map(|r| r.as_ref())
.sorted()
.collect_vec();
let mut tx = workspace_command.start_transaction();
do_git_fetch(ui, &mut tx, &remotes, &args.branch)?;
git_fetch(ui, &mut tx, &git_repo, &remotes, &args.branch)?;
tx.finish(
ui,
format!(
"fetch from git remote(s) {}",
remotes.iter().map(|n| n.as_symbol()).join(",")
),
format!("fetch from git remote(s) {}", remotes.iter().join(",")),
)?;
Ok(())
}
const DEFAULT_REMOTE: &RemoteName = RemoteName::new("origin");
const DEFAULT_REMOTE: &str = "origin";
fn get_default_fetch_remotes(
ui: &Ui,
workspace_command: &WorkspaceCommandHelper,
) -> Result<Vec<StringPattern>, CommandError> {
settings: &UserSettings,
git_repo: &git2::Repository,
) -> Result<Vec<String>, CommandError> {
const KEY: &str = "git.fetch";
let settings = workspace_command.settings();
if let Ok(remotes) = settings.get::<Vec<String>>(KEY) {
remotes
.into_iter()
.map(|r| parse_remote_pattern(&r))
.try_collect()
} else if let Some(remote) = settings.get_string(KEY).optional()? {
Ok(vec![parse_remote_pattern(&remote)?])
} else if let Some(remote) = get_single_remote(workspace_command.repo().store())? {
if let Ok(remotes) = settings.config().get(KEY) {
Ok(remotes)
} else if let Some(remote) = settings.config().get_string(KEY).optional()? {
Ok(vec![remote])
} else if let Some(remote) = get_single_remote(git_repo)? {
// if nothing was explicitly configured, try to guess
if remote != DEFAULT_REMOTE {
writeln!(
ui.hint_default(),
"Fetching from the only existing remote: {remote}",
remote = remote.as_symbol()
"Fetching from the only existing remote: {remote}"
)?;
}
Ok(vec![StringPattern::exact(remote)])
Ok(vec![remote])
} else {
Ok(vec![StringPattern::exact(DEFAULT_REMOTE)])
Ok(vec![DEFAULT_REMOTE.to_owned()])
}
}
fn parse_remote_pattern(remote: &str) -> Result<StringPattern, CommandError> {
StringPattern::parse(remote).map_err(config_error)
}
fn do_git_fetch(
ui: &mut Ui,
tx: &mut WorkspaceCommandTransaction,
remotes: &[&RemoteName],
branch_names: &[StringPattern],
) -> Result<(), CommandError> {
let git_settings = tx.settings().git_settings()?;
let mut git_fetch = GitFetch::new(tx.repo_mut(), &git_settings)?;
for remote_name in remotes {
with_remote_git_callbacks(ui, |callbacks| {
git_fetch.fetch(remote_name, branch_names, callbacks, None)
})?;
}
let import_stats = git_fetch.import_refs()?;
print_git_import_stats(ui, tx.repo(), &import_stats, true)?;
warn_if_branches_not_found(ui, tx, branch_names, remotes)
}
fn warn_if_branches_not_found(
ui: &mut Ui,
tx: &WorkspaceCommandTransaction,
branches: &[StringPattern],
remotes: &[&RemoteName],
) -> Result<(), CommandError> {
for branch in branches {
let matches = remotes.iter().any(|&remote| {
let remote = StringPattern::exact(remote);
tx.repo()
.view()
.remote_bookmarks_matching(branch, &remote)
.next()
.is_some()
|| tx
.base_repo()
.view()
.remote_bookmarks_matching(branch, &remote)
.next()
.is_some()
});
if !matches {
writeln!(
ui.warning_default(),
"No branch matching `{branch}` found on any specified/configured remote",
)?;
}
}
Ok(())
fn get_all_remotes(git_repo: &git2::Repository) -> Result<Vec<String>, CommandError> {
let git_remotes = git_repo.remotes()?;
Ok(git_remotes
.iter()
.filter_map(|x| x.map(ToOwned::to_owned))
.collect())
}

View File

@ -32,12 +32,11 @@ pub fn cmd_git_import(
_args: &GitImportArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let git_settings = workspace_command.settings().git_settings()?;
let mut tx = workspace_command.start_transaction();
// In non-colocated repo, Git HEAD will never be moved internally by jj.
// That's why cmd_git_export() doesn't export the HEAD ref.
git::import_head(tx.repo_mut())?;
let stats = git::import_refs(tx.repo_mut(), &git_settings)?;
let stats = git::import_refs(tx.repo_mut(), &command.settings().git_settings())?;
print_git_import_stats(ui, tx.repo(), &stats, true)?;
tx.finish(ui, "import git refs")?;
Ok(())

View File

@ -12,35 +12,33 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io;
use std::io::Write as _;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::str;
use std::sync::Arc;
use itertools::Itertools as _;
use jj_lib::file_util;
use jj_lib::git;
use jj_lib::git::parse_git_ref;
use jj_lib::git::GitRefKind;
use jj_lib::git::RefName;
use jj_lib::repo::ReadonlyRepo;
use jj_lib::repo::Repo as _;
use jj_lib::view::View;
use jj_lib::repo::Repo;
use jj_lib::workspace::Workspace;
use super::write_repository_level_trunk_alias;
use crate::cli_util::print_trackable_remote_bookmarks;
use crate::cli_util::start_repo_transaction;
use crate::cli_util::CommandHelper;
use crate::cli_util::WorkspaceCommandHelper;
use crate::command_error::cli_error;
use crate::command_error::internal_error;
use crate::command_error::user_error_with_hint;
use crate::command_error::user_error_with_message;
use crate::command_error::CommandError;
use crate::commands::git::maybe_add_gitignore;
use crate::config::write_config_value_to_file;
use crate::config::ConfigNamePathBuf;
use crate::git_util::get_git_repo;
use crate::git_util::is_colocated_git_workspace;
use crate::git_util::print_git_export_stats;
use crate::git_util::print_failed_git_export;
use crate::git_util::print_git_import_stats;
use crate::ui::Ui;
@ -94,7 +92,7 @@ pub fn cmd_git_init(
let cwd = command.cwd();
let wc_path = cwd.join(&args.destination);
let wc_path = file_util::create_or_reuse_dir(&wc_path)
.and_then(|_| dunce::canonicalize(wc_path))
.and_then(|_| wc_path.canonicalize())
.map_err(|e| user_error_with_message("Failed to create workspace", e))?;
do_init(
@ -115,7 +113,7 @@ pub fn cmd_git_init(
Ok(())
}
fn do_init(
pub fn do_init(
ui: &mut Ui,
command: &CommandHelper,
workspace_root: &Path,
@ -157,20 +155,20 @@ fn do_init(
GitInitMode::Internal
};
let settings = command.settings_for_new_workspace(workspace_root)?;
match &init_mode {
GitInitMode::Colocate => {
let (workspace, repo) = Workspace::init_colocated_git(&settings, workspace_root)?;
let (workspace, repo) =
Workspace::init_colocated_git(command.settings(), workspace_root)?;
let workspace_command = command.for_workable_repo(ui, workspace, repo)?;
maybe_add_gitignore(&workspace_command)?;
}
GitInitMode::External(git_repo_path) => {
let (workspace, repo) =
Workspace::init_external_git(&settings, workspace_root, git_repo_path)?;
Workspace::init_external_git(command.settings(), workspace_root, git_repo_path)?;
// Import refs first so all the reachable commits are indexed in
// chronological order.
let colocated = is_colocated_git_workspace(&workspace, &repo);
let repo = init_git_refs(ui, repo, command.string_args(), colocated)?;
let repo = init_git_refs(ui, command, repo, colocated)?;
let mut workspace_command = command.for_workable_repo(ui, workspace, repo)?;
maybe_add_gitignore(&workspace_command)?;
workspace_command.maybe_snapshot(ui)?;
@ -189,7 +187,7 @@ fn do_init(
print_trackable_remote_bookmarks(ui, workspace_command.repo().view())?;
}
GitInitMode::Internal => {
Workspace::init_internal_git(&settings, workspace_root)?;
Workspace::init_internal_git(command.settings(), workspace_root)?;
}
}
Ok(())
@ -202,26 +200,31 @@ fn do_init(
/// moves the Git HEAD to the working copy parent.
fn init_git_refs(
ui: &mut Ui,
command: &CommandHelper,
repo: Arc<ReadonlyRepo>,
string_args: &[String],
colocated: bool,
) -> Result<Arc<ReadonlyRepo>, CommandError> {
let mut git_settings = repo.settings().git_settings()?;
let mut tx = start_repo_transaction(&repo, string_args);
let mut tx = start_repo_transaction(&repo, command.settings(), command.string_args());
// There should be no old refs to abandon, but enforce it.
let mut git_settings = command.settings().git_settings();
git_settings.abandon_unreachable_commits = false;
let stats = git::import_refs(tx.repo_mut(), &git_settings)?;
print_git_import_stats(ui, tx.repo(), &stats, false)?;
let stats = git::import_some_refs(
tx.repo_mut(),
&git_settings,
// Initial import shouldn't fail because of reserved remote name.
|ref_name| !git::is_reserved_git_remote_ref(ref_name),
)?;
if !tx.repo().has_changes() {
return Ok(repo);
}
print_git_import_stats(ui, tx.repo(), &stats, false)?;
if colocated {
// If git.auto-local-bookmark = true, local bookmarks could be created for
// the imported remote branches.
let stats = git::export_refs(tx.repo_mut())?;
print_git_export_stats(ui, &stats)?;
let failed_refs = git::export_refs(tx.repo_mut())?;
print_failed_git_export(ui, &failed_refs)?;
}
let repo = tx.commit("import git refs")?;
let repo = tx.commit("import git refs");
writeln!(
ui.status(),
"Done importing changes from the underlying Git repo."
@ -234,56 +237,27 @@ pub fn maybe_set_repository_level_trunk_alias(
ui: &Ui,
workspace_command: &WorkspaceCommandHelper,
) -> Result<(), CommandError> {
let git_repo = git::get_git_repo(workspace_command.repo().store())?;
if let Some(reference) = git_repo
.try_find_reference("refs/remotes/origin/HEAD")
.map_err(internal_error)?
{
if let Some(reference_name) = reference.target().try_name() {
if let Some((GitRefKind::Bookmark, symbol)) = str::from_utf8(reference_name.as_bstr())
.ok()
.and_then(|name| parse_git_ref(name.as_ref()))
let git_repo = get_git_repo(workspace_command.repo().store())?;
if let Ok(reference) = git_repo.find_reference("refs/remotes/origin/HEAD") {
if let Some(reference_name) = reference.symbolic_target() {
if let Some(RefName::RemoteBranch {
branch: default_branch,
..
}) = parse_git_ref(reference_name)
{
// TODO: Can we assume the symbolic target points to the same remote?
let symbol = symbol.name.to_remote_symbol("origin".as_ref());
write_repository_level_trunk_alias(ui, workspace_command.repo_path(), symbol)?;
let config_path = workspace_command.repo_path().join("config.toml");
write_config_value_to_file(
&ConfigNamePathBuf::from_iter(["revset-aliases", "trunk()"]),
format!("{default_branch}@origin").into(),
&config_path,
)?;
writeln!(
ui.status(),
"Setting the revset alias \"trunk()\" to \"{default_branch}@origin\"",
)?;
}
};
};
Ok(())
}
fn print_trackable_remote_bookmarks(ui: &Ui, view: &View) -> io::Result<()> {
let remote_bookmark_symbols = view
.bookmarks()
.filter(|(_, bookmark_target)| bookmark_target.local_target.is_present())
.flat_map(|(name, bookmark_target)| {
bookmark_target
.remote_refs
.into_iter()
.filter(|&(_, remote_ref)| !remote_ref.is_tracked())
.map(move |(remote, _)| name.to_remote_symbol(remote))
})
.collect_vec();
if remote_bookmark_symbols.is_empty() {
return Ok(());
}
if let Some(mut formatter) = ui.status_formatter() {
writeln!(
formatter.labeled("hint").with_heading("Hint: "),
"The following remote bookmarks aren't associated with the existing local bookmarks:"
)?;
for symbol in &remote_bookmark_symbols {
write!(formatter, " ")?;
writeln!(formatter.labeled("bookmark"), "{symbol}")?;
}
writeln!(
formatter.labeled("hint").with_heading("Hint: "),
"Run `jj bookmark track {syms}` to keep local bookmarks updated on future pulls.",
syms = remote_bookmark_symbols.iter().join(" "),
)?;
}
Ok(())
}

View File

@ -12,25 +12,16 @@
// See the License for the specific language governing permissions and
// limitations under the License.
mod clone;
mod export;
mod fetch;
mod import;
mod init;
mod push;
mod remote;
mod root;
use std::path::Path;
pub mod clone;
pub mod export;
pub mod fetch;
pub mod import;
pub mod init;
pub mod push;
pub mod remote;
pub mod submodule;
use clap::Subcommand;
use jj_lib::config::ConfigFile;
use jj_lib::config::ConfigSource;
use jj_lib::git;
use jj_lib::git::UnexpectedGitBackendError;
use jj_lib::ref_name::RemoteNameBuf;
use jj_lib::ref_name::RemoteRefSymbol;
use jj_lib::store::Store;
use self::clone::cmd_git_clone;
use self::clone::GitCloneArgs;
@ -46,8 +37,8 @@ use self::push::cmd_git_push;
use self::push::GitPushArgs;
use self::remote::cmd_git_remote;
use self::remote::RemoteCommand;
use self::root::cmd_git_root;
use self::root::GitRootArgs;
use self::submodule::cmd_git_submodule;
use self::submodule::GitSubmoduleCommand;
use crate::cli_util::CommandHelper;
use crate::cli_util::WorkspaceCommandHelper;
use crate::command_error::user_error_with_message;
@ -56,13 +47,8 @@ use crate::ui::Ui;
/// Commands for working with Git remotes and the underlying Git repo
///
/// See this [comparison], including a [table of commands].
///
/// [comparison]:
/// https://jj-vcs.github.io/jj/latest/git-comparison/.
///
/// [table of commands]:
/// https://jj-vcs.github.io/jj/latest/git-command-table
/// For a comparison with Git, including a table of commands, see
/// https://martinvonz.github.io/jj/latest/git-comparison/.
#[derive(Subcommand, Clone, Debug)]
pub enum GitCommand {
Clone(GitCloneArgs),
@ -73,7 +59,8 @@ pub enum GitCommand {
Push(GitPushArgs),
#[command(subcommand)]
Remote(RemoteCommand),
Root(GitRootArgs),
#[command(subcommand, hide = true)]
Submodule(GitSubmoduleCommand),
}
pub fn cmd_git(
@ -89,7 +76,7 @@ pub fn cmd_git(
GitCommand::Init(args) => cmd_git_init(ui, command, args),
GitCommand::Push(args) => cmd_git_push(ui, command, args),
GitCommand::Remote(args) => cmd_git_remote(ui, command, args),
GitCommand::Root(args) => cmd_git_root(ui, command, args),
GitCommand::Submodule(args) => cmd_git_submodule(ui, command, args),
}
}
@ -108,27 +95,10 @@ pub fn maybe_add_gitignore(workspace_command: &WorkspaceCommandHelper) -> Result
}
}
fn get_single_remote(store: &Store) -> Result<Option<RemoteNameBuf>, UnexpectedGitBackendError> {
let mut names = git::get_all_remote_names(store)?;
Ok(match names.len() {
1 => names.pop(),
fn get_single_remote(git_repo: &git2::Repository) -> Result<Option<String>, CommandError> {
let git_remotes = git_repo.remotes()?;
Ok(match git_remotes.len() {
1 => git_remotes.get(0).map(ToOwned::to_owned),
_ => None,
})
}
/// Sets repository level `trunk()` alias to the specified remote symbol.
fn write_repository_level_trunk_alias(
ui: &Ui,
repo_path: &Path,
symbol: RemoteRefSymbol<'_>,
) -> Result<(), CommandError> {
let mut file = ConfigFile::load_or_empty(ConfigSource::Repo, repo_path.join("config.toml"))?;
file.set_value(["revset-aliases", "trunk()"], symbol.to_string())
.expect("initial repo config shouldn't have invalid values");
file.save()?;
writeln!(
ui.status(),
"Setting the revset alias `trunk()` to `{symbol}`",
)?;
Ok(())
}

File diff suppressed because it is too large Load Diff

View File

@ -13,23 +13,19 @@
// limitations under the License.
use jj_lib::git;
use jj_lib::ref_name::RemoteNameBuf;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::git_util::absolute_git_url;
use crate::git_util::get_git_repo;
use crate::ui::Ui;
/// Add a Git remote
#[derive(clap::Args, Clone, Debug)]
pub struct GitRemoteAddArgs {
/// The remote's name
remote: RemoteNameBuf,
/// The remote's URL or path
///
/// Local path will be resolved to absolute form.
#[arg(value_hint = clap::ValueHint::Url)]
remote: String,
/// The remote's URL
url: String,
}
@ -39,7 +35,8 @@ pub fn cmd_git_remote_add(
args: &GitRemoteAddArgs,
) -> Result<(), CommandError> {
let workspace_command = command.workspace_helper(ui)?;
let url = absolute_git_url(command.cwd(), &args.url)?;
git::add_remote(workspace_command.repo().store(), &args.remote, &url)?;
let repo = workspace_command.repo();
let git_repo = get_git_repo(repo.store())?;
git::add_remote(&git_repo, &args.remote, &args.url)?;
Ok(())
}

View File

@ -12,14 +12,13 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write as _;
use std::io::Write;
use jj_lib::git;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error_with_message;
use crate::command_error::CommandError;
use crate::git_util::get_git_repo;
use crate::ui::Ui;
/// List Git remotes
@ -32,24 +31,16 @@ pub fn cmd_git_remote_list(
_args: &GitRemoteListArgs,
) -> Result<(), CommandError> {
let workspace_command = command.workspace_helper(ui)?;
let git_repo = git::get_git_repo(workspace_command.repo().store())?;
for remote_name in git_repo.remote_names() {
let remote = match git_repo.try_find_remote(&*remote_name) {
Some(Ok(remote)) => remote,
Some(Err(err)) => {
return Err(user_error_with_message(
format!("Failed to load configured remote {remote_name}"),
err,
))
}
None => continue, // ignore empty [remote "<name>"] section
};
// TODO: print push url (by default or by some flag)?
let fetch_url = remote
.url(gix::remote::Direction::Fetch)
.map(|url| url.to_bstring())
.unwrap_or_else(|| "<no URL>".into());
writeln!(ui.stdout(), "{remote_name} {fetch_url}")?;
let repo = workspace_command.repo();
let git_repo = get_git_repo(repo.store())?;
for remote_name in git_repo.remotes()?.iter().flatten() {
let remote = git_repo.find_remote(remote_name)?;
writeln!(
ui.stdout(),
"{} {}",
remote_name,
remote.url().unwrap_or("<no URL>")
)?;
}
Ok(())
}

View File

@ -12,11 +12,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.
mod add;
mod list;
mod remove;
mod rename;
mod set_url;
pub mod add;
pub mod list;
pub mod remove;
pub mod rename;
pub mod set_url;
use clap::Subcommand;

View File

@ -12,21 +12,19 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use jj_lib::git;
use jj_lib::ref_name::RemoteNameBuf;
use jj_lib::repo::Repo;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::complete;
use crate::git_util::get_git_repo;
use crate::ui::Ui;
/// Remove a Git remote and forget its bookmarks
#[derive(clap::Args, Clone, Debug)]
pub struct GitRemoteRemoveArgs {
/// The remote's name
#[arg(add = ArgValueCandidates::new(complete::git_remotes))]
remote: RemoteNameBuf,
remote: String,
}
pub fn cmd_git_remote_remove(
@ -35,10 +33,12 @@ pub fn cmd_git_remote_remove(
args: &GitRemoteRemoveArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let repo = workspace_command.repo();
let git_repo = get_git_repo(repo.store())?;
let mut tx = workspace_command.start_transaction();
git::remove_remote(tx.repo_mut(), &args.remote)?;
git::remove_remote(tx.repo_mut(), &git_repo, &args.remote)?;
if tx.repo().has_changes() {
tx.finish(ui, format!("remove git remote {}", args.remote.as_symbol()))
tx.finish(ui, format!("remove git remote {}", &args.remote))
} else {
Ok(()) // Do not print "Nothing changed."
}

View File

@ -12,23 +12,21 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use jj_lib::git;
use jj_lib::ref_name::RemoteNameBuf;
use jj_lib::repo::Repo;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::complete;
use crate::git_util::get_git_repo;
use crate::ui::Ui;
/// Rename a Git remote
#[derive(clap::Args, Clone, Debug)]
pub struct GitRemoteRenameArgs {
/// The name of an existing remote
#[arg(add = ArgValueCandidates::new(complete::git_remotes))]
old: RemoteNameBuf,
old: String,
/// The desired name for `old`
new: RemoteNameBuf,
new: String,
}
pub fn cmd_git_remote_rename(
@ -37,16 +35,14 @@ pub fn cmd_git_remote_rename(
args: &GitRemoteRenameArgs,
) -> Result<(), CommandError> {
let mut workspace_command = command.workspace_helper(ui)?;
let repo = workspace_command.repo();
let git_repo = get_git_repo(repo.store())?;
let mut tx = workspace_command.start_transaction();
git::rename_remote(tx.repo_mut(), &args.old, &args.new)?;
git::rename_remote(tx.repo_mut(), &git_repo, &args.old, &args.new)?;
if tx.repo().has_changes() {
tx.finish(
ui,
format!(
"rename git remote {old} to {new}",
old = args.old.as_symbol(),
new = args.new.as_symbol()
),
format!("rename git remote {} to {}", &args.old, &args.new),
)
} else {
Ok(()) // Do not print "Nothing changed."

View File

@ -12,27 +12,20 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use jj_lib::git;
use jj_lib::ref_name::RemoteNameBuf;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use crate::cli_util::CommandHelper;
use crate::command_error::CommandError;
use crate::complete;
use crate::git_util::absolute_git_url;
use crate::git_util::get_git_repo;
use crate::ui::Ui;
/// Set the URL of a Git remote
#[derive(clap::Args, Clone, Debug)]
pub struct GitRemoteSetUrlArgs {
/// The remote's name
#[arg(add = ArgValueCandidates::new(complete::git_remotes))]
remote: RemoteNameBuf,
/// The desired URL or path for `remote`
///
/// Local path will be resolved to absolute form.
#[arg(value_hint = clap::ValueHint::Url)]
remote: String,
/// The desired url for `remote`
url: String,
}
@ -42,7 +35,8 @@ pub fn cmd_git_remote_set_url(
args: &GitRemoteSetUrlArgs,
) -> Result<(), CommandError> {
let workspace_command = command.workspace_helper(ui)?;
let url = absolute_git_url(command.cwd(), &args.url)?;
git::set_remote_url(workspace_command.repo().store(), &args.remote, &url)?;
let repo = workspace_command.repo();
let git_repo = get_git_repo(repo.store())?;
git::set_remote_url(&git_repo, &args.remote, &args.url)?;
Ok(())
}

View File

@ -1,44 +0,0 @@
// Copyright 2025 The Jujutsu Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write as _;
use jj_lib::repo::Repo as _;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::ui::Ui;
/// Show the underlying Git directory of a repository using the Git backend
#[derive(clap::Args, Clone, Debug)]
pub struct GitRootArgs {}
#[instrument(skip_all)]
pub fn cmd_git_root(
ui: &mut Ui,
command: &CommandHelper,
_args: &GitRootArgs,
) -> Result<(), CommandError> {
let workspace_command = command.workspace_helper(ui)?;
let store = workspace_command.repo().store();
let git_backend = jj_lib::git::get_git_backend(store)?;
let root = git_backend
.git_repo_path()
.to_str()
.ok_or_else(|| user_error("The workspace root is not valid UTF-8"))?;
writeln!(ui.stdout(), "{root}")?;
Ok(())
}

View File

@ -0,0 +1,91 @@
// Copyright 2020-2023 The Jujutsu Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write;
use clap::Subcommand;
use jj_lib::backend::TreeValue;
use jj_lib::git::parse_gitmodules;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPath;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::user_error;
use crate::command_error::CommandError;
use crate::ui::Ui;
/// FOR INTERNAL USE ONLY Interact with git submodules
#[derive(Subcommand, Clone, Debug)]
pub enum GitSubmoduleCommand {
/// Print the relevant contents from .gitmodules. For debugging purposes
/// only.
PrintGitmodules(PrintArgs),
}
pub fn cmd_git_submodule(
ui: &mut Ui,
command: &CommandHelper,
subcommand: &GitSubmoduleCommand,
) -> Result<(), CommandError> {
match subcommand {
GitSubmoduleCommand::PrintGitmodules(args) => cmd_submodule_print(ui, command, args),
}
}
// TODO: break everything below into a separate file as soon as there is more
// than one subcommand here.
/// Print debugging info about Git submodules
#[derive(clap::Args, Clone, Debug)]
#[command(hide = true)]
pub struct PrintArgs {
/// Read .gitmodules from the given revision.
#[arg(long, short = 'r', default_value = "@")]
revisions: RevisionArg,
}
fn cmd_submodule_print(
ui: &mut Ui,
command: &CommandHelper,
args: &PrintArgs,
) -> Result<(), CommandError> {
let workspace_command = command.workspace_helper(ui)?;
let repo = workspace_command.repo();
let commit = workspace_command.resolve_single_rev(ui, &args.revisions)?;
let tree = commit.tree()?;
let gitmodules_path = RepoPath::from_internal_string(".gitmodules");
let mut gitmodules_file = match tree.path_value(gitmodules_path)?.into_resolved() {
Ok(None) => {
writeln!(ui.status(), "No submodules!")?;
return Ok(());
}
Ok(Some(TreeValue::File { id, .. })) => repo.store().read_file(gitmodules_path, &id)?,
_ => {
return Err(user_error(".gitmodules is not a file."));
}
};
let submodules = parse_gitmodules(&mut gitmodules_file)?;
for (name, submodule) in submodules {
writeln!(
ui.stdout(),
"name:{}\nurl:{}\npath:{}\n\n",
name,
submodule.url,
submodule.path
)?;
}
Ok(())
}

View File

@ -13,12 +13,12 @@
// limitations under the License.
use std::fmt::Write as _;
use std::io::Write as _;
use std::io::Write;
use clap::builder::PossibleValue;
use clap::builder::StyledStr;
use crossterm::style::Stylize as _;
use itertools::Itertools as _;
use crossterm::style::Stylize;
use itertools::Itertools;
use tracing::instrument;
use crate::cli_util::CommandHelper;
@ -58,15 +58,11 @@ pub(crate) fn cmd_help(
return Ok(());
}
let bin_name = command
.string_args()
.first()
.map_or(command.app().get_name(), |name| name.as_ref());
let mut args_to_show_help = vec![bin_name];
let mut args_to_show_help = vec![command.app().get_name()];
args_to_show_help.extend(args.command.iter().map(|s| s.as_str()));
args_to_show_help.push("--help");
// TODO: `help log -- -r` will give a cryptic error, ideally, it should state
// TODO: `help log -- -r` will gives an cryptic error, ideally, it should state
// that the subcommand `log -r` doesn't exist.
let help_err = command
.app()
@ -97,38 +93,13 @@ struct Keyword {
//
// TODO: Find a way to render markdown using ANSI escape codes.
//
// Maybe we can steal some ideas from https://github.com/jj-vcs/jj/pull/3130
// Maybe we can steal some ideas from https://github.com/martinvonz/jj/pull/3130
const KEYWORDS: &[Keyword] = &[
Keyword {
name: "bookmarks",
description: "Named pointers to revisions (similar to Git's branches)",
content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "bookmarks.md")),
},
Keyword {
name: "config",
description: "How and where to set configuration options",
content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "config.md")),
},
Keyword {
name: "filesets",
description: "A functional language for selecting a set of files",
content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "filesets.md")),
},
Keyword {
name: "glossary",
description: "Definitions of various terms",
content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "glossary.md")),
},
Keyword {
name: "revsets",
description: "A functional language for selecting a set of revision",
content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "revsets.md")),
},
Keyword {
name: "templates",
description: "A functional language to customize command output",
content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "templates.md")),
},
Keyword {
name: "tutorial",
description: "Show a tutorial to get started with jj",
@ -144,7 +115,7 @@ pub fn show_keyword_hint_after_help() -> StyledStr {
let mut ret = StyledStr::new();
writeln!(
ret,
"{} lists available keywords. Use {} to show help for one of these keywords.",
"{} list available keywords. Use {} to show help for one of these keywords.",
"'jj help --help'".bold(),
"'jj help -k'".bold(),
)

View File

@ -12,38 +12,46 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write as _;
use std::io::Write;
use clap::ArgGroup;
use jj_lib::file_util;
use jj_lib::workspace::Workspace;
use tracing::instrument;
use super::git;
use crate::cli_util::CommandHelper;
use crate::command_error::cli_error;
use crate::command_error::user_error_with_hint;
use crate::command_error::user_error_with_message;
use crate::command_error::CommandError;
use crate::ui::Ui;
/// Create a new repo in the given directory using the proof-of-concept simple
/// backend
/// Create a new repo in the given directory
///
/// The simple backend does not support cloning, fetching, or pushing.
///
/// This command is otherwise analogous to `jj git init`. If the given directory
/// does not exist, it will be created. If no directory is given, the current
/// directory is used.
/// If the given directory does not exist, it will be created. If no directory
/// is given, the current directory is used.
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct DebugInitSimpleArgs {
#[command(group(ArgGroup::new("backend").args(&["git", "git_repo"])))]
pub(crate) struct InitArgs {
/// The destination directory
#[arg(default_value = ".", value_hint = clap::ValueHint::DirPath)]
destination: String,
/// DEPRECATED: Use `jj git init`
/// Use the Git backend, creating a jj repo backed by a Git repo
#[arg(long, hide = true)]
git: bool,
/// DEPRECATED: Use `jj git init`
/// Path to a git repo the jj repo will be backed by
#[arg(long, hide = true, value_hint = clap::ValueHint::DirPath)]
git_repo: Option<String>,
}
#[instrument(skip_all)]
pub(crate) fn cmd_debug_init_simple(
pub(crate) fn cmd_init(
ui: &mut Ui,
command: &CommandHelper,
args: &DebugInitSimpleArgs,
args: &InitArgs,
) -> Result<(), CommandError> {
if command.global_args().ignore_working_copy {
return Err(cli_error("--ignore-working-copy is not respected"));
@ -54,10 +62,29 @@ pub(crate) fn cmd_debug_init_simple(
let cwd = command.cwd();
let wc_path = cwd.join(&args.destination);
let wc_path = file_util::create_or_reuse_dir(&wc_path)
.and_then(|_| dunce::canonicalize(wc_path))
.and_then(|_| wc_path.canonicalize())
.map_err(|e| user_error_with_message("Failed to create workspace", e))?;
Workspace::init_simple(&command.settings_for_new_workspace(&wc_path)?, &wc_path)?;
// Preserve existing behaviour where `jj init` is not able to create
// a colocated repo.
let colocate = false;
if args.git || args.git_repo.is_some() {
git::init::do_init(ui, command, &wc_path, colocate, args.git_repo.as_deref())?;
writeln!(
ui.warning_default(),
"`--git` and `--git-repo` are deprecated.
Use `jj git init` instead"
)?;
} else {
if !command.settings().allow_native_backend() {
return Err(user_error_with_hint(
"The native backend is disallowed by default.",
"Did you mean to call `jj git init`?
Set `ui.allow-init-native` to allow initializing a repo with the native backend.",
));
}
Workspace::init_local(command.settings(), &wc_path)?;
}
let relative_wc_path = file_util::relative_path(cwd, &wc_path);
writeln!(

View File

@ -15,13 +15,11 @@
use std::slice;
use clap::ArgGroup;
use clap_complete::ArgValueCompleter;
use tracing::instrument;
use crate::cli_util::CommandHelper;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::diff_util::DiffFormatArgs;
use crate::ui::Ui;
@ -36,27 +34,13 @@ use crate::ui::Ui;
#[command(mut_arg("ignore_space_change", |a| a.short('b')))]
pub(crate) struct InterdiffArgs {
/// Show changes from this revision
#[arg(
long,
short,
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long)]
from: Option<RevisionArg>,
/// Show changes to this revision
#[arg(
long,
short,
value_name = "REVSET",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long)]
to: Option<RevisionArg>,
/// Restrict the diff to these paths
#[arg(
value_name = "FILESETS",
value_hint = clap::ValueHint::AnyPath,
add = ArgValueCompleter::new(complete::interdiff_files),
)]
#[arg(value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
#[command(flatten)]
format: DiffFormatArgs,

View File

@ -12,22 +12,16 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use clap_complete::ArgValueCandidates;
use clap_complete::ArgValueCompleter;
use itertools::Itertools as _;
use jj_lib::backend::CommitId;
use jj_lib::commit::Commit;
use jj_lib::config::ConfigGetError;
use jj_lib::config::ConfigGetResultExt as _;
use jj_lib::graph::reverse_graph;
use jj_lib::graph::GraphEdge;
use jj_lib::graph::GraphEdgeType;
use jj_lib::graph::ReverseGraphIterator;
use jj_lib::graph::TopoGroupedGraphIterator;
use jj_lib::repo::Repo as _;
use jj_lib::repo::Repo;
use jj_lib::revset::RevsetEvaluationError;
use jj_lib::revset::RevsetExpression;
use jj_lib::revset::RevsetFilterPredicate;
use jj_lib::revset::RevsetIteratorExt as _;
use jj_lib::revset::RevsetIteratorExt;
use jj_lib::settings::ConfigResultExt as _;
use jj_lib::settings::UserSettings;
use tracing::instrument;
@ -36,83 +30,55 @@ use crate::cli_util::CommandHelper;
use crate::cli_util::LogContentFormat;
use crate::cli_util::RevisionArg;
use crate::command_error::CommandError;
use crate::complete;
use crate::commit_templater::CommitTemplateLanguage;
use crate::diff_util::DiffFormatArgs;
use crate::graphlog::get_graphlog;
use crate::graphlog::Edge;
use crate::graphlog::GraphStyle;
use crate::templater::TemplateRenderer;
use crate::ui::Ui;
/// Show revision history
///
/// Renders a graphical view of the project's history, ordered with children
/// before parents. By default, the output only includes mutable revisions,
/// along with some additional revisions for context. Use `jj log -r ::` to see
/// all revisions. See [`jj help -k revsets`] for information about the syntax.
///
/// [`jj help -k revsets`]:
/// https://jj-vcs.github.io/jj/latest/revsets/
/// along with some additional revisions for context.
///
/// Spans of revisions that are not included in the graph per `--revisions` are
/// rendered as a synthetic node labeled "(elided revisions)".
///
/// The working-copy commit is indicated by a `@` symbol in the graph.
/// [Immutable revisions] have a `◆` symbol. Other commits have a `○` symbol.
/// All of these symbols can be [customized].
///
/// [Immutable revisions]:
/// https://jj-vcs.github.io/jj/latest/config/#set-of-immutable-commits
///
/// [customized]:
/// https://jj-vcs.github.io/jj/latest/config/#node-style
#[derive(clap::Args, Clone, Debug)]
pub(crate) struct LogArgs {
/// Which revisions to show
///
/// If no paths nor revisions are specified, this defaults to the
/// `revsets.log` setting.
#[arg(
long,
short,
value_name = "REVSETS",
add = ArgValueCompleter::new(complete::revset_expression_all),
)]
#[arg(long, short)]
revisions: Vec<RevisionArg>,
/// Show revisions modifying the given paths
#[arg(
value_name = "FILESETS",
value_hint = clap::ValueHint::AnyPath,
add = ArgValueCompleter::new(complete::log_files),
)]
#[arg(value_hint = clap::ValueHint::AnyPath)]
paths: Vec<String>,
/// Limit number of revisions to show
///
/// Applied after revisions are filtered and reordered topologically, but
/// before being reversed.
#[arg(long, short = 'n')]
limit: Option<usize>,
/// Show revisions in the opposite order (older revisions first)
#[arg(long)]
reversed: bool,
/// Limit number of revisions to show
///
/// Applied after revisions are filtered and reordered.
#[arg(long, short = 'n')]
limit: Option<usize>,
// TODO: Delete `-l` alias in jj 0.25+
#[arg(
short = 'l',
hide = true,
conflicts_with = "limit",
value_name = "LIMIT"
)]
deprecated_limit: Option<usize>,
/// Don't show the graph, show a flat list of revisions
#[arg(long)]
no_graph: bool,
/// Render each revision using the given template
///
/// Run `jj log -T` to list the built-in templates.
///
/// You can also specify arbitrary template expressions using the
/// [built-in keywords]. See [`jj help -k templates`] for more
/// information.
///
/// If not specified, this defaults to the `templates.log` setting.
///
/// [built-in keywords]:
/// https://jj-vcs.github.io/jj/latest/templates/#commit-keywords
///
/// [`jj help -k templates`]:
/// https://jj-vcs.github.io/jj/latest/templates/
#[arg(long, short = 'T', add = ArgValueCandidates::new(complete::template_aliases))]
/// For the syntax, see https://martinvonz.github.io/jj/latest/templates/
#[arg(long, short = 'T')]
template: Option<String>,
/// Show patch
#[arg(long, short = 'p')]
@ -128,14 +94,13 @@ pub(crate) fn cmd_log(
args: &LogArgs,
) -> Result<(), CommandError> {
let workspace_command = command.workspace_helper(ui)?;
let settings = workspace_command.settings();
let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?;
let revset_expression = {
// only use default revset if neither revset nor path are specified
let mut expression = if args.revisions.is_empty() && args.paths.is_empty() {
let revset_string = settings.get_string("revsets.log")?;
workspace_command.parse_revset(ui, &RevisionArg::from(revset_string))?
workspace_command
.parse_revset(ui, &RevisionArg::from(command.settings().default_revset()))?
} else if !args.revisions.is_empty() {
workspace_command.parse_union_revsets(ui, &args.revisions)?
} else {
@ -150,8 +115,6 @@ pub(crate) fn cmd_log(
}
expression
};
let prio_revset = settings.get_string("revsets.log-graph-prioritize")?;
let prio_revset = workspace_command.parse_revset(ui, &RevisionArg::from(prio_revset))?;
let repo = workspace_command.repo();
let matcher = fileset_expression.to_matcher();
@ -159,24 +122,37 @@ pub(crate) fn cmd_log(
let store = repo.store();
let diff_renderer = workspace_command.diff_renderer_for_log(&args.diff_format, args.patch)?;
let graph_style = GraphStyle::from_settings(settings)?;
let graph_style = GraphStyle::from_settings(command.settings())?;
let use_elided_nodes = settings.get_bool("ui.log-synthetic-elided-nodes")?;
let with_content_format = LogContentFormat::new(ui, settings)?;
let use_elided_nodes = command
.settings()
.config()
.get_bool("ui.log-synthetic-elided-nodes")?;
let with_content_format = LogContentFormat::new(ui, command.settings())?;
let template: TemplateRenderer<Commit>;
let node_template: TemplateRenderer<Option<Commit>>;
let template;
let node_template;
{
let language = workspace_command.commit_template_language();
let template_string = match &args.template {
Some(value) => value.to_string(),
None => settings.get_string("templates.log")?,
None => command.settings().config().get_string("templates.log")?,
};
template = workspace_command
.parse_template(ui, &language, &template_string)?
.parse_template(
ui,
&language,
&template_string,
CommitTemplateLanguage::wrap_commit,
)?
.labeled("log");
node_template = workspace_command
.parse_template(ui, &language, &get_node_template(graph_style, settings)?)?
.parse_template(
ui,
&language,
&get_node_template(graph_style, command.settings())?,
CommitTemplateLanguage::wrap_commit_opt,
)?
.labeled("node");
}
@ -185,60 +161,53 @@ pub(crate) fn cmd_log(
let mut formatter = ui.stdout_formatter();
let formatter = formatter.as_mut();
if args.deprecated_limit.is_some() {
writeln!(
ui.warning_default(),
"The -l shorthand is deprecated, use -n instead."
)?;
}
let limit = args.limit.or(args.deprecated_limit).unwrap_or(usize::MAX);
if !args.no_graph {
let mut raw_output = formatter.raw()?;
let mut graph = get_graphlog(graph_style, raw_output.as_mut());
let iter: Box<dyn Iterator<Item = _>> = {
let mut forward_iter = TopoGroupedGraphIterator::new(revset.iter_graph());
let has_commit = revset.containing_fn();
for prio in prio_revset.evaluate_to_commit_ids()? {
let prio = prio?;
if has_commit(&prio)? {
forward_iter.prioritize_branch(prio);
}
}
// The input to TopoGroupedGraphIterator shouldn't be truncated
// because the prioritized commit must exist in the input set.
let forward_iter = forward_iter.take(args.limit.unwrap_or(usize::MAX));
if args.reversed {
Box::new(reverse_graph(forward_iter, |id| id)?.into_iter().map(Ok))
} else {
Box::new(forward_iter)
}
let forward_iter = TopoGroupedGraphIterator::new(revset.iter_graph());
let iter: Box<dyn Iterator<Item = _>> = if args.reversed {
Box::new(ReverseGraphIterator::new(forward_iter)?)
} else {
Box::new(forward_iter)
};
for node in iter {
for node in iter.take(limit) {
let (commit_id, edges) = node?;
// The graph is keyed by (CommitId, is_synthetic)
let mut graphlog_edges = vec![];
// TODO: Should we update revset.iter_graph() to yield a `has_missing` flag
// instead of all the missing edges since we don't care about
// where they point here anyway?
let mut missing_edge_id = None;
// TODO: Should we update revset.iter_graph() to yield this flag instead of all
// the missing edges since we don't care about where they point here
// anyway?
let mut has_missing = false;
let mut elided_targets = vec![];
for edge in edges {
match edge.edge_type {
GraphEdgeType::Missing => {
missing_edge_id = Some(edge.target);
has_missing = true;
}
GraphEdgeType::Direct => {
graphlog_edges.push(GraphEdge::direct((edge.target, false)));
graphlog_edges.push(Edge::Direct((edge.target, false)));
}
GraphEdgeType::Indirect => {
if use_elided_nodes {
elided_targets.push(edge.target.clone());
graphlog_edges.push(GraphEdge::direct((edge.target, true)));
graphlog_edges.push(Edge::Direct((edge.target, true)));
} else {
graphlog_edges.push(GraphEdge::indirect((edge.target, false)));
graphlog_edges.push(Edge::Indirect((edge.target, false)));
}
}
}
}
if let Some(missing_edge_id) = missing_edge_id {
graphlog_edges.push(GraphEdge::missing((missing_edge_id, false)));
if has_missing {
graphlog_edges.push(Edge::Missing);
}
let mut buffer = vec![];
let key = (commit_id, false);
@ -272,7 +241,7 @@ pub(crate) fn cmd_log(
for elided_target in elided_targets {
let elided_key = (elided_target, true);
let real_key = (elided_key.0.clone(), false);
let edges = [GraphEdge::direct(real_key)];
let edges = [Edge::Direct(real_key)];
let mut buffer = vec![];
let within_graph =
with_content_format.sub_width(graph.width(&elided_key, &edges));
@ -289,16 +258,13 @@ pub(crate) fn cmd_log(
}
}
} else {
let iter: Box<dyn Iterator<Item = Result<CommitId, RevsetEvaluationError>>> = {
let forward_iter = revset.iter().take(args.limit.unwrap_or(usize::MAX));
let iter: Box<dyn Iterator<Item = Result<CommitId, RevsetEvaluationError>>> =
if args.reversed {
let entries: Vec<_> = forward_iter.try_collect()?;
Box::new(entries.into_iter().rev().map(Ok))
Box::new(revset.iter().reversed()?)
} else {
Box::new(forward_iter)
}
};
for commit_or_error in iter.commits(store) {
Box::new(revset.iter())
};
for commit_or_error in iter.commits(store).take(limit) {
let commit = commit_or_error?;
with_content_format
.write(formatter, |formatter| template.format(&commit, formatter))?;
@ -317,9 +283,9 @@ pub(crate) fn cmd_log(
// For users of e.g. Mercurial, where `.` indicates the current commit.
writeln!(
ui.warning_default(),
"The argument {only_path:?} is being interpreted as a fileset expression, but \
this is often not useful because all non-empty commits touch '.'. If you meant \
to show the working copy commit, pass -r '@' instead."
"The argument {only_path:?} is being interpreted as a path, but this is often not \
useful because all non-empty commits touch '.'. If you meant to show the \
working copy commit, pass -r '@' instead."
)?;
} else if revset.is_empty()
&& workspace_command
@ -328,8 +294,8 @@ pub(crate) fn cmd_log(
{
writeln!(
ui.warning_default(),
"The argument {only_path:?} is being interpreted as a fileset expression. To \
specify a revset, pass -r {only_path:?} instead."
"The argument {only_path:?} is being interpreted as a path. To specify a revset, \
pass -r {only_path:?} instead."
)?;
}
}
@ -340,8 +306,11 @@ pub(crate) fn cmd_log(
pub fn get_node_template(
style: GraphStyle,
settings: &UserSettings,
) -> Result<String, ConfigGetError> {
let symbol = settings.get_string("templates.log_node").optional()?;
) -> Result<String, config::ConfigError> {
let symbol = settings
.config()
.get_string("templates.log_node")
.optional()?;
let default = if style.is_ascii() {
"builtin_log_node_ascii"
} else {

Some files were not shown because too many files have changed in this diff Show More