Compare commits

..

No commits in common. "main" and "0.99.0" have entirely different histories.
main ... 0.99.0

1053 changed files with 24689 additions and 58496 deletions

40
.github/labeler.yml vendored
View File

@ -1,40 +0,0 @@
# A bot for automatically labelling pull requests
# See https://github.com/actions/labeler
dataframe:
- changed-files:
- any-glob-to-any-file:
- crates/nu_plugin_polars/**
std-library:
- changed-files:
- any-glob-to-any-file:
- crates/nu-std/**
ci:
- changed-files:
- any-glob-to-any-file:
- .github/workflows/**
LSP:
- changed-files:
- any-glob-to-any-file:
- crates/nu-lsp/**
parser:
- changed-files:
- any-glob-to-any-file:
- crates/nu-parser/**
pr:plugins:
- changed-files:
- any-glob-to-any-file:
# plugins API
- crates/nu-plugin/**
- crates/nu-plugin-core/**
- crates/nu-plugin-engine/**
- crates/nu-plugin-protocol/**
- crates/nu-plugin-test-support/**
# specific plugins (like polars)
- crates/nu_plugin_*/**

View File

@ -1,52 +0,0 @@
name: Test on Beta Toolchain
# This workflow is made to run our tests on the beta toolchain to validate that
# the beta toolchain works.
# We do not intend to test here that we are working correctly but rather that
# the beta toolchain works correctly.
# The ci.yml handles our actual testing with our guarantees.
on:
schedule:
# If this workflow fails, GitHub notifications will go to the last person
# who edited this line.
# See: https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/monitoring-workflows/notifications-for-workflow-runs
- cron: '0 0 * * *' # Runs daily at midnight UTC
env:
NUSHELL_CARGO_PROFILE: ci
NU_LOG_LEVEL: DEBUG
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
cancel-in-progress: true
jobs:
build-and-test:
# this job is more for testing the beta toolchain and not our tests, so if
# this fails but the tests of the regular ci pass, then this is fine
continue-on-error: true
strategy:
fail-fast: true
matrix:
platform: [windows-latest, macos-latest, ubuntu-22.04]
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v4
- run: rustup update beta
- name: Tests
run: cargo +beta test --workspace --profile ci --exclude nu_plugin_*
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi

View File

@ -3,7 +3,6 @@ on:
push: push:
branches: branches:
- main - main
- 'patch-release-*'
name: continuous-integration name: continuous-integration
@ -22,14 +21,14 @@ jobs:
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
# Pinning to Ubuntu 22.04 because building on newer Ubuntu versions causes linux-gnu # Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider # builds to link against a too-new-for-many-Linux-installs glibc version. Consider
# revisiting this when 22.04 is closer to EOL (June 2027) # revisiting this when 20.04 is closer to EOL (April 2025)
# #
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB, # Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are # instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
# removed and we're only building the `polars` plugin instead # removed and we're only building the `polars` plugin instead
platform: [windows-latest, macos-13, ubuntu-22.04] platform: [windows-latest, macos-13, ubuntu-20.04]
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
@ -37,7 +36,7 @@ jobs:
- uses: actions/checkout@v4.1.7 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0 uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
- name: cargo fmt - name: cargo fmt
run: cargo fmt --all -- --check run: cargo fmt --all -- --check
@ -57,7 +56,7 @@ jobs:
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
platform: [windows-latest, macos-latest, ubuntu-22.04] platform: [windows-latest, macos-latest, ubuntu-20.04]
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
@ -65,7 +64,7 @@ jobs:
- uses: actions/checkout@v4.1.7 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0 uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
- name: Tests - name: Tests
run: cargo test --workspace --profile ci --exclude nu_plugin_* run: cargo test --workspace --profile ci --exclude nu_plugin_*
@ -84,7 +83,7 @@ jobs:
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
platform: [ubuntu-22.04, macos-latest, windows-latest] platform: [ubuntu-20.04, macos-latest, windows-latest]
py: py:
- py - py
@ -94,10 +93,10 @@ jobs:
- uses: actions/checkout@v4.1.7 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0 uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
- name: Install Nushell - name: Install Nushell
run: cargo install --path . --locked --force run: cargo install --path . --locked --no-default-features
- name: Standard library tests - name: Standard library tests
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std' run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
@ -137,7 +136,7 @@ jobs:
# instead of 14 GB) which is too little for us right now. # instead of 14 GB) which is too little for us right now.
# #
# Failure occurring with clippy for rust 1.77.2 # Failure occurring with clippy for rust 1.77.2
platform: [windows-latest, macos-13, ubuntu-22.04] platform: [windows-latest, macos-13, ubuntu-20.04]
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
@ -145,7 +144,7 @@ jobs:
- uses: actions/checkout@v4.1.7 - uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0 uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
- name: Clippy - name: Clippy
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
@ -163,50 +162,3 @@ jobs:
else else
echo "no changes in working directory"; echo "no changes in working directory";
fi fi
wasm:
env:
WASM_OPTIONS: --no-default-features --target wasm32-unknown-unknown
CLIPPY_CONF_DIR: ${{ github.workspace }}/clippy/wasm/
strategy:
matrix:
job:
- name: Build WASM
command: cargo build
args:
- name: Clippy WASM
command: cargo clippy
args: -- $CLIPPY_OPTIONS
name: ${{ matrix.job.name }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
- name: Add wasm32-unknown-unknown target
run: rustup target add wasm32-unknown-unknown
- run: ${{ matrix.job.command }} -p nu-cmd-base $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-cmd-extra $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-cmd-lang $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-color-config $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-command $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-derive-value $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-engine $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-glob $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-json $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-parser $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-path $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-pretty-hex $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-protocol $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-std $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-system $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-table $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-term-grid $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-utils $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nuon $WASM_OPTIONS ${{ matrix.job.args }}

View File

@ -1,19 +0,0 @@
# Automatically labels PRs based on the configuration file
# you are probably looking for 👉 `.github/labeler.yml`
name: Label PRs
on:
- pull_request_target
jobs:
triage:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
if: github.repository_owner == 'nushell'
steps:
- uses: actions/labeler@v5
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
sync-labels: true

View File

@ -1,30 +0,0 @@
# Description:
# - Add milestone to a merged PR automatically
# - Add milestone to a closed issue that has a merged PR fix (if any)
name: Milestone Action
on:
issues:
types: [closed]
pull_request_target:
types: [closed]
jobs:
update-milestone:
runs-on: ubuntu-latest
name: Milestone Update
steps:
- name: Set Milestone for PR
uses: hustcer/milestone-action@main
if: github.event.pull_request.merged == true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Bind milestone to closed issue that has a merged PR fix
- name: Set Milestone for Issue
uses: hustcer/milestone-action@v2
if: github.event.issue.state == 'closed'
with:
action: bind-issue
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -8,7 +8,6 @@
name: Nightly Build name: Nightly Build
on: on:
workflow_dispatch:
push: push:
branches: branches:
- nightly # Just for test purpose only with the nightly repo - nightly # Just for test purpose only with the nightly repo
@ -40,7 +39,7 @@ jobs:
uses: hustcer/setup-nu@v3 uses: hustcer/setup-nu@v3
if: github.repository == 'nushell/nightly' if: github.repository == 'nushell/nightly'
with: with:
version: 0.103.0 version: 0.98.0
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo # Synchronize the main branch of nightly repo with the main branch of Nushell official repo
- name: Prepare for Nightly Release - name: Prepare for Nightly Release
@ -115,7 +114,7 @@ jobs:
- target: armv7-unknown-linux-musleabihf - target: armv7-unknown-linux-musleabihf
os: ubuntu-22.04 os: ubuntu-22.04
- target: riscv64gc-unknown-linux-gnu - target: riscv64gc-unknown-linux-gnu
os: ubuntu-22.04 os: ubuntu-latest
- target: loongarch64-unknown-linux-gnu - target: loongarch64-unknown-linux-gnu
os: ubuntu-22.04 os: ubuntu-22.04
@ -132,7 +131,7 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0 uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
rustflags: '' rustflags: ''
@ -140,7 +139,7 @@ jobs:
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3 uses: hustcer/setup-nu@v3
with: with:
version: 0.103.0 version: 0.98.0
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -171,7 +170,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
# Create a release only in nushell/nightly repo # Create a release only in nushell/nightly repo
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v2.0.9 uses: softprops/action-gh-release@v2.0.8
if: ${{ startsWith(github.repository, 'nushell/nightly') }} if: ${{ startsWith(github.repository, 'nushell/nightly') }}
with: with:
prerelease: true prerelease: true
@ -198,7 +197,7 @@ jobs:
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3 uses: hustcer/setup-nu@v3
with: with:
version: 0.103.0 version: 0.98.0
# Keep the last a few releases # Keep the last a few releases
- name: Delete Older Releases - name: Delete Older Releases

View File

@ -117,14 +117,14 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Build for Windows without static-link-openssl feature # Build for Windows without static-link-openssl feature
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
if $os =~ 'windows' { if $os in ['windows-latest'] {
cargo-build-nu cargo-build-nu
} }
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Prepare for the release archive # Prepare for the release archive
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
let suffix = if $os =~ 'windows' { '.exe' } let suffix = if $os == 'windows-latest' { '.exe' }
# nu, nu_plugin_* were all included # nu, nu_plugin_* were all included
let executable = $'target/($target)/release/($bin)*($suffix)' let executable = $'target/($target)/release/($bin)*($suffix)'
print $'Current executable file: ($executable)' print $'Current executable file: ($executable)'
@ -148,10 +148,10 @@ For more information, refer to https://www.nushell.sh/book/plugins.html
[LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten [LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten
print $'(char nl)Check binary release version detail:'; hr-line print $'(char nl)Check binary release version detail:'; hr-line
let ver = if $os =~ 'windows' { let ver = if $os == 'windows-latest' {
(do -i { .\output\nu.exe -c 'version' }) | default '' | str join (do -i { .\output\nu.exe -c 'version' }) | str join
} else { } else {
(do -i { ./output/nu -c 'version' }) | default '' | str join (do -i { ./output/nu -c 'version' }) | str join
} }
if ($ver | str trim | is-empty) { if ($ver | str trim | is-empty) {
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)' print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)'
@ -177,7 +177,7 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
# REF: https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/ # REF: https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
} else if $os =~ 'windows' { } else if $os == 'windows-latest' {
let releaseStem = $'($bin)-($version)-($target)' let releaseStem = $'($bin)-($version)-($target)'
@ -221,7 +221,7 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
} }
def 'cargo-build-nu' [] { def 'cargo-build-nu' [] {
if $os =~ 'windows' { if $os == 'windows-latest' {
cargo build --release --all --target $target cargo build --release --all --target $target
} else { } else {
cargo build --release --all --target $target --features=static-link-openssl cargo build --release --all --target $target --features=static-link-openssl

View File

@ -7,9 +7,7 @@ name: Create Release Draft
on: on:
workflow_dispatch: workflow_dispatch:
push: push:
tags: tags: ["[0-9]+.[0-9]+.[0-9]+*"]
- '[0-9]+.[0-9]+.[0-9]+*'
- '!*nightly*' # Don't trigger release for nightly tags
defaults: defaults:
run: run:
@ -66,7 +64,7 @@ jobs:
- target: armv7-unknown-linux-musleabihf - target: armv7-unknown-linux-musleabihf
os: ubuntu-22.04 os: ubuntu-22.04
- target: riscv64gc-unknown-linux-gnu - target: riscv64gc-unknown-linux-gnu
os: ubuntu-22.04 os: ubuntu-latest
- target: loongarch64-unknown-linux-gnu - target: loongarch64-unknown-linux-gnu
os: ubuntu-22.04 os: ubuntu-22.04
@ -80,7 +78,7 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain - name: Setup Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0 uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
cache: false cache: false
@ -89,7 +87,7 @@ jobs:
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3 uses: hustcer/setup-nu@v3
with: with:
version: 0.103.0 version: 0.98.0
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -100,10 +98,9 @@ jobs:
TARGET: ${{ matrix.target }} TARGET: ${{ matrix.target }}
_EXTRA_: ${{ matrix.extra }} _EXTRA_: ${{ matrix.extra }}
# WARN: Don't upgrade this action due to the release per asset issue. # REF: https://github.com/marketplace/actions/gh-release
# See: https://github.com/softprops/action-gh-release/issues/445
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v2.0.5 uses: softprops/action-gh-release@v2.0.8
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: true draft: true
@ -127,7 +124,7 @@ jobs:
- name: Create Checksums - name: Create Checksums
run: cd release && shasum -a 256 * > ../SHA256SUMS run: cd release && shasum -a 256 * > ../SHA256SUMS
- name: Publish Checksums - name: Publish Checksums
uses: softprops/action-gh-release@v2.0.5 uses: softprops/action-gh-release@v2.0.8
with: with:
draft: true draft: true
files: SHA256SUMS files: SHA256SUMS

View File

@ -10,4 +10,4 @@ jobs:
uses: actions/checkout@v4.1.7 uses: actions/checkout@v4.1.7
- name: Check spelling - name: Check spelling
uses: crate-ci/typos@v1.31.2 uses: crate-ci/typos@v1.26.0

4167
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
license = "MIT" license = "MIT"
name = "nu" name = "nu"
repository = "https://github.com/nushell/nushell" repository = "https://github.com/nushell/nushell"
rust-version = "1.84.1" rust-version = "1.79.0"
version = "0.104.1" version = "0.99.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -66,33 +66,33 @@ alphanumeric-sort = "1.5"
ansi-str = "0.8" ansi-str = "0.8"
anyhow = "1.0.82" anyhow = "1.0.82"
base64 = "0.22.1" base64 = "0.22.1"
bracoxide = "0.1.6" bracoxide = "0.1.2"
brotli = "7.0" brotli = "5.0"
byteorder = "1.5" byteorder = "1.5"
bytes = "1" bytes = "1"
bytesize = "1.3.3" bytesize = "1.3"
calamine = "0.27" calamine = "0.24.0"
chardetng = "0.1.17" chardetng = "0.1.17"
chrono = { default-features = false, version = "0.4.34" } chrono = { default-features = false, version = "0.4.34" }
chrono-humanize = "0.2.3" chrono-humanize = "0.2.3"
chrono-tz = "0.10" chrono-tz = "0.8"
crossbeam-channel = "0.5.8" crossbeam-channel = "0.5.8"
crossterm = "0.28.1" crossterm = "0.28.1"
csv = "1.3" csv = "1.3"
ctrlc = "3.4" ctrlc = "3.4"
devicons = "0.6.12"
dialoguer = { default-features = false, version = "0.11" } dialoguer = { default-features = false, version = "0.11" }
digest = { default-features = false, version = "0.10" } digest = { default-features = false, version = "0.10" }
dirs = "5.0" dirs = "5.0"
dirs-sys = "0.4" dirs-sys = "0.4"
dtparse = "2.0" dtparse = "2.0"
encoding_rs = "0.8" encoding_rs = "0.8"
fancy-regex = "0.14" fancy-regex = "0.13"
filesize = "0.2" filesize = "0.2"
filetime = "0.2" filetime = "0.2"
fuzzy-matcher = "0.3"
heck = "0.5.0" heck = "0.5.0"
human-date-parser = "0.3.0" human-date-parser = "0.2.0"
indexmap = "2.9" indexmap = "2.6"
indicatif = "0.17" indicatif = "0.17"
interprocess = "2.2.0" interprocess = "2.2.0"
is_executable = "1.0" is_executable = "1.0"
@ -102,91 +102,83 @@ libproc = "0.14"
log = "0.4" log = "0.4"
lru = "0.12" lru = "0.12"
lscolors = { version = "0.17", default-features = false } lscolors = { version = "0.17", default-features = false }
lsp-server = "0.7.8" lsp-server = "0.7.5"
lsp-types = { version = "0.97.0", features = ["proposed"] } lsp-types = "0.95.0"
lsp-textdocument = "0.4.2"
mach2 = "0.4" mach2 = "0.4"
md5 = { version = "0.10", package = "md-5" } md5 = { version = "0.10", package = "md-5" }
miette = "7.5" miette = "7.2"
mime = "0.3.17" mime = "0.3.17"
mime_guess = "2.0" mime_guess = "2.0"
mockito = { version = "1.7", default-features = false } mockito = { version = "1.5", default-features = false }
multipart-rs = "0.1.13" multipart-rs = "0.1.11"
native-tls = "0.2" native-tls = "0.2"
nix = { version = "0.29", default-features = false } nix = { version = "0.29", default-features = false }
notify-debouncer-full = { version = "0.3", default-features = false } notify-debouncer-full = { version = "0.3", default-features = false }
nu-ansi-term = "0.50.1" nu-ansi-term = "0.50.1"
nucleo-matcher = "0.3"
num-format = "0.4" num-format = "0.4"
num-traits = "0.2" num-traits = "0.2"
oem_cp = "2.0.0"
omnipath = "0.1" omnipath = "0.1"
once_cell = "1.20"
open = "5.3" open = "5.3"
os_pipe = { version = "1.2", features = ["io_safety"] } os_pipe = { version = "1.2", features = ["io_safety"] }
pathdiff = "0.2" pathdiff = "0.2"
percent-encoding = "2" percent-encoding = "2"
pretty_assertions = "1.4" pretty_assertions = "1.4"
print-positions = "0.6" print-positions = "0.6"
proc-macro-error2 = "2.0" proc-macro-error = { version = "1.0", default-features = false }
proc-macro2 = "1.0" proc-macro2 = "1.0"
procfs = "0.17.0" procfs = "0.16.0"
pwd = "1.3" pwd = "1.3"
quick-xml = "0.37.0" quick-xml = "0.32.0"
quickcheck = "1.0" quickcheck = "1.0"
quickcheck_macros = "1.0" quickcheck_macros = "1.0"
quote = "1.0" quote = "1.0"
rand = "0.9" rand = "0.8"
getrandom = "0.2" # pick same version that rand requires rand_chacha = "0.3.1"
rand_chacha = "0.9" ratatui = "0.26"
ratatui = "0.29"
rayon = "1.10" rayon = "1.10"
reedline = "0.40.0" reedline = "0.36.0"
regex = "1.9.5"
rmp = "0.8" rmp = "0.8"
rmp-serde = "1.3" rmp-serde = "1.3"
roxmltree = "0.20" ropey = "1.6.1"
rstest = { version = "0.23", default-features = false } roxmltree = "0.19"
rstest_reuse = "0.7" rstest = { version = "0.18", default-features = false }
rusqlite = "0.31" rusqlite = "0.31"
rust-embed = "8.7.0" rust-embed = "8.5.0"
scopeguard = { version = "1.2.0" }
serde = { version = "1.0" } serde = { version = "1.0" }
serde_json = "1.0.97" serde_json = "1.0"
serde_urlencoded = "0.7.1" serde_urlencoded = "0.7.1"
serde_yaml = "0.9.33" serde_yaml = "0.9"
sha2 = "0.10" sha2 = "0.10"
strip-ansi-escapes = "0.2.0" strip-ansi-escapes = "0.2.0"
strum = "0.26"
strum_macros = "0.26"
syn = "2.0" syn = "2.0"
sysinfo = "0.33" sysinfo = "0.30"
tabled = { version = "0.17.0", default-features = false } tabled = { version = "0.16.0", default-features = false }
tempfile = "3.15" tempfile = "3.13"
titlecase = "3.5" terminal_size = "0.3"
titlecase = "2.0"
toml = "0.8" toml = "0.8"
trash = "5.2" trash = "3.3"
update-informer = { version = "1.2.0", default-features = false, features = ["github", "native-tls", "ureq"] }
umask = "2.1" umask = "2.1"
unicode-segmentation = "1.12" unicode-segmentation = "1.12"
unicode-width = "0.2" unicode-width = "0.1"
ureq = { version = "2.12", default-features = false, features = ["socks-proxy"] } ureq = { version = "2.10", default-features = false }
url = "2.2" url = "2.2"
uu_cp = "0.0.30" uu_cp = "0.0.27"
uu_mkdir = "0.0.30" uu_mkdir = "0.0.27"
uu_mktemp = "0.0.30" uu_mktemp = "0.0.27"
uu_mv = "0.0.30" uu_mv = "0.0.27"
uu_touch = "0.0.30" uu_whoami = "0.0.27"
uu_whoami = "0.0.30" uu_uname = "0.0.27"
uu_uname = "0.0.30" uucore = "0.0.27"
uucore = "0.0.30" uuid = "1.10.0"
uuid = "1.16.0"
v_htmlescape = "0.15.0" v_htmlescape = "0.15.0"
wax = "0.6" wax = "0.6"
web-time = "1.1.0" which = "6.0.0"
which = "7.0.0" windows = "0.54"
windows = "0.56"
windows-sys = "0.48" windows-sys = "0.48"
winreg = "0.52" winreg = "0.52"
memchr = "2.7.4"
[workspace.lints.clippy] [workspace.lints.clippy]
# Warning: workspace lints affect library code as well as tests, so don't enable lints that would be too noisy in tests like that. # Warning: workspace lints affect library code as well as tests, so don't enable lints that would be too noisy in tests like that.
@ -197,22 +189,22 @@ unchecked_duration_subtraction = "warn"
workspace = true workspace = true
[dependencies] [dependencies]
nu-cli = { path = "./crates/nu-cli", version = "0.104.1" } nu-cli = { path = "./crates/nu-cli", version = "0.99.0" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.104.1" } nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.99.0" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.104.1" } nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.99.0" }
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.104.1", optional = true } nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.99.0", optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.104.1" } nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.99.0" }
nu-command = { path = "./crates/nu-command", version = "0.104.1" } nu-command = { path = "./crates/nu-command", version = "0.99.0" }
nu-engine = { path = "./crates/nu-engine", version = "0.104.1" } nu-engine = { path = "./crates/nu-engine", version = "0.99.0" }
nu-explore = { path = "./crates/nu-explore", version = "0.104.1" } nu-explore = { path = "./crates/nu-explore", version = "0.99.0" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.104.1" } nu-lsp = { path = "./crates/nu-lsp/", version = "0.99.0" }
nu-parser = { path = "./crates/nu-parser", version = "0.104.1" } nu-parser = { path = "./crates/nu-parser", version = "0.99.0" }
nu-path = { path = "./crates/nu-path", version = "0.104.1" } nu-path = { path = "./crates/nu-path", version = "0.99.0" }
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.104.1" } nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.99.0" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.104.1" } nu-protocol = { path = "./crates/nu-protocol", version = "0.99.0" }
nu-std = { path = "./crates/nu-std", version = "0.104.1" } nu-std = { path = "./crates/nu-std", version = "0.99.0" }
nu-system = { path = "./crates/nu-system", version = "0.104.1" } nu-system = { path = "./crates/nu-system", version = "0.99.0" }
nu-utils = { path = "./crates/nu-utils", version = "0.104.1" } nu-utils = { path = "./crates/nu-utils", version = "0.99.0" }
reedline = { workspace = true, features = ["bashisms", "sqlite"] } reedline = { workspace = true, features = ["bashisms", "sqlite"] }
crossterm = { workspace = true } crossterm = { workspace = true }
@ -220,6 +212,7 @@ ctrlc = { workspace = true }
dirs = { workspace = true } dirs = { workspace = true }
log = { workspace = true } log = { workspace = true }
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] } miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
mimalloc = { version = "0.1.42", default-features = false, optional = true }
multipart-rs = { workspace = true } multipart-rs = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
simplelog = "0.12" simplelog = "0.12"
@ -241,38 +234,34 @@ nix = { workspace = true, default-features = false, features = [
] } ] }
[dev-dependencies] [dev-dependencies]
nu-test-support = { path = "./crates/nu-test-support", version = "0.104.1" } nu-test-support = { path = "./crates/nu-test-support", version = "0.99.0" }
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.104.1" } nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.99.0" }
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.104.1" } nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.99.0" }
assert_cmd = "2.0" assert_cmd = "2.0"
dirs = { workspace = true } dirs = { workspace = true }
tango-bench = "0.6" tango-bench = "0.6"
pretty_assertions = { workspace = true } pretty_assertions = { workspace = true }
fancy-regex = { workspace = true } regex = { workspace = true }
rstest = { workspace = true, default-features = false } rstest = { workspace = true, default-features = false }
serial_test = "3.2" serial_test = "3.1"
tempfile = { workspace = true } tempfile = { workspace = true }
[features] [features]
plugin = [ plugin = [
# crates
"nu-cmd-plugin",
"nu-plugin-engine", "nu-plugin-engine",
"nu-cmd-plugin",
# features
"nu-cli/plugin", "nu-cli/plugin",
"nu-cmd-lang/plugin",
"nu-command/plugin",
"nu-engine/plugin",
"nu-engine/plugin",
"nu-parser/plugin", "nu-parser/plugin",
"nu-command/plugin",
"nu-protocol/plugin", "nu-protocol/plugin",
"nu-engine/plugin",
] ]
default = [ default = [
"plugin", "plugin",
"trash-support", "trash-support",
"sqlite", "sqlite",
"mimalloc",
] ]
stable = ["default"] stable = ["default"]
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command # NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
@ -281,6 +270,7 @@ stable = ["default"]
# otherwise the system version will be used. Not enabled by default because it takes a while to build # otherwise the system version will be used. Not enabled by default because it takes a while to build
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"] static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
# Optional system clipboard support in `reedline`, this behavior has problematic compatibility with some systems. # Optional system clipboard support in `reedline`, this behavior has problematic compatibility with some systems.
# Missing X server/ Wayland can cause issues # Missing X server/ Wayland can cause issues
system-clipboard = [ system-clipboard = [
@ -293,7 +283,7 @@ system-clipboard = [
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"] trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
# SQLite commands for nushell # SQLite commands for nushell
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite", "nu-std/sqlite"] sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
[profile.release] [profile.release]
opt-level = "s" # Optimize for size opt-level = "s" # Optimize for size

View File

@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2019 - 2025 The Nushell Project Developers Copyright (c) 2019 - 2023 The Nushell Project Developers
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

120
README.md
View File

@ -4,6 +4,7 @@
[![Nightly Build](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml/badge.svg)](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml) [![Nightly Build](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml/badge.svg)](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
[![Discord](https://img.shields.io/discord/601130461678272522.svg?logo=discord)](https://discord.gg/NtAbbGn) [![Discord](https://img.shields.io/discord/601130461678272522.svg?logo=discord)](https://discord.gg/NtAbbGn)
[![The Changelog #363](https://img.shields.io/badge/The%20Changelog-%23363-61c192.svg)](https://changelog.com/podcast/363) [![The Changelog #363](https://img.shields.io/badge/The%20Changelog-%23363-61c192.svg)](https://changelog.com/podcast/363)
[![@nu_shell](https://img.shields.io/badge/twitter-@nu_shell-1DA1F3?style=flat-square)](https://twitter.com/nu_shell)
[![GitHub commit activity](https://img.shields.io/github/commit-activity/m/nushell/nushell)](https://github.com/nushell/nushell/graphs/commit-activity) [![GitHub commit activity](https://img.shields.io/github/commit-activity/m/nushell/nushell)](https://github.com/nushell/nushell/graphs/commit-activity)
[![GitHub contributors](https://img.shields.io/github/contributors/nushell/nushell)](https://github.com/nushell/nushell/graphs/contributors) [![GitHub contributors](https://img.shields.io/github/contributors/nushell/nushell)](https://github.com/nushell/nushell/graphs/contributors)
@ -34,7 +35,7 @@ This project has reached a minimum-viable-product level of quality. Many people
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/). The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
We're also active on [Discord](https://discord.gg/NtAbbGn); come and chat with us! We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
## Installation ## Installation
@ -57,7 +58,7 @@ For details about which platforms the Nushell team actively supports, see [our p
## Configuration ## Configuration
The default configurations can be found at [sample_config](crates/nu-utils/src/default_files) The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
which are the configuration files one gets when they startup Nushell for the first time. which are the configuration files one gets when they startup Nushell for the first time.
It sets all of the default configuration to run Nushell. From here one can It sets all of the default configuration to run Nushell. From here one can
@ -94,44 +95,44 @@ Commands that work in the pipeline fit into one of three categories:
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right. Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
```shell ```shell
ls | where type == "dir" | table > ls | where type == "dir" | table
# => ╭────┬──────────┬──────┬─────────┬───────────────╮ ╭────┬──────────┬──────┬─────────┬───────────────╮
# => │ # │ name │ type │ size │ modified │ │ # │ name │ type │ size │ modified │
# => ├────┼──────────┼──────┼─────────┼───────────────┤ ├────┼──────────┼──────┼─────────┼───────────────┤
# => │ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │ │ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
# => │ 1 │ assets │ dir │ 0 B │ 2 weeks ago │ │ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
# => │ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │ │ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
# => │ 3 │ docker │ dir │ 0 B │ 2 weeks ago │ │ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
# => │ 4 │ docs │ dir │ 0 B │ 2 weeks ago │ │ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
# => │ 5 │ images │ dir │ 0 B │ 2 weeks ago │ │ 5 │ images │ dir │ 0 B │ 2 weeks ago │
# => │ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │ │ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
# => │ 7 │ samples │ dir │ 0 B │ 2 weeks ago │ │ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
# => │ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │ │ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
# => │ 9 │ target │ dir │ 0 B │ a day ago │ │ 9 │ target │ dir │ 0 B │ a day ago │
# => │ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │ │ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
# => │ 11 │ wix │ dir │ 0 B │ 2 weeks ago │ │ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
# => ╰────┴──────────┴──────┴─────────┴───────────────╯ ╰────┴──────────┴──────┴─────────┴───────────────╯
``` ```
Because most of the time you'll want to see the output of a pipeline, `table` is assumed. Because most of the time you'll want to see the output of a pipeline, `table` is assumed.
We could have also written the above: We could have also written the above:
```shell ```shell
ls | where type == "dir" > ls | where type == "dir"
``` ```
Being able to use the same commands and compose them differently is an important philosophy in Nu. Being able to use the same commands and compose them differently is an important philosophy in Nu.
For example, we could use the built-in `ps` command to get a list of the running processes, using the same `where` as above. For example, we could use the built-in `ps` command to get a list of the running processes, using the same `where` as above.
```shell ```shell
ps | where cpu > 0 > ps | where cpu > 0
# => ╭───┬───────┬───────────┬───────┬───────────┬───────────╮ ╭───┬───────┬───────────┬───────┬───────────┬───────────╮
# => │ # │ pid │ name │ cpu │ mem │ virtual │ │ # │ pid │ name │ cpu │ mem │ virtual │
# => ├───┼───────┼───────────┼───────┼───────────┼───────────┤ ├───┼───────┼───────────┼───────┼───────────┼───────────┤
# => │ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │ │ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
# => │ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │ │ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
# => │ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │ │ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
# => ╰───┴───────┴───────────┴───────┴───────────┴───────────╯ ╰───┴───────┴───────────┴───────┴───────────┴───────────╯
``` ```
### Opening files ### Opening files
@ -140,46 +141,46 @@ Nu can load file and URL contents as raw text or structured data (if it recogniz
For example, you can load a .toml file as structured data and explore it: For example, you can load a .toml file as structured data and explore it:
```shell ```shell
open Cargo.toml > open Cargo.toml
# => ╭──────────────────┬────────────────────╮ ╭──────────────────┬────────────────────╮
# => │ bin │ [table 1 row] │ │ bin │ [table 1 row] │
# => │ dependencies │ {record 25 fields} │ │ dependencies │ {record 25 fields} │
# => │ dev-dependencies │ {record 8 fields} │ │ dev-dependencies │ {record 8 fields} │
# => │ features │ {record 10 fields} │ │ features │ {record 10 fields} │
# => │ package │ {record 13 fields} │ │ package │ {record 13 fields} │
# => │ patch │ {record 1 field} │ │ patch │ {record 1 field} │
# => │ profile │ {record 3 fields} │ │ profile │ {record 3 fields} │
# => │ target │ {record 3 fields} │ │ target │ {record 3 fields} │
# => │ workspace │ {record 1 field} │ │ workspace │ {record 1 field} │
# => ╰──────────────────┴────────────────────╯ ╰──────────────────┴────────────────────╯
``` ```
We can pipe this into a command that gets the contents of one of the columns: We can pipe this into a command that gets the contents of one of the columns:
```shell ```shell
open Cargo.toml | get package > open Cargo.toml | get package
# => ╭───────────────┬────────────────────────────────────╮ ╭───────────────┬────────────────────────────────────╮
# => │ authors │ [list 1 item] │ │ authors │ [list 1 item] │
# => │ default-run │ nu │ │ default-run │ nu │
# => │ description │ A new type of shell │ │ description │ A new type of shell │
# => │ documentation │ https://www.nushell.sh/book/ │ │ documentation │ https://www.nushell.sh/book/ │
# => │ edition │ 2018 │ │ edition │ 2018 │
# => │ exclude │ [list 1 item] │ │ exclude │ [list 1 item] │
# => │ homepage │ https://www.nushell.sh │ │ homepage │ https://www.nushell.sh │
# => │ license │ MIT │ │ license │ MIT │
# => │ metadata │ {record 1 field} │ │ metadata │ {record 1 field} │
# => │ name │ nu │ │ name │ nu │
# => │ repository │ https://github.com/nushell/nushell │ │ repository │ https://github.com/nushell/nushell │
# => │ rust-version │ 1.60 │ │ rust-version │ 1.60 │
# => │ version │ 0.72.0 │ │ version │ 0.72.0 │
# => ╰───────────────┴────────────────────────────────────╯ ╰───────────────┴────────────────────────────────────╯
``` ```
And if needed we can drill down further: And if needed we can drill down further:
```shell ```shell
open Cargo.toml | get package.version > open Cargo.toml | get package.version
# => 0.72.0 0.72.0
``` ```
### Plugins ### Plugins
@ -222,14 +223,13 @@ Please submit an issue or PR to be added to this list.
- [Dorothy](http://github.com/bevry/dorothy) - [Dorothy](http://github.com/bevry/dorothy)
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell) - [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
- [x-cmd](https://x-cmd.com/mod/nu) - [x-cmd](https://x-cmd.com/mod/nu)
- [vfox](https://github.com/version-fox/vfox)
## Contributing ## Contributing
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed! See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
<a href="https://github.com/nushell/nushell/graphs/contributors"> <a href="https://github.com/nushell/nushell/graphs/contributors">
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750&columns=20" /> <img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750" />
</a> </a>
## License ## License

View File

@ -1,6 +1,7 @@
use nu_cli::{eval_source, evaluate_commands}; use nu_cli::{eval_source, evaluate_commands};
use nu_plugin_core::{Encoder, EncodingType}; use nu_plugin_core::{Encoder, EncodingType};
use nu_plugin_protocol::{PluginCallResponse, PluginOutput}; use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack}, engine::{EngineState, Stack},
PipelineData, Signals, Span, Spanned, Value, PipelineData, Signals, Span, Spanned, Value,
@ -8,11 +9,12 @@ use nu_protocol::{
use nu_std::load_standard_library; use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env}; use nu_utils::{get_default_config, get_default_env};
use std::{ use std::{
fmt::Write,
hint::black_box,
rc::Rc, rc::Rc,
sync::{atomic::AtomicBool, Arc}, sync::{atomic::AtomicBool, Arc},
}; };
use std::hint::black_box;
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks}; use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks};
fn load_bench_commands() -> EngineState { fn load_bench_commands() -> EngineState {
@ -44,6 +46,9 @@ fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
let mut stack = Stack::new(); let mut stack = Stack::new();
// Support running benchmarks without IR mode
stack.use_ir = std::env::var_os("NU_DISABLE_IR").is_none();
evaluate_commands( evaluate_commands(
&commands, &commands,
&mut engine, &mut engine,
@ -139,16 +144,19 @@ fn bench_load_standard_lib() -> impl IntoBenchmarks {
})] })]
} }
fn create_flat_record_string(n: usize) -> String { fn create_flat_record_string(n: i32) -> String {
let mut s = String::from("let record = {"); let mut s = String::from("let record = {");
for i in 0..n { for i in 0..n {
write!(s, "col_{i}: {i}, ").unwrap(); s.push_str(&format!("col_{}: {}", i, i));
if i < n - 1 {
s.push_str(", ");
}
} }
s.push('}'); s.push('}');
s s
} }
fn create_nested_record_string(depth: usize) -> String { fn create_nested_record_string(depth: i32) -> String {
let mut s = String::from("let record = {"); let mut s = String::from("let record = {");
for _ in 0..depth { for _ in 0..depth {
s.push_str("col: {"); s.push_str("col: {");
@ -161,7 +169,7 @@ fn create_nested_record_string(depth: usize) -> String {
s s
} }
fn create_example_table_nrows(n: usize) -> String { fn create_example_table_nrows(n: i32) -> String {
let mut s = String::from("let table = [[foo bar baz]; "); let mut s = String::from("let table = [[foo bar baz]; ");
for i in 0..n { for i in 0..n {
s.push_str(&format!("[0, 1, {i}]")); s.push_str(&format!("[0, 1, {i}]"));
@ -173,7 +181,7 @@ fn create_example_table_nrows(n: usize) -> String {
s s
} }
fn bench_record_create(n: usize) -> impl IntoBenchmarks { fn bench_record_create(n: i32) -> impl IntoBenchmarks {
bench_command( bench_command(
&format!("record_create_{n}"), &format!("record_create_{n}"),
&create_flat_record_string(n), &create_flat_record_string(n),
@ -182,7 +190,7 @@ fn bench_record_create(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_record_flat_access(n: usize) -> impl IntoBenchmarks { fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
let setup_command = create_flat_record_string(n); let setup_command = create_flat_record_string(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command); let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
bench_command( bench_command(
@ -193,10 +201,10 @@ fn bench_record_flat_access(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_record_nested_access(n: usize) -> impl IntoBenchmarks { fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
let setup_command = create_nested_record_string(n); let setup_command = create_nested_record_string(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command); let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let nested_access = ".col".repeat(n); let nested_access = ".col".repeat(n as usize);
bench_command( bench_command(
&format!("record_nested_access_{n}"), &format!("record_nested_access_{n}"),
&format!("$record{} | ignore", nested_access), &format!("$record{} | ignore", nested_access),
@ -205,18 +213,7 @@ fn bench_record_nested_access(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_record_insert(n: usize, m: usize) -> impl IntoBenchmarks { fn bench_table_create(n: i32) -> impl IntoBenchmarks {
let setup_command = create_flat_record_string(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let mut insert = String::from("$record");
for i in n..(n + m) {
write!(insert, " | insert col_{i} {i}").unwrap();
}
insert.push_str(" | ignore");
bench_command(&format!("record_insert_{n}_{m}"), &insert, stack, engine)
}
fn bench_table_create(n: usize) -> impl IntoBenchmarks {
bench_command( bench_command(
&format!("table_create_{n}"), &format!("table_create_{n}"),
&create_example_table_nrows(n), &create_example_table_nrows(n),
@ -225,7 +222,7 @@ fn bench_table_create(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_table_get(n: usize) -> impl IntoBenchmarks { fn bench_table_get(n: i32) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n); let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command); let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
bench_command( bench_command(
@ -236,7 +233,7 @@ fn bench_table_get(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_table_select(n: usize) -> impl IntoBenchmarks { fn bench_table_select(n: i32) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n); let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command); let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
bench_command( bench_command(
@ -247,29 +244,7 @@ fn bench_table_select(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_table_insert_row(n: usize, m: usize) -> impl IntoBenchmarks { fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let mut insert = String::from("$table");
for i in n..(n + m) {
write!(insert, " | insert {i} {{ foo: 0, bar: 1, baz: {i} }}").unwrap();
}
insert.push_str(" | ignore");
bench_command(&format!("table_insert_row_{n}_{m}"), &insert, stack, engine)
}
fn bench_table_insert_col(n: usize, m: usize) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let mut insert = String::from("$table");
for i in 0..m {
write!(insert, " | insert col_{i} {i}").unwrap();
}
insert.push_str(" | ignore");
bench_command(&format!("table_insert_col_{n}_{m}"), &insert, stack, engine)
}
fn bench_eval_interleave(n: usize) -> impl IntoBenchmarks {
let engine = setup_engine(); let engine = setup_engine();
let stack = Stack::new(); let stack = Stack::new();
bench_command( bench_command(
@ -280,7 +255,7 @@ fn bench_eval_interleave(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_eval_interleave_with_interrupt(n: usize) -> impl IntoBenchmarks { fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
let mut engine = setup_engine(); let mut engine = setup_engine();
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false)))); engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
let stack = Stack::new(); let stack = Stack::new();
@ -292,7 +267,7 @@ fn bench_eval_interleave_with_interrupt(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_eval_for(n: usize) -> impl IntoBenchmarks { fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
let engine = setup_engine(); let engine = setup_engine();
let stack = Stack::new(); let stack = Stack::new();
bench_command( bench_command(
@ -303,7 +278,7 @@ fn bench_eval_for(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_eval_each(n: usize) -> impl IntoBenchmarks { fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
let engine = setup_engine(); let engine = setup_engine();
let stack = Stack::new(); let stack = Stack::new();
bench_command( bench_command(
@ -314,7 +289,7 @@ fn bench_eval_each(n: usize) -> impl IntoBenchmarks {
) )
} }
fn bench_eval_par_each(n: usize) -> impl IntoBenchmarks { fn bench_eval_par_each(n: i32) -> impl IntoBenchmarks {
let engine = setup_engine(); let engine = setup_engine();
let stack = Stack::new(); let stack = Stack::new();
bench_command( bench_command(
@ -455,14 +430,6 @@ tango_benchmarks!(
bench_record_nested_access(32), bench_record_nested_access(32),
bench_record_nested_access(64), bench_record_nested_access(64),
bench_record_nested_access(128), bench_record_nested_access(128),
bench_record_insert(1, 1),
bench_record_insert(10, 1),
bench_record_insert(100, 1),
bench_record_insert(1000, 1),
bench_record_insert(1, 10),
bench_record_insert(10, 10),
bench_record_insert(100, 10),
bench_record_insert(1000, 10),
// Table // Table
bench_table_create(1), bench_table_create(1),
bench_table_create(10), bench_table_create(10),
@ -476,22 +443,6 @@ tango_benchmarks!(
bench_table_select(10), bench_table_select(10),
bench_table_select(100), bench_table_select(100),
bench_table_select(1_000), bench_table_select(1_000),
bench_table_insert_row(1, 1),
bench_table_insert_row(10, 1),
bench_table_insert_row(100, 1),
bench_table_insert_row(1000, 1),
bench_table_insert_row(1, 10),
bench_table_insert_row(10, 10),
bench_table_insert_row(100, 10),
bench_table_insert_row(1000, 10),
bench_table_insert_col(1, 1),
bench_table_insert_col(10, 1),
bench_table_insert_col(100, 1),
bench_table_insert_col(1000, 1),
bench_table_insert_col(1, 10),
bench_table_insert_col(10, 10),
bench_table_insert_col(100, 10),
bench_table_insert_col(1000, 10),
// Eval // Eval
// Interleave // Interleave
bench_eval_interleave(100), bench_eval_interleave(100),

View File

@ -1,3 +0,0 @@
[[disallowed-types]]
path = "std::time::Instant"
reason = "WASM panics if used, use `web_time::Instant` instead"

View File

@ -5,43 +5,41 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cli" name = "nu-cli"
version = "0.104.1" version = "0.99.0"
[lib] [lib]
bench = false bench = false
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.104.1" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.99.0" }
nu-command = { path = "../nu-command", version = "0.104.1" } nu-command = { path = "../nu-command", version = "0.99.0" }
nu-std = { path = "../nu-std", version = "0.104.1" } nu-test-support = { path = "../nu-test-support", version = "0.99.0" }
nu-test-support = { path = "../nu-test-support", version = "0.104.1" }
rstest = { workspace = true, default-features = false } rstest = { workspace = true, default-features = false }
tempfile = { workspace = true } tempfile = { workspace = true }
[dependencies] [dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.104.1" } nu-cmd-base = { path = "../nu-cmd-base", version = "0.99.0" }
nu-engine = { path = "../nu-engine", version = "0.104.1", features = ["os"] } nu-engine = { path = "../nu-engine", version = "0.99.0" }
nu-glob = { path = "../nu-glob", version = "0.104.1" } nu-path = { path = "../nu-path", version = "0.99.0" }
nu-path = { path = "../nu-path", version = "0.104.1" } nu-parser = { path = "../nu-parser", version = "0.99.0" }
nu-parser = { path = "../nu-parser", version = "0.104.1" } nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.99.0", optional = true }
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.104.1", optional = true } nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
nu-protocol = { path = "../nu-protocol", version = "0.104.1", features = ["os"] } nu-utils = { path = "../nu-utils", version = "0.99.0" }
nu-utils = { path = "../nu-utils", version = "0.104.1" } nu-color-config = { path = "../nu-color-config", version = "0.99.0" }
nu-color-config = { path = "../nu-color-config", version = "0.104.1" }
nu-ansi-term = { workspace = true } nu-ansi-term = { workspace = true }
reedline = { workspace = true, features = ["bashisms", "sqlite"] } reedline = { workspace = true, features = ["bashisms", "sqlite"] }
chrono = { default-features = false, features = ["std"], workspace = true } chrono = { default-features = false, features = ["std"], workspace = true }
crossterm = { workspace = true } crossterm = { workspace = true }
fancy-regex = { workspace = true } fancy-regex = { workspace = true }
fuzzy-matcher = { workspace = true }
is_executable = { workspace = true } is_executable = { workspace = true }
log = { workspace = true } log = { workspace = true }
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
miette = { workspace = true, features = ["fancy-no-backtrace"] } miette = { workspace = true, features = ["fancy-no-backtrace"] }
nucleo-matcher = { workspace = true } lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
once_cell = { workspace = true }
percent-encoding = { workspace = true } percent-encoding = { workspace = true }
sysinfo = { workspace = true } sysinfo = { workspace = true }
strum = { workspace = true }
unicode-segmentation = { workspace = true } unicode-segmentation = { workspace = true }
uuid = { workspace = true, features = ["v4"] } uuid = { workspace = true, features = ["v4"] }
which = { workspace = true } which = { workspace = true }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct CommandlineEdit; pub struct SubCommand;
impl Command for CommandlineEdit { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"commandline edit" "commandline edit"
} }
@ -29,7 +29,7 @@ impl Command for CommandlineEdit {
.required( .required(
"str", "str",
SyntaxShape::String, SyntaxShape::String,
"The string to perform the operation with.", "the string to perform the operation with",
) )
.category(Category::Core) .category(Category::Core)
} }

View File

@ -2,9 +2,9 @@ use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)] #[derive(Clone)]
pub struct CommandlineGetCursor; pub struct SubCommand;
impl Command for CommandlineGetCursor { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"commandline get-cursor" "commandline get-cursor"
} }

View File

@ -4,6 +4,6 @@ mod get_cursor;
mod set_cursor; mod set_cursor;
pub use commandline_::Commandline; pub use commandline_::Commandline;
pub use edit::CommandlineEdit; pub use edit::SubCommand as CommandlineEdit;
pub use get_cursor::CommandlineGetCursor; pub use get_cursor::SubCommand as CommandlineGetCursor;
pub use set_cursor::CommandlineSetCursor; pub use set_cursor::SubCommand as CommandlineSetCursor;

View File

@ -3,9 +3,9 @@ use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)] #[derive(Clone)]
pub struct CommandlineSetCursor; pub struct SubCommand;
impl Command for CommandlineSetCursor { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"commandline set-cursor" "commandline set-cursor"
} }
@ -18,7 +18,7 @@ impl Command for CommandlineSetCursor {
"set the current cursor position to the end of the buffer", "set the current cursor position to the end of the buffer",
Some('e'), Some('e'),
) )
.optional("pos", SyntaxShape::Int, "Cursor position to be set.") .optional("pos", SyntaxShape::Int, "Cursor position to be set")
.category(Category::Core) .category(Category::Core)
} }

View File

@ -17,7 +17,6 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
CommandlineGetCursor, CommandlineGetCursor,
CommandlineSetCursor, CommandlineSetCursor,
History, History,
HistoryImport,
HistorySession, HistorySession,
Keybindings, Keybindings,
KeybindingsDefault, KeybindingsDefault,

View File

@ -1,9 +0,0 @@
// Each const is named after a HistoryItem field, and the value is the field name to be displayed to
// the user (or accept during import).
pub const COMMAND_LINE: &str = "command";
pub const START_TIMESTAMP: &str = "start_timestamp";
pub const HOSTNAME: &str = "hostname";
pub const CWD: &str = "cwd";
pub const EXIT_STATUS: &str = "exit_status";
pub const DURATION: &str = "duration";
pub const SESSION_ID: &str = "session_id";

View File

@ -1,12 +1,10 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::{shell_error::io::IoError, HistoryFileFormat}; use nu_protocol::HistoryFileFormat;
use reedline::{ use reedline::{
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery, FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
SqliteBackedHistory, SqliteBackedHistory,
}; };
use super::fields;
#[derive(Clone)] #[derive(Clone)]
pub struct History; pub struct History;
@ -85,19 +83,17 @@ impl Command for History {
entries.into_iter().enumerate().map(move |(idx, entry)| { entries.into_iter().enumerate().map(move |(idx, entry)| {
Value::record( Value::record(
record! { record! {
fields::COMMAND_LINE => Value::string(entry.command_line, head), "command" => Value::string(entry.command_line, head),
// TODO: This name is inconsistent with create_history_record.
"index" => Value::int(idx as i64, head), "index" => Value::int(idx as i64, head),
}, },
head, head,
) )
}) })
}) })
.ok_or(IoError::new( .ok_or(ShellError::FileNotFound {
std::io::ErrorKind::NotFound, file: history_path.display().to_string(),
head, span: head,
history_path, })?
))?
.into_pipeline_data(head, signals)), .into_pipeline_data(head, signals)),
HistoryFileFormat::Sqlite => Ok(history_reader HistoryFileFormat::Sqlite => Ok(history_reader
.and_then(|h| { .and_then(|h| {
@ -105,15 +101,15 @@ impl Command for History {
.ok() .ok()
}) })
.map(move |entries| { .map(move |entries| {
entries.into_iter().enumerate().map(move |(idx, entry)| { entries
create_sqlite_history_record(idx, entry, long, head) .into_iter()
.enumerate()
.map(move |(idx, entry)| create_history_record(idx, entry, long, head))
}) })
}) .ok_or(ShellError::FileNotFound {
.ok_or(IoError::new( file: history_path.display().to_string(),
std::io::ErrorKind::NotFound, span: head,
head, })?
history_path,
))?
.into_pipeline_data(head, signals)), .into_pipeline_data(head, signals)),
} }
} }
@ -139,7 +135,7 @@ impl Command for History {
} }
} }
fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value { fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
//1. Format all the values //1. Format all the values
//2. Create a record of either short or long columns and values //2. Create a record of either short or long columns and values
@ -150,8 +146,11 @@ fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head
.unwrap_or_default(), .unwrap_or_default(),
head, head,
); );
let start_timestamp_value = Value::date( let start_timestamp_value = Value::string(
entry.start_timestamp.unwrap_or_default().fixed_offset(), entry
.start_timestamp
.map(|time| time.to_string())
.unwrap_or_default(),
head, head,
); );
let command_value = Value::string(entry.command_line, head); let command_value = Value::string(entry.command_line, head);
@ -177,13 +176,13 @@ fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head
Value::record( Value::record(
record! { record! {
"item_id" => item_id_value, "item_id" => item_id_value,
fields::START_TIMESTAMP => start_timestamp_value, "start_timestamp" => start_timestamp_value,
fields::COMMAND_LINE => command_value, "command" => command_value,
fields::SESSION_ID => session_id_value, "session_id" => session_id_value,
fields::HOSTNAME => hostname_value, "hostname" => hostname_value,
fields::CWD => cwd_value, "cwd" => cwd_value,
fields::DURATION => duration_value, "duration" => duration_value,
fields::EXIT_STATUS => exit_status_value, "exit_status" => exit_status_value,
"idx" => index_value, "idx" => index_value,
}, },
head, head,
@ -191,11 +190,11 @@ fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head
} else { } else {
Value::record( Value::record(
record! { record! {
fields::START_TIMESTAMP => start_timestamp_value, "start_timestamp" => start_timestamp_value,
fields::COMMAND_LINE => command_value, "command" => command_value,
fields::CWD => cwd_value, "cwd" => cwd_value,
fields::DURATION => duration_value, "duration" => duration_value,
fields::EXIT_STATUS => exit_status_value, "exit_status" => exit_status_value,
}, },
head, head,
) )

View File

@ -1,441 +0,0 @@
use std::path::{Path, PathBuf};
use nu_engine::command_prelude::*;
use nu_protocol::{
shell_error::{self, io::IoError},
HistoryFileFormat,
};
use reedline::{
FileBackedHistory, History, HistoryItem, ReedlineError, SearchQuery, SqliteBackedHistory,
};
use super::fields;
#[derive(Clone)]
pub struct HistoryImport;
impl Command for HistoryImport {
fn name(&self) -> &str {
"history import"
}
fn description(&self) -> &str {
"Import command line history."
}
fn extra_description(&self) -> &str {
r#"Can import history from input, either successive command lines or more detailed records. If providing records, available fields are:
command, start_timestamp, hostname, cwd, duration, exit_status.
If no input is provided, will import all history items from existing history in the other format: if current history is stored in sqlite, it will store it in plain text and vice versa.
Note that history item IDs are ignored when importing from file."#
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("history import")
.category(Category::History)
.input_output_types(vec![
(Type::Nothing, Type::Nothing),
(Type::String, Type::Nothing),
(Type::List(Box::new(Type::String)), Type::Nothing),
(Type::table(), Type::Nothing),
])
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
example: "history import",
description:
"Append all items from history in the other format to the current history",
result: None,
},
Example {
example: "echo foo | history import",
description: "Append `foo` to the current history",
result: None,
},
Example {
example: "[[ command_line cwd ]; [ foo /home ]] | history import",
description: "Append `foo` ran from `/home` to the current history",
result: None,
},
]
}
fn run(
&self,
engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let span = call.head;
let ok = Ok(Value::nothing(call.head).into_pipeline_data());
let Some(history) = engine_state.history_config() else {
return ok;
};
let Some(current_history_path) = history.file_path() else {
return Err(ShellError::ConfigDirNotFound { span: span.into() });
};
if let Some(bak_path) = backup(&current_history_path, span)? {
println!("Backed history to {}", bak_path.display());
}
match input {
PipelineData::Empty => {
let other_format = match history.file_format {
HistoryFileFormat::Sqlite => HistoryFileFormat::Plaintext,
HistoryFileFormat::Plaintext => HistoryFileFormat::Sqlite,
};
let src = new_backend(other_format, None)?;
let mut dst = new_backend(history.file_format, Some(current_history_path))?;
let items = src
.search(SearchQuery::everything(
reedline::SearchDirection::Forward,
None,
))
.map_err(error_from_reedline)?
.into_iter()
.map(Ok);
import(dst.as_mut(), items)
}
_ => {
let input = input.into_iter().map(item_from_value);
import(
new_backend(history.file_format, Some(current_history_path))?.as_mut(),
input,
)
}
}?;
ok
}
}
fn new_backend(
format: HistoryFileFormat,
path: Option<PathBuf>,
) -> Result<Box<dyn History>, ShellError> {
let path = match path {
Some(path) => path,
None => {
let Some(mut path) = nu_path::nu_config_dir() else {
return Err(ShellError::ConfigDirNotFound { span: None });
};
path.push(format.default_file_name());
path.into_std_path_buf()
}
};
fn map(
result: Result<impl History + 'static, ReedlineError>,
) -> Result<Box<dyn History>, ShellError> {
result
.map(|x| Box::new(x) as Box<dyn History>)
.map_err(error_from_reedline)
}
match format {
// Use a reasonably large value for maximum capacity.
HistoryFileFormat::Plaintext => map(FileBackedHistory::with_file(0xfffffff, path)),
HistoryFileFormat::Sqlite => map(SqliteBackedHistory::with_file(path, None, None)),
}
}
fn import(
dst: &mut dyn History,
src: impl Iterator<Item = Result<HistoryItem, ShellError>>,
) -> Result<(), ShellError> {
for item in src {
let mut item = item?;
item.id = None;
dst.save(item).map_err(error_from_reedline)?;
}
Ok(())
}
fn error_from_reedline(e: ReedlineError) -> ShellError {
// TODO: Should we add a new ShellError variant?
ShellError::GenericError {
error: "Reedline error".to_owned(),
msg: format!("{e}"),
span: None,
help: None,
inner: Vec::new(),
}
}
fn item_from_value(v: Value) -> Result<HistoryItem, ShellError> {
let span = v.span();
match v {
Value::Record { val, .. } => item_from_record(val.into_owned(), span),
Value::String { val, .. } => Ok(HistoryItem {
command_line: val,
id: None,
start_timestamp: None,
session_id: None,
hostname: None,
cwd: None,
duration: None,
exit_status: None,
more_info: None,
}),
_ => Err(ShellError::UnsupportedInput {
msg: "Only list and record inputs are supported".to_owned(),
input: v.get_type().to_string(),
msg_span: span,
input_span: span,
}),
}
}
fn item_from_record(mut rec: Record, span: Span) -> Result<HistoryItem, ShellError> {
let cmd = match rec.remove(fields::COMMAND_LINE) {
Some(v) => v.as_str()?.to_owned(),
None => {
return Err(ShellError::TypeMismatch {
err_message: format!("missing column: {}", fields::COMMAND_LINE),
span,
})
}
};
fn get<T>(
rec: &mut Record,
field: &'static str,
f: impl FnOnce(Value) -> Result<T, ShellError>,
) -> Result<Option<T>, ShellError> {
rec.remove(field).map(f).transpose()
}
let rec = &mut rec;
let item = HistoryItem {
command_line: cmd,
id: None,
start_timestamp: get(rec, fields::START_TIMESTAMP, |v| Ok(v.as_date()?.to_utc()))?,
hostname: get(rec, fields::HOSTNAME, |v| Ok(v.as_str()?.to_owned()))?,
cwd: get(rec, fields::CWD, |v| Ok(v.as_str()?.to_owned()))?,
exit_status: get(rec, fields::EXIT_STATUS, |v| v.as_int())?,
duration: get(rec, fields::DURATION, |v| duration_from_value(v, span))?,
more_info: None,
// TODO: Currently reedline doesn't let you create session IDs.
session_id: None,
};
if !rec.is_empty() {
let cols = rec.columns().map(|s| s.as_str()).collect::<Vec<_>>();
return Err(ShellError::TypeMismatch {
err_message: format!("unsupported column names: {}", cols.join(", ")),
span,
});
}
Ok(item)
}
fn duration_from_value(v: Value, span: Span) -> Result<std::time::Duration, ShellError> {
chrono::Duration::nanoseconds(v.as_duration()?)
.to_std()
.map_err(|_| ShellError::NeedsPositiveValue { span })
}
fn find_backup_path(path: &Path, span: Span) -> Result<PathBuf, ShellError> {
let Ok(mut bak_path) = path.to_path_buf().into_os_string().into_string() else {
// This isn't fundamentally problem, but trying to work with OsString is a nightmare.
return Err(ShellError::GenericError {
error: "History path not UTF-8".to_string(),
msg: "History path must be representable as UTF-8".to_string(),
span: Some(span),
help: None,
inner: vec![],
});
};
bak_path.push_str(".bak");
if !Path::new(&bak_path).exists() {
return Ok(bak_path.into());
}
let base_len = bak_path.len();
for i in 1..100 {
use std::fmt::Write;
bak_path.truncate(base_len);
write!(&mut bak_path, ".{i}").unwrap();
if !Path::new(&bak_path).exists() {
return Ok(PathBuf::from(bak_path));
}
}
Err(ShellError::GenericError {
error: "Too many backup files".to_string(),
msg: "Found too many existing backup files".to_string(),
span: Some(span),
help: None,
inner: vec![],
})
}
fn backup(path: &Path, span: Span) -> Result<Option<PathBuf>, ShellError> {
match path.metadata() {
Ok(md) if md.is_file() => (),
Ok(_) => {
return Err(IoError::new_with_additional_context(
shell_error::io::ErrorKind::NotAFile,
span,
PathBuf::from(path),
"history path exists but is not a file",
)
.into())
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(e) => {
return Err(IoError::new_internal(
e.kind(),
"Could not get metadata",
nu_protocol::location!(),
)
.into())
}
}
let bak_path = find_backup_path(path, span)?;
std::fs::copy(path, &bak_path).map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not copy backup",
nu_protocol::location!(),
)
})?;
Ok(Some(bak_path))
}
#[cfg(test)]
mod tests {
use chrono::DateTime;
use rstest::rstest;
use super::*;
#[test]
fn test_item_from_value_string() -> Result<(), ShellError> {
let item = item_from_value(Value::string("foo", Span::unknown()))?;
assert_eq!(
item,
HistoryItem {
command_line: "foo".to_string(),
id: None,
start_timestamp: None,
session_id: None,
hostname: None,
cwd: None,
duration: None,
exit_status: None,
more_info: None
}
);
Ok(())
}
#[test]
fn test_item_from_value_record() {
let span = Span::unknown();
let rec = new_record(&[
("command", Value::string("foo", span)),
(
"start_timestamp",
Value::date(
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00").unwrap(),
span,
),
),
("hostname", Value::string("localhost", span)),
("cwd", Value::string("/home/test", span)),
("duration", Value::duration(100_000_000, span)),
("exit_status", Value::int(42, span)),
]);
let item = item_from_value(rec).unwrap();
assert_eq!(
item,
HistoryItem {
command_line: "foo".to_string(),
id: None,
start_timestamp: Some(
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00")
.unwrap()
.to_utc()
),
hostname: Some("localhost".to_string()),
cwd: Some("/home/test".to_string()),
duration: Some(std::time::Duration::from_nanos(100_000_000)),
exit_status: Some(42),
session_id: None,
more_info: None
}
);
}
#[test]
fn test_item_from_value_record_extra_field() {
let span = Span::unknown();
let rec = new_record(&[
("command_line", Value::string("foo", span)),
("id_nonexistent", Value::int(1, span)),
]);
assert!(item_from_value(rec).is_err());
}
#[test]
fn test_item_from_value_record_bad_type() {
let span = Span::unknown();
let rec = new_record(&[
("command_line", Value::string("foo", span)),
("id", Value::string("one".to_string(), span)),
]);
assert!(item_from_value(rec).is_err());
}
fn new_record(rec: &[(&'static str, Value)]) -> Value {
let span = Span::unknown();
let rec = Record::from_raw_cols_vals(
rec.iter().map(|(col, _)| col.to_string()).collect(),
rec.iter().map(|(_, val)| val.clone()).collect(),
span,
span,
)
.unwrap();
Value::record(rec, span)
}
#[rstest]
#[case::no_backup(&["history.dat"], "history.dat.bak")]
#[case::backup_exists(&["history.dat", "history.dat.bak"], "history.dat.bak.1")]
#[case::multiple_backups_exists( &["history.dat", "history.dat.bak", "history.dat.bak.1"], "history.dat.bak.2")]
fn test_find_backup_path(#[case] existing: &[&str], #[case] want: &str) {
let dir = tempfile::tempdir().unwrap();
for name in existing {
std::fs::File::create_new(dir.path().join(name)).unwrap();
}
let got = find_backup_path(&dir.path().join("history.dat"), Span::test_data()).unwrap();
assert_eq!(got, dir.path().join(want))
}
#[test]
fn test_backup() {
let dir = tempfile::tempdir().unwrap();
let mut history = std::fs::File::create_new(dir.path().join("history.dat")).unwrap();
use std::io::Write;
write!(&mut history, "123").unwrap();
let want_bak_path = dir.path().join("history.dat.bak");
assert_eq!(
backup(&dir.path().join("history.dat"), Span::test_data()),
Ok(Some(want_bak_path.clone()))
);
let got_data = String::from_utf8(std::fs::read(want_bak_path).unwrap()).unwrap();
assert_eq!(got_data, "123");
}
#[test]
fn test_backup_no_file() {
let dir = tempfile::tempdir().unwrap();
let bak_path = backup(&dir.path().join("history.dat"), Span::test_data()).unwrap();
assert!(bak_path.is_none());
}
}

View File

@ -1,8 +1,5 @@
mod fields;
mod history_; mod history_;
mod history_import;
mod history_session; mod history_session;
pub use history_::History; pub use history_::History;
pub use history_import::HistoryImport;
pub use history_session::HistorySession; pub use history_session::HistorySession;

View File

@ -2,7 +2,6 @@ use crossterm::{
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand, event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
}; };
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::shell_error::io::IoError;
use std::io::{stdout, Write}; use std::io::{stdout, Write};
#[derive(Clone)] #[derive(Clone)]
@ -40,13 +39,7 @@ impl Command for KeybindingsListen {
match print_events(engine_state) { match print_events(engine_state) {
Ok(v) => Ok(v.into_pipeline_data()), Ok(v) => Ok(v.into_pipeline_data()),
Err(e) => { Err(e) => {
terminal::disable_raw_mode().map_err(|err| { terminal::disable_raw_mode()?;
IoError::new_internal(
err.kind(),
"Could not disable raw mode",
nu_protocol::location!(),
)
})?;
Err(ShellError::GenericError { Err(ShellError::GenericError {
error: "Error with input".into(), error: "Error with input".into(),
msg: "".into(), msg: "".into(),
@ -70,20 +63,8 @@ impl Command for KeybindingsListen {
pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> { pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
let config = engine_state.get_config(); let config = engine_state.get_config();
stdout().flush().map_err(|err| { stdout().flush()?;
IoError::new_internal( terminal::enable_raw_mode()?;
err.kind(),
"Could not flush stdout",
nu_protocol::location!(),
)
})?;
terminal::enable_raw_mode().map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not enable raw mode",
nu_protocol::location!(),
)
})?;
if config.use_kitty_protocol { if config.use_kitty_protocol {
if let Ok(false) = crossterm::terminal::supports_keyboard_enhancement() { if let Ok(false) = crossterm::terminal::supports_keyboard_enhancement() {
@ -113,9 +94,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
let mut stdout = std::io::BufWriter::new(std::io::stderr()); let mut stdout = std::io::BufWriter::new(std::io::stderr());
loop { loop {
let event = crossterm::event::read().map_err(|err| { let event = crossterm::event::read()?;
IoError::new_internal(err.kind(), "Could not read event", nu_protocol::location!())
})?;
if event == Event::Key(KeyCode::Esc.into()) { if event == Event::Key(KeyCode::Esc.into()) {
break; break;
} }
@ -134,25 +113,9 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
_ => "".to_string(), _ => "".to_string(),
}; };
stdout.queue(crossterm::style::Print(o)).map_err(|err| { stdout.queue(crossterm::style::Print(o))?;
IoError::new_internal( stdout.queue(crossterm::style::Print("\r\n"))?;
err.kind(), stdout.flush()?;
"Could not print output record",
nu_protocol::location!(),
)
})?;
stdout
.queue(crossterm::style::Print("\r\n"))
.map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not print linebreak",
nu_protocol::location!(),
)
})?;
stdout.flush().map_err(|err| {
IoError::new_internal(err.kind(), "Could not flush", nu_protocol::location!())
})?;
} }
if config.use_kitty_protocol { if config.use_kitty_protocol {
@ -162,13 +125,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
); );
} }
terminal::disable_raw_mode().map_err(|err| { terminal::disable_raw_mode()?;
IoError::new_internal(
err.kind(),
"Could not disable raw mode",
nu_protocol::location!(),
)
})?;
Ok(Value::nothing(Span::unknown())) Ok(Value::nothing(Span::unknown()))
} }

View File

@ -7,7 +7,7 @@ mod keybindings_list;
mod keybindings_listen; mod keybindings_listen;
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor}; pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
pub use history::{History, HistoryImport, HistorySession}; pub use history::{History, HistorySession};
pub use keybindings::Keybindings; pub use keybindings::Keybindings;
pub use keybindings_default::KeybindingsDefault; pub use keybindings_default::KeybindingsDefault;
pub use keybindings_list::KeybindingsList; pub use keybindings_list::KeybindingsList;

View File

@ -1,87 +0,0 @@
use super::{completion_options::NuMatcher, SemanticSuggestion};
use crate::{
completions::{Completer, CompletionOptions},
SuggestionKind,
};
use nu_protocol::{
engine::{Stack, StateWorkingSet},
Span,
};
use reedline::Suggestion;
pub struct AttributeCompletion;
pub struct AttributableCompletion;
impl Completer for AttributeCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options);
let attr_commands =
working_set.find_commands_by_predicate(|s| s.starts_with(b"attr "), true);
for (decl_id, name, desc, ty) in attr_commands {
let name = name.strip_prefix(b"attr ").unwrap_or(&name);
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(name).into_owned(),
description: desc,
style: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: false,
},
kind: Some(SuggestionKind::Command(ty, Some(decl_id))),
});
}
matcher.results()
}
}
impl Completer for AttributableCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options);
for s in ["def", "extern", "export def", "export extern"] {
let decl_id = working_set
.find_decl(s.as_bytes())
.expect("internal error, builtin declaration not found");
let cmd = working_set.get_decl(decl_id);
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: cmd.name().into(),
description: Some(cmd.description().into()),
style: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: false,
},
kind: Some(SuggestionKind::Command(cmd.command_type(), None)),
});
}
matcher.results()
}
}

View File

@ -1,7 +1,7 @@
use crate::completions::CompletionOptions; use crate::completions::CompletionOptions;
use nu_protocol::{ use nu_protocol::{
engine::{Stack, StateWorkingSet}, engine::{Stack, StateWorkingSet},
DeclId, Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
@ -12,9 +12,10 @@ pub trait Completer {
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
stack: &Stack, stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion>; ) -> Vec<SemanticSuggestion>;
} }
@ -28,15 +29,8 @@ pub struct SemanticSuggestion {
// TODO: think about name: maybe suggestion context? // TODO: think about name: maybe suggestion context?
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub enum SuggestionKind { pub enum SuggestionKind {
Command(nu_protocol::engine::CommandType, Option<DeclId>), Command(nu_protocol::engine::CommandType),
Value(nu_protocol::Type), Type(nu_protocol::Type),
CellPath,
Directory,
File,
Flag,
Module,
Operator,
Variable,
} }
impl From<Suggestion> for SemanticSuggestion { impl From<Suggestion> for SemanticSuggestion {

View File

@ -1,153 +0,0 @@
use std::borrow::Cow;
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
use nu_engine::{column::get_columns, eval_variable};
use nu_protocol::{
ast::{Expr, Expression, FullCellPath, PathMember},
engine::{Stack, StateWorkingSet},
eval_const::eval_constant,
ShellError, Span, Value,
};
use reedline::Suggestion;
use super::completion_options::NuMatcher;
pub struct CellPathCompletion<'a> {
pub full_cell_path: &'a FullCellPath,
pub position: usize,
}
fn prefix_from_path_member(member: &PathMember, pos: usize) -> (String, Span) {
let (prefix_str, start) = match member {
PathMember::String { val, span, .. } => (val, span.start),
PathMember::Int { val, span, .. } => (&val.to_string(), span.start),
};
let prefix_str = prefix_str.get(..pos + 1 - start).unwrap_or(prefix_str);
// strip wrapping quotes
let quotations = ['"', '\'', '`'];
let prefix_str = prefix_str.strip_prefix(quotations).unwrap_or(prefix_str);
(prefix_str.to_string(), Span::new(start, pos + 1))
}
impl Completer for CellPathCompletion<'_> {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
_prefix: impl AsRef<str>,
_span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut prefix_str = String::new();
// position at dots, e.g. `$env.config.<TAB>`
let mut span = Span::new(self.position + 1, self.position + 1);
let mut path_member_num_before_pos = 0;
for member in self.full_cell_path.tail.iter() {
if member.span().end <= self.position {
path_member_num_before_pos += 1;
} else if member.span().contains(self.position) {
(prefix_str, span) = prefix_from_path_member(member, self.position);
break;
}
}
let current_span = reedline::Span {
start: span.start - offset,
end: span.end - offset,
};
let mut matcher = NuMatcher::new(prefix_str, options);
let path_members = self
.full_cell_path
.tail
.get(0..path_member_num_before_pos)
.unwrap_or_default();
let value = eval_cell_path(
working_set,
stack,
&self.full_cell_path.head,
path_members,
span,
)
.unwrap_or_default();
for suggestion in get_suggestions_by_value(&value, current_span) {
matcher.add_semantic_suggestion(suggestion);
}
matcher.results()
}
}
/// Follow cell path to get the value
/// NOTE: This is a relatively lightweight implementation,
/// so it may fail to get the exact value when the expression is complicated.
/// One failing example would be `[$foo].0`
pub(crate) fn eval_cell_path(
working_set: &StateWorkingSet,
stack: &Stack,
head: &Expression,
path_members: &[PathMember],
span: Span,
) -> Result<Value, ShellError> {
// evaluate the head expression to get its value
let head_value = if let Expr::Var(var_id) = head.expr {
working_set
.get_variable(var_id)
.const_val
.to_owned()
.map_or_else(
|| eval_variable(working_set.permanent_state, stack, var_id, span),
Ok,
)
} else {
eval_constant(working_set, head)
}?;
head_value
.follow_cell_path(path_members, false)
.map(Cow::into_owned)
}
fn get_suggestions_by_value(
value: &Value,
current_span: reedline::Span,
) -> Vec<SemanticSuggestion> {
let to_suggestion = |s: String, v: Option<&Value>| {
// Check if the string needs quoting
let value = if s.is_empty()
|| s.chars()
.any(|c: char| !(c.is_ascii_alphabetic() || ['_', '-'].contains(&c)))
{
format!("{:?}", s)
} else {
s
};
SemanticSuggestion {
suggestion: Suggestion {
value,
span: current_span,
description: v.map(|v| v.get_type().to_string()),
..Suggestion::default()
},
kind: Some(SuggestionKind::CellPath),
}
};
match value {
Value::Record { val, .. } => val
.columns()
.map(|s| to_suggestion(s.to_string(), val.get(s)))
.collect(),
Value::List { vals, .. } => get_columns(vals.as_slice())
.into_iter()
.map(|s| {
let sub_val = vals
.first()
.and_then(|v| v.as_record().ok())
.and_then(|rv| rv.get(&s));
to_suggestion(s, sub_val)
})
.collect(),
_ => vec![],
}
}

View File

@ -1,37 +1,47 @@
use std::collections::HashMap;
use crate::{ use crate::{
completions::{Completer, CompletionOptions}, completions::{Completer, CompletionOptions, MatchAlgorithm},
SuggestionKind, SuggestionKind,
}; };
use nu_parser::FlatShape;
use nu_protocol::{ use nu_protocol::{
engine::{CommandType, Stack, StateWorkingSet}, engine::{CachedFile, Stack, StateWorkingSet},
Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use super::{completion_options::NuMatcher, SemanticSuggestion}; use super::{completion_common::sort_suggestions, SemanticSuggestion};
pub struct CommandCompletion { pub struct CommandCompletion {
/// Whether to include internal commands flattened: Vec<(Span, FlatShape)>,
pub internals: bool, flat_shape: FlatShape,
/// Whether to include external commands force_completion_after_space: bool,
pub externals: bool,
} }
impl CommandCompletion { impl CommandCompletion {
pub fn new(
flattened: Vec<(Span, FlatShape)>,
flat_shape: FlatShape,
force_completion_after_space: bool,
) -> Self {
Self {
flattened,
flat_shape,
force_completion_after_space,
}
}
fn external_command_completion( fn external_command_completion(
&self, &self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
sugg_span: reedline::Span, prefix: &str,
matched_internal: impl Fn(&str) -> bool, match_algorithm: MatchAlgorithm,
matcher: &mut NuMatcher<String>, ) -> Vec<String> {
) -> HashMap<String, SemanticSuggestion> { let mut executables = vec![];
let mut suggs = HashMap::new();
let paths = working_set.permanent_state.get_env_var_insensitive("path"); // os agnostic way to get the PATH env var
let paths = working_set.permanent_state.get_path_env_var();
if let Some((_, paths)) = paths { if let Some(paths) = paths {
if let Ok(paths) = paths.as_list() { if let Ok(paths) = paths.as_list() {
for path in paths { for path in paths {
let path = path.coerce_str().unwrap_or_default(); let path = path.coerce_str().unwrap_or_default();
@ -44,43 +54,24 @@ impl CommandCompletion {
.completions .completions
.external .external
.max_results .max_results
<= suggs.len() as i64 > executables.len() as i64
&& !executables.contains(
&item
.path()
.file_name()
.map(|x| x.to_string_lossy().to_string())
.unwrap_or_default(),
)
&& matches!(
item.path().file_name().map(|x| match_algorithm
.matches_str(&x.to_string_lossy(), prefix)),
Some(true)
)
&& is_executable::is_executable(item.path())
{ {
break; if let Ok(name) = item.file_name().into_string() {
executables.push(name);
} }
let Ok(name) = item.file_name().into_string() else {
continue;
};
let value = if matched_internal(&name) {
format!("^{}", name)
} else {
name.clone()
};
if suggs.contains_key(&value) {
continue;
}
// TODO: check name matching before a relative heavy IO involved
// `is_executable` for performance consideration, should avoid
// duplicated `match_aux` call for matched items in the future
if matcher.matches(&name) && is_executable::is_executable(item.path()) {
// If there's an internal command with the same name, adds ^cmd to the
// matcher so that both the internal and external command are included
matcher.add(&name, value.clone());
suggs.insert(
value.clone(),
SemanticSuggestion {
suggestion: Suggestion {
value,
span: sugg_span,
append_whitespace: true,
..Default::default()
},
kind: Some(SuggestionKind::Command(
CommandType::External,
None,
)),
},
);
} }
} }
} }
@ -88,7 +79,77 @@ impl CommandCompletion {
} }
} }
suggs executables
}
fn complete_commands(
&self,
working_set: &StateWorkingSet,
span: Span,
offset: usize,
find_externals: bool,
match_algorithm: MatchAlgorithm,
) -> Vec<SemanticSuggestion> {
let partial = working_set.get_span_contents(span);
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
let mut results = working_set
.find_commands_by_predicate(filter_predicate, true)
.into_iter()
.map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&x.0).to_string(),
description: x.1,
span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true,
..Suggestion::default()
},
kind: Some(SuggestionKind::Command(x.2)),
})
.collect::<Vec<_>>();
let partial = working_set.get_span_contents(span);
let partial = String::from_utf8_lossy(partial).to_string();
if find_externals {
let results_external = self
.external_command_completion(working_set, &partial, match_algorithm)
.into_iter()
.map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x,
span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true,
..Suggestion::default()
},
// TODO: is there a way to create a test?
kind: None,
});
let results_strings: Vec<String> =
results.iter().map(|x| x.suggestion.value.clone()).collect();
for external in results_external {
if results_strings.contains(&external.suggestion.value) {
results.push(SemanticSuggestion {
suggestion: Suggestion {
value: format!("^{}", external.suggestion.value),
span: external.suggestion.span,
append_whitespace: true,
..Suggestion::default()
},
kind: external.kind,
})
} else {
results.push(external)
}
}
results
} else {
results
}
} }
} }
@ -97,62 +158,175 @@ impl Completer for CommandCompletion {
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
_stack: &Stack, _stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options); let last = self
.flattened
.iter()
.rev()
.skip_while(|x| x.0.end > pos)
.take_while(|x| {
matches!(
x.1,
FlatShape::InternalCall(_)
| FlatShape::External
| FlatShape::ExternalArg
| FlatShape::Literal
| FlatShape::String
)
})
.last();
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset); // The last item here would be the earliest shape that could possible by part of this subcommand
let subcommands = if let Some(last) = last {
let mut internal_suggs = HashMap::new(); self.complete_commands(
if self.internals {
let filtered_commands = working_set.find_commands_by_predicate(
|name| {
let name = String::from_utf8_lossy(name);
matcher.add(&name, name.to_string())
},
true,
);
for (decl_id, name, description, typ) in filtered_commands {
let name = String::from_utf8_lossy(&name);
internal_suggs.insert(
name.to_string(),
SemanticSuggestion {
suggestion: Suggestion {
value: name.to_string(),
description,
span: sugg_span,
append_whitespace: true,
..Suggestion::default()
},
kind: Some(SuggestionKind::Command(typ, Some(decl_id))),
},
);
}
}
let mut external_suggs = if self.externals {
self.external_command_completion(
working_set, working_set,
sugg_span, Span::new(last.0.start, pos),
|name| internal_suggs.contains_key(name), offset,
&mut matcher, false,
options.match_algorithm,
) )
} else { } else {
HashMap::new() vec![]
}; };
let mut res = Vec::new(); if !subcommands.is_empty() {
for cmd_name in matcher.results() { return sort_suggestions(&String::from_utf8_lossy(prefix), subcommands, options);
if let Some(sugg) = internal_suggs }
.remove(&cmd_name)
.or_else(|| external_suggs.remove(&cmd_name)) let config = working_set.get_config();
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|| ((span.end - span.start) == 0)
|| is_passthrough_command(working_set.delta.get_file_contents())
{ {
res.push(sugg); // we're in a gap or at a command
if working_set.get_span_contents(span).is_empty() && !self.force_completion_after_space
{
return vec![];
}
self.complete_commands(
working_set,
span,
offset,
config.completions.external.enable,
options.match_algorithm,
)
} else {
vec![]
};
sort_suggestions(&String::from_utf8_lossy(prefix), commands, options)
} }
} }
res
pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
match contents.get(start..) {
Some(contents) => {
contents
.iter()
.take_while(|x| x.is_ascii_whitespace())
.count()
+ start
}
None => start,
}
}
pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool {
for cached_file in working_set_file_contents {
let contents = &cached_file.content;
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
let cur_pos = find_non_whitespace_index(contents, last_pipe_pos);
let result = match contents.get(cur_pos..) {
Some(contents) => contents.starts_with(b"sudo ") || contents.starts_with(b"doas "),
None => false,
};
if result {
return true;
}
}
false
}
#[cfg(test)]
mod command_completions_tests {
use super::*;
use nu_protocol::engine::EngineState;
use std::sync::Arc;
#[test]
fn test_find_non_whitespace_index() {
let commands = [
(" hello", 4),
("sudo ", 0),
(" sudo ", 2),
(" sudo ", 2),
(" hello ", 1),
(" hello ", 3),
(" hello | sudo ", 4),
(" sudo|sudo", 5),
("sudo | sudo ", 0),
(" hello sud", 1),
];
for (idx, ele) in commands.iter().enumerate() {
let index = find_non_whitespace_index(ele.0.as_bytes(), 0);
assert_eq!(index, ele.1, "Failed on index {}", idx);
}
}
#[test]
fn test_is_last_command_passthrough() {
let commands = [
(" hello", false),
(" sudo ", true),
("sudo ", true),
(" hello", false),
(" sudo", false),
(" sudo ", true),
(" sudo ", true),
(" sudo ", true),
(" hello ", false),
(" hello | sudo ", true),
(" sudo|sudo", false),
("sudo | sudo ", true),
(" hello sud", false),
(" sudo | sud ", false),
(" sudo|sudo ", true),
(" sudo | sudo ls | sudo ", true),
];
for (idx, ele) in commands.iter().enumerate() {
let input = ele.0.as_bytes();
let mut engine_state = EngineState::new();
engine_state.add_file("test.nu".into(), Arc::new([]));
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
let _ = working_set.add_file("child.nu".into(), input);
working_set.render()
};
let result = engine_state.merge_delta(delta);
assert!(
result.is_ok(),
"Merge delta has failed: {}",
result.err().unwrap()
);
let is_passthrough_command = is_passthrough_command(engine_state.get_file_contents());
assert_eq!(
is_passthrough_command, ele.1,
"index for '{}': {}",
ele.0, idx
);
}
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,20 +1,22 @@
use super::{completion_options::NuMatcher, MatchAlgorithm}; use super::MatchAlgorithm;
use crate::completions::CompletionOptions; use crate::{
completions::{matches, CompletionOptions},
SemanticSuggestion,
};
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use nu_ansi_term::Style; use nu_ansi_term::Style;
use nu_engine::env_to_string; use nu_engine::env_to_string;
use nu_path::dots::expand_ndots; use nu_path::dots::expand_ndots;
use nu_path::{expand_to_real_path, home_dir}; use nu_path::{expand_to_real_path, home_dir};
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Span, CompletionSort, Span,
}; };
use nu_utils::get_ls_colors; use nu_utils::get_ls_colors;
use nu_utils::IgnoreCaseExt;
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP}; use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct PathBuiltFromString { pub struct PathBuiltFromString {
cwd: PathBuf,
parts: Vec<String>, parts: Vec<String>,
isdir: bool, isdir: bool,
} }
@ -22,103 +24,84 @@ pub struct PathBuiltFromString {
/// Recursively goes through paths that match a given `partial`. /// Recursively goes through paths that match a given `partial`.
/// built: State struct for a valid matching path built so far. /// built: State struct for a valid matching path built so far.
/// ///
/// `want_directory`: Whether we want only directories as completion matches.
/// Some commands like `cd` can only be run on directories whereas others
/// like `ls` can be run on regular files as well.
///
/// `isdir`: whether the current partial path has a trailing slash. /// `isdir`: whether the current partial path has a trailing slash.
/// Parsing a path string into a pathbuf loses that bit of information. /// Parsing a path string into a pathbuf loses that bit of information.
/// ///
/// `enable_exact_match`: Whether match algorithm is Prefix and all previous components /// want_directory: Whether we want only directories as completion matches.
/// of the path matched a directory exactly. /// Some commands like `cd` can only be run on directories whereas others
fn complete_rec( /// like `ls` can be run on regular files as well.
pub fn complete_rec(
partial: &[&str], partial: &[&str],
built_paths: &[PathBuiltFromString], built: &PathBuiltFromString,
cwd: &Path,
options: &CompletionOptions, options: &CompletionOptions,
want_directory: bool, want_directory: bool,
isdir: bool, isdir: bool,
enable_exact_match: bool,
) -> Vec<PathBuiltFromString> { ) -> Vec<PathBuiltFromString> {
let mut completions = vec![];
if let Some((&base, rest)) = partial.split_first() { if let Some((&base, rest)) = partial.split_first() {
if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) { if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) {
let built_paths: Vec<_> = built_paths
.iter()
.map(|built| {
let mut built = built.clone(); let mut built = built.clone();
built.parts.push(base.to_string()); built.parts.push(base.to_string());
built.isdir = true; built.isdir = true;
built return complete_rec(rest, &built, cwd, options, want_directory, isdir);
})
.collect();
return complete_rec(
rest,
&built_paths,
options,
want_directory,
isdir,
enable_exact_match,
);
} }
} }
let prefix = partial.first().unwrap_or(&""); let mut built_path = cwd.to_path_buf();
let mut matcher = NuMatcher::new(prefix, options);
for built in built_paths {
let mut path = built.cwd.clone();
for part in &built.parts { for part in &built.parts {
path.push(part); built_path.push(part);
} }
let Ok(result) = path.read_dir() else { let Ok(result) = built_path.read_dir() else {
continue; return completions;
}; };
let mut entries = Vec::new();
for entry in result.filter_map(|e| e.ok()) { for entry in result.filter_map(|e| e.ok()) {
let entry_name = entry.file_name().to_string_lossy().into_owned(); let entry_name = entry.file_name().to_string_lossy().into_owned();
let entry_isdir = entry.path().is_dir(); let entry_isdir = entry.path().is_dir();
let mut built = built.clone(); let mut built = built.clone();
built.parts.push(entry_name.clone()); built.parts.push(entry_name.clone());
// Symlinks to directories shouldn't have a trailing slash (#13275) built.isdir = entry_isdir;
built.isdir = entry_isdir && !entry.path().is_symlink();
if !want_directory || entry_isdir { if !want_directory || entry_isdir {
matcher.add(entry_name.clone(), (entry_name, built)); entries.push((entry_name, built));
}
} }
} }
let mut completions = vec![]; let prefix = partial.first().unwrap_or(&"");
for (entry_name, built) in matcher.results() { let sorted_entries = sort_completions(prefix, entries, options, |(entry, _)| entry);
for (entry_name, built) in sorted_entries {
match partial.split_first() { match partial.split_first() {
Some((base, rest)) => { Some((base, rest)) => {
if matches(base, &entry_name, options) {
// We use `isdir` to confirm that the current component has // We use `isdir` to confirm that the current component has
// at least one next component or a slash. // at least one next component or a slash.
// Serves as confirmation to ignore longer completions for // Serves as confirmation to ignore longer completions for
// components in between. // components in between.
if !rest.is_empty() || isdir { if !rest.is_empty() || isdir {
// Don't show longer completions if we have an exact match (#13204, #14794)
let exact_match = enable_exact_match
&& (if options.case_sensitive {
entry_name.eq(base)
} else {
entry_name.eq_ignore_case(base)
});
completions.extend(complete_rec( completions.extend(complete_rec(
rest, rest,
&[built], &built,
cwd,
options, options,
want_directory, want_directory,
isdir, isdir,
exact_match,
)); ));
if exact_match {
break;
}
} else { } else {
completions.push(built); completions.push(built);
} }
} }
if entry_name.eq(base)
&& matches!(options.match_algorithm, MatchAlgorithm::Prefix)
&& isdir
{
break;
}
}
None => { None => {
completions.push(built); completions.push(built);
} }
@ -150,7 +133,7 @@ impl OriginalCwd {
} }
} }
pub fn surround_remove(partial: &str) -> String { fn surround_remove(partial: &str) -> String {
for c in ['`', '"', '\''] { for c in ['`', '"', '\''] {
if partial.starts_with(c) { if partial.starts_with(c) {
let ret = partial.strip_prefix(c).unwrap_or(partial); let ret = partial.strip_prefix(c).unwrap_or(partial);
@ -164,25 +147,15 @@ pub fn surround_remove(partial: &str) -> String {
partial.to_string() partial.to_string()
} }
pub struct FileSuggestion {
pub span: nu_protocol::Span,
pub path: String,
pub style: Option<Style>,
pub is_dir: bool,
}
/// # Parameters
/// * `cwds` - A list of directories in which to search. The only reason this isn't a single string
/// is because dotnu_completions searches in multiple directories at once
pub fn complete_item( pub fn complete_item(
want_directory: bool, want_directory: bool,
span: nu_protocol::Span, span: nu_protocol::Span,
partial: &str, partial: &str,
cwds: &[impl AsRef<str>], cwd: &str,
options: &CompletionOptions, options: &CompletionOptions,
engine_state: &EngineState, engine_state: &EngineState,
stack: &Stack, stack: &Stack,
) -> Vec<FileSuggestion> { ) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
let cleaned_partial = surround_remove(partial); let cleaned_partial = surround_remove(partial);
let isdir = cleaned_partial.ends_with(is_separator); let isdir = cleaned_partial.ends_with(is_separator);
let expanded_partial = expand_ndots(Path::new(&cleaned_partial)); let expanded_partial = expand_ndots(Path::new(&cleaned_partial));
@ -202,20 +175,18 @@ pub fn complete_item(
partial.push_str(&format!("{path_separator}.")); partial.push_str(&format!("{path_separator}."));
} }
let cwd_pathbufs: Vec<_> = cwds let cwd_pathbuf = Path::new(cwd).to_path_buf();
.iter()
.map(|cwd| Path::new(cwd.as_ref()).to_path_buf())
.collect();
let ls_colors = (engine_state.config.completions.use_ls_colors let ls_colors = (engine_state.config.completions.use_ls_colors
&& engine_state.config.use_ansi_coloring.get(engine_state)) && engine_state.config.use_ansi_coloring)
.then(|| { .then(|| {
let ls_colors_env_str = stack let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
.get_env_var(engine_state, "LS_COLORS") Some(v) => env_to_string("LS_COLORS", v, engine_state, stack).ok(),
.and_then(|v| env_to_string("LS_COLORS", v, engine_state, stack).ok()); None => None,
};
get_ls_colors(ls_colors_env_str) get_ls_colors(ls_colors_env_str)
}); });
let mut cwds = cwd_pathbufs.clone(); let mut cwd = cwd_pathbuf.clone();
let mut prefix_len = 0; let mut prefix_len = 0;
let mut original_cwd = OriginalCwd::None; let mut original_cwd = OriginalCwd::None;
@ -223,21 +194,19 @@ pub fn complete_item(
match components.peek().cloned() { match components.peek().cloned() {
Some(c @ Component::Prefix(..)) => { Some(c @ Component::Prefix(..)) => {
// windows only by definition // windows only by definition
cwds = vec![[c, Component::RootDir].iter().collect()]; cwd = [c, Component::RootDir].iter().collect();
prefix_len = c.as_os_str().len(); prefix_len = c.as_os_str().len();
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned()); original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
} }
Some(c @ Component::RootDir) => { Some(c @ Component::RootDir) => {
// This is kind of a hack. When joining an empty string with the rest, // This is kind of a hack. When joining an empty string with the rest,
// we add the slash automagically // we add the slash automagically
cwds = vec![PathBuf::from(c.as_os_str())]; cwd = PathBuf::from(c.as_os_str());
prefix_len = 1; prefix_len = 1;
original_cwd = OriginalCwd::Prefix(String::new()); original_cwd = OriginalCwd::Prefix(String::new());
} }
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => { Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
cwds = home_dir() cwd = home_dir().map(Into::into).unwrap_or(cwd_pathbuf);
.map(|dir| vec![dir.into()])
.unwrap_or(cwd_pathbufs);
prefix_len = 1; prefix_len = 1;
original_cwd = OriginalCwd::Home; original_cwd = OriginalCwd::Home;
} }
@ -254,70 +223,59 @@ pub fn complete_item(
complete_rec( complete_rec(
partial.as_slice(), partial.as_slice(),
&cwds &PathBuiltFromString::default(),
.into_iter() &cwd,
.map(|cwd| PathBuiltFromString {
cwd,
parts: Vec::new(),
isdir: false,
})
.collect::<Vec<_>>(),
options, options,
want_directory, want_directory,
isdir, isdir,
options.match_algorithm == MatchAlgorithm::Prefix,
) )
.into_iter() .into_iter()
.map(|mut p| { .map(|mut p| {
if should_collapse_dots { if should_collapse_dots {
p = collapse_ndots(p); p = collapse_ndots(p);
} }
let is_dir = p.isdir;
let path = original_cwd.apply(p, path_separator); let path = original_cwd.apply(p, path_separator);
let real_path = expand_to_real_path(&path);
let metadata = std::fs::symlink_metadata(&real_path).ok();
let style = ls_colors.as_ref().map(|lsc| { let style = ls_colors.as_ref().map(|lsc| {
lsc.style_for_path_with_metadata(&real_path, metadata.as_ref()) lsc.style_for_path_with_metadata(
&path,
std::fs::symlink_metadata(expand_to_real_path(&path))
.ok()
.as_ref(),
)
.map(lscolors::Style::to_nu_ansi_term_style) .map(lscolors::Style::to_nu_ansi_term_style)
.unwrap_or_default() .unwrap_or_default()
}); });
FileSuggestion { (span, escape_path(path, want_directory), style)
span,
path: escape_path(path),
style,
is_dir,
}
}) })
.collect() .collect()
} }
// Fix files or folders with quotes or hashes // Fix files or folders with quotes or hashes
pub fn escape_path(path: String) -> String { pub fn escape_path(path: String, dir: bool) -> String {
// make glob pattern have the highest priority. // make glob pattern have the highest priority.
if nu_glob::is_glob(path.as_str()) || path.contains('`') { let glob_contaminated = path.contains(['[', '*', ']', '?']);
// expand home `~` for https://github.com/nushell/nushell/issues/13905 if glob_contaminated {
let pathbuf = nu_path::expand_tilde(path); return if path.contains('\'') {
let path = pathbuf.to_string_lossy(); // decide to use double quote, also need to escape `"` in path
if path.contains('\'') { // or else users can't do anything with completed path either.
// decide to use double quotes format!("\"{}\"", path.replace('"', r#"\""#))
// Path as Debug will do the escaping for `"`, `\`
format!("{:?}", path)
} else { } else {
format!("'{path}'") format!("'{path}'")
};
} }
} else {
let contaminated = let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
path.contains(['\'', '"', ' ', '#', '(', ')', '{', '}', '[', ']', '|', ';']); let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
let maybe_flag = path.starts_with('-'); let maybe_flag = path.starts_with('-');
let maybe_variable = path.starts_with('$'); let maybe_variable = path.starts_with('$');
let maybe_number = path.parse::<f64>().is_ok(); let maybe_number = path.parse::<f64>().is_ok();
if contaminated || maybe_flag || maybe_variable || maybe_number { if filename_contaminated || dirname_contaminated || maybe_flag || maybe_variable || maybe_number
{
format!("`{path}`") format!("`{path}`")
} else { } else {
path path
} }
} }
}
pub struct AdjustView { pub struct AdjustView {
pub prefix: String, pub prefix: String,
@ -326,12 +284,12 @@ pub struct AdjustView {
} }
pub fn adjust_if_intermediate( pub fn adjust_if_intermediate(
prefix: &str, prefix: &[u8],
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
mut span: nu_protocol::Span, mut span: nu_protocol::Span,
) -> AdjustView { ) -> AdjustView {
let span_contents = String::from_utf8_lossy(working_set.get_span_contents(span)).to_string(); let span_contents = String::from_utf8_lossy(working_set.get_span_contents(span)).to_string();
let mut prefix = prefix.to_string(); let mut prefix = String::from_utf8_lossy(prefix).to_string();
// A difference of 1 because of the cursor's unicode code point in between. // A difference of 1 because of the cursor's unicode code point in between.
// Using .chars().count() because unicode and Windows. // Using .chars().count() because unicode and Windows.
@ -352,6 +310,45 @@ pub fn adjust_if_intermediate(
} }
} }
/// Convenience function to sort suggestions using [`sort_completions`]
pub fn sort_suggestions(
prefix: &str,
items: Vec<SemanticSuggestion>,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
sort_completions(prefix, items, options, |it| &it.suggestion.value)
}
/// # Arguments
/// * `prefix` - What the user's typed, for sorting by fuzzy matcher score
pub fn sort_completions<T>(
prefix: &str,
mut items: Vec<T>,
options: &CompletionOptions,
get_value: fn(&T) -> &str,
) -> Vec<T> {
// Sort items
if options.sort == CompletionSort::Smart && options.match_algorithm == MatchAlgorithm::Fuzzy {
let mut matcher = SkimMatcherV2::default();
if options.case_sensitive {
matcher = matcher.respect_case();
} else {
matcher = matcher.ignore_case();
};
items.sort_unstable_by(|a, b| {
let a_str = get_value(a);
let b_str = get_value(b);
let a_score = matcher.fuzzy_match(a_str, prefix).unwrap_or_default();
let b_score = matcher.fuzzy_match(b_str, prefix).unwrap_or_default();
b_score.cmp(&a_score).then(a_str.cmp(b_str))
});
} else {
items.sort_unstable_by(|a, b| get_value(a).cmp(get_value(b)));
}
items
}
/// Collapse multiple ".." components into n-dots. /// Collapse multiple ".." components into n-dots.
/// ///
/// It performs the reverse operation of `expand_ndots`, collapsing sequences of ".." into n-dots, /// It performs the reverse operation of `expand_ndots`, collapsing sequences of ".." into n-dots,
@ -362,7 +359,6 @@ fn collapse_ndots(path: PathBuiltFromString) -> PathBuiltFromString {
let mut result = PathBuiltFromString { let mut result = PathBuiltFromString {
parts: Vec::with_capacity(path.parts.len()), parts: Vec::with_capacity(path.parts.len()),
isdir: path.isdir, isdir: path.isdir,
cwd: path.cwd,
}; };
let mut dot_count = 0; let mut dot_count = 0;

View File

@ -1,13 +1,7 @@
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use nu_parser::trim_quotes_str; use nu_parser::trim_quotes_str;
use nu_protocol::{CompletionAlgorithm, CompletionSort}; use nu_protocol::{CompletionAlgorithm, CompletionSort};
use nu_utils::IgnoreCaseExt; use std::fmt::Display;
use nucleo_matcher::{
pattern::{Atom, AtomKind, CaseMatching, Normalization},
Config, Matcher, Utf32Str,
};
use std::{borrow::Cow, fmt::Display};
use super::SemanticSuggestion;
/// Describes how suggestions should be matched. /// Describes how suggestions should be matched.
#[derive(Copy, Clone, Debug, PartialEq)] #[derive(Copy, Clone, Debug, PartialEq)]
@ -18,12 +12,6 @@ pub enum MatchAlgorithm {
/// "git switch" is matched by "git sw" /// "git switch" is matched by "git sw"
Prefix, Prefix,
/// Only show suggestions which have a substring matching with the given input
///
/// Example:
/// "git checkout" is matched by "checkout"
Substring,
/// Only show suggestions which contain the input chars at any place /// Only show suggestions which contain the input chars at any place
/// ///
/// Example: /// Example:
@ -31,195 +19,32 @@ pub enum MatchAlgorithm {
Fuzzy, Fuzzy,
} }
pub struct NuMatcher<'a, T> { impl MatchAlgorithm {
options: &'a CompletionOptions, /// Returns whether the `needle` search text matches the given `haystack`.
needle: String, pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
state: State<T>,
}
enum State<T> {
Prefix {
/// Holds (haystack, item)
items: Vec<(String, T)>,
},
Substring {
/// Holds (haystack, item)
items: Vec<(String, T)>,
},
Fuzzy {
matcher: Matcher,
atom: Atom,
/// Holds (haystack, item, score)
items: Vec<(String, T, u16)>,
},
}
/// Filters and sorts suggestions
impl<T> NuMatcher<'_, T> {
/// # Arguments
///
/// * `needle` - The text to search for
pub fn new(needle: impl AsRef<str>, options: &CompletionOptions) -> NuMatcher<T> {
let needle = trim_quotes_str(needle.as_ref());
match options.match_algorithm {
MatchAlgorithm::Prefix => {
let lowercase_needle = if options.case_sensitive {
needle.to_owned()
} else {
needle.to_folded_case()
};
NuMatcher {
options,
needle: lowercase_needle,
state: State::Prefix { items: Vec::new() },
}
}
MatchAlgorithm::Substring => {
let lowercase_needle = if options.case_sensitive {
needle.to_owned()
} else {
needle.to_folded_case()
};
NuMatcher {
options,
needle: lowercase_needle,
state: State::Substring { items: Vec::new() },
}
}
MatchAlgorithm::Fuzzy => {
let atom = Atom::new(
needle,
if options.case_sensitive {
CaseMatching::Respect
} else {
CaseMatching::Ignore
},
Normalization::Smart,
AtomKind::Fuzzy,
false,
);
NuMatcher {
options,
needle: needle.to_owned(),
state: State::Fuzzy {
matcher: Matcher::new(Config::DEFAULT),
atom,
items: Vec::new(),
},
}
}
}
}
/// Returns whether or not the haystack matches the needle. If it does, `item` is added
/// to the list of matches (if given).
///
/// Helper to avoid code duplication between [NuMatcher::add] and [NuMatcher::matches].
fn matches_aux(&mut self, haystack: &str, item: Option<T>) -> bool {
let haystack = trim_quotes_str(haystack); let haystack = trim_quotes_str(haystack);
match &mut self.state { let needle = trim_quotes_str(needle);
State::Prefix { items } => { match *self {
let haystack_folded = if self.options.case_sensitive { MatchAlgorithm::Prefix => haystack.starts_with(needle),
Cow::Borrowed(haystack) MatchAlgorithm::Fuzzy => {
} else { let matcher = SkimMatcherV2::default();
Cow::Owned(haystack.to_folded_case()) matcher.fuzzy_match(haystack, needle).is_some()
};
let matches = haystack_folded.starts_with(self.needle.as_str());
if matches {
if let Some(item) = item {
items.push((haystack.to_string(), item));
}
}
matches
}
State::Substring { items } => {
let haystack_folded = if self.options.case_sensitive {
Cow::Borrowed(haystack)
} else {
Cow::Owned(haystack.to_folded_case())
};
let matches = haystack_folded.contains(self.needle.as_str());
if matches {
if let Some(item) = item {
items.push((haystack.to_string(), item));
}
}
matches
}
State::Fuzzy {
matcher,
atom,
items,
} => {
let mut haystack_buf = Vec::new();
let haystack_utf32 = Utf32Str::new(trim_quotes_str(haystack), &mut haystack_buf);
let mut indices = Vec::new();
let Some(score) = atom.indices(haystack_utf32, matcher, &mut indices) else {
return false;
};
if let Some(item) = item {
items.push((haystack.to_string(), item, score));
}
true
} }
} }
} }
/// Add the given item if the given haystack matches the needle. /// Returns whether the `needle` search text matches the given `haystack`.
/// pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
/// Returns whether the item was added. match *self {
pub fn add(&mut self, haystack: impl AsRef<str>, item: T) -> bool { MatchAlgorithm::Prefix => haystack.starts_with(needle),
self.matches_aux(haystack.as_ref(), Some(item)) MatchAlgorithm::Fuzzy => {
} let haystack_str = String::from_utf8_lossy(haystack);
let needle_str = String::from_utf8_lossy(needle);
/// Returns whether the haystack matches the needle. let matcher = SkimMatcherV2::default();
pub fn matches(&mut self, haystack: &str) -> bool { matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
self.matches_aux(haystack, None)
}
/// Get all the items that matched (sorted)
pub fn results(self) -> Vec<T> {
match self.state {
State::Prefix { mut items, .. } | State::Substring { mut items, .. } => {
items.sort_by(|(haystack1, _), (haystack2, _)| {
let cmp_sensitive = haystack1.cmp(haystack2);
if self.options.case_sensitive {
cmp_sensitive
} else {
haystack1
.to_folded_case()
.cmp(&haystack2.to_folded_case())
.then(cmp_sensitive)
}
});
items.into_iter().map(|(_, item)| item).collect::<Vec<_>>()
}
State::Fuzzy { mut items, .. } => {
match self.options.sort {
CompletionSort::Alphabetical => {
items.sort_by(|(haystack1, _, _), (haystack2, _, _)| {
haystack1.cmp(haystack2)
});
}
CompletionSort::Smart => {
items.sort_by(|(haystack1, _, score1), (haystack2, _, score2)| {
score2.cmp(score1).then(haystack1.cmp(haystack2))
});
} }
} }
items
.into_iter()
.map(|(_, item, _)| item)
.collect::<Vec<_>>()
}
}
}
}
impl NuMatcher<'_, SemanticSuggestion> {
pub fn add_semantic_suggestion(&mut self, sugg: SemanticSuggestion) -> bool {
let value = sugg.suggestion.value.to_string();
self.add(value, sugg)
} }
} }
@ -227,7 +52,6 @@ impl From<CompletionAlgorithm> for MatchAlgorithm {
fn from(value: CompletionAlgorithm) -> Self { fn from(value: CompletionAlgorithm) -> Self {
match value { match value {
CompletionAlgorithm::Prefix => MatchAlgorithm::Prefix, CompletionAlgorithm::Prefix => MatchAlgorithm::Prefix,
CompletionAlgorithm::Substring => MatchAlgorithm::Substring,
CompletionAlgorithm::Fuzzy => MatchAlgorithm::Fuzzy, CompletionAlgorithm::Fuzzy => MatchAlgorithm::Fuzzy,
} }
} }
@ -239,7 +63,6 @@ impl TryFrom<String> for MatchAlgorithm {
fn try_from(value: String) -> Result<Self, Self::Error> { fn try_from(value: String) -> Result<Self, Self::Error> {
match value.as_str() { match value.as_str() {
"prefix" => Ok(Self::Prefix), "prefix" => Ok(Self::Prefix),
"substring" => Ok(Self::Substring),
"fuzzy" => Ok(Self::Fuzzy), "fuzzy" => Ok(Self::Fuzzy),
_ => Err(InvalidMatchAlgorithm::Unknown), _ => Err(InvalidMatchAlgorithm::Unknown),
} }
@ -264,6 +87,7 @@ impl std::error::Error for InvalidMatchAlgorithm {}
#[derive(Clone)] #[derive(Clone)]
pub struct CompletionOptions { pub struct CompletionOptions {
pub case_sensitive: bool, pub case_sensitive: bool,
pub positional: bool,
pub match_algorithm: MatchAlgorithm, pub match_algorithm: MatchAlgorithm,
pub sort: CompletionSort, pub sort: CompletionSort,
} }
@ -272,6 +96,7 @@ impl Default for CompletionOptions {
fn default() -> Self { fn default() -> Self {
Self { Self {
case_sensitive: true, case_sensitive: true,
positional: true,
match_algorithm: MatchAlgorithm::Prefix, match_algorithm: MatchAlgorithm::Prefix,
sort: Default::default(), sort: Default::default(),
} }
@ -280,70 +105,35 @@ impl Default for CompletionOptions {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use rstest::rstest; use super::MatchAlgorithm;
use super::{CompletionOptions, MatchAlgorithm, NuMatcher}; #[test]
fn match_algorithm_prefix() {
let algorithm = MatchAlgorithm::Prefix;
#[rstest] assert!(algorithm.matches_str("example text", ""));
#[case(MatchAlgorithm::Prefix, "example text", "", true)] assert!(algorithm.matches_str("example text", "examp"));
#[case(MatchAlgorithm::Prefix, "example text", "examp", true)] assert!(!algorithm.matches_str("example text", "text"));
#[case(MatchAlgorithm::Prefix, "example text", "text", false)]
#[case(MatchAlgorithm::Substring, "example text", "", true)] assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
#[case(MatchAlgorithm::Substring, "example text", "text", true)] assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
#[case(MatchAlgorithm::Substring, "example text", "mplxt", false)] assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
#[case(MatchAlgorithm::Fuzzy, "example text", "", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "examp", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "ext", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "mplxt", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "mpp", false)]
fn match_algorithm_simple(
#[case] match_algorithm: MatchAlgorithm,
#[case] haystack: &str,
#[case] needle: &str,
#[case] should_match: bool,
) {
let options = CompletionOptions {
match_algorithm,
..Default::default()
};
let mut matcher = NuMatcher::new(needle, &options);
matcher.add(haystack, haystack);
if should_match {
assert_eq!(vec![haystack], matcher.results());
} else {
assert_ne!(vec![haystack], matcher.results());
}
} }
#[test] #[test]
fn match_algorithm_fuzzy_sort_score() { fn match_algorithm_fuzzy() {
let options = CompletionOptions { let algorithm = MatchAlgorithm::Fuzzy;
match_algorithm: MatchAlgorithm::Fuzzy,
..Default::default()
};
let mut matcher = NuMatcher::new("fob", &options);
for item in ["foo/bar", "fob", "foo bar"] {
matcher.add(item, item);
}
// Sort by score, then in alphabetical order
assert_eq!(vec!["fob", "foo bar", "foo/bar"], matcher.results());
}
#[test] assert!(algorithm.matches_str("example text", ""));
fn match_algorithm_fuzzy_sort_strip() { assert!(algorithm.matches_str("example text", "examp"));
let options = CompletionOptions { assert!(algorithm.matches_str("example text", "ext"));
match_algorithm: MatchAlgorithm::Fuzzy, assert!(algorithm.matches_str("example text", "mplxt"));
..Default::default() assert!(!algorithm.matches_str("example text", "mpp"));
};
let mut matcher = NuMatcher::new("'love spaces' ", &options); assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
for item in [ assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
"'i love spaces'", assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
"'i love spaces' so much", assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
"'lovespaces' ", assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
] {
matcher.add(item, item);
}
// Make sure the spaces are respected
assert_eq!(vec!["'i love spaces' so much"], matcher.results());
} }
} }

View File

@ -6,47 +6,48 @@ use nu_engine::eval_call;
use nu_protocol::{ use nu_protocol::{
ast::{Argument, Call, Expr, Expression}, ast::{Argument, Call, Expr, Expression},
debugger::WithoutDebug, debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{Stack, StateWorkingSet},
DeclId, PipelineData, Span, Type, Value, CompletionSort, DeclId, PipelineData, Span, Type, Value,
}; };
use nu_utils::IgnoreCaseExt;
use std::collections::HashMap; use std::collections::HashMap;
use super::completion_options::NuMatcher; use super::completion_common::sort_suggestions;
pub struct CustomCompletion<T: Completer> { pub struct CustomCompletion {
stack: Stack,
decl_id: DeclId, decl_id: DeclId,
line: String, line: String,
line_pos: usize,
fallback: T,
} }
impl<T: Completer> CustomCompletion<T> { impl CustomCompletion {
pub fn new(decl_id: DeclId, line: String, line_pos: usize, fallback: T) -> Self { pub fn new(stack: Stack, decl_id: DeclId, line: String) -> Self {
Self { Self {
stack,
decl_id, decl_id,
line, line,
line_pos,
fallback,
} }
} }
} }
impl<T: Completer> Completer for CustomCompletion<T> { impl Completer for CustomCompletion {
fn fetch( fn fetch(
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
stack: &Stack, _stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
orig_options: &CompletionOptions, pos: usize,
completion_options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
// Line position
let line_pos = pos - offset;
// Call custom declaration // Call custom declaration
let mut stack_mut = stack.clone(); let result = eval_call::<WithoutDebug>(
let mut eval = |engine_state: &EngineState| { working_set.permanent_state,
eval_call::<WithoutDebug>( &mut self.stack,
engine_state,
&mut stack_mut,
&Call { &Call {
decl_id: self.decl_id, decl_id: self.decl_id,
head: span, head: span,
@ -57,7 +58,7 @@ impl<T: Completer> Completer for CustomCompletion<T> {
Type::String, Type::String,
)), )),
Argument::Positional(Expression::new_unknown( Argument::Positional(Expression::new_unknown(
Expr::Int(self.line_pos as i64), Expr::Int(line_pos as i64),
Span::unknown(), Span::unknown(),
Type::Int, Type::Int,
)), )),
@ -65,22 +66,14 @@ impl<T: Completer> Completer for CustomCompletion<T> {
parser_info: HashMap::new(), parser_info: HashMap::new(),
}, },
PipelineData::empty(), PipelineData::empty(),
) );
};
let result = if self.decl_id.get() < working_set.permanent_state.num_decls() {
eval(working_set.permanent_state)
} else {
let mut engine_state = working_set.permanent_state.clone();
let _ = engine_state.merge_delta(working_set.delta.clone());
eval(&engine_state)
};
let mut completion_options = orig_options.clone(); let mut custom_completion_options = None;
let mut should_sort = true;
// Parse result // Parse result
let suggestions = match result.and_then(|data| data.into_value(span)) { let suggestions = result
Ok(value) => match &value { .and_then(|data| data.into_value(span))
.map(|value| match &value {
Value::Record { val, .. } => { Value::Record { val, .. } => {
let completions = val let completions = val
.get("completions") .get("completions")
@ -93,74 +86,78 @@ impl<T: Completer> Completer for CustomCompletion<T> {
let options = val.get("options"); let options = val.get("options");
if let Some(Value::Record { val: options, .. }) = &options { if let Some(Value::Record { val: options, .. }) = &options {
if let Some(sort) = options.get("sort").and_then(|val| val.as_bool().ok()) { let should_sort = options
should_sort = sort; .get("sort")
} .and_then(|val| val.as_bool().ok())
.unwrap_or(false);
if let Some(case_sensitive) = options custom_completion_options = Some(CompletionOptions {
case_sensitive: options
.get("case_sensitive") .get("case_sensitive")
.and_then(|val| val.as_bool().ok()) .and_then(|val| val.as_bool().ok())
{ .unwrap_or(true),
completion_options.case_sensitive = case_sensitive; positional: options
} .get("positional")
let positional = .and_then(|val| val.as_bool().ok())
options.get("positional").and_then(|val| val.as_bool().ok()); .unwrap_or(true),
if positional.is_some() { match_algorithm: match options.get("completion_algorithm") {
log::warn!("Use of the positional option is deprecated. Use the substring match algorithm instead."); Some(option) => option
} .coerce_string()
if let Some(algorithm) = options .ok()
.get("completion_algorithm")
.and_then(|option| option.coerce_string().ok())
.and_then(|option| option.try_into().ok()) .and_then(|option| option.try_into().ok())
{ .unwrap_or(MatchAlgorithm::Prefix),
completion_options.match_algorithm = algorithm; None => completion_options.match_algorithm,
if let Some(false) = positional { },
if completion_options.match_algorithm == MatchAlgorithm::Prefix { sort: if should_sort {
completion_options.match_algorithm = MatchAlgorithm::Substring CompletionSort::Alphabetical
} } else {
} CompletionSort::Smart
} },
});
} }
completions completions
} }
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset), Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
Value::Nothing { .. } => { _ => vec![],
return self.fallback.fetch( })
working_set, .unwrap_or_default();
stack,
prefix, let options = custom_completion_options
span, .as_ref()
offset, .unwrap_or(completion_options);
orig_options, let suggestions = filter(prefix, suggestions, options);
); sort_suggestions(&String::from_utf8_lossy(prefix), suggestions, options)
}
}
fn filter(
prefix: &[u8],
items: Vec<SemanticSuggestion>,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
items
.into_iter()
.filter(|it| match options.match_algorithm {
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
(true, true) => it.suggestion.value.as_bytes().starts_with(prefix),
(true, false) => it
.suggestion
.value
.contains(std::str::from_utf8(prefix).unwrap_or("")),
(false, positional) => {
let value = it.suggestion.value.to_folded_case();
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
if positional {
value.starts_with(&prefix)
} else {
value.contains(&prefix)
} }
_ => {
log::error!(
"Custom completer returned invalid value of type {}",
value.get_type().to_string()
);
return vec![];
} }
}, },
Err(e) => { MatchAlgorithm::Fuzzy => options
log::error!("Error getting custom completions: {e}"); .match_algorithm
return vec![]; .matches_u8(it.suggestion.value.as_bytes(), prefix),
} })
};
let mut matcher = NuMatcher::new(prefix, &completion_options);
if should_sort {
for sugg in suggestions {
matcher.add_semantic_suggestion(sugg);
}
matcher.results()
} else {
suggestions
.into_iter()
.filter(|sugg| matcher.matches(&sugg.suggestion.value))
.collect() .collect()
} }
}
}

View File

@ -2,6 +2,7 @@ use crate::completions::{
completion_common::{adjust_if_intermediate, complete_item, AdjustView}, completion_common::{adjust_if_intermediate, complete_item, AdjustView},
Completer, CompletionOptions, Completer, CompletionOptions,
}; };
use nu_ansi_term::Style;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Span, Span,
@ -9,22 +10,29 @@ use nu_protocol::{
use reedline::Suggestion; use reedline::Suggestion;
use std::path::Path; use std::path::Path;
use super::{completion_common::FileSuggestion, SemanticSuggestion, SuggestionKind}; use super::SemanticSuggestion;
pub struct DirectoryCompletion; #[derive(Clone, Default)]
pub struct DirectoryCompletion {}
impl DirectoryCompletion {
pub fn new() -> Self {
Self::default()
}
}
impl Completer for DirectoryCompletion { impl Completer for DirectoryCompletion {
fn fetch( fn fetch(
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
stack: &Stack, stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
_pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
let AdjustView { prefix, span, .. } = let AdjustView { prefix, span, .. } = adjust_if_intermediate(prefix, working_set, span);
adjust_if_intermediate(prefix.as_ref(), working_set, span);
// Filter only the folders // Filter only the folders
#[allow(deprecated)] #[allow(deprecated)]
@ -39,15 +47,16 @@ impl Completer for DirectoryCompletion {
.into_iter() .into_iter()
.map(move |x| SemanticSuggestion { .map(move |x| SemanticSuggestion {
suggestion: Suggestion { suggestion: Suggestion {
value: x.path, value: x.1,
style: x.style, style: x.2,
span: reedline::Span { span: reedline::Span {
start: x.span.start - offset, start: x.0.start - offset,
end: x.span.end - offset, end: x.0.end - offset,
}, },
..Suggestion::default() ..Suggestion::default()
}, },
kind: Some(SuggestionKind::Directory), // TODO????
kind: None,
}) })
.collect(); .collect();
@ -83,6 +92,6 @@ pub fn directory_completion(
options: &CompletionOptions, options: &CompletionOptions,
engine_state: &EngineState, engine_state: &EngineState,
stack: &Stack, stack: &Stack,
) -> Vec<FileSuggestion> { ) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
complete_item(true, span, partial, &[cwd], options, engine_state, stack) complete_item(true, span, partial, cwd, options, engine_state, stack)
} }

View File

@ -1,22 +1,20 @@
use crate::completions::{ use crate::completions::{file_path_completion, Completer, CompletionOptions};
completion_common::{surround_remove, FileSuggestion},
completion_options::NuMatcher,
file_path_completion, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
};
use nu_path::expand_tilde;
use nu_protocol::{ use nu_protocol::{
engine::{Stack, StateWorkingSet, VirtualPath}, engine::{Stack, StateWorkingSet},
Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::{ use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
collections::HashSet,
path::{is_separator, PathBuf, MAIN_SEPARATOR_STR},
};
pub struct DotNuCompletion { use super::{completion_common::sort_suggestions, SemanticSuggestion};
/// e.g. use std/a<tab>
pub std_virtual_path: bool, #[derive(Clone, Default)]
pub struct DotNuCompletion {}
impl DotNuCompletion {
pub fn new() -> Self {
Self::default()
}
} }
impl Completer for DotNuCompletion { impl Completer for DotNuCompletion {
@ -24,185 +22,114 @@ impl Completer for DotNuCompletion {
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
stack: &Stack, stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
_pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
let prefix_str = prefix.as_ref(); let prefix_str = String::from_utf8_lossy(prefix).replace('`', "");
let start_with_backquote = prefix_str.starts_with('`'); let mut search_dirs: Vec<String> = vec![];
let end_with_backquote = prefix_str.ends_with('`');
let prefix_str = prefix_str.replace('`', "");
// e.g. `./`, `..\`, `/`
let not_lib_dirs = prefix_str
.chars()
.find(|c| *c != '.')
.is_some_and(is_separator);
let mut search_dirs: Vec<PathBuf> = vec![];
let (base, partial) = if let Some((parent, remain)) = prefix_str.rsplit_once(is_separator) { // If prefix_str is only a word we want to search in the current dir
// If prefix_str is only a word we want to search in the current dir. let (base, partial) = prefix_str
// "/xx" should be split to "/" and "xx". .rsplit_once(is_separator)
if parent.is_empty() { .unwrap_or((".", &prefix_str));
(MAIN_SEPARATOR_STR, remain)
} else {
(parent, remain)
}
} else {
(".", prefix_str.as_str())
};
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR); let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
let mut partial = partial.to_string();
// On windows, this standardizes paths to use \
let mut is_current_folder = false;
// Fetch the lib dirs // Fetch the lib dirs
// NOTE: 2 ways to setup `NU_LIB_DIRS` let lib_dirs: Vec<String> = if let Some(lib_dirs) = working_set.get_env_var("NU_LIB_DIRS") {
// 1. `const NU_LIB_DIRS = [paths]`, equal to `nu -I paths`
// 2. `$env.NU_LIB_DIRS = [paths]`
let const_lib_dirs = working_set
.find_variable(b"$NU_LIB_DIRS")
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref());
let env_lib_dirs = working_set.get_env_var("NU_LIB_DIRS");
let lib_dirs: HashSet<PathBuf> = [const_lib_dirs, env_lib_dirs]
.into_iter()
.flatten()
.flat_map(|lib_dirs| {
lib_dirs lib_dirs
.as_list() .as_list()
.into_iter() .into_iter()
.flat_map(|it| it.iter().filter_map(|x| x.to_path().ok())) .flat_map(|it| {
.map(expand_tilde) it.iter().map(|x| {
x.to_path()
.expect("internal error: failed to convert lib path")
}) })
.collect(); })
.map(|it| {
it.into_os_string()
.into_string()
.expect("internal error: failed to convert OS path")
})
.collect()
} else {
vec![]
};
// Check if the base_dir is a folder // Check if the base_dir is a folder
let cwd = working_set.permanent_state.cwd(None); // rsplit_once removes the separator
if base_dir != "." { if base_dir != "." {
let expanded_base_dir = expand_tilde(&base_dir); // Add the base dir into the directories to be searched
let is_base_dir_relative = expanded_base_dir.is_relative(); search_dirs.push(base_dir.clone());
// Search in base_dir as well as lib_dirs.
// After expanded, base_dir can be a relative path or absolute path. // Reset the partial adding the basic dir back
// If relative, we join "current working dir" with it to get subdirectory and add to search_dirs. // in order to make the span replace work properly
// If absolute, we add it to search_dirs. let mut base_dir_partial = base_dir;
if let Ok(mut cwd) = cwd { base_dir_partial.push_str(&partial);
if is_base_dir_relative {
cwd.push(&base_dir); partial = base_dir_partial;
search_dirs.push(cwd.into_std_path_buf());
} else { } else {
search_dirs.push(expanded_base_dir); // Fetch the current folder
} #[allow(deprecated)]
} let current_folder = working_set.permanent_state.current_work_dir();
if !not_lib_dirs { is_current_folder = true;
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
dir.push(&base_dir); // Add the current folder and the lib dirs into the
dir // directories to be searched
})); search_dirs.push(current_folder);
}
} else {
if let Ok(cwd) = cwd {
search_dirs.push(cwd.into_std_path_buf());
}
if !not_lib_dirs {
search_dirs.extend(lib_dirs); search_dirs.extend(lib_dirs);
} }
}
// Fetch the files filtering the ones that ends with .nu // Fetch the files filtering the ones that ends with .nu
// and transform them into suggestions // and transform them into suggestions
let mut completions = file_path_completion( let output: Vec<SemanticSuggestion> = search_dirs
.into_iter()
.flat_map(|search_dir| {
let completions = file_path_completion(
span, span,
partial, &partial,
&search_dirs &search_dir,
.iter()
.filter_map(|d| d.to_str())
.collect::<Vec<_>>(),
options, options,
working_set.permanent_state, working_set.permanent_state,
stack, stack,
); );
if self.std_virtual_path {
let mut matcher = NuMatcher::new(partial, options);
let base_dir = surround_remove(&base_dir);
if base_dir == "." {
let surround_prefix = partial
.chars()
.take_while(|c| "`'\"".contains(*c))
.collect::<String>();
for path in ["std", "std-rfc"] {
let path = format!("{}{}", surround_prefix, path);
matcher.add(
path.clone(),
FileSuggestion {
span,
path,
style: None,
is_dir: true,
},
);
}
} else if let Some(VirtualPath::Dir(sub_paths)) =
working_set.find_virtual_path(&base_dir)
{
for sub_vp_id in sub_paths {
let (path, sub_vp) = working_set.get_virtual_path(*sub_vp_id);
let path = path
.strip_prefix(&format!("{}/", base_dir))
.unwrap_or(path)
.to_string();
matcher.add(
path.clone(),
FileSuggestion {
path,
span,
style: None,
is_dir: matches!(sub_vp, VirtualPath::Dir(_)),
},
);
}
}
completions.extend(matcher.results());
}
completions completions
.into_iter() .into_iter()
.filter(move |it| {
// Different base dir, so we list the .nu files or folders // Different base dir, so we list the .nu files or folders
.filter(|it| { if !is_current_folder {
// for paths with spaces in them it.1.ends_with(".nu") || it.1.ends_with(SEP)
let path = it.path.trim_end_matches('`'); } else {
path.ends_with(".nu") || it.is_dir // Lib dirs, so we filter only the .nu files or directory modules
if it.1.ends_with(SEP) {
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
} else {
it.1.ends_with(".nu")
}
}
}) })
.map(|x| { .map(move |x| SemanticSuggestion {
let append_whitespace = !x.is_dir && (!start_with_backquote || end_with_backquote);
// Re-calculate the span to replace
let mut span_offset = 0;
let mut value = x.path.to_string();
// Complete only the last path component
if base_dir == MAIN_SEPARATOR_STR {
span_offset = base_dir.len()
} else if base_dir != "." {
span_offset = base_dir.len() + 1
}
// Retain only one '`'
if start_with_backquote {
value = value.trim_start_matches('`').to_string();
span_offset += 1;
}
// Add the backquote back
if end_with_backquote && !value.ends_with('`') {
value.push('`');
}
let end = x.span.end - offset;
let start = std::cmp::min(end, x.span.start - offset + span_offset);
SemanticSuggestion {
suggestion: Suggestion { suggestion: Suggestion {
value, value: x.1,
style: x.style, style: x.2,
span: reedline::Span { start, end }, span: reedline::Span {
append_whitespace, start: x.0.start - offset,
end: x.0.end - offset,
},
append_whitespace: true,
..Suggestion::default() ..Suggestion::default()
}, },
kind: Some(SuggestionKind::Module), // TODO????
} kind: None,
}) })
.collect::<Vec<_>>() })
.collect();
sort_suggestions(&prefix_str, output, options)
} }
} }

View File

@ -1,112 +0,0 @@
use crate::completions::{
completion_common::surround_remove, completion_options::NuMatcher, Completer,
CompletionOptions, SemanticSuggestion, SuggestionKind,
};
use nu_protocol::{
engine::{Stack, StateWorkingSet},
ModuleId, Span,
};
use reedline::Suggestion;
pub struct ExportableCompletion<'a> {
pub module_id: ModuleId,
pub temp_working_set: Option<StateWorkingSet<'a>>,
}
/// If name contains space, wrap it in quotes
fn wrapped_name(name: String) -> String {
if !name.contains(' ') {
return name;
}
if name.contains('\'') {
format!("\"{}\"", name.replace('"', r#"\""#))
} else {
format!("'{name}'")
}
}
impl Completer for ExportableCompletion<'_> {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::<()>::new(surround_remove(prefix.as_ref()), options);
let mut results = Vec::new();
let span = reedline::Span {
start: span.start - offset,
end: span.end - offset,
};
// TODO: use matcher.add_lazy to lazy evaluate an item if it matches the prefix
let mut add_suggestion = |value: String,
description: Option<String>,
extra: Option<Vec<String>>,
kind: SuggestionKind| {
results.push(SemanticSuggestion {
suggestion: Suggestion {
value,
span,
description,
extra,
..Suggestion::default()
},
kind: Some(kind),
});
};
let working_set = self.temp_working_set.as_ref().unwrap_or(working_set);
let module = working_set.get_module(self.module_id);
for (name, decl_id) in &module.decls {
let name = String::from_utf8_lossy(name).to_string();
if matcher.matches(&name) {
let cmd = working_set.get_decl(*decl_id);
add_suggestion(
wrapped_name(name),
Some(cmd.description().to_string()),
None,
// `None` here avoids arguments being expanded by snippet edit style for lsp
SuggestionKind::Command(cmd.command_type(), None),
);
}
}
for (name, module_id) in &module.submodules {
let name = String::from_utf8_lossy(name).to_string();
if matcher.matches(&name) {
let comments = working_set.get_module_comments(*module_id).map(|spans| {
spans
.iter()
.map(|sp| {
String::from_utf8_lossy(working_set.get_span_contents(*sp)).into()
})
.collect::<Vec<String>>()
});
add_suggestion(
wrapped_name(name),
Some("Submodule".into()),
comments,
SuggestionKind::Module,
);
}
}
for (name, var_id) in &module.constants {
let name = String::from_utf8_lossy(name).to_string();
if matcher.matches(&name) {
let var = working_set.get_variable(*var_id);
add_suggestion(
wrapped_name(name),
var.const_val
.as_ref()
.and_then(|v| v.clone().coerce_into_string().ok()),
None,
SuggestionKind::Variable,
);
}
}
results
}
}

View File

@ -2,39 +2,49 @@ use crate::completions::{
completion_common::{adjust_if_intermediate, complete_item, AdjustView}, completion_common::{adjust_if_intermediate, complete_item, AdjustView},
Completer, CompletionOptions, Completer, CompletionOptions,
}; };
use nu_ansi_term::Style;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Span, Span,
}; };
use nu_utils::IgnoreCaseExt;
use reedline::Suggestion; use reedline::Suggestion;
use std::path::Path; use std::path::Path;
use super::{completion_common::FileSuggestion, SemanticSuggestion, SuggestionKind}; use super::SemanticSuggestion;
pub struct FileCompletion; #[derive(Clone, Default)]
pub struct FileCompletion {}
impl FileCompletion {
pub fn new() -> Self {
Self::default()
}
}
impl Completer for FileCompletion { impl Completer for FileCompletion {
fn fetch( fn fetch(
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
stack: &Stack, stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
_pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
let AdjustView { let AdjustView {
prefix, prefix,
span, span,
readjusted, readjusted,
} = adjust_if_intermediate(prefix.as_ref(), working_set, span); } = adjust_if_intermediate(prefix, working_set, span);
#[allow(deprecated)] #[allow(deprecated)]
let items: Vec<_> = complete_item( let items: Vec<_> = complete_item(
readjusted, readjusted,
span, span,
&prefix, &prefix,
&[&working_set.permanent_state.current_work_dir()], &working_set.permanent_state.current_work_dir(),
options, options,
working_set.permanent_state, working_set.permanent_state,
stack, stack,
@ -42,19 +52,16 @@ impl Completer for FileCompletion {
.into_iter() .into_iter()
.map(move |x| SemanticSuggestion { .map(move |x| SemanticSuggestion {
suggestion: Suggestion { suggestion: Suggestion {
value: x.path, value: x.1,
style: x.style, style: x.2,
span: reedline::Span { span: reedline::Span {
start: x.span.start - offset, start: x.0.start - offset,
end: x.span.end - offset, end: x.0.end - offset,
}, },
..Suggestion::default() ..Suggestion::default()
}, },
kind: Some(if x.is_dir { // TODO????
SuggestionKind::Directory kind: None,
} else {
SuggestionKind::File
}),
}) })
.collect(); .collect();
@ -88,10 +95,21 @@ impl Completer for FileCompletion {
pub fn file_path_completion( pub fn file_path_completion(
span: nu_protocol::Span, span: nu_protocol::Span,
partial: &str, partial: &str,
cwds: &[impl AsRef<str>], cwd: &str,
options: &CompletionOptions, options: &CompletionOptions,
engine_state: &EngineState, engine_state: &EngineState,
stack: &Stack, stack: &Stack,
) -> Vec<FileSuggestion> { ) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
complete_item(false, span, partial, cwds, options, engine_state, stack) complete_item(false, span, partial, cwd, options, engine_state, stack)
}
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
// Check for case sensitive
if !options.case_sensitive {
return options
.match_algorithm
.matches_str(&from.to_folded_case(), &partial.to_folded_case());
}
options.match_algorithm.matches_str(from, partial)
} }

View File

@ -1,15 +1,22 @@
use crate::completions::{ use crate::completions::{completion_common::sort_suggestions, Completer, CompletionOptions};
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
};
use nu_protocol::{ use nu_protocol::{
ast::{Expr, Expression},
engine::{Stack, StateWorkingSet}, engine::{Stack, StateWorkingSet},
DeclId, Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct FlagCompletion { pub struct FlagCompletion {
pub decl_id: DeclId, expression: Expression,
}
impl FlagCompletion {
pub fn new(expression: Expression) -> Self {
Self { expression }
}
} }
impl Completer for FlagCompletion { impl Completer for FlagCompletion {
@ -17,17 +24,31 @@ impl Completer for FlagCompletion {
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
_stack: &Stack, _stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
_pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options); // Check if it's a flag
let mut add_suggestion = |value: String, description: String| { if let Expr::Call(call) = &self.expression.expr {
matcher.add_semantic_suggestion(SemanticSuggestion { let decl = working_set.get_decl(call.decl_id);
let sig = decl.signature();
let mut output = vec![];
for named in &sig.named {
let flag_desc = &named.desc;
if let Some(short) = named.short {
let mut named = vec![0; short.len_utf8()];
short.encode_utf8(&mut named);
named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, prefix) {
output.push(SemanticSuggestion {
suggestion: Suggestion { suggestion: Suggestion {
value, value: String::from_utf8_lossy(&named).to_string(),
description: Some(description), description: Some(flag_desc.to_string()),
span: reedline::Span { span: reedline::Span {
start: span.start - offset, start: span.start - offset,
end: span.end - offset, end: span.end - offset,
@ -35,24 +56,41 @@ impl Completer for FlagCompletion {
append_whitespace: true, append_whitespace: true,
..Suggestion::default() ..Suggestion::default()
}, },
kind: Some(SuggestionKind::Flag), // TODO????
kind: None,
}); });
}; }
let decl = working_set.get_decl(self.decl_id);
let sig = decl.signature();
for named in &sig.named {
if let Some(short) = named.short {
let mut name = String::from("-");
name.push(short);
add_suggestion(name, named.desc.clone());
} }
if named.long.is_empty() { if named.long.is_empty() {
continue; continue;
} }
add_suggestion(format!("--{}", named.long), named.desc.clone());
} let mut named = named.long.as_bytes().to_vec();
matcher.results() named.insert(0, b'-');
named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, prefix) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()),
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
..Suggestion::default()
},
// TODO????
kind: None,
});
}
}
return sort_suggestions(&String::from_utf8_lossy(prefix), output, options);
}
vec![]
} }
} }

View File

@ -1,6 +1,4 @@
mod attribute_completions;
mod base; mod base;
mod cell_path_completions;
mod command_completions; mod command_completions;
mod completer; mod completer;
mod completion_common; mod completion_common;
@ -8,23 +6,19 @@ mod completion_options;
mod custom_completions; mod custom_completions;
mod directory_completions; mod directory_completions;
mod dotnu_completions; mod dotnu_completions;
mod exportable_completions;
mod file_completions; mod file_completions;
mod flag_completions; mod flag_completions;
mod operator_completions; mod operator_completions;
mod variable_completions; mod variable_completions;
pub use attribute_completions::{AttributableCompletion, AttributeCompletion};
pub use base::{Completer, SemanticSuggestion, SuggestionKind}; pub use base::{Completer, SemanticSuggestion, SuggestionKind};
pub use cell_path_completions::CellPathCompletion;
pub use command_completions::CommandCompletion; pub use command_completions::CommandCompletion;
pub use completer::NuCompleter; pub use completer::NuCompleter;
pub use completion_options::{CompletionOptions, MatchAlgorithm}; pub use completion_options::{CompletionOptions, MatchAlgorithm};
pub use custom_completions::CustomCompletion; pub use custom_completions::CustomCompletion;
pub use directory_completions::DirectoryCompletion; pub use directory_completions::DirectoryCompletion;
pub use dotnu_completions::DotNuCompletion; pub use dotnu_completions::DotNuCompletion;
pub use exportable_completions::ExportableCompletion; pub use file_completions::{file_path_completion, matches, FileCompletion};
pub use file_completions::{file_path_completion, FileCompletion};
pub use flag_completions::FlagCompletion; pub use flag_completions::FlagCompletion;
pub use operator_completions::OperatorCompletion; pub use operator_completions::OperatorCompletion;
pub use variable_completions::VariableCompletion; pub use variable_completions::VariableCompletion;

View File

@ -1,277 +1,180 @@
use crate::completions::{ use crate::completions::{
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind, Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
}; };
use nu_protocol::{ use nu_protocol::{
ast::{self, Comparison, Expr, Expression}, ast::{Expr, Expression},
engine::{Stack, StateWorkingSet}, engine::{Stack, StateWorkingSet},
Span, Type, Value, ENV_VARIABLE_ID, Span, Type,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use strum::{EnumMessage, IntoEnumIterator};
use super::cell_path_completions::eval_cell_path;
#[derive(Clone)] #[derive(Clone)]
pub struct OperatorCompletion<'a> { pub struct OperatorCompletion {
pub left_hand_side: &'a Expression, previous_expr: Expression,
} }
struct OperatorItem { impl OperatorCompletion {
pub symbols: String, pub fn new(previous_expr: Expression) -> Self {
pub description: String, OperatorCompletion { previous_expr }
}
fn operator_to_item<T: EnumMessage + AsRef<str>>(op: T) -> OperatorItem {
OperatorItem {
symbols: op.as_ref().into(),
description: op.get_message().unwrap_or_default().into(),
} }
} }
fn common_comparison_ops() -> Vec<OperatorItem> { impl Completer for OperatorCompletion {
vec![
operator_to_item(Comparison::In),
operator_to_item(Comparison::NotIn),
operator_to_item(Comparison::Equal),
operator_to_item(Comparison::NotEqual),
]
}
fn all_ops_for_immutable() -> Vec<OperatorItem> {
ast::Comparison::iter()
.map(operator_to_item)
.chain(ast::Math::iter().map(operator_to_item))
.chain(ast::Boolean::iter().map(operator_to_item))
.chain(ast::Bits::iter().map(operator_to_item))
.collect()
}
fn collection_comparison_ops() -> Vec<OperatorItem> {
let mut ops = common_comparison_ops();
ops.push(operator_to_item(Comparison::Has));
ops.push(operator_to_item(Comparison::NotHas));
ops
}
fn number_comparison_ops() -> Vec<OperatorItem> {
Comparison::iter()
.filter(|op| {
!matches!(
op,
Comparison::RegexMatch
| Comparison::NotRegexMatch
| Comparison::StartsWith
| Comparison::EndsWith
| Comparison::Has
| Comparison::NotHas
)
})
.map(operator_to_item)
.collect()
}
fn math_ops() -> Vec<OperatorItem> {
ast::Math::iter()
.filter(|op| !matches!(op, ast::Math::Concatenate | ast::Math::Pow))
.map(operator_to_item)
.collect()
}
fn bit_ops() -> Vec<OperatorItem> {
ast::Bits::iter().map(operator_to_item).collect()
}
fn all_assignment_ops() -> Vec<OperatorItem> {
ast::Assignment::iter().map(operator_to_item).collect()
}
fn numeric_assignment_ops() -> Vec<OperatorItem> {
ast::Assignment::iter()
.filter(|op| !matches!(op, ast::Assignment::ConcatenateAssign))
.map(operator_to_item)
.collect()
}
fn concat_assignment_ops() -> Vec<OperatorItem> {
vec![
operator_to_item(ast::Assignment::Assign),
operator_to_item(ast::Assignment::ConcatenateAssign),
]
}
fn valid_int_ops() -> Vec<OperatorItem> {
let mut ops = valid_float_ops();
ops.extend(bit_ops());
ops
}
fn valid_float_ops() -> Vec<OperatorItem> {
let mut ops = valid_value_with_unit_ops();
ops.push(operator_to_item(ast::Math::Pow));
ops
}
fn valid_string_ops() -> Vec<OperatorItem> {
let mut ops: Vec<OperatorItem> = Comparison::iter().map(operator_to_item).collect();
ops.push(operator_to_item(ast::Math::Concatenate));
ops.push(OperatorItem {
symbols: "like".into(),
description: Comparison::RegexMatch
.get_message()
.unwrap_or_default()
.into(),
});
ops.push(OperatorItem {
symbols: "not-like".into(),
description: Comparison::NotRegexMatch
.get_message()
.unwrap_or_default()
.into(),
});
ops
}
fn valid_list_ops() -> Vec<OperatorItem> {
let mut ops = collection_comparison_ops();
ops.push(operator_to_item(ast::Math::Concatenate));
ops
}
fn valid_binary_ops() -> Vec<OperatorItem> {
let mut ops = number_comparison_ops();
ops.extend(bit_ops());
ops.push(operator_to_item(ast::Math::Concatenate));
ops
}
fn valid_bool_ops() -> Vec<OperatorItem> {
let mut ops: Vec<OperatorItem> = ast::Boolean::iter().map(operator_to_item).collect();
ops.extend(common_comparison_ops());
ops
}
fn valid_value_with_unit_ops() -> Vec<OperatorItem> {
let mut ops = number_comparison_ops();
ops.extend(math_ops());
ops
}
fn ops_by_value(value: &Value, mutable: bool) -> Vec<OperatorItem> {
let mut ops = match value {
Value::Int { .. } => valid_int_ops(),
Value::Float { .. } => valid_float_ops(),
Value::String { .. } => valid_string_ops(),
Value::Binary { .. } => valid_binary_ops(),
Value::Bool { .. } => valid_bool_ops(),
Value::Date { .. } => number_comparison_ops(),
Value::Filesize { .. } | Value::Duration { .. } => valid_value_with_unit_ops(),
Value::Range { .. } | Value::Record { .. } => collection_comparison_ops(),
Value::List { .. } => valid_list_ops(),
_ => all_ops_for_immutable(),
};
if mutable {
ops.extend(match value {
Value::Int { .. }
| Value::Float { .. }
| Value::Filesize { .. }
| Value::Duration { .. } => numeric_assignment_ops(),
Value::String { .. } | Value::Binary { .. } | Value::List { .. } => {
concat_assignment_ops()
}
Value::Bool { .. }
| Value::Date { .. }
| Value::Range { .. }
| Value::Record { .. } => vec![operator_to_item(ast::Assignment::Assign)],
_ => all_assignment_ops(),
})
}
ops
}
fn is_expression_mutable(expr: &Expr, working_set: &StateWorkingSet) -> bool {
let Expr::FullCellPath(path) = expr else {
return false;
};
let Expr::Var(id) = path.head.expr else {
return false;
};
if id == ENV_VARIABLE_ID {
return true;
}
let var = working_set.get_variable(id);
var.mutable
}
impl Completer for OperatorCompletion<'_> {
fn fetch( fn fetch(
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
stack: &Stack, _stack: &Stack,
prefix: impl AsRef<str>, _prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
options: &CompletionOptions, _pos: usize,
_options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
let mut needs_assignment_ops = true; //Check if int, float, or string
// Complete according expression type let partial = std::str::from_utf8(working_set.get_span_contents(span)).unwrap_or("");
// TODO: type inference on self.left_hand_side to get more accurate completions let op = match &self.previous_expr.expr {
let mut possible_operations: Vec<OperatorItem> = match &self.left_hand_side.ty { Expr::BinaryOp(x, _, _) => &x.expr,
Type::Int | Type::Number => valid_int_ops(), _ => {
Type::Float => valid_float_ops(),
Type::String => valid_string_ops(),
Type::Binary => valid_binary_ops(),
Type::Bool => valid_bool_ops(),
Type::Date => number_comparison_ops(),
Type::Filesize | Type::Duration => valid_value_with_unit_ops(),
Type::Record(_) | Type::Range => collection_comparison_ops(),
Type::List(_) | Type::Table(_) => valid_list_ops(),
// Unknown type, resort to evaluated values
Type::Any => match &self.left_hand_side.expr {
Expr::FullCellPath(path) => {
// for `$ <tab>`
if matches!(path.head.expr, Expr::Garbage) {
return vec![]; return vec![];
} }
let value =
eval_cell_path(working_set, stack, &path.head, &path.tail, path.head.span)
.unwrap_or_default();
let mutable = is_expression_mutable(&self.left_hand_side.expr, working_set);
// to avoid duplication
needs_assignment_ops = false;
ops_by_value(&value, mutable)
}
_ => all_ops_for_immutable(),
},
_ => common_comparison_ops(),
}; };
// If the left hand side is a variable, add assignment operators if mutable let possible_operations = match op {
if needs_assignment_ops && is_expression_mutable(&self.left_hand_side.expr, working_set) { Expr::Int(_) => vec![
possible_operations.extend(match &self.left_hand_side.ty { ("+", "Add (Plus)"),
Type::Int | Type::Float | Type::Number => numeric_assignment_ops(), ("-", "Subtract (Minus)"),
Type::Filesize | Type::Duration => numeric_assignment_ops(), ("*", "Multiply"),
Type::String | Type::Binary | Type::List(_) => concat_assignment_ops(), ("/", "Divide"),
Type::Any => all_assignment_ops(), ("==", "Equal to"),
_ => vec![operator_to_item(ast::Assignment::Assign)], ("!=", "Not equal to"),
}); ("//", "Floor division"),
} ("<", "Less than"),
(">", "Greater than"),
("<=", "Less than or equal to"),
(">=", "Greater than or equal to"),
("mod", "Floor division remainder (Modulo)"),
("**", "Power of"),
("bit-or", "Bitwise OR"),
("bit-xor", "Bitwise exclusive OR"),
("bit-and", "Bitwise AND"),
("bit-shl", "Bitwise shift left"),
("bit-shr", "Bitwise shift right"),
("in", "Is a member of (doesn't use regex)"),
("not-in", "Is not a member of (doesn't use regex)"),
(
"++",
"Appends two lists, a list and a value, two strings, or two binary values",
),
],
Expr::String(_) => vec![
("=~", "Contains regex match"),
("!~", "Does not contain regex match"),
(
"++",
"Appends two lists, a list and a value, two strings, or two binary values",
),
("in", "Is a member of (doesn't use regex)"),
("not-in", "Is not a member of (doesn't use regex)"),
("starts-with", "Starts with"),
("ends-with", "Ends with"),
],
Expr::Float(_) => vec![
("+", "Add (Plus)"),
("-", "Subtract (Minus)"),
("*", "Multiply"),
("/", "Divide"),
("==", "Equal to"),
("!=", "Not equal to"),
("//", "Floor division"),
("<", "Less than"),
(">", "Greater than"),
("<=", "Less than or equal to"),
(">=", "Greater than or equal to"),
("mod", "Floor division remainder (Modulo)"),
("**", "Power of"),
("in", "Is a member of (doesn't use regex)"),
("not-in", "Is not a member of (doesn't use regex)"),
(
"++",
"Appends two lists, a list and a value, two strings, or two binary values",
),
],
Expr::Bool(_) => vec![
(
"and",
"Both values are true (short-circuits when first value is false)",
),
(
"or",
"Either value is true (short-circuits when first value is true)",
),
("xor", "One value is true and the other is false"),
("not", "Negates a value or expression"),
("in", "Is a member of (doesn't use regex)"),
("not-in", "Is not a member of (doesn't use regex)"),
(
"++",
"Appends two lists, a list and a value, two strings, or two binary values",
),
],
Expr::FullCellPath(path) => match path.head.expr {
Expr::List(_) => vec![(
"++",
"Appends two lists, a list and a value, two strings, or two binary values",
)],
Expr::Var(id) => get_variable_completions(id, working_set),
_ => vec![],
},
_ => vec![],
};
let mut matcher = NuMatcher::new(prefix, options); let match_algorithm = MatchAlgorithm::Prefix;
for OperatorItem { let input_fuzzy_search =
symbols, |(operator, _): &(&str, &str)| match_algorithm.matches_str(operator, partial);
description,
} in possible_operations possible_operations
{ .into_iter()
matcher.add_semantic_suggestion(SemanticSuggestion { .filter(input_fuzzy_search)
.map(move |x| SemanticSuggestion {
suggestion: Suggestion { suggestion: Suggestion {
value: symbols.to_owned(), value: x.0.to_string(),
description: Some(description.to_owned()), description: Some(x.1.to_string()),
span: reedline::Span::new(span.start - offset, span.end - offset), span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true, append_whitespace: true,
..Suggestion::default() ..Suggestion::default()
}, },
kind: Some(SuggestionKind::Operator), kind: Some(SuggestionKind::Command(
}); nu_protocol::engine::CommandType::Builtin,
} )),
matcher.results() })
.collect()
}
}
pub fn get_variable_completions<'a>(
id: nu_protocol::Id<nu_protocol::marker::Var>,
working_set: &StateWorkingSet,
) -> Vec<(&'a str, &'a str)> {
let var = working_set.get_variable(id);
if !var.mutable {
return vec![];
}
match var.ty {
Type::List(_) | Type::String | Type::Binary => vec![
(
"++=",
"Appends a list, a value, a string, or a binary value to a variable.",
),
("=", "Assigns a value to a variable."),
],
Type::Int | Type::Float => vec![
("=", "Assigns a value to a variable."),
("+=", "Adds a value to a variable."),
("-=", "Subtracts a value from a variable."),
("*=", "Multiplies a variable by a value"),
("/=", "Divides a variable by a value."),
],
_ => vec![],
} }
} }

View File

@ -1,55 +1,167 @@
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind}; use crate::completions::{
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
};
use nu_engine::{column::get_columns, eval_variable};
use nu_protocol::{ use nu_protocol::{
engine::{Stack, StateWorkingSet}, engine::{Stack, StateWorkingSet},
Span, VarId, Span, Value,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::str;
use super::completion_options::NuMatcher; use super::completion_common::sort_suggestions;
pub struct VariableCompletion; #[derive(Clone)]
pub struct VariableCompletion {
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
}
impl VariableCompletion {
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
Self { var_context }
}
}
impl Completer for VariableCompletion { impl Completer for VariableCompletion {
fn fetch( fn fetch(
&mut self, &mut self,
working_set: &StateWorkingSet, working_set: &StateWorkingSet,
_stack: &Stack, stack: &Stack,
prefix: impl AsRef<str>, prefix: &[u8],
span: Span, span: Span,
offset: usize, offset: usize,
_pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options); let mut output = vec![];
let builtins = ["$nu", "$in", "$env"];
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
let var_id = working_set.find_variable(&self.var_context.0);
let current_span = reedline::Span { let current_span = reedline::Span {
start: span.start - offset, start: span.start - offset,
end: span.end - offset, end: span.end - offset,
}; };
let sublevels_count = self.var_context.1.len();
let prefix_str = String::from_utf8_lossy(prefix);
// Completions for the given variable
if !var_str.is_empty() {
// Completion for $env.<tab>
if var_str == "$env" {
let env_vars = stack.get_env_vars(working_set.permanent_state);
// Return nested values
if sublevels_count > 0 {
// Extract the target var ($env.<target-var>)
let target_var = self.var_context.1[0].clone();
let target_var_str =
str::from_utf8(&target_var).unwrap_or_default().to_string();
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
let nested_levels: Vec<Vec<u8>> =
self.var_context.1.clone().into_iter().skip(1).collect();
if let Some(val) = env_vars.get(&target_var_str) {
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
suggestion.suggestion.value.as_bytes(),
prefix,
) {
output.push(suggestion);
}
}
return sort_suggestions(&prefix_str, output, options);
}
} else {
// No nesting provided, return all env vars
for env_var in env_vars {
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
env_var.0.as_bytes(),
prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: env_var.0,
span: current_span,
..Suggestion::default()
},
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
});
}
}
return sort_suggestions(&prefix_str, output, options);
}
}
// Completions for $nu.<tab>
if var_str == "$nu" {
// Eval nu var
if let Ok(nuval) = eval_variable(
working_set.permanent_state,
stack,
nu_protocol::NU_VARIABLE_ID,
nu_protocol::Span::new(current_span.start, current_span.end),
) {
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
{
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
suggestion.suggestion.value.as_bytes(),
prefix,
) {
output.push(suggestion);
}
}
return sort_suggestions(&prefix_str, output, options);
}
}
// Completion other variable types
if let Some(var_id) = var_id {
// Extract the variable value from the stack
let var = stack.get_var(var_id, Span::new(span.start, span.end));
// If the value exists and it's of type Record
if let Ok(value) = var {
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
{
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
suggestion.suggestion.value.as_bytes(),
prefix,
) {
output.push(suggestion);
}
}
return sort_suggestions(&prefix_str, output, options);
}
}
}
// Variable completion (e.g: $en<tab> to complete $env) // Variable completion (e.g: $en<tab> to complete $env)
let builtins = ["$nu", "$in", "$env"];
for builtin in builtins { for builtin in builtins {
matcher.add_semantic_suggestion(SemanticSuggestion { if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
builtin.as_bytes(),
prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion { suggestion: Suggestion {
value: builtin.to_string(), value: builtin.to_string(),
span: current_span, span: current_span,
description: Some("reserved".into()),
..Suggestion::default() ..Suggestion::default()
}, },
kind: Some(SuggestionKind::Variable), // TODO is there a way to get the VarId to get the type???
kind: None,
}); });
} }
}
let mut add_candidate = |name, var_id: &VarId| {
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(name).to_string(),
span: current_span,
description: Some(working_set.get_variable(*var_id).ty.to_string()),
..Suggestion::default()
},
kind: Some(SuggestionKind::Variable),
})
};
// TODO: The following can be refactored (see find_commands_by_predicate() used in // TODO: The following can be refactored (see find_commands_by_predicate() used in
// command_completions). // command_completions).
@ -57,11 +169,27 @@ impl Completer for VariableCompletion {
// Working set scope vars // Working set scope vars
for scope_frame in working_set.delta.scope.iter().rev() { for scope_frame in working_set.delta.scope.iter().rev() {
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() { for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
for (name, var_id) in &overlay_frame.vars { for v in &overlay_frame.vars {
add_candidate(name, var_id); if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
v.0,
prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
span: current_span,
..Suggestion::default()
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
});
} }
} }
} }
}
// Permanent state vars // Permanent state vars
// for scope in &self.engine_state.scope { // for scope in &self.engine_state.scope {
for overlay_frame in working_set for overlay_frame in working_set
@ -69,11 +197,118 @@ impl Completer for VariableCompletion {
.active_overlays(&removed_overlays) .active_overlays(&removed_overlays)
.rev() .rev()
{ {
for (name, var_id) in &overlay_frame.vars { for v in &overlay_frame.vars {
add_candidate(name, var_id); if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
v.0,
prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
span: current_span,
..Suggestion::default()
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
});
}
} }
} }
matcher.results() output = sort_suggestions(&prefix_str, output, options);
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
output
}
}
// Find recursively the values for sublevels
// if no sublevels are set it returns the current value
fn nested_suggestions(
val: &Value,
sublevels: &[Vec<u8>],
current_span: reedline::Span,
) -> Vec<SemanticSuggestion> {
let mut output: Vec<SemanticSuggestion> = vec![];
let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
let kind = SuggestionKind::Type(value.get_type());
match value {
Value::Record { val, .. } => {
// Add all the columns as completion
for col in val.columns() {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: col.clone(),
span: current_span,
..Suggestion::default()
},
kind: Some(kind.clone()),
});
}
output
}
Value::List { vals, .. } => {
for column_name in get_columns(vals.as_slice()) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: column_name,
span: current_span,
..Suggestion::default()
},
kind: Some(kind.clone()),
});
}
output
}
_ => output,
}
}
// Extracts the recursive value (e.g: $var.a.b.c)
fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
// Go to next sublevel
if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
let span = val.span();
match val {
Value::Record { val, .. } => {
if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
// If matches try to fetch recursively the next
recursive_value(value, next_sublevels)
} else {
// Current sublevel value not found
Err(span)
}
}
Value::List { vals, .. } => {
for col in get_columns(vals.as_slice()) {
if col.as_bytes() == *sublevel {
let val = val.get_data_by_key(&col).ok_or(span)?;
return recursive_value(&val, next_sublevels);
}
}
// Current sublevel value not found
Err(span)
}
_ => Ok(val.clone()),
}
} else {
Ok(val.clone())
}
}
impl MatchAlgorithm {
pub fn matches_u8_insensitive(&self, sensitive: bool, haystack: &[u8], needle: &[u8]) -> bool {
if sensitive {
self.matches_u8(haystack, needle)
} else {
self.matches_u8(&haystack.to_ascii_lowercase(), &needle.to_ascii_lowercase())
}
} }
} }

View File

@ -18,7 +18,7 @@ const OLD_PLUGIN_FILE: &str = "plugin.nu";
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) { pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
use nu_protocol::{shell_error::io::IoError, ShellError}; use nu_protocol::ShellError;
use std::path::Path; use std::path::Path;
let span = plugin_file.as_ref().map(|s| s.span); let span = plugin_file.as_ref().map(|s| s.span);
@ -49,10 +49,7 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
perf!( perf!(
"add plugin file to engine_state", "add plugin file to engine_state",
start_time, start_time,
engine_state engine_state.get_config().use_ansi_coloring
.get_config()
.use_ansi_coloring
.get(engine_state)
); );
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
@ -78,12 +75,16 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
} else { } else {
report_shell_error( report_shell_error(
engine_state, engine_state,
&ShellError::Io(IoError::new_internal_with_path( &ShellError::GenericError {
err.kind(), error: format!(
"Could not open plugin registry file", "Error while opening plugin registry file: {}",
nu_protocol::location!(), plugin_path.display()
plugin_path, ),
)), msg: "plugin path defined here".into(),
span,
help: None,
inner: vec![err.into()],
},
); );
return; return;
} }
@ -128,10 +129,7 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
perf!( perf!(
&format!("read plugin file {}", plugin_path.display()), &format!("read plugin file {}", plugin_path.display()),
start_time, start_time,
engine_state engine_state.get_config().use_ansi_coloring
.get_config()
.use_ansi_coloring
.get(engine_state)
); );
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
@ -147,10 +145,7 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
perf!( perf!(
&format!("load plugin file {}", plugin_path.display()), &format!("load plugin file {}", plugin_path.display()),
start_time, start_time,
engine_state engine_state.get_config().use_ansi_coloring
.get_config()
.use_ansi_coloring
.get(engine_state)
); );
} }
} }
@ -230,8 +225,8 @@ pub fn eval_config_contents(
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool { pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
use nu_protocol::{ use nu_protocol::{
shell_error::io::IoError, PluginExample, PluginIdentity, PluginRegistryItem, PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
PluginRegistryItemData, PluginSignature, ShellError, ShellError,
}; };
use std::collections::BTreeMap; use std::collections::BTreeMap;
@ -320,15 +315,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
// Write the new file // Write the new file
let new_plugin_file_path = config_dir.join(PLUGIN_FILE); let new_plugin_file_path = config_dir.join(PLUGIN_FILE);
if let Err(err) = std::fs::File::create(&new_plugin_file_path) if let Err(err) = std::fs::File::create(&new_plugin_file_path)
.map_err(|err| { .map_err(|e| e.into())
IoError::new_internal_with_path(
err.kind(),
"Could not create new plugin file",
nu_protocol::location!(),
new_plugin_file_path.clone(),
)
})
.map_err(ShellError::from)
.and_then(|file| contents.write_to(file, None)) .and_then(|file| contents.write_to(file, None))
{ {
report_shell_error( report_shell_error(
@ -358,10 +345,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
perf!( perf!(
"migrate old plugin file", "migrate old plugin file",
start_time, start_time,
engine_state engine_state.get_config().use_ansi_coloring
.get_config()
.use_ansi_coloring
.get(&engine_state)
); );
true true
} }

View File

@ -1,5 +1,5 @@
use log::info; use log::info;
use nu_engine::eval_block; use nu_engine::{convert_env_values, eval_block};
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::{ use nu_protocol::{
cli_error::report_compile_error, cli_error::report_compile_error,
@ -9,8 +9,6 @@ use nu_protocol::{
}; };
use std::sync::Arc; use std::sync::Arc;
use crate::util::print_pipeline;
#[derive(Default)] #[derive(Default)]
pub struct EvaluateCommandsOpts { pub struct EvaluateCommandsOpts {
pub table_mode: Option<Value>, pub table_mode: Option<Value>,
@ -50,6 +48,9 @@ pub fn evaluate_commands(
} }
} }
// Translate environment variables from Strings to Values
convert_env_values(engine_state, stack)?;
// Parse the source code // Parse the source code
let (block, delta) = { let (block, delta) = {
if let Some(ref t_mode) = table_mode { if let Some(ref t_mode) = table_mode {
@ -71,7 +72,7 @@ pub fn evaluate_commands(
if let Some(err) = working_set.compile_errors.first() { if let Some(err) = working_set.compile_errors.first() {
report_compile_error(&working_set, err); report_compile_error(&working_set, err);
std::process::exit(1); // Not a fatal error, for now
} }
(output, working_set.render()) (output, working_set.render())
@ -92,7 +93,7 @@ pub fn evaluate_commands(
t_mode.coerce_str()?.parse().unwrap_or_default(); t_mode.coerce_str()?.parse().unwrap_or_default();
} }
print_pipeline(engine_state, stack, pipeline, no_newline)?; pipeline.print(engine_state, stack, no_newline, false)?;
info!("evaluate {}:{}:{}", file!(), line!(), column!()); info!("evaluate {}:{}:{}", file!(), line!(), column!());

View File

@ -1,17 +1,15 @@
use crate::util::{eval_source, print_pipeline}; use crate::util::eval_source;
use log::{info, trace}; use log::{info, trace};
use nu_engine::eval_block; use nu_engine::{convert_env_values, eval_block};
use nu_parser::parse; use nu_parser::parse;
use nu_path::canonicalize_with; use nu_path::canonicalize_with;
use nu_protocol::{ use nu_protocol::{
cli_error::report_compile_error, cli_error::report_compile_error,
debugger::WithoutDebug, debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
report_parse_error, report_parse_warning, report_parse_error, report_parse_warning, PipelineData, ShellError, Span, Value,
shell_error::io::*,
PipelineData, ShellError, Span, Value,
}; };
use std::{path::PathBuf, sync::Arc}; use std::sync::Arc;
/// Entry point for evaluating a file. /// Entry point for evaluating a file.
/// ///
@ -24,15 +22,15 @@ pub fn evaluate_file(
stack: &mut Stack, stack: &mut Stack,
input: PipelineData, input: PipelineData,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// Convert environment variables from Strings to Values and store them in the engine state.
convert_env_values(engine_state, stack)?;
let cwd = engine_state.cwd_as_string(Some(stack))?; let cwd = engine_state.cwd_as_string(Some(stack))?;
let file_path = canonicalize_with(&path, cwd).map_err(|err| { let file_path =
IoError::new_internal_with_path( canonicalize_with(&path, cwd).map_err(|err| ShellError::FileNotFoundCustom {
err.kind().not_found_as(NotFound::File), msg: format!("Could not access file '{path}': {err}"),
"Could not access file", span: Span::unknown(),
nu_protocol::location!(),
PathBuf::from(&path),
)
})?; })?;
let file_path_str = file_path let file_path_str = file_path
@ -45,23 +43,17 @@ pub fn evaluate_file(
span: Span::unknown(), span: Span::unknown(),
})?; })?;
let file = std::fs::read(&file_path).map_err(|err| { let file = std::fs::read(&file_path).map_err(|err| ShellError::FileNotFoundCustom {
IoError::new_internal_with_path( msg: format!("Could not read file '{file_path_str}': {err}"),
err.kind().not_found_as(NotFound::File), span: Span::unknown(),
"Could not read file",
nu_protocol::location!(),
file_path.clone(),
)
})?; })?;
engine_state.file = Some(file_path.clone()); engine_state.file = Some(file_path.clone());
let parent = file_path.parent().ok_or_else(|| { let parent = file_path
IoError::new_internal_with_path( .parent()
ErrorKind::DirectoryNotFound, .ok_or_else(|| ShellError::FileNotFoundCustom {
"The file path does not have a parent", msg: format!("The file path '{file_path_str}' does not have a parent"),
nu_protocol::location!(), span: Span::unknown(),
file_path.clone(),
)
})?; })?;
stack.add_env_var( stack.add_env_var(
@ -97,7 +89,7 @@ pub fn evaluate_file(
if let Some(err) = working_set.compile_errors.first() { if let Some(err) = working_set.compile_errors.first() {
report_compile_error(&working_set, err); report_compile_error(&working_set, err);
std::process::exit(1); // Not a fatal error, for now
} }
// Look for blocks whose name starts with "main" and replace it with the filename. // Look for blocks whose name starts with "main" and replace it with the filename.
@ -127,7 +119,7 @@ pub fn evaluate_file(
}; };
// Print the pipeline output of the last command of the file. // Print the pipeline output of the last command of the file.
print_pipeline(engine_state, stack, pipeline, true)?; pipeline.print(engine_state, stack, true, false)?;
// Invoke the main command with arguments. // Invoke the main command with arguments.
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace. // Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.

View File

@ -65,12 +65,8 @@ Since this command has no output, there is no point in piping it with other comm
arg.into_pipeline_data() arg.into_pipeline_data()
.print_raw(engine_state, no_newline, to_stderr)?; .print_raw(engine_state, no_newline, to_stderr)?;
} else { } else {
arg.into_pipeline_data().print_table( arg.into_pipeline_data()
engine_state, .print(engine_state, stack, no_newline, to_stderr)?;
stack,
no_newline,
to_stderr,
)?;
} }
} }
} else if !input.is_nothing() { } else if !input.is_nothing() {
@ -82,7 +78,7 @@ Since this command has no output, there is no point in piping it with other comm
if raw { if raw {
input.print_raw(engine_state, no_newline, to_stderr)?; input.print_raw(engine_state, no_newline, to_stderr)?;
} else { } else {
input.print_table(engine_state, stack, no_newline, to_stderr)?; input.print(engine_state, stack, no_newline, to_stderr)?;
} }
} }

View File

@ -1,5 +1,5 @@
use crate::NushellPrompt; use crate::NushellPrompt;
use log::{trace, warn}; use log::trace;
use nu_engine::ClosureEvalOnce; use nu_engine::ClosureEvalOnce;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack}, engine::{EngineState, Stack},
@ -30,21 +30,30 @@ pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\"; pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\"; pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\"; pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\";
#[allow(dead_code)]
pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;"; pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;";
#[allow(dead_code)]
pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\"; pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
// OSC633 is the same as OSC133 but specifically for VSCode // OSC633 is the same as OSC133 but specifically for VSCode
pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\"; pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\";
pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\"; pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\";
#[allow(dead_code)]
pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\"; pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\";
#[allow(dead_code)]
//"\x1b]633;D;{}\x1b\\" //"\x1b]633;D;{}\x1b\\"
pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;"; pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;";
#[allow(dead_code)]
pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\"; pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
#[allow(dead_code)]
//"\x1b]633;E;{}\x1b\\" //"\x1b]633;E;{}\x1b\\"
pub(crate) const VSCODE_COMMANDLINE_MARKER_PREFIX: &str = "\x1b]633;E;"; pub(crate) const VSCODE_COMMANDLINE_MARKER_PREFIX: &str = "\x1b]633;E;";
#[allow(dead_code)]
pub(crate) const VSCODE_COMMANDLINE_MARKER_SUFFIX: &str = "\x1b\\"; pub(crate) const VSCODE_COMMANDLINE_MARKER_SUFFIX: &str = "\x1b\\";
#[allow(dead_code)]
// "\x1b]633;P;Cwd={}\x1b\\" // "\x1b]633;P;Cwd={}\x1b\\"
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd="; pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd=";
#[allow(dead_code)]
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\"; pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\";
pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l"; pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
@ -80,19 +89,18 @@ fn get_prompt_string(
}) })
.and_then(|pipeline_data| { .and_then(|pipeline_data| {
let output = pipeline_data.collect_string("", config).ok(); let output = pipeline_data.collect_string("", config).ok();
let ansi_output = output.map(|mut x| {
// Always reset the color at the start of the right prompt
// to ensure there is no ansi bleed over
if x.is_empty() && prompt == PROMPT_COMMAND_RIGHT {
x.insert_str(0, "\x1b[0m")
};
output.map(|mut x| {
// Just remove the very last newline.
if x.ends_with('\n') {
x.pop();
}
if x.ends_with('\r') {
x.pop();
}
x x
}); })
// Let's keep this for debugging purposes with nu --log-level warn
warn!("{}:{}:{} {:?}", file!(), line!(), column!(), ansi_output);
ansi_output
}) })
} }

View File

@ -740,15 +740,9 @@ fn add_keybinding(
let span = mode.span(); let span = mode.span();
match &mode { match &mode {
Value::String { val, .. } => match val.as_str() { Value::String { val, .. } => match val.as_str() {
str if str.eq_ignore_ascii_case("emacs") => { "emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
add_parsed_keybinding(emacs_keybindings, keybinding, config) "vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
} "vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
str if str.eq_ignore_ascii_case("vi_insert") => {
add_parsed_keybinding(insert_keybindings, keybinding, config)
}
str if str.eq_ignore_ascii_case("vi_normal") => {
add_parsed_keybinding(normal_keybindings, keybinding, config)
}
str => Err(ShellError::InvalidValue { str => Err(ShellError::InvalidValue {
valid: "'emacs', 'vi_insert', or 'vi_normal'".into(), valid: "'emacs', 'vi_insert', or 'vi_normal'".into(),
actual: format!("'{str}'"), actual: format!("'{str}'"),
@ -864,10 +858,10 @@ fn add_parsed_keybinding(
c if c.starts_with('f') => c[1..] c if c.starts_with('f') => c[1..]
.parse() .parse()
.ok() .ok()
.filter(|num| (1..=35).contains(num)) .filter(|num| (1..=20).contains(num))
.map(KeyCode::F) .map(KeyCode::F)
.ok_or(ShellError::InvalidValue { .ok_or(ShellError::InvalidValue {
valid: "'f1', 'f2', ..., or 'f35'".into(), valid: "'f1', 'f2', ..., or 'f20'".into(),
actual: format!("'{keycode}'"), actual: format!("'{keycode}'"),
span: keybinding.keycode.span(), span: keybinding.keycode.span(),
})?, })?,
@ -998,54 +992,41 @@ fn event_from_record(
) -> Result<ReedlineEvent, ShellError> { ) -> Result<ReedlineEvent, ShellError> {
let event = match name { let event = match name {
"none" => ReedlineEvent::None, "none" => ReedlineEvent::None,
"clearscreen" => ReedlineEvent::ClearScreen,
"clearscrollback" => ReedlineEvent::ClearScrollback,
"historyhintcomplete" => ReedlineEvent::HistoryHintComplete, "historyhintcomplete" => ReedlineEvent::HistoryHintComplete,
"historyhintwordcomplete" => ReedlineEvent::HistoryHintWordComplete, "historyhintwordcomplete" => ReedlineEvent::HistoryHintWordComplete,
"ctrld" => ReedlineEvent::CtrlD, "ctrld" => ReedlineEvent::CtrlD,
"ctrlc" => ReedlineEvent::CtrlC, "ctrlc" => ReedlineEvent::CtrlC,
"clearscreen" => ReedlineEvent::ClearScreen,
"clearscrollback" => ReedlineEvent::ClearScrollback,
"enter" => ReedlineEvent::Enter, "enter" => ReedlineEvent::Enter,
"submit" => ReedlineEvent::Submit, "submit" => ReedlineEvent::Submit,
"submitornewline" => ReedlineEvent::SubmitOrNewline, "submitornewline" => ReedlineEvent::SubmitOrNewline,
"esc" | "escape" => ReedlineEvent::Esc, "esc" | "escape" => ReedlineEvent::Esc,
// Non-sensical for user configuration:
//
// `ReedlineEvent::Mouse` - itself a no-op
// `ReedlineEvent::Resize` - requires size info specifically from the ANSI resize
// event
//
// Handled above in `parse_event`:
//
// `ReedlineEvent::Edit`
"repaint" => ReedlineEvent::Repaint,
"previoushistory" => ReedlineEvent::PreviousHistory,
"up" => ReedlineEvent::Up, "up" => ReedlineEvent::Up,
"down" => ReedlineEvent::Down, "down" => ReedlineEvent::Down,
"right" => ReedlineEvent::Right, "right" => ReedlineEvent::Right,
"left" => ReedlineEvent::Left, "left" => ReedlineEvent::Left,
"nexthistory" => ReedlineEvent::NextHistory,
"searchhistory" => ReedlineEvent::SearchHistory, "searchhistory" => ReedlineEvent::SearchHistory,
// Handled above in `parse_event`: "nexthistory" => ReedlineEvent::NextHistory,
// "previoushistory" => ReedlineEvent::PreviousHistory,
// `ReedlineEvent::Multiple` "repaint" => ReedlineEvent::Repaint,
// `ReedlineEvent::UntilFound` "menudown" => ReedlineEvent::MenuDown,
"menuup" => ReedlineEvent::MenuUp,
"menuleft" => ReedlineEvent::MenuLeft,
"menuright" => ReedlineEvent::MenuRight,
"menunext" => ReedlineEvent::MenuNext,
"menuprevious" => ReedlineEvent::MenuPrevious,
"menupagenext" => ReedlineEvent::MenuPageNext,
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
"openeditor" => ReedlineEvent::OpenEditor,
"menu" => { "menu" => {
let menu = extract_value("name", record, span)?; let menu = extract_value("name", record, span)?;
ReedlineEvent::Menu(menu.to_expanded_string("", config)) ReedlineEvent::Menu(menu.to_expanded_string("", config))
} }
"menunext" => ReedlineEvent::MenuNext,
"menuprevious" => ReedlineEvent::MenuPrevious,
"menuup" => ReedlineEvent::MenuUp,
"menudown" => ReedlineEvent::MenuDown,
"menuleft" => ReedlineEvent::MenuLeft,
"menuright" => ReedlineEvent::MenuRight,
"menupagenext" => ReedlineEvent::MenuPageNext,
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
"executehostcommand" => { "executehostcommand" => {
let cmd = extract_value("cmd", record, span)?; let cmd = extract_value("cmd", record, span)?;
ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config)) ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config))
} }
"openeditor" => ReedlineEvent::OpenEditor,
str => { str => {
return Err(ShellError::InvalidValue { return Err(ShellError::InvalidValue {
valid: "a reedline event".into(), valid: "a reedline event".into(),
@ -1075,6 +1056,7 @@ fn edit_from_record(
.and_then(|value| value.as_bool()) .and_then(|value| value.as_bool())
.unwrap_or(false), .unwrap_or(false),
}, },
"movetoend" => EditCommand::MoveToEnd { "movetoend" => EditCommand::MoveToEnd {
select: extract_value("select", record, span) select: extract_value("select", record, span)
.and_then(|value| value.as_bool()) .and_then(|value| value.as_bool())
@ -1110,16 +1092,6 @@ fn edit_from_record(
.and_then(|value| value.as_bool()) .and_then(|value| value.as_bool())
.unwrap_or(false), .unwrap_or(false),
}, },
"movewordrightstart" => EditCommand::MoveWordRightStart {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movebigwordrightstart" => EditCommand::MoveBigWordRightStart {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movewordrightend" => EditCommand::MoveWordRightEnd { "movewordrightend" => EditCommand::MoveWordRightEnd {
select: extract_value("select", record, span) select: extract_value("select", record, span)
.and_then(|value| value.as_bool()) .and_then(|value| value.as_bool())
@ -1130,6 +1102,16 @@ fn edit_from_record(
.and_then(|value| value.as_bool()) .and_then(|value| value.as_bool())
.unwrap_or(false), .unwrap_or(false),
}, },
"movewordrightstart" => EditCommand::MoveWordRightStart {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movebigwordrightstart" => EditCommand::MoveBigWordRightStart {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movetoposition" => { "movetoposition" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
let select = extract_value("select", record, span) let select = extract_value("select", record, span)
@ -1151,13 +1133,6 @@ fn edit_from_record(
EditCommand::InsertString(value.to_expanded_string("", config)) EditCommand::InsertString(value.to_expanded_string("", config))
} }
"insertnewline" => EditCommand::InsertNewline, "insertnewline" => EditCommand::InsertNewline,
"replacechar" => {
let value = extract_value("value", record, span)?;
let char = extract_char(value)?;
EditCommand::ReplaceChar(char)
}
// `EditCommand::ReplaceChars` - Internal hack not sanely implementable as a
// standalone binding
"backspace" => EditCommand::Backspace, "backspace" => EditCommand::Backspace,
"delete" => EditCommand::Delete, "delete" => EditCommand::Delete,
"cutchar" => EditCommand::CutChar, "cutchar" => EditCommand::CutChar,
@ -1165,7 +1140,6 @@ fn edit_from_record(
"deleteword" => EditCommand::DeleteWord, "deleteword" => EditCommand::DeleteWord,
"clear" => EditCommand::Clear, "clear" => EditCommand::Clear,
"cleartolineend" => EditCommand::ClearToLineEnd, "cleartolineend" => EditCommand::ClearToLineEnd,
"complete" => EditCommand::Complete,
"cutcurrentline" => EditCommand::CutCurrentLine, "cutcurrentline" => EditCommand::CutCurrentLine,
"cutfromstart" => EditCommand::CutFromStart, "cutfromstart" => EditCommand::CutFromStart,
"cutfromlinestart" => EditCommand::CutFromLineStart, "cutfromlinestart" => EditCommand::CutFromLineStart,
@ -1182,7 +1156,6 @@ fn edit_from_record(
"uppercaseword" => EditCommand::UppercaseWord, "uppercaseword" => EditCommand::UppercaseWord,
"lowercaseword" => EditCommand::LowercaseWord, "lowercaseword" => EditCommand::LowercaseWord,
"capitalizechar" => EditCommand::CapitalizeChar, "capitalizechar" => EditCommand::CapitalizeChar,
"switchcasechar" => EditCommand::SwitchcaseChar,
"swapwords" => EditCommand::SwapWords, "swapwords" => EditCommand::SwapWords,
"swapgraphemes" => EditCommand::SwapGraphemes, "swapgraphemes" => EditCommand::SwapGraphemes,
"undo" => EditCommand::Undo, "undo" => EditCommand::Undo,
@ -1239,64 +1212,17 @@ fn edit_from_record(
.unwrap_or(false); .unwrap_or(false);
EditCommand::MoveLeftBefore { c: char, select } EditCommand::MoveLeftBefore { c: char, select }
} }
"selectall" => EditCommand::SelectAll, "complete" => EditCommand::Complete,
"cutselection" => EditCommand::CutSelection, "cutselection" => EditCommand::CutSelection,
"copyselection" => EditCommand::CopySelection,
"paste" => EditCommand::Paste,
"copyfromstart" => EditCommand::CopyFromStart,
"copyfromlinestart" => EditCommand::CopyFromLineStart,
"copytoend" => EditCommand::CopyToEnd,
"copytolineend" => EditCommand::CopyToLineEnd,
"copycurrentline" => EditCommand::CopyCurrentLine,
"copywordleft" => EditCommand::CopyWordLeft,
"copybigwordleft" => EditCommand::CopyBigWordLeft,
"copywordright" => EditCommand::CopyWordRight,
"copybigwordright" => EditCommand::CopyBigWordRight,
"copywordrighttonext" => EditCommand::CopyWordRightToNext,
"copybigwordrighttonext" => EditCommand::CopyBigWordRightToNext,
"copyleft" => EditCommand::CopyLeft,
"copyright" => EditCommand::CopyRight,
"copyrightuntil" => {
let value = extract_value("value", record, span)?;
let char = extract_char(value)?;
EditCommand::CopyRightUntil(char)
}
"copyrightbefore" => {
let value = extract_value("value", record, span)?;
let char = extract_char(value)?;
EditCommand::CopyRightBefore(char)
}
"copyleftuntil" => {
let value = extract_value("value", record, span)?;
let char = extract_char(value)?;
EditCommand::CopyLeftUntil(char)
}
"copyleftbefore" => {
let value = extract_value("value", record, span)?;
let char = extract_char(value)?;
EditCommand::CopyLeftBefore(char)
}
"swapcursorandanchor" => EditCommand::SwapCursorAndAnchor,
#[cfg(feature = "system-clipboard")] #[cfg(feature = "system-clipboard")]
"cutselectionsystem" => EditCommand::CutSelectionSystem, "cutselectionsystem" => EditCommand::CutSelectionSystem,
"copyselection" => EditCommand::CopySelection,
#[cfg(feature = "system-clipboard")] #[cfg(feature = "system-clipboard")]
"copyselectionsystem" => EditCommand::CopySelectionSystem, "copyselectionsystem" => EditCommand::CopySelectionSystem,
"paste" => EditCommand::Paste,
#[cfg(feature = "system-clipboard")] #[cfg(feature = "system-clipboard")]
"pastesystem" => EditCommand::PasteSystem, "pastesystem" => EditCommand::PasteSystem,
"cutinside" => { "selectall" => EditCommand::SelectAll,
let value = extract_value("left", record, span)?;
let left = extract_char(value)?;
let value = extract_value("right", record, span)?;
let right = extract_char(value)?;
EditCommand::CutInside { left, right }
}
"yankinside" => {
let value = extract_value("left", record, span)?;
let left = extract_char(value)?;
let value = extract_value("right", record, span)?;
let right = extract_char(value)?;
EditCommand::YankInside { left, right }
}
str => { str => {
return Err(ShellError::InvalidValue { return Err(ShellError::InvalidValue {
valid: "a reedline EditCommand".into(), valid: "a reedline EditCommand".into(),

View File

@ -16,13 +16,11 @@ use crate::{
use crossterm::cursor::SetCursorStyle; use crossterm::cursor::SetCursorStyle;
use log::{error, trace, warn}; use log::{error, trace, warn};
use miette::{ErrReport, IntoDiagnostic, Result}; use miette::{ErrReport, IntoDiagnostic, Result};
use nu_cmd_base::util::get_editor; use nu_cmd_base::{hook::eval_hook, util::get_editor};
use nu_color_config::StyleComputer; use nu_color_config::StyleComputer;
#[allow(deprecated)] #[allow(deprecated)]
use nu_engine::env_to_strings; use nu_engine::{convert_env_values, current_dir_str, env_to_strings};
use nu_engine::exit::cleanup_exit;
use nu_parser::{lex, parse, trim_quotes_str}; use nu_parser::{lex, parse, trim_quotes_str};
use nu_protocol::shell_error::io::IoError;
use nu_protocol::{ use nu_protocol::{
config::NuCursorShape, config::NuCursorShape,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
@ -37,7 +35,6 @@ use reedline::{
CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory, CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory,
HistorySessionId, Reedline, SqliteBackedHistory, Vi, HistorySessionId, Reedline, SqliteBackedHistory, Vi,
}; };
use std::sync::atomic::Ordering;
use std::{ use std::{
collections::HashMap, collections::HashMap,
env::temp_dir, env::temp_dir,
@ -64,7 +61,9 @@ pub fn evaluate_repl(
// from the Arc. This lets us avoid copying stack variables needlessly // from the Arc. This lets us avoid copying stack variables needlessly
let mut unique_stack = stack.clone(); let mut unique_stack = stack.clone();
let config = engine_state.get_config(); let config = engine_state.get_config();
let use_color = config.use_ansi_coloring.get(engine_state); let use_color = config.use_ansi_coloring;
confirm_stdin_is_terminal()?;
let mut entry_num = 0; let mut entry_num = 0;
@ -82,6 +81,13 @@ pub fn evaluate_repl(
stack.clone(), stack.clone(),
); );
let start_time = std::time::Instant::now();
// Translate environment variables from Strings to Values
if let Err(e) = convert_env_values(engine_state, &unique_stack) {
report_shell_error(engine_state, &e);
}
perf!("translate env vars", start_time, use_color);
// seed env vars // seed env vars
unique_stack.add_env_var( unique_stack.add_env_var(
"CMD_DURATION_MS".into(), "CMD_DURATION_MS".into(),
@ -105,8 +111,6 @@ pub fn evaluate_repl(
engine_state.merge_env(&mut unique_stack)?; engine_state.merge_env(&mut unique_stack)?;
} }
confirm_stdin_is_terminal()?;
let hostname = System::host_name(); let hostname = System::host_name();
if shell_integration_osc2 { if shell_integration_osc2 {
run_shell_integration_osc2(None, engine_state, &mut unique_stack, use_color); run_shell_integration_osc2(None, engine_state, &mut unique_stack, use_color);
@ -126,8 +130,13 @@ pub fn evaluate_repl(
// escape a few things because this says so // escape a few things because this says so
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st // https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
let cmd_text = line_editor.current_buffer_contents().to_string(); let cmd_text = line_editor.current_buffer_contents().to_string();
let len = cmd_text.len();
let mut cmd_text_chars = cmd_text[0..len].chars();
let mut replaced_cmd_text = String::with_capacity(len);
let replaced_cmd_text = escape_special_vscode_bytes(&cmd_text)?; while let Some(c) = unescape_for_vscode(&mut cmd_text_chars) {
replaced_cmd_text.push(c);
}
run_shell_integration_osc633( run_shell_integration_osc633(
engine_state, engine_state,
@ -142,20 +151,7 @@ pub fn evaluate_repl(
// Regenerate the $nu constant to contain the startup time and any other potential updates // Regenerate the $nu constant to contain the startup time and any other potential updates
engine_state.generate_nu_constant(); engine_state.generate_nu_constant();
if load_std_lib.is_none() { if load_std_lib.is_none() && engine_state.get_config().show_banner {
match engine_state.get_config().show_banner {
Value::Bool { val: false, .. } => {}
Value::String { ref val, .. } if val == "short" => {
eval_source(
engine_state,
&mut unique_stack,
r#"banner --short"#.as_bytes(),
"show short banner",
PipelineData::empty(),
false,
);
}
_ => {
eval_source( eval_source(
engine_state, engine_state,
&mut unique_stack, &mut unique_stack,
@ -165,8 +161,6 @@ pub fn evaluate_repl(
false, false,
); );
} }
}
}
kitty_protocol_healthcheck(engine_state); kitty_protocol_healthcheck(engine_state);
@ -226,41 +220,26 @@ pub fn evaluate_repl(
Ok(()) Ok(())
} }
fn escape_special_vscode_bytes(input: &str) -> Result<String, ShellError> { fn unescape_for_vscode(text: &mut std::str::Chars) -> Option<char> {
let bytes = input match text.next() {
.chars() Some('\\') => match text.next() {
.flat_map(|c| { Some('0') => Some('\x00'), // NUL '\0' (null character)
let mut buf = [0; 4]; // Buffer to hold UTF-8 bytes of the character Some('a') => Some('\x07'), // BEL '\a' (bell)
let c_bytes = c.encode_utf8(&mut buf); // Get UTF-8 bytes for the character Some('b') => Some('\x08'), // BS '\b' (backspace)
Some('t') => Some('\x09'), // HT '\t' (horizontal tab)
if c_bytes.len() == 1 { Some('n') => Some('\x0a'), // LF '\n' (new line)
let byte = c_bytes.as_bytes()[0]; Some('v') => Some('\x0b'), // VT '\v' (vertical tab)
Some('f') => Some('\x0c'), // FF '\f' (form feed)
match byte { Some('r') => Some('\x0d'), // CR '\r' (carriage ret)
// Escape bytes below 0x20 Some(';') => Some('\x3b'), // semi-colon
b if b < 0x20 => format!("\\x{:02X}", byte).into_bytes(), Some('\\') => Some('\x5c'), // backslash
// Escape semicolon as \x3B Some('e') => Some('\x1b'), // escape
b';' => "\\x3B".to_string().into_bytes(), Some(c) => Some(c),
// Escape backslash as \\ None => None,
b'\\' => "\\\\".to_string().into_bytes(), },
// Otherwise, return the character unchanged Some(c) => Some(c),
_ => vec![byte], None => None,
} }
} else {
// pass through multi-byte characters unchanged
c_bytes.bytes().collect()
}
})
.collect();
String::from_utf8(bytes).map_err(|err| ShellError::CantConvert {
to_type: "string".to_string(),
from_type: "bytes".to_string(),
span: Span::unknown(),
help: Some(format!(
"Error {err}, Unable to convert {input} to escaped bytes"
)),
})
} }
fn get_line_editor(engine_state: &mut EngineState, use_color: bool) -> Result<Reedline> { fn get_line_editor(engine_state: &mut EngineState, use_color: bool) -> Result<Reedline> {
@ -317,6 +296,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
if let Err(err) = engine_state.merge_env(&mut stack) { if let Err(err) = engine_state.merge_env(&mut stack) {
report_shell_error(engine_state, &err); report_shell_error(engine_state, &err);
} }
// Check whether $env.NU_DISABLE_IR is set, so that the user can change it in the REPL
// Temporary while IR eval is optional
stack.use_ir = !stack.has_env_var(engine_state, "NU_DISABLE_IR");
perf!("merge env", start_time, use_color); perf!("merge env", start_time, use_color);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
@ -324,26 +306,20 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
perf!("reset signals", start_time, use_color); perf!("reset signals", start_time, use_color);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
// Right before we start our prompt and take input from the user, fire the "pre_prompt" hook // Right before we start our prompt and take input from the user,
if let Err(err) = hook::eval_hooks( // fire the "pre_prompt" hook
engine_state, if let Some(hook) = engine_state.get_config().hooks.pre_prompt.clone() {
&mut stack, if let Err(err) = eval_hook(engine_state, &mut stack, None, vec![], &hook, "pre_prompt") {
vec![],
&engine_state.get_config().hooks.pre_prompt.clone(),
"pre_prompt",
) {
report_shell_error(engine_state, &err); report_shell_error(engine_state, &err);
} }
}
perf!("pre-prompt hook", start_time, use_color); perf!("pre-prompt hook", start_time, use_color);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
// Next, check all the environment variables they ask for // Next, check all the environment variables they ask for
// fire the "env_change" hook // fire the "env_change" hook
if let Err(error) = hook::eval_env_change_hook( let env_change = engine_state.get_config().hooks.env_change.clone();
&engine_state.get_config().hooks.env_change.clone(), if let Err(error) = hook::eval_env_change_hook(env_change, engine_state, &mut stack) {
engine_state,
&mut stack,
) {
report_shell_error(engine_state, &error) report_shell_error(engine_state, &error)
} }
perf!("env-change hook", start_time, use_color); perf!("env-change hook", start_time, use_color);
@ -386,7 +362,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
))) )))
.with_quick_completions(config.completions.quick) .with_quick_completions(config.completions.quick)
.with_partial_completions(config.completions.partial) .with_partial_completions(config.completions.partial)
.with_ansi_colors(config.use_ansi_coloring.get(engine_state)) .with_ansi_colors(config.use_ansi_coloring)
.with_cwd(Some( .with_cwd(Some(
engine_state engine_state
.cwd(None) .cwd(None)
@ -406,7 +382,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
let style_computer = StyleComputer::from_config(engine_state, &stack_arc); let style_computer = StyleComputer::from_config(engine_state, &stack_arc);
start_time = std::time::Instant::now(); start_time = std::time::Instant::now();
line_editor = if config.use_ansi_coloring.get(engine_state) { line_editor = if config.use_ansi_coloring {
line_editor.with_hinter(Box::new({ line_editor.with_hinter(Box::new({
// As of Nov 2022, "hints" color_config closures only get `null` passed in. // As of Nov 2022, "hints" color_config closures only get `null` passed in.
let style = style_computer.compute("hints", &Value::nothing(Span::unknown())); let style = style_computer.compute("hints", &Value::nothing(Span::unknown()));
@ -528,17 +504,18 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
// Right before we start running the code the user gave us, fire the `pre_execution` // Right before we start running the code the user gave us, fire the `pre_execution`
// hook // hook
{ if let Some(hook) = config.hooks.pre_execution.clone() {
// Set the REPL buffer to the current command for the "pre_execution" hook // Set the REPL buffer to the current command for the "pre_execution" hook
let mut repl = engine_state.repl_state.lock().expect("repl state mutex"); let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
repl.buffer = repl_cmd_line_text.to_string(); repl.buffer = repl_cmd_line_text.to_string();
drop(repl); drop(repl);
if let Err(err) = hook::eval_hooks( if let Err(err) = eval_hook(
engine_state, engine_state,
&mut stack, &mut stack,
None,
vec![], vec![],
&engine_state.get_config().hooks.pre_execution.clone(), &hook,
"pre_execution", "pre_execution",
) { ) {
report_shell_error(engine_state, &err); report_shell_error(engine_state, &err);
@ -694,11 +671,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
); );
println!(); println!();
return (false, stack, line_editor);
cleanup_exit((), engine_state, 0);
// if cleanup_exit didn't exit, we should keep running
return (true, stack, line_editor);
} }
Err(err) => { Err(err) => {
let message = err.to_string(); let message = err.to_string();
@ -777,7 +750,7 @@ fn fill_in_result_related_history_metadata(
c.duration = Some(cmd_duration); c.duration = Some(cmd_duration);
c.exit_status = stack c.exit_status = stack
.get_env_var(engine_state, "LAST_EXIT_CODE") .get_env_var(engine_state, "LAST_EXIT_CODE")
.and_then(|e| e.as_int().ok()); .and_then(|e| e.as_i64().ok());
c c
}) })
.into_diagnostic()?; // todo: don't stop repl if error here? .into_diagnostic()?; // todo: don't stop repl if error here?
@ -816,10 +789,8 @@ fn parse_operation(
) -> Result<ReplOperation, ErrReport> { ) -> Result<ReplOperation, ErrReport> {
let tokens = lex(s.as_bytes(), 0, &[], &[], false); let tokens = lex(s.as_bytes(), 0, &[], &[], false);
// Check if this is a single call to a directory, if so auto-cd // Check if this is a single call to a directory, if so auto-cd
let cwd = engine_state #[allow(deprecated)]
.cwd(Some(stack)) let cwd = nu_engine::env::current_dir_str(engine_state, stack).unwrap_or_default();
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_default();
let mut orig = s.clone(); let mut orig = s.clone();
if orig.starts_with('`') { if orig.starts_with('`') {
orig = trim_quotes_str(&orig).to_string() orig = trim_quotes_str(&orig).to_string()
@ -853,26 +824,21 @@ fn do_auto_cd(
if !path.exists() { if !path.exists() {
report_shell_error( report_shell_error(
engine_state, engine_state,
&ShellError::Io(IoError::new_with_additional_context( &ShellError::DirectoryNotFound {
std::io::ErrorKind::NotFound, dir: path.to_string_lossy().to_string(),
span, span,
PathBuf::from(&path), },
"Cannot change directory",
)),
); );
} }
path.to_string_lossy().to_string() path.to_string_lossy().to_string()
}; };
if let PermissionResult::PermissionDenied = have_permission(path.clone()) { if let PermissionResult::PermissionDenied(reason) = have_permission(path.clone()) {
report_shell_error( report_shell_error(
engine_state, engine_state,
&ShellError::Io(IoError::new_with_additional_context( &ShellError::IOError {
std::io::ErrorKind::PermissionDenied, msg: format!("Cannot change directory to {path}: {reason}"),
span, },
PathBuf::from(path),
"Cannot change directory",
)),
); );
return; return;
} }
@ -936,9 +902,6 @@ fn do_run_cmd(
trace!("eval source: {}", s); trace!("eval source: {}", s);
let mut cmds = s.split_whitespace(); let mut cmds = s.split_whitespace();
let had_warning_before = engine_state.exit_warning_given.load(Ordering::SeqCst);
if let Some("exit") = cmds.next() { if let Some("exit") = cmds.next() {
let mut working_set = StateWorkingSet::new(engine_state); let mut working_set = StateWorkingSet::new(engine_state);
let _ = parse(&mut working_set, None, s.as_bytes(), false); let _ = parse(&mut working_set, None, s.as_bytes(), false);
@ -947,11 +910,13 @@ fn do_run_cmd(
match cmds.next() { match cmds.next() {
Some(s) => { Some(s) => {
if let Ok(n) = s.parse::<i32>() { if let Ok(n) = s.parse::<i32>() {
return cleanup_exit(line_editor, engine_state, n); drop(line_editor);
std::process::exit(n);
} }
} }
None => { None => {
return cleanup_exit(line_editor, engine_state, 0); drop(line_editor);
std::process::exit(0);
} }
} }
} }
@ -970,14 +935,6 @@ fn do_run_cmd(
false, false,
); );
// if there was a warning before, and we got to this point, it means
// the possible call to cleanup_exit did not occur.
if had_warning_before && engine_state.is_interactive {
engine_state
.exit_warning_given
.store(false, Ordering::SeqCst);
}
line_editor line_editor
} }
@ -992,7 +949,8 @@ fn run_shell_integration_osc2(
stack: &mut Stack, stack: &mut Stack,
use_color: bool, use_color: bool,
) { ) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) { #[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
let start_time = Instant::now(); let start_time = Instant::now();
// Try to abbreviate string for windows title // Try to abbreviate string for windows title
@ -1036,7 +994,8 @@ fn run_shell_integration_osc7(
stack: &mut Stack, stack: &mut Stack,
use_color: bool, use_color: bool,
) { ) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) { #[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
let start_time = Instant::now(); let start_time = Instant::now();
// Otherwise, communicate the path as OSC 7 (often used for spawning new tabs in the same dir) // Otherwise, communicate the path as OSC 7 (often used for spawning new tabs in the same dir)
@ -1059,7 +1018,8 @@ fn run_shell_integration_osc7(
} }
fn run_shell_integration_osc9_9(engine_state: &EngineState, stack: &mut Stack, use_color: bool) { fn run_shell_integration_osc9_9(engine_state: &EngineState, stack: &mut Stack, use_color: bool) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) { #[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
let start_time = Instant::now(); let start_time = Instant::now();
// Otherwise, communicate the path as OSC 9;9 from ConEmu (often used for spawning new tabs in the same dir) // Otherwise, communicate the path as OSC 9;9 from ConEmu (often used for spawning new tabs in the same dir)
@ -1083,7 +1043,8 @@ fn run_shell_integration_osc633(
use_color: bool, use_color: bool,
repl_cmd_line_text: String, repl_cmd_line_text: String,
) { ) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) { #[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
// Supported escape sequences of Microsoft's Visual Studio Code (vscode) // Supported escape sequences of Microsoft's Visual Studio Code (vscode)
// https://code.visualstudio.com/docs/terminal/shell-integration#_supported-escape-sequences // https://code.visualstudio.com/docs/terminal/shell-integration#_supported-escape-sequences
if stack if stack
@ -1108,8 +1069,16 @@ fn run_shell_integration_osc633(
// escape a few things because this says so // escape a few things because this says so
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st // https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
let replaced_cmd_text =
escape_special_vscode_bytes(&repl_cmd_line_text).unwrap_or(repl_cmd_line_text); let replaced_cmd_text: String = repl_cmd_line_text
.chars()
.map(|c| match c {
'\n' => '\x0a',
'\r' => '\x0d',
'\x1b' => '\x1b',
_ => c,
})
.collect();
//OSC 633 ; E ; <commandline> [; <nonce] ST - Explicitly set the command line with an optional nonce. //OSC 633 ; E ; <commandline> [; <nonce] ST - Explicitly set the command line with an optional nonce.
run_ansi_sequence(&format!( run_ansi_sequence(&format!(
@ -1174,7 +1143,7 @@ fn setup_history(
/// Setup Reedline keybindingds based on the provided config /// Setup Reedline keybindingds based on the provided config
/// ///
fn setup_keybindings(engine_state: &EngineState, line_editor: Reedline) -> Reedline { fn setup_keybindings(engine_state: &EngineState, line_editor: Reedline) -> Reedline {
match create_keybindings(engine_state.get_config()) { return match create_keybindings(engine_state.get_config()) {
Ok(keybindings) => match keybindings { Ok(keybindings) => match keybindings {
KeybindingsMode::Emacs(keybindings) => { KeybindingsMode::Emacs(keybindings) => {
let edit_mode = Box::new(Emacs::new(keybindings)); let edit_mode = Box::new(Emacs::new(keybindings));
@ -1192,7 +1161,7 @@ fn setup_keybindings(engine_state: &EngineState, line_editor: Reedline) -> Reedl
report_shell_error(engine_state, &e); report_shell_error(engine_state, &e);
line_editor line_editor
} }
} };
} }
/// ///
@ -1276,7 +1245,7 @@ fn get_command_finished_marker(
) -> String { ) -> String {
let exit_code = stack let exit_code = stack
.get_env_var(engine_state, "LAST_EXIT_CODE") .get_env_var(engine_state, "LAST_EXIT_CODE")
.and_then(|e| e.as_int().ok()); .and_then(|e| e.as_i64().ok());
if shell_integration_osc633 { if shell_integration_osc633 {
if stack if stack
@ -1387,7 +1356,8 @@ fn run_finaliziation_ansi_sequence(
// Absolute paths with a drive letter, like 'C:', 'D:\', 'E:\foo' // Absolute paths with a drive letter, like 'C:', 'D:\', 'E:\foo'
#[cfg(windows)] #[cfg(windows)]
static DRIVE_PATH_REGEX: std::sync::LazyLock<fancy_regex::Regex> = std::sync::LazyLock::new(|| { static DRIVE_PATH_REGEX: once_cell::sync::Lazy<fancy_regex::Regex> =
once_cell::sync::Lazy::new(|| {
fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation") fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation")
}); });
@ -1451,7 +1421,7 @@ fn are_session_ids_in_sync() {
#[cfg(test)] #[cfg(test)]
mod test_auto_cd { mod test_auto_cd {
use super::{do_auto_cd, escape_special_vscode_bytes, parse_operation, ReplOperation}; use super::{do_auto_cd, parse_operation, ReplOperation};
use nu_path::AbsolutePath; use nu_path::AbsolutePath;
use nu_protocol::engine::{EngineState, Stack}; use nu_protocol::engine::{EngineState, Stack};
use tempfile::tempdir; use tempfile::tempdir;
@ -1589,13 +1559,6 @@ mod test_auto_cd {
symlink(&dir, &link).unwrap(); symlink(&dir, &link).unwrap();
let input = if cfg!(windows) { r".\link" } else { "./link" }; let input = if cfg!(windows) { r".\link" } else { "./link" };
check(tempdir, input, link); check(tempdir, input, link);
let dir = tempdir.join("foo").join("bar");
std::fs::create_dir_all(&dir).unwrap();
let link = tempdir.join("link2");
symlink(&dir, &link).unwrap();
let input = "..";
check(link, input, tempdir);
} }
#[test] #[test]
@ -1608,43 +1571,4 @@ mod test_auto_cd {
let input = if cfg!(windows) { r"foo\" } else { "foo/" }; let input = if cfg!(windows) { r"foo\" } else { "foo/" };
check(tempdir, input, dir); check(tempdir, input, dir);
} }
#[test]
fn escape_vscode_semicolon_test() {
let input = r#"now;is"#;
let expected = r#"now\x3Bis"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_backslash_test() {
let input = r#"now\is"#;
let expected = r#"now\\is"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_linefeed_test() {
let input = "now\nis";
let expected = r#"now\x0Ais"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_tab_null_cr_test() {
let input = "now\t\0\ris";
let expected = r#"now\x09\x00\x0Dis"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_multibyte_ok() {
let input = "now🍪is";
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(input, actual);
}
} }

View File

@ -144,6 +144,8 @@ impl Highlighter for NuHighlighter {
} }
FlatShape::Flag => add_colored_token(&shape.1, next_token), FlatShape::Flag => add_colored_token(&shape.1, next_token),
FlatShape::Pipe => add_colored_token(&shape.1, next_token), FlatShape::Pipe => add_colored_token(&shape.1, next_token),
FlatShape::And => add_colored_token(&shape.1, next_token),
FlatShape::Or => add_colored_token(&shape.1, next_token),
FlatShape::Redirection => add_colored_token(&shape.1, next_token), FlatShape::Redirection => add_colored_token(&shape.1, next_token),
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token), FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token), FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
@ -309,7 +311,6 @@ fn find_matching_block_end_in_expr(
.unwrap_or(expression.span.start); .unwrap_or(expression.span.start);
return match &expression.expr { return match &expression.expr {
// TODO: Can't these be handled with an `_ => None` branch? Refactor
Expr::Bool(_) => None, Expr::Bool(_) => None,
Expr::Int(_) => None, Expr::Int(_) => None,
Expr::Float(_) => None, Expr::Float(_) => None,
@ -336,28 +337,6 @@ fn find_matching_block_end_in_expr(
Expr::Nothing => None, Expr::Nothing => None,
Expr::Garbage => None, Expr::Garbage => None,
Expr::AttributeBlock(ab) => ab
.attributes
.iter()
.find_map(|attr| {
find_matching_block_end_in_expr(
line,
working_set,
&attr.expr,
global_span_offset,
global_cursor_offset,
)
})
.or_else(|| {
find_matching_block_end_in_expr(
line,
working_set,
&ab.item,
global_span_offset,
global_cursor_offset,
)
}),
Expr::Table(table) => { Expr::Table(table) => {
if expr_last == global_cursor_offset { if expr_last == global_cursor_offset {
// cursor is at table end // cursor is at table end

View File

@ -1,8 +1,6 @@
#![allow(clippy::byte_char_slices)]
use nu_cmd_base::hook::eval_hook; use nu_cmd_base::hook::eval_hook;
use nu_engine::{eval_block, eval_block_with_early_return}; use nu_engine::{eval_block, eval_block_with_early_return};
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents}; use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
use nu_protocol::{ use nu_protocol::{
cli_error::report_compile_error, cli_error::report_compile_error,
debugger::WithoutDebug, debugger::WithoutDebug,
@ -12,7 +10,7 @@ use nu_protocol::{
}; };
#[cfg(windows)] #[cfg(windows)]
use nu_utils::enable_vt_processing; use nu_utils::enable_vt_processing;
use nu_utils::{escape_quote_string, perf}; use nu_utils::perf;
use std::path::Path; use std::path::Path;
// This will collect environment variables from std::env and adds them to a stack. // This will collect environment variables from std::env and adds them to a stack.
@ -132,7 +130,7 @@ fn gather_env_vars(
working_set.error(err); working_set.error(err);
} }
if !working_set.parse_errors.is_empty() { if working_set.parse_errors.first().is_some() {
report_capture_error( report_capture_error(
engine_state, engine_state,
&String::from_utf8_lossy(contents), &String::from_utf8_lossy(contents),
@ -176,7 +174,7 @@ fn gather_env_vars(
working_set.error(err); working_set.error(err);
} }
if !working_set.parse_errors.is_empty() { if working_set.parse_errors.first().is_some() {
report_capture_error( report_capture_error(
engine_state, engine_state,
&String::from_utf8_lossy(contents), &String::from_utf8_lossy(contents),
@ -203,35 +201,6 @@ fn gather_env_vars(
} }
} }
/// Print a pipeline with formatting applied based on display_output hook.
///
/// This function should be preferred when printing values resulting from a completed evaluation.
/// For values printed as part of a command's execution, such as values printed by the `print` command,
/// the `PipelineData::print_table` function should be preferred instead as it is not config-dependent.
///
/// `no_newline` controls if we need to attach newline character to output.
pub fn print_pipeline(
engine_state: &mut EngineState,
stack: &mut Stack,
pipeline: PipelineData,
no_newline: bool,
) -> Result<(), ShellError> {
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
let pipeline = eval_hook(
engine_state,
stack,
Some(pipeline),
vec![],
&hook,
"display_output",
)?;
pipeline.print_raw(engine_state, no_newline, false)
} else {
// if display_output isn't set, we should still prefer to print with some formatting
pipeline.print_table(engine_state, stack, no_newline, false)
}
}
pub fn eval_source( pub fn eval_source(
engine_state: &mut EngineState, engine_state: &mut EngineState,
stack: &mut Stack, stack: &mut Stack,
@ -252,7 +221,7 @@ pub fn eval_source(
report_shell_error(engine_state, &err); report_shell_error(engine_state, &err);
let code = err.exit_code(); let code = err.exit_code();
stack.set_last_error(&err); stack.set_last_error(&err);
code.unwrap_or(0) code
} }
}; };
@ -265,10 +234,7 @@ pub fn eval_source(
perf!( perf!(
&format!("eval_source {}", &fname), &format!("eval_source {}", &fname),
start_time, start_time,
engine_state engine_state.get_config().use_ansi_coloring
.get_config()
.use_ansi_coloring
.get(engine_state)
); );
exit_code exit_code
@ -301,7 +267,7 @@ fn evaluate_source(
if let Some(err) = working_set.compile_errors.first() { if let Some(err) = working_set.compile_errors.first() {
report_compile_error(&working_set, err); report_compile_error(&working_set, err);
return Ok(true); // Not a fatal error, for now
} }
(output, working_set.render()) (output, working_set.render())
@ -315,8 +281,21 @@ fn evaluate_source(
eval_block::<WithoutDebug>(engine_state, stack, &block, input) eval_block::<WithoutDebug>(engine_state, stack, &block, input)
}?; }?;
let no_newline = matches!(&pipeline, &PipelineData::ByteStream(..)); if let PipelineData::ByteStream(..) = pipeline {
print_pipeline(engine_state, stack, pipeline, no_newline)?; pipeline.print(engine_state, stack, false, false)
} else if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
let pipeline = eval_hook(
engine_state,
stack,
Some(pipeline),
vec![],
&hook,
"display_output",
)?;
pipeline.print(engine_state, stack, false, false)
} else {
pipeline.print(engine_state, stack, true, false)
}?;
Ok(false) Ok(false)
} }

View File

@ -1,296 +0,0 @@
use nu_protocol::HistoryFileFormat;
use nu_test_support::{nu, Outcome};
use reedline::{
FileBackedHistory, History, HistoryItem, HistoryItemId, ReedlineError, SearchQuery,
SqliteBackedHistory,
};
use rstest::rstest;
use tempfile::TempDir;
struct Test {
cfg_dir: TempDir,
}
impl Test {
fn new(history_format: &'static str) -> Self {
let cfg_dir = tempfile::Builder::new()
.prefix("history_import_test")
.tempdir()
.unwrap();
// Assigning to $env.config.history.file_format seems to work only in startup
// configuration.
std::fs::write(
cfg_dir.path().join("env.nu"),
format!("$env.config.history.file_format = {history_format:?}"),
)
.unwrap();
Self { cfg_dir }
}
fn nu(&self, cmd: impl AsRef<str>) -> Outcome {
let env = [(
"XDG_CONFIG_HOME".to_string(),
self.cfg_dir.path().to_str().unwrap().to_string(),
)];
let env_config = self.cfg_dir.path().join("env.nu");
nu!(envs: env, env_config: env_config, cmd.as_ref())
}
fn open_plaintext(&self) -> Result<FileBackedHistory, ReedlineError> {
FileBackedHistory::with_file(
100,
self.cfg_dir
.path()
.join("nushell")
.join(HistoryFileFormat::Plaintext.default_file_name()),
)
}
fn open_sqlite(&self) -> Result<SqliteBackedHistory, ReedlineError> {
SqliteBackedHistory::with_file(
self.cfg_dir
.path()
.join("nushell")
.join(HistoryFileFormat::Sqlite.default_file_name()),
None,
None,
)
}
fn open_backend(&self, format: HistoryFileFormat) -> Result<Box<dyn History>, ReedlineError> {
fn boxed(be: impl History + 'static) -> Box<dyn History> {
Box::new(be)
}
use HistoryFileFormat::*;
match format {
Plaintext => self.open_plaintext().map(boxed),
Sqlite => self.open_sqlite().map(boxed),
}
}
}
enum HistorySource {
Vec(Vec<HistoryItem>),
Command(&'static str),
}
struct TestCase {
dst_format: HistoryFileFormat,
dst_history: Vec<HistoryItem>,
src_history: HistorySource,
want_history: Vec<HistoryItem>,
}
const EMPTY_TEST_CASE: TestCase = TestCase {
dst_format: HistoryFileFormat::Plaintext,
dst_history: Vec::new(),
src_history: HistorySource::Vec(Vec::new()),
want_history: Vec::new(),
};
impl TestCase {
fn run(self) {
use HistoryFileFormat::*;
let test = Test::new(match self.dst_format {
Plaintext => "plaintext",
Sqlite => "sqlite",
});
save_all(
&mut *test.open_backend(self.dst_format).unwrap(),
self.dst_history,
)
.unwrap();
let outcome = match self.src_history {
HistorySource::Vec(src_history) => {
let src_format = match self.dst_format {
Plaintext => Sqlite,
Sqlite => Plaintext,
};
save_all(&mut *test.open_backend(src_format).unwrap(), src_history).unwrap();
test.nu("history import")
}
HistorySource::Command(cmd) => {
let mut cmd = cmd.to_string();
cmd.push_str(" | history import");
test.nu(cmd)
}
};
assert!(outcome.status.success());
let got = query_all(&*test.open_backend(self.dst_format).unwrap()).unwrap();
// Compare just the commands first, for readability.
fn commands_only(items: &[HistoryItem]) -> Vec<&str> {
items
.iter()
.map(|item| item.command_line.as_str())
.collect()
}
assert_eq!(commands_only(&got), commands_only(&self.want_history));
// If commands match, compare full items.
assert_eq!(got, self.want_history);
}
}
fn query_all(history: &dyn History) -> Result<Vec<HistoryItem>, ReedlineError> {
history.search(SearchQuery::everything(
reedline::SearchDirection::Forward,
None,
))
}
fn save_all(history: &mut dyn History, items: Vec<HistoryItem>) -> Result<(), ReedlineError> {
for item in items {
history.save(item)?;
}
Ok(())
}
const EMPTY_ITEM: HistoryItem = HistoryItem {
command_line: String::new(),
id: None,
start_timestamp: None,
session_id: None,
hostname: None,
cwd: None,
duration: None,
exit_status: None,
more_info: None,
};
#[test]
fn history_import_pipe_string() {
TestCase {
dst_format: HistoryFileFormat::Plaintext,
src_history: HistorySource::Command("echo bar"),
want_history: vec![HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "bar".to_string(),
..EMPTY_ITEM
}],
..EMPTY_TEST_CASE
}
.run();
}
#[test]
fn history_import_pipe_record() {
TestCase {
dst_format: HistoryFileFormat::Sqlite,
src_history: HistorySource::Command("[[cwd command]; [/tmp some_command]]"),
want_history: vec![HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "some_command".to_string(),
cwd: Some("/tmp".to_string()),
..EMPTY_ITEM
}],
..EMPTY_TEST_CASE
}
.run();
}
#[test]
fn to_empty_plaintext() {
TestCase {
dst_format: HistoryFileFormat::Plaintext,
src_history: HistorySource::Vec(vec![
HistoryItem {
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
command_line: "bar".to_string(),
..EMPTY_ITEM
},
]),
want_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "bar".to_string(),
..EMPTY_ITEM
},
],
..EMPTY_TEST_CASE
}
.run()
}
#[test]
fn to_empty_sqlite() {
TestCase {
dst_format: HistoryFileFormat::Sqlite,
src_history: HistorySource::Vec(vec![
HistoryItem {
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
command_line: "bar".to_string(),
..EMPTY_ITEM
},
]),
want_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(2)),
command_line: "bar".to_string(),
..EMPTY_ITEM
},
],
..EMPTY_TEST_CASE
}
.run()
}
#[rstest]
#[case::plaintext(HistoryFileFormat::Plaintext)]
#[case::sqlite(HistoryFileFormat::Sqlite)]
fn to_existing(#[case] dst_format: HistoryFileFormat) {
TestCase {
dst_format,
dst_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "original-1".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "original-2".to_string(),
..EMPTY_ITEM
},
],
src_history: HistorySource::Vec(vec![HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "new".to_string(),
..EMPTY_ITEM
}]),
want_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "original-1".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "original-2".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(2)),
command_line: "new".to_string(),
..EMPTY_ITEM
},
],
}
.run()
}

View File

@ -1,3 +1,2 @@
mod history_import;
mod keybindings_list; mod keybindings_list;
mod nu_highlight; mod nu_highlight;

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@ fn create_default_context() -> EngineState {
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context()) nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
} }
/// creates a new engine with the current path into the completions fixtures folder // creates a new engine with the current path into the completions fixtures folder
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) { pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
// Target folder inside assets // Target folder inside assets
let dir = fs::fixtures().join("completions"); let dir = fs::fixtures().join("completions");
@ -69,26 +69,7 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
(dir, dir_str, engine_state, stack) (dir, dir_str, engine_state, stack)
} }
/// Adds pseudo PATH env for external completion tests // creates a new engine with the current path into the completions fixtures folder
pub fn new_external_engine() -> EngineState {
let mut engine = create_default_context();
let dir = fs::fixtures().join("external_completions").join("path");
let dir_str = dir.to_string_lossy().to_string();
let internal_span = nu_protocol::Span::new(0, dir_str.len());
engine.add_env_var(
"PATH".to_string(),
Value::List {
vals: vec![Value::String {
val: dir_str,
internal_span,
}],
internal_span,
},
);
engine
}
/// creates a new engine with the current path into the completions fixtures folder
pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) { pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
// Target folder inside assets // Target folder inside assets
let dir = fs::fixtures().join("dotnu_completions"); let dir = fs::fixtures().join("dotnu_completions");
@ -105,23 +86,6 @@ pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
// Add $nu // Add $nu
engine_state.generate_nu_constant(); engine_state.generate_nu_constant();
// const $NU_LIB_DIRS
let mut working_set = StateWorkingSet::new(&engine_state);
let var_id = working_set.add_variable(
b"$NU_LIB_DIRS".into(),
Span::unknown(),
nu_protocol::Type::List(Box::new(nu_protocol::Type::String)),
false,
);
working_set.set_variable_const_val(
var_id,
Value::test_list(vec![
Value::string(file(dir.join("lib-dir1")), dir_span),
Value::string(file(dir.join("lib-dir3")), dir_span),
]),
);
let _ = engine_state.merge_delta(working_set.render());
// New stack // New stack
let mut stack = Stack::new(); let mut stack = Stack::new();
@ -131,12 +95,17 @@ pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
"TEST".to_string(), "TEST".to_string(),
Value::string("NUSHELL".to_string(), dir_span), Value::string("NUSHELL".to_string(), dir_span),
); );
stack.add_env_var( stack.add_env_var(
"NU_LIB_DIRS".into(), "NU_LIB_DIRS".to_string(),
Value::test_list(vec![ Value::List {
vals: vec![
Value::string(file(dir.join("lib-dir1")), dir_span),
Value::string(file(dir.join("lib-dir2")), dir_span), Value::string(file(dir.join("lib-dir2")), dir_span),
Value::string(file(dir.join("lib-dir3")), dir_span), Value::string(file(dir.join("lib-dir3")), dir_span),
]), ],
internal_span: dir_span,
},
); );
// Merge environment into the permanent state // Merge environment into the permanent state
@ -216,8 +185,8 @@ pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
(dir, dir_str, engine_state, stack) (dir, dir_str, engine_state, stack)
} }
/// match a list of suggestions with the expected values // match a list of suggestions with the expected values
pub fn match_suggestions(expected: &Vec<&str>, suggestions: &Vec<Suggestion>) { pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
let expected_len = expected.len(); let expected_len = expected.len();
let suggestions_len = suggestions.len(); let suggestions_len = suggestions.len();
if expected_len != suggestions_len { if expected_len != suggestions_len {
@ -228,34 +197,28 @@ pub fn match_suggestions(expected: &Vec<&str>, suggestions: &Vec<Suggestion>) {
) )
} }
let suggestions_str = suggestions let suggestoins_str = suggestions
.iter() .iter()
.map(|it| it.value.as_str()) .map(|it| it.value.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_eq!(expected, &suggestions_str); assert_eq!(expected, &suggestoins_str);
} }
/// match a list of suggestions with the expected values // append the separator to the converted path
pub fn match_suggestions_by_string(expected: &[String], suggestions: &Vec<Suggestion>) {
let expected = expected.iter().map(|it| it.as_str()).collect::<Vec<_>>();
match_suggestions(&expected, suggestions);
}
/// append the separator to the converted path
pub fn folder(path: impl Into<PathBuf>) -> String { pub fn folder(path: impl Into<PathBuf>) -> String {
let mut converted_path = file(path); let mut converted_path = file(path);
converted_path.push(MAIN_SEPARATOR); converted_path.push(MAIN_SEPARATOR);
converted_path converted_path
} }
/// convert a given path to string // convert a given path to string
pub fn file(path: impl Into<PathBuf>) -> String { pub fn file(path: impl Into<PathBuf>) -> String {
path.into().into_os_string().into_string().unwrap() path.into().into_os_string().into_string().unwrap()
} }
/// merge_input executes the given input into the engine // merge_input executes the given input into the engine
/// and merges the state // and merges the state
pub fn merge_input( pub fn merge_input(
input: &[u8], input: &[u8],
engine_state: &mut EngineState, engine_state: &mut EngineState,

View File

@ -1,5 +1,3 @@
pub mod completions_helpers; pub mod completions_helpers;
pub use completions_helpers::{ pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};
file, folder, match_suggestions, match_suggestions_by_string, merge_input, new_engine,
};

View File

@ -5,7 +5,7 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-base" name = "nu-cmd-base"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
version = "0.104.1" version = "0.99.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,10 +13,10 @@ version = "0.104.1"
workspace = true workspace = true
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.104.1", default-features = false } nu-engine = { path = "../nu-engine", version = "0.99.0" }
nu-parser = { path = "../nu-parser", version = "0.104.1" } nu-parser = { path = "../nu-parser", version = "0.99.0" }
nu-path = { path = "../nu-path", version = "0.104.1" } nu-path = { path = "../nu-path", version = "0.99.0" }
nu-protocol = { path = "../nu-protocol", version = "0.104.1", default-features = false } nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
indexmap = { workspace = true } indexmap = { workspace = true }
miette = { workspace = true } miette = { workspace = true }

View File

@ -7,52 +7,46 @@ use nu_protocol::{
engine::{Closure, EngineState, Stack, StateWorkingSet}, engine::{Closure, EngineState, Stack, StateWorkingSet},
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId, PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
}; };
use std::{collections::HashMap, sync::Arc}; use std::sync::Arc;
pub fn eval_env_change_hook( pub fn eval_env_change_hook(
env_change_hook: &HashMap<String, Vec<Value>>, env_change_hook: Option<Value>,
engine_state: &mut EngineState, engine_state: &mut EngineState,
stack: &mut Stack, stack: &mut Stack,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
for (env, hooks) in env_change_hook { if let Some(hook) = env_change_hook {
let before = engine_state.previous_env_vars.get(env); match hook {
let after = stack.get_env_var(engine_state, env); Value::Record { val, .. } => {
for (env_name, hook_value) in &*val {
let before = engine_state.previous_env_vars.get(env_name);
let after = stack.get_env_var(engine_state, env_name);
if before != after { if before != after {
let before = before.cloned().unwrap_or_default(); let before = before.cloned().unwrap_or_default();
let after = after.cloned().unwrap_or_default(); let after = after.cloned().unwrap_or_default();
eval_hooks(
engine_state,
stack,
vec![("$before".into(), before), ("$after".into(), after.clone())],
hooks,
"env_change",
)?;
Arc::make_mut(&mut engine_state.previous_env_vars).insert(env.clone(), after);
}
}
Ok(())
}
pub fn eval_hooks(
engine_state: &mut EngineState,
stack: &mut Stack,
arguments: Vec<(String, Value)>,
hooks: &[Value],
hook_name: &str,
) -> Result<(), ShellError> {
for hook in hooks {
eval_hook( eval_hook(
engine_state, engine_state,
stack, stack,
None, None,
arguments.clone(), vec![("$before".into(), before), ("$after".into(), after.clone())],
hook, hook_value,
&format!("{hook_name} list, recursive"), "env_change",
)?; )?;
Arc::make_mut(&mut engine_state.previous_env_vars)
.insert(env_name.clone(), after);
} }
}
}
x => {
return Err(ShellError::TypeMismatch {
err_message: "record for the 'env_change' hook".to_string(),
span: x.span(),
});
}
}
}
Ok(()) Ok(())
} }
@ -133,7 +127,16 @@ pub fn eval_hook(
} }
} }
Value::List { vals, .. } => { Value::List { vals, .. } => {
eval_hooks(engine_state, stack, arguments, vals, hook_name)?; for val in vals {
eval_hook(
engine_state,
stack,
None,
arguments.clone(),
val,
&format!("{hook_name} list, recursive"),
)?;
}
} }
Value::Record { val, .. } => { Value::Record { val, .. } => {
// Hooks can optionally be a record in this form: // Hooks can optionally be a record in this form:

View File

@ -78,10 +78,10 @@ pub fn get_editor(
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor") get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
{ {
Ok(buff_editor) Ok(buff_editor)
} else if let Some(value) = env_vars.get("VISUAL") {
get_editor_commandline(value, "$env.VISUAL")
} else if let Some(value) = env_vars.get("EDITOR") { } else if let Some(value) = env_vars.get("EDITOR") {
get_editor_commandline(value, "$env.EDITOR") get_editor_commandline(value, "$env.EDITOR")
} else if let Some(value) = env_vars.get("VISUAL") {
get_editor_commandline(value, "$env.VISUAL")
} else { } else {
Err(ShellError::GenericError { Err(ShellError::GenericError {
error: "No editor configured".into(), error: "No editor configured".into(),

View File

@ -5,7 +5,7 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-extra" name = "nu-cmd-extra"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
version = "0.104.1" version = "0.99.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -16,13 +16,13 @@ bench = false
workspace = true workspace = true
[dependencies] [dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.104.1" } nu-cmd-base = { path = "../nu-cmd-base", version = "0.99.0" }
nu-engine = { path = "../nu-engine", version = "0.104.1", default-features = false } nu-engine = { path = "../nu-engine", version = "0.99.0" }
nu-json = { version = "0.104.1", path = "../nu-json" } nu-json = { version = "0.99.0", path = "../nu-json" }
nu-parser = { path = "../nu-parser", version = "0.104.1" } nu-parser = { path = "../nu-parser", version = "0.99.0" }
nu-pretty-hex = { version = "0.104.1", path = "../nu-pretty-hex" } nu-pretty-hex = { version = "0.99.0", path = "../nu-pretty-hex" }
nu-protocol = { path = "../nu-protocol", version = "0.104.1", default-features = false } nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
nu-utils = { path = "../nu-utils", version = "0.104.1", default-features = false } nu-utils = { path = "../nu-utils", version = "0.99.0" }
# Potential dependencies for extras # Potential dependencies for extras
heck = { workspace = true } heck = { workspace = true }
@ -34,9 +34,8 @@ serde = { workspace = true }
serde_urlencoded = { workspace = true } serde_urlencoded = { workspace = true }
v_htmlescape = { workspace = true } v_htmlescape = { workspace = true }
itertools = { workspace = true } itertools = { workspace = true }
mime = { workspace = true }
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.104.1" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.99.0" }
nu-command = { path = "../nu-command", version = "0.104.1" } nu-command = { path = "../nu-command", version = "0.99.0" }
nu-test-support = { path = "../nu-test-support", version = "0.104.1" } nu-test-support = { path = "../nu-test-support", version = "0.99.0" }

View File

@ -43,12 +43,7 @@ mod test_examples {
signature.operates_on_cell_paths(), signature.operates_on_cell_paths(),
), ),
); );
check_example_evaluates_to_expected_output( check_example_evaluates_to_expected_output(&example, cwd.as_path(), &mut engine_state);
cmd.name(),
&example,
cwd.as_path(),
&mut engine_state,
);
} }
check_all_signature_input_output_types_entries_have_examples( check_all_signature_input_output_types_entries_have_examples(

View File

@ -26,7 +26,7 @@ impl Command for BitsAnd {
.required( .required(
"target", "target",
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]), SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
"Right-hand side of the operation.", "right-hand side of the operation",
) )
.named( .named(
"endian", "endian",

View File

@ -3,10 +3,10 @@ use std::io::{self, Read, Write};
use nu_cmd_base::input_handler::{operate, CmdArgument}; use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::{shell_error::io::IoError, Signals}; use nu_protocol::Signals;
use num_traits::ToPrimitive; use num_traits::ToPrimitive;
struct Arguments { pub struct Arguments {
cell_paths: Option<Vec<CellPath>>, cell_paths: Option<Vec<CellPath>>,
} }
@ -17,15 +17,15 @@ impl CmdArgument for Arguments {
} }
#[derive(Clone)] #[derive(Clone)]
pub struct FormatBits; pub struct BitsInto;
impl Command for FormatBits { impl Command for BitsInto {
fn name(&self) -> &str { fn name(&self) -> &str {
"format bits" "into bits"
} }
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("format bits") Signature::build("into bits")
.input_output_types(vec![ .input_output_types(vec![
(Type::Binary, Type::String), (Type::Binary, Type::String),
(Type::Int, Type::String), (Type::Int, Type::String),
@ -40,17 +40,17 @@ impl Command for FormatBits {
.rest( .rest(
"rest", "rest",
SyntaxShape::CellPath, SyntaxShape::CellPath,
"For a data structure input, convert data at the given cell paths.", "for a data structure input, convert data at the given cell paths",
) )
.category(Category::Conversions) .category(Category::Conversions)
} }
fn description(&self) -> &str { fn description(&self) -> &str {
"Convert value to a string of binary data represented by 0 and 1." "Convert value to a binary primitive."
} }
fn search_terms(&self) -> Vec<&str> { fn search_terms(&self) -> Vec<&str> {
vec!["convert", "cast", "binary"] vec!["convert", "cast"]
} }
fn run( fn run(
@ -60,49 +60,49 @@ impl Command for FormatBits {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
format_bits(engine_state, stack, call, input) into_bits(engine_state, stack, call, input)
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![ vec![
Example { Example {
description: "convert a binary value into a string, padded to 8 places with 0s", description: "convert a binary value into a string, padded to 8 places with 0s",
example: "0x[1] | format bits", example: "0x[1] | into bits",
result: Some(Value::string("00000001", result: Some(Value::string("00000001",
Span::test_data(), Span::test_data(),
)), )),
}, },
Example { Example {
description: "convert an int into a string, padded to 8 places with 0s", description: "convert an int into a string, padded to 8 places with 0s",
example: "1 | format bits", example: "1 | into bits",
result: Some(Value::string("00000001", result: Some(Value::string("00000001",
Span::test_data(), Span::test_data(),
)), )),
}, },
Example { Example {
description: "convert a filesize value into a string, padded to 8 places with 0s", description: "convert a filesize value into a string, padded to 8 places with 0s",
example: "1b | format bits", example: "1b | into bits",
result: Some(Value::string("00000001", result: Some(Value::string("00000001",
Span::test_data(), Span::test_data(),
)), )),
}, },
Example { Example {
description: "convert a duration value into a string, padded to 8 places with 0s", description: "convert a duration value into a string, padded to 8 places with 0s",
example: "1ns | format bits", example: "1ns | into bits",
result: Some(Value::string("00000001", result: Some(Value::string("00000001",
Span::test_data(), Span::test_data(),
)), )),
}, },
Example { Example {
description: "convert a boolean value into a string, padded to 8 places with 0s", description: "convert a boolean value into a string, padded to 8 places with 0s",
example: "true | format bits", example: "true | into bits",
result: Some(Value::string("00000001", result: Some(Value::string("00000001",
Span::test_data(), Span::test_data(),
)), )),
}, },
Example { Example {
description: "convert a string into a raw binary string, padded with 0s to 8 places", description: "convert a string into a raw binary string, padded with 0s to 8 places",
example: "'nushell.sh' | format bits", example: "'nushell.sh' | into bits",
result: Some(Value::string("01101110 01110101 01110011 01101000 01100101 01101100 01101100 00101110 01110011 01101000", result: Some(Value::string("01101110 01110101 01110011 01101000 01100101 01101100 01101100 00101110 01110011 01101000",
Span::test_data(), Span::test_data(),
)), )),
@ -111,7 +111,7 @@ impl Command for FormatBits {
} }
} }
fn format_bits( fn into_bits(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
@ -141,11 +141,7 @@ fn byte_stream_to_bits(stream: ByteStream, head: Span) -> ByteStream {
ByteStreamType::String, ByteStreamType::String,
move |buffer| { move |buffer| {
let mut byte = [0]; let mut byte = [0];
if reader if reader.read(&mut byte[..]).err_span(head)? > 0 {
.read(&mut byte[..])
.map_err(|err| IoError::new(err.kind(), head, None))?
> 0
{
// Format the byte as bits // Format the byte as bits
if is_first { if is_first {
is_first = false; is_first = false;
@ -197,7 +193,7 @@ fn convert_to_smallest_number_type(num: i64, span: Span) -> Value {
} }
} }
fn action(input: &Value, _args: &Arguments, span: Span) -> Value { pub fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
match input { match input {
Value::Binary { val, .. } => { Value::Binary { val, .. } => {
let mut raw_string = "".to_string(); let mut raw_string = "".to_string();
@ -207,7 +203,7 @@ fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
Value::string(raw_string.trim(), span) Value::string(raw_string.trim(), span)
} }
Value::Int { val, .. } => convert_to_smallest_number_type(*val, span), Value::Int { val, .. } => convert_to_smallest_number_type(*val, span),
Value::Filesize { val, .. } => convert_to_smallest_number_type(val.get(), span), Value::Filesize { val, .. } => convert_to_smallest_number_type(*val, span),
Value::Duration { val, .. } => convert_to_smallest_number_type(*val, span), Value::Duration { val, .. } => convert_to_smallest_number_type(*val, span),
Value::String { val, .. } => { Value::String { val, .. } => {
let raw_bytes = val.as_bytes(); let raw_bytes = val.as_bytes();
@ -243,6 +239,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(FormatBits {}) test_examples(BitsInto {})
} }
} }

View File

@ -1,5 +1,6 @@
mod and; mod and;
mod bits_; mod bits_;
mod into;
mod not; mod not;
mod or; mod or;
mod rotate_left; mod rotate_left;
@ -10,6 +11,7 @@ mod xor;
pub use and::BitsAnd; pub use and::BitsAnd;
pub use bits_::Bits; pub use bits_::Bits;
pub use into::BitsInto;
pub use not::BitsNot; pub use not::BitsNot;
pub use or::BitsOr; pub use or::BitsOr;
pub use rotate_left::BitsRol; pub use rotate_left::BitsRol;
@ -135,7 +137,7 @@ where
(min, max) => (rhs, lhs, max, min), (min, max) => (rhs, lhs, max, min),
}; };
let pad = iter::repeat_n(0, max_len - min_len); let pad = iter::repeat(0).take(max_len - min_len);
let mut a; let mut a;
let mut b; let mut b;
@ -159,10 +161,9 @@ where
} }
(Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => { (Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => {
Value::error( Value::error(
ShellError::OnlySupportsThisInputType { ShellError::PipelineMismatch {
exp_input_type: "input, and argument, to be both int or both binary" exp_input_type: "input, and argument, to be both int or both binary"
.to_string(), .to_string(),
wrong_type: "int and binary".to_string(),
dst_span: rhs.span(), dst_span: rhs.span(),
src_span: span, src_span: span,
}, },

View File

@ -27,7 +27,7 @@ impl Command for BitsOr {
.required( .required(
"target", "target",
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]), SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
"Right-hand side of the operation.", "right-hand side of the operation",
) )
.named( .named(
"endian", "endian",

View File

@ -37,7 +37,7 @@ impl Command for BitsRol {
), ),
]) ])
.allow_variants_without_examples(true) .allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to rotate left.") .required("bits", SyntaxShape::Int, "number of bits to rotate left")
.switch( .switch(
"signed", "signed",
"always treat input number as a signed number", "always treat input number as a signed number",

View File

@ -37,7 +37,7 @@ impl Command for BitsRor {
), ),
]) ])
.allow_variants_without_examples(true) .allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to rotate right.") .required("bits", SyntaxShape::Int, "number of bits to rotate right")
.switch( .switch(
"signed", "signed",
"always treat input number as a signed number", "always treat input number as a signed number",

View File

@ -40,7 +40,7 @@ impl Command for BitsShl {
), ),
]) ])
.allow_variants_without_examples(true) .allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to shift left.") .required("bits", SyntaxShape::Int, "number of bits to shift left")
.switch( .switch(
"signed", "signed",
"always treat input number as a signed number", "always treat input number as a signed number",
@ -249,7 +249,7 @@ fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -
Last | Only => lhs << bit_shift, Last | Only => lhs << bit_shift,
_ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)), _ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)),
}) })
.chain(iter::repeat_n(0, byte_shift)) .chain(iter::repeat(0).take(byte_shift))
.collect::<Vec<u8>>() .collect::<Vec<u8>>()
} }

View File

@ -37,7 +37,7 @@ impl Command for BitsShr {
), ),
]) ])
.allow_variants_without_examples(true) .allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to shift right.") .required("bits", SyntaxShape::Int, "number of bits to shift right")
.switch( .switch(
"signed", "signed",
"always treat input number as a signed number", "always treat input number as a signed number",

View File

@ -27,7 +27,7 @@ impl Command for BitsXor {
.required( .required(
"target", "target",
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]), SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
"Right-hand side of the operation.", "right-hand side of the operation",
) )
.named( .named(
"endian", "endian",

View File

@ -2,11 +2,11 @@ use nu_cmd_base::input_handler::{operate, CellPathOnlyArgs};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct FormatNumber; pub struct Fmt;
impl Command for FormatNumber { impl Command for Fmt {
fn name(&self) -> &str { fn name(&self) -> &str {
"format number" "fmt"
} }
fn description(&self) -> &str { fn description(&self) -> &str {
@ -14,19 +14,19 @@ impl Command for FormatNumber {
} }
fn signature(&self) -> nu_protocol::Signature { fn signature(&self) -> nu_protocol::Signature {
Signature::build("format number") Signature::build("fmt")
.input_output_types(vec![(Type::Number, Type::record())]) .input_output_types(vec![(Type::Number, Type::record())])
.category(Category::Conversions) .category(Category::Conversions)
} }
fn search_terms(&self) -> Vec<&str> { fn search_terms(&self) -> Vec<&str> {
vec!["display", "render", "fmt"] vec!["display", "render", "format"]
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
vec![Example { vec![Example {
description: "Get a record containing multiple formats for the number 42", description: "Get a record containing multiple formats for the number 42",
example: "42 | format number", example: "42 | fmt",
result: Some(Value::test_record(record! { result: Some(Value::test_record(record! {
"binary" => Value::test_string("0b101010"), "binary" => Value::test_string("0b101010"),
"debug" => Value::test_string("42"), "debug" => Value::test_string("42"),
@ -47,11 +47,11 @@ impl Command for FormatNumber {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
format_number(engine_state, stack, call, input) fmt(engine_state, stack, call, input)
} }
} }
pub(crate) fn format_number( fn fmt(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
@ -64,9 +64,9 @@ pub(crate) fn format_number(
fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value { fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
match input { match input {
Value::Float { val, .. } => format_f64(*val, span), Value::Float { val, .. } => fmt_it_64(*val, span),
Value::Int { val, .. } => format_i64(*val, span), Value::Int { val, .. } => fmt_it(*val, span),
Value::Filesize { val, .. } => format_i64(val.get(), span), Value::Filesize { val, .. } => fmt_it(*val, span),
// Propagate errors by explicitly matching them before the final case. // Propagate errors by explicitly matching them before the final case.
Value::Error { .. } => input.clone(), Value::Error { .. } => input.clone(),
other => Value::error( other => Value::error(
@ -81,7 +81,7 @@ fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
} }
} }
fn format_i64(num: i64, span: Span) -> Value { fn fmt_it(num: i64, span: Span) -> Value {
Value::record( Value::record(
record! { record! {
"binary" => Value::string(format!("{num:#b}"), span), "binary" => Value::string(format!("{num:#b}"), span),
@ -97,7 +97,7 @@ fn format_i64(num: i64, span: Span) -> Value {
) )
} }
fn format_f64(num: f64, span: Span) -> Value { fn fmt_it_64(num: f64, span: Span) -> Value {
Value::record( Value::record(
record! { record! {
"binary" => Value::string(format!("{:b}", num.to_bits()), span), "binary" => Value::string(format!("{:b}", num.to_bits()), span),
@ -121,6 +121,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(FormatNumber {}) test_examples(Fmt {})
} }
} }

View File

@ -0,0 +1,3 @@
mod fmt;
pub(crate) use fmt::Fmt;

View File

@ -25,8 +25,8 @@ impl Command for EachWhile {
)]) )])
.required( .required(
"closure", "closure",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
"The closure to run.", "the closure to run",
) )
.category(Category::Filters) .category(Category::Filters)
} }

View File

@ -13,17 +13,14 @@ impl Command for Rotate {
.input_output_types(vec![ .input_output_types(vec![
(Type::record(), Type::table()), (Type::record(), Type::table()),
(Type::table(), Type::table()), (Type::table(), Type::table()),
(Type::list(Type::Any), Type::table()),
(Type::String, Type::table()),
]) ])
.switch("ccw", "rotate counter clockwise", None) .switch("ccw", "rotate counter clockwise", None)
.rest( .rest(
"rest", "rest",
SyntaxShape::String, SyntaxShape::String,
"The names to give columns once rotated.", "the names to give columns once rotated",
) )
.category(Category::Filters) .category(Category::Filters)
.allow_variants_without_examples(true)
} }
fn description(&self) -> &str { fn description(&self) -> &str {

View File

@ -16,7 +16,7 @@ impl Command for UpdateCells {
.required( .required(
"closure", "closure",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The closure to run an update for each cell.", "the closure to run an update for each cell",
) )
.named( .named(
"columns", "columns",

View File

@ -2,4 +2,4 @@ mod from;
mod to; mod to;
pub(crate) use from::url::FromUrl; pub(crate) use from::url::FromUrl;
pub use to::html::ToHtml; pub(crate) use to::html::ToHtml;

View File

@ -330,12 +330,7 @@ fn to_html(
output_string = run_regexes(&regex_hm, &output_string); output_string = run_regexes(&regex_hm, &output_string);
} }
let metadata = PipelineMetadata { Ok(Value::string(output_string, head).into_pipeline_data())
data_source: nu_protocol::DataSource::None,
content_type: Some(mime::TEXT_HTML_UTF_8.to_string()),
};
Ok(Value::string(output_string, head).into_pipeline_data_with_metadata(metadata))
} }
fn theme_demo(span: Span) -> PipelineData { fn theme_demo(span: Span) -> PipelineData {

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathArcCos; pub struct SubCommand;
impl Command for MathArcCos { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math arccos" "math arccos"
} }
@ -114,6 +114,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathArcCos {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathArcCosH; pub struct SubCommand;
impl Command for MathArcCosH { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math arccosh" "math arccosh"
} }
@ -100,6 +100,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathArcCosH {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathArcSin; pub struct SubCommand;
impl Command for MathArcSin { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math arcsin" "math arcsin"
} }
@ -115,6 +115,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathArcSin {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathArcSinH; pub struct SubCommand;
impl Command for MathArcSinH { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math arcsinh" "math arcsinh"
} }
@ -88,6 +88,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathArcSinH {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathArcTan; pub struct SubCommand;
impl Command for MathArcTan { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math arctan" "math arctan"
} }
@ -102,6 +102,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathArcTan {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathArcTanH; pub struct SubCommand;
impl Command for MathArcTanH { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math arctanh" "math arctanh"
} }
@ -101,6 +101,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathArcTanH {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathCos; pub struct SubCommand;
impl Command for MathCos { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math cos" "math cos"
} }
@ -108,6 +108,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathCos {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathCosH; pub struct SubCommand;
impl Command for MathCosH { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math cosh" "math cosh"
} }
@ -88,6 +88,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathCosH {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathExp; pub struct SubCommand;
impl Command for MathExp { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math exp" "math exp"
} }
@ -93,6 +93,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathExp {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathLn; pub struct SubCommand;
impl Command for MathLn { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math ln" "math ln"
} }
@ -100,6 +100,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathLn {}) test_examples(SubCommand {})
} }
} }

View File

@ -15,19 +15,19 @@ mod arcsinh;
mod arctan; mod arctan;
mod arctanh; mod arctanh;
pub use cos::MathCos; pub use cos::SubCommand as MathCos;
pub use cosh::MathCosH; pub use cosh::SubCommand as MathCosH;
pub use sin::MathSin; pub use sin::SubCommand as MathSin;
pub use sinh::MathSinH; pub use sinh::SubCommand as MathSinH;
pub use tan::MathTan; pub use tan::SubCommand as MathTan;
pub use tanh::MathTanH; pub use tanh::SubCommand as MathTanH;
pub use exp::MathExp; pub use exp::SubCommand as MathExp;
pub use ln::MathLn; pub use ln::SubCommand as MathLn;
pub use arccos::MathArcCos; pub use arccos::SubCommand as MathArcCos;
pub use arccosh::MathArcCosH; pub use arccosh::SubCommand as MathArcCosH;
pub use arcsin::MathArcSin; pub use arcsin::SubCommand as MathArcSin;
pub use arcsinh::MathArcSinH; pub use arcsinh::SubCommand as MathArcSinH;
pub use arctan::MathArcTan; pub use arctan::SubCommand as MathArcTan;
pub use arctanh::MathArcTanH; pub use arctanh::SubCommand as MathArcTanH;

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathSin; pub struct SubCommand;
impl Command for MathSin { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math sin" "math sin"
} }
@ -108,6 +108,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathSin {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathSinH; pub struct SubCommand;
impl Command for MathSinH { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math sinh" "math sinh"
} }
@ -87,6 +87,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathSinH {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathTan; pub struct SubCommand;
impl Command for MathTan { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math tan" "math tan"
} }
@ -106,6 +106,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathTan {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct MathTanH; pub struct SubCommand;
impl Command for MathTanH { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"math tanh" "math tanh"
} }
@ -86,6 +86,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(MathTanH {}) test_examples(SubCommand {})
} }
} }

View File

@ -1,12 +1,14 @@
mod bits; mod bits;
mod conversions;
mod filters; mod filters;
mod formats; mod formats;
mod math; mod math;
mod platform; mod platform;
mod strings; mod strings;
pub use bits::{Bits, BitsAnd, BitsNot, BitsOr, BitsRol, BitsRor, BitsShl, BitsShr, BitsXor}; pub use bits::{
pub use formats::ToHtml; Bits, BitsAnd, BitsInto, BitsNot, BitsOr, BitsRol, BitsRor, BitsShl, BitsShr, BitsXor,
};
pub use math::{MathArcCos, MathArcCosH, MathArcSin, MathArcSinH, MathArcTan, MathArcTanH}; pub use math::{MathArcCos, MathArcCosH, MathArcSin, MathArcSinH, MathArcTan, MathArcTanH};
pub use math::{MathCos, MathCosH, MathSin, MathSinH, MathTan, MathTanH}; pub use math::{MathCos, MathCosH, MathSin, MathSinH, MathTan, MathTanH};
pub use math::{MathExp, MathLn}; pub use math::{MathExp, MathLn};
@ -26,6 +28,8 @@ pub fn add_extra_command_context(mut engine_state: EngineState) -> EngineState {
}; };
} }
bind_command!(conversions::Fmt);
bind_command!( bind_command!(
filters::UpdateCells, filters::UpdateCells,
filters::EachWhile, filters::EachWhile,
@ -41,8 +45,6 @@ pub fn add_extra_command_context(mut engine_state: EngineState) -> EngineState {
bind_command!( bind_command!(
strings::format::FormatPattern, strings::format::FormatPattern,
strings::format::FormatBits,
strings::format::FormatNumber,
strings::str_::case::Str, strings::str_::case::Str,
strings::str_::case::StrCamelCase, strings::str_::case::StrCamelCase,
strings::str_::case::StrKebabCase, strings::str_::case::StrKebabCase,
@ -52,12 +54,12 @@ pub fn add_extra_command_context(mut engine_state: EngineState) -> EngineState {
strings::str_::case::StrTitleCase strings::str_::case::StrTitleCase
); );
bind_command!(ToHtml, formats::FromUrl); bind_command!(formats::ToHtml, formats::FromUrl);
// Bits // Bits
bind_command! { bind_command! {
Bits, Bits,
BitsAnd, BitsAnd,
BitsInto,
BitsNot, BitsNot,
BitsOr, BitsOr,
BitsRol, BitsRol,

View File

@ -38,7 +38,7 @@ impl Command for SubCommand {
.rest( .rest(
"cell path", "cell path",
SyntaxShape::CellPath, SyntaxShape::CellPath,
"For a data structure input, add a gradient to strings at the given cell paths.", "for a data structure input, add a gradient to strings at the given cell paths",
) )
.input_output_types(vec![ .input_output_types(vec![
(Type::String, Type::String), (Type::String, Type::String),

View File

@ -18,7 +18,7 @@ impl Command for FormatPattern {
.required( .required(
"pattern", "pattern",
SyntaxShape::String, SyntaxShape::String,
"The pattern to output. e.g.) \"{foo}: {bar}\".", "the pattern to output. e.g.) \"{foo}: {bar}\"",
) )
.allow_variants_without_examples(true) .allow_variants_without_examples(true)
.category(Category::Strings) .category(Category::Strings)
@ -253,11 +253,12 @@ fn format_record(
optional: false, optional: false,
}) })
.collect(); .collect();
match data_as_value.clone().follow_cell_path(&path_members, false) {
let expanded_string = data_as_value Ok(value_at_column) => {
.follow_cell_path(&path_members, false)? output.push_str(value_at_column.to_expanded_string(", ", config).as_str())
.to_expanded_string(", ", config); }
output.push_str(expanded_string.as_str()) Err(se) => return Err(se),
}
} }
} }
} }

View File

@ -1,7 +1,3 @@
mod bits;
mod command; mod command;
mod number;
pub(crate) use bits::FormatBits;
pub(crate) use command::FormatPattern; pub(crate) use command::FormatPattern;
pub(crate) use number::FormatNumber;

View File

@ -3,9 +3,9 @@ use heck::ToLowerCamelCase;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct StrCamelCase; pub struct SubCommand;
impl Command for StrCamelCase { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"str camel-case" "str camel-case"
} }
@ -25,7 +25,7 @@ impl Command for StrCamelCase {
.rest( .rest(
"rest", "rest",
SyntaxShape::CellPath, SyntaxShape::CellPath,
"For a data structure input, convert strings at the given cell paths.", "For a data structure input, convert strings at the given cell paths",
) )
.category(Category::Strings) .category(Category::Strings)
} }
@ -91,6 +91,6 @@ mod test {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(StrCamelCase {}) test_examples(SubCommand {})
} }
} }

View File

@ -3,9 +3,9 @@ use heck::ToKebabCase;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct StrKebabCase; pub struct SubCommand;
impl Command for StrKebabCase { impl Command for SubCommand {
fn name(&self) -> &str { fn name(&self) -> &str {
"str kebab-case" "str kebab-case"
} }
@ -25,7 +25,7 @@ impl Command for StrKebabCase {
.rest( .rest(
"rest", "rest",
SyntaxShape::CellPath, SyntaxShape::CellPath,
"For a data structure input, convert strings at the given cell paths.", "For a data structure input, convert strings at the given cell paths",
) )
.category(Category::Strings) .category(Category::Strings)
} }
@ -90,6 +90,6 @@ mod tests {
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(StrKebabCase {}) test_examples(SubCommand {})
} }
} }

View File

@ -6,13 +6,13 @@ mod snake_case;
mod str_; mod str_;
mod title_case; mod title_case;
pub use camel_case::StrCamelCase; pub use camel_case::SubCommand as StrCamelCase;
pub use kebab_case::StrKebabCase; pub use kebab_case::SubCommand as StrKebabCase;
pub use pascal_case::StrPascalCase; pub use pascal_case::SubCommand as StrPascalCase;
pub use screaming_snake_case::StrScreamingSnakeCase; pub use screaming_snake_case::SubCommand as StrScreamingSnakeCase;
pub use snake_case::StrSnakeCase; pub use snake_case::SubCommand as StrSnakeCase;
pub use str_::Str; pub use str_::Str;
pub use title_case::StrTitleCase; pub use title_case::SubCommand as StrTitleCase;
use nu_cmd_base::input_handler::{operate as general_operate, CmdArgument}; use nu_cmd_base::input_handler::{operate as general_operate, CmdArgument};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;

Some files were not shown because too many files have changed in this diff Show More