Compare commits

..

No commits in common. "main" and "0.97.0" have entirely different histories.
main ... 0.97.0

1400 changed files with 35419 additions and 75736 deletions

View File

@ -13,7 +13,7 @@ body:
id: repro
attributes:
label: How to reproduce
description: Steps to reproduce the behavior (including succinct code examples or screenshots of the observed behavior)
description: Steps to reproduce the behavior
placeholder: |
1.
2.
@ -28,6 +28,13 @@ body:
placeholder: I expected nu to...
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: Please add any relevant screenshots here, if any
validations:
required: false
- type: textarea
id: config
attributes:
@ -48,3 +55,10 @@ body:
| installed_plugins | binaryview, chart bar, chart line, fetch, from bson, from sqlite, inc, match, post, ps, query json, s3, selector, start, sys, textview, to bson, to sqlite, tree, xpath |
validations:
required: true
- type: textarea
id: context
attributes:
label: Additional context
description: Add any other context about the problem here.
validations:
required: false

40
.github/labeler.yml vendored
View File

@ -1,40 +0,0 @@
# A bot for automatically labelling pull requests
# See https://github.com/actions/labeler
dataframe:
- changed-files:
- any-glob-to-any-file:
- crates/nu_plugin_polars/**
std-library:
- changed-files:
- any-glob-to-any-file:
- crates/nu-std/**
ci:
- changed-files:
- any-glob-to-any-file:
- .github/workflows/**
LSP:
- changed-files:
- any-glob-to-any-file:
- crates/nu-lsp/**
parser:
- changed-files:
- any-glob-to-any-file:
- crates/nu-parser/**
pr:plugins:
- changed-files:
- any-glob-to-any-file:
# plugins API
- crates/nu-plugin/**
- crates/nu-plugin-core/**
- crates/nu-plugin-engine/**
- crates/nu-plugin-protocol/**
- crates/nu-plugin-test-support/**
# specific plugins (like polars)
- crates/nu_plugin_*/**

View File

@ -20,6 +20,6 @@ jobs:
continue-on-error: true
steps:
- uses: actions/checkout@v4.1.7
- uses: rustsec/audit-check@v2.0.0
- uses: rustsec/audit-check@v1.4.1
with:
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,52 +0,0 @@
name: Test on Beta Toolchain
# This workflow is made to run our tests on the beta toolchain to validate that
# the beta toolchain works.
# We do not intend to test here that we are working correctly but rather that
# the beta toolchain works correctly.
# The ci.yml handles our actual testing with our guarantees.
on:
schedule:
# If this workflow fails, GitHub notifications will go to the last person
# who edited this line.
# See: https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/monitoring-workflows/notifications-for-workflow-runs
- cron: '0 0 * * *' # Runs daily at midnight UTC
env:
NUSHELL_CARGO_PROFILE: ci
NU_LOG_LEVEL: DEBUG
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
cancel-in-progress: true
jobs:
build-and-test:
# this job is more for testing the beta toolchain and not our tests, so if
# this fails but the tests of the regular ci pass, then this is fine
continue-on-error: true
strategy:
fail-fast: true
matrix:
platform: [windows-latest, macos-latest, ubuntu-22.04]
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v4
- run: rustup update beta
- name: Tests
run: cargo +beta test --workspace --profile ci --exclude nu_plugin_*
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi

View File

@ -3,7 +3,6 @@ on:
push:
branches:
- main
- 'patch-release-*'
name: continuous-integration
@ -22,14 +21,14 @@ jobs:
strategy:
fail-fast: true
matrix:
# Pinning to Ubuntu 22.04 because building on newer Ubuntu versions causes linux-gnu
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
# revisiting this when 22.04 is closer to EOL (June 2027)
# revisiting this when 20.04 is closer to EOL (April 2025)
#
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
# removed and we're only building the `polars` plugin instead
platform: [windows-latest, macos-13, ubuntu-22.04]
platform: [windows-latest, macos-13, ubuntu-20.04]
runs-on: ${{ matrix.platform }}
@ -37,7 +36,7 @@ jobs:
- uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: cargo fmt
run: cargo fmt --all -- --check
@ -57,7 +56,12 @@ jobs:
strategy:
fail-fast: true
matrix:
platform: [windows-latest, macos-latest, ubuntu-22.04]
platform: [windows-latest, macos-latest, ubuntu-20.04]
include:
- default-flags: ""
# linux CI cannot handle clipboard feature
- platform: ubuntu-20.04
default-flags: "--no-default-features --features=default-no-clipboard"
runs-on: ${{ matrix.platform }}
@ -65,10 +69,10 @@ jobs:
- uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Tests
run: cargo test --workspace --profile ci --exclude nu_plugin_*
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.default-flags }}
- name: Check for clean repo
shell: bash
run: |
@ -84,7 +88,7 @@ jobs:
strategy:
fail-fast: true
matrix:
platform: [ubuntu-22.04, macos-latest, windows-latest]
platform: [ubuntu-20.04, macos-latest, windows-latest]
py:
- py
@ -94,10 +98,10 @@ jobs:
- uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Install Nushell
run: cargo install --path . --locked --force
run: cargo install --path . --locked --no-default-features
- name: Standard library tests
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
@ -137,7 +141,7 @@ jobs:
# instead of 14 GB) which is too little for us right now.
#
# Failure occurring with clippy for rust 1.77.2
platform: [windows-latest, macos-13, ubuntu-22.04]
platform: [windows-latest, macos-13, ubuntu-20.04]
runs-on: ${{ matrix.platform }}
@ -145,7 +149,7 @@ jobs:
- uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Clippy
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
@ -163,50 +167,3 @@ jobs:
else
echo "no changes in working directory";
fi
wasm:
env:
WASM_OPTIONS: --no-default-features --target wasm32-unknown-unknown
CLIPPY_CONF_DIR: ${{ github.workspace }}/clippy/wasm/
strategy:
matrix:
job:
- name: Build WASM
command: cargo build
args:
- name: Clippy WASM
command: cargo clippy
args: -- $CLIPPY_OPTIONS
name: ${{ matrix.job.name }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.7
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
- name: Add wasm32-unknown-unknown target
run: rustup target add wasm32-unknown-unknown
- run: ${{ matrix.job.command }} -p nu-cmd-base $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-cmd-extra $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-cmd-lang $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-color-config $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-command $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-derive-value $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-engine $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-glob $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-json $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-parser $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-path $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-pretty-hex $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-protocol $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-std $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-system $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-table $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-term-grid $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nu-utils $WASM_OPTIONS ${{ matrix.job.args }}
- run: ${{ matrix.job.command }} -p nuon $WASM_OPTIONS ${{ matrix.job.args }}

View File

@ -1,19 +0,0 @@
# Automatically labels PRs based on the configuration file
# you are probably looking for 👉 `.github/labeler.yml`
name: Label PRs
on:
- pull_request_target
jobs:
triage:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
if: github.repository_owner == 'nushell'
steps:
- uses: actions/labeler@v5
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
sync-labels: true

View File

@ -1,30 +0,0 @@
# Description:
# - Add milestone to a merged PR automatically
# - Add milestone to a closed issue that has a merged PR fix (if any)
name: Milestone Action
on:
issues:
types: [closed]
pull_request_target:
types: [closed]
jobs:
update-milestone:
runs-on: ubuntu-latest
name: Milestone Update
steps:
- name: Set Milestone for PR
uses: hustcer/milestone-action@main
if: github.event.pull_request.merged == true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Bind milestone to closed issue that has a merged PR fix
- name: Set Milestone for Issue
uses: hustcer/milestone-action@v2
if: github.event.issue.state == 'closed'
with:
action: bind-issue
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -8,7 +8,6 @@
name: Nightly Build
on:
workflow_dispatch:
push:
branches:
- nightly # Just for test purpose only with the nightly repo
@ -28,7 +27,7 @@ jobs:
# if: github.repository == 'nushell/nightly'
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v4.1.7
if: github.repository == 'nushell/nightly'
with:
ref: main
@ -37,10 +36,10 @@ jobs:
token: ${{ secrets.WORKFLOW_TOKEN }}
- name: Setup Nushell
uses: hustcer/setup-nu@v3
uses: hustcer/setup-nu@v3.12
if: github.repository == 'nushell/nightly'
with:
version: 0.103.0
version: 0.95.0
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
- name: Prepare for Nightly Release
@ -66,7 +65,7 @@ jobs:
}
standard:
name: Nu
name: Std
needs: prepare
strategy:
fail-fast: false
@ -79,11 +78,8 @@ jobs:
- x86_64-unknown-linux-gnu
- x86_64-unknown-linux-musl
- aarch64-unknown-linux-gnu
- aarch64-unknown-linux-musl
- armv7-unknown-linux-gnueabihf
- armv7-unknown-linux-musleabihf
- riscv64gc-unknown-linux-gnu
- loongarch64-unknown-linux-gnu
extra: ['bin']
include:
- target: aarch64-apple-darwin
@ -108,21 +104,15 @@ jobs:
os: ubuntu-22.04
- target: aarch64-unknown-linux-gnu
os: ubuntu-22.04
- target: aarch64-unknown-linux-musl
os: ubuntu-22.04
- target: armv7-unknown-linux-gnueabihf
os: ubuntu-22.04
- target: armv7-unknown-linux-musleabihf
os: ubuntu-22.04
- target: riscv64gc-unknown-linux-gnu
os: ubuntu-22.04
- target: loongarch64-unknown-linux-gnu
os: ubuntu-22.04
os: ubuntu-latest
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4.1.7
with:
ref: main
fetch-depth: 0
@ -132,15 +122,15 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with:
rustflags: ''
- name: Setup Nushell
uses: hustcer/setup-nu@v3
uses: hustcer/setup-nu@v3.12
with:
version: 0.103.0
version: 0.95.0
- name: Release Nu Binary
id: nu
@ -171,7 +161,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release
# Create a release only in nushell/nightly repo
- name: Publish Archive
uses: softprops/action-gh-release@v2.0.9
uses: softprops/action-gh-release@v2.0.8
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
with:
prerelease: true
@ -191,14 +181,14 @@ jobs:
- name: Waiting for Release
run: sleep 1800
- uses: actions/checkout@v4
- uses: actions/checkout@v4.1.7
with:
ref: main
- name: Setup Nushell
uses: hustcer/setup-nu@v3
uses: hustcer/setup-nu@v3.12
with:
version: 0.103.0
version: 0.95.0
# Keep the last a few releases
- name: Delete Older Releases

View File

@ -84,27 +84,6 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
cargo-build-nu
}
'aarch64-unknown-linux-musl' => {
aria2c https://musl.cc/aarch64-linux-musl-cross.tgz
tar -xf aarch64-linux-musl-cross.tgz -C $env.HOME
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/aarch64-linux-musl-cross/bin')
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER = 'aarch64-linux-musl-gcc'
cargo-build-nu
}
'armv7-unknown-linux-musleabihf' => {
aria2c https://musl.cc/armv7r-linux-musleabihf-cross.tgz
tar -xf armv7r-linux-musleabihf-cross.tgz -C $env.HOME
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/armv7r-linux-musleabihf-cross/bin')
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER = 'armv7r-linux-musleabihf-gcc'
cargo-build-nu
}
'loongarch64-unknown-linux-gnu' => {
aria2c https://github.com/loongson/build-tools/releases/download/2024.08.08/x86_64-cross-tools-loongarch64-binutils_2.43-gcc_14.2.0-glibc_2.40.tar.xz
tar xf x86_64-cross-tools-loongarch64-*.tar.xz
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.PWD)/cross-tools/bin')
$env.CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNU_LINKER = 'loongarch64-unknown-linux-gnu-gcc'
cargo-build-nu
}
_ => {
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
# Actually just for x86_64-unknown-linux-musl target
@ -117,14 +96,14 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
# ----------------------------------------------------------------------------
# Build for Windows without static-link-openssl feature
# ----------------------------------------------------------------------------
if $os =~ 'windows' {
if $os in ['windows-latest'] {
cargo-build-nu
}
# ----------------------------------------------------------------------------
# Prepare for the release archive
# ----------------------------------------------------------------------------
let suffix = if $os =~ 'windows' { '.exe' }
let suffix = if $os == 'windows-latest' { '.exe' }
# nu, nu_plugin_* were all included
let executable = $'target/($target)/release/($bin)*($suffix)'
print $'Current executable file: ($executable)'
@ -148,10 +127,10 @@ For more information, refer to https://www.nushell.sh/book/plugins.html
[LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten
print $'(char nl)Check binary release version detail:'; hr-line
let ver = if $os =~ 'windows' {
(do -i { .\output\nu.exe -c 'version' }) | default '' | str join
let ver = if $os == 'windows-latest' {
(do -i { .\output\nu.exe -c 'version' }) | str join
} else {
(do -i { ./output/nu -c 'version' }) | default '' | str join
(do -i { ./output/nu -c 'version' }) | str join
}
if ($ver | str trim | is-empty) {
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)'
@ -177,7 +156,7 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
# REF: https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
} else if $os =~ 'windows' {
} else if $os == 'windows-latest' {
let releaseStem = $'($bin)-($version)-($target)'
@ -221,7 +200,7 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
}
def 'cargo-build-nu' [] {
if $os =~ 'windows' {
if $os == 'windows-latest' {
cargo build --release --all --target $target
} else {
cargo build --release --all --target $target --features=static-link-openssl

View File

@ -7,17 +7,15 @@ name: Create Release Draft
on:
workflow_dispatch:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+*'
- '!*nightly*' # Don't trigger release for nightly tags
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
defaults:
run:
shell: bash
jobs:
release:
name: Nu
standard:
name: Std
strategy:
fail-fast: false
@ -30,11 +28,8 @@ jobs:
- x86_64-unknown-linux-gnu
- x86_64-unknown-linux-musl
- aarch64-unknown-linux-gnu
- aarch64-unknown-linux-musl
- armv7-unknown-linux-gnueabihf
- armv7-unknown-linux-musleabihf
- riscv64gc-unknown-linux-gnu
- loongarch64-unknown-linux-gnu
extra: ['bin']
include:
- target: aarch64-apple-darwin
@ -59,37 +54,31 @@ jobs:
os: ubuntu-22.04
- target: aarch64-unknown-linux-gnu
os: ubuntu-22.04
- target: aarch64-unknown-linux-musl
os: ubuntu-22.04
- target: armv7-unknown-linux-gnueabihf
os: ubuntu-22.04
- target: armv7-unknown-linux-musleabihf
os: ubuntu-22.04
- target: riscv64gc-unknown-linux-gnu
os: ubuntu-22.04
- target: loongarch64-unknown-linux-gnu
os: ubuntu-22.04
os: ubuntu-latest
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4.1.7
- name: Update Rust Toolchain Target
run: |
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with:
cache: false
rustflags: ''
- name: Setup Nushell
uses: hustcer/setup-nu@v3
uses: hustcer/setup-nu@v3.12
with:
version: 0.103.0
version: 0.95.0
- name: Release Nu Binary
id: nu
@ -100,36 +89,12 @@ jobs:
TARGET: ${{ matrix.target }}
_EXTRA_: ${{ matrix.extra }}
# WARN: Don't upgrade this action due to the release per asset issue.
# See: https://github.com/softprops/action-gh-release/issues/445
# REF: https://github.com/marketplace/actions/gh-release
- name: Publish Archive
uses: softprops/action-gh-release@v2.0.5
uses: softprops/action-gh-release@v2.0.8
if: ${{ startsWith(github.ref, 'refs/tags/') }}
with:
draft: true
files: ${{ steps.nu.outputs.archive }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
sha256sum:
needs: release
name: Create Sha256sum
runs-on: ubuntu-latest
steps:
- name: Download Release Archives
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: >-
gh release download ${{ github.ref_name }}
--repo ${{ github.repository }}
--pattern '*'
--dir release
- name: Create Checksums
run: cd release && shasum -a 256 * > ../SHA256SUMS
- name: Publish Checksums
uses: softprops/action-gh-release@v2.0.5
with:
draft: true
files: SHA256SUMS
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -10,4 +10,4 @@ jobs:
uses: actions/checkout@v4.1.7
- name: Check spelling
uses: crate-ci/typos@v1.31.1
uses: crate-ci/typos@v1.23.6

4743
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
license = "MIT"
name = "nu"
repository = "https://github.com/nushell/nushell"
rust-version = "1.84.1"
version = "0.104.1"
rust-version = "1.78.0"
version = "0.97.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -66,153 +66,138 @@ alphanumeric-sort = "1.5"
ansi-str = "0.8"
anyhow = "1.0.82"
base64 = "0.22.1"
bracoxide = "0.1.6"
brotli = "7.0"
bracoxide = "0.1.2"
brotli = "5.0"
byteorder = "1.5"
bytes = "1"
bytesize = "1.3.3"
calamine = "0.27"
bytesize = "1.3"
calamine = "0.24.0"
chardetng = "0.1.17"
chrono = { default-features = false, version = "0.4.34" }
chrono-humanize = "0.2.3"
chrono-tz = "0.10"
chrono-tz = "0.8"
convert_case = "0.6"
crossbeam-channel = "0.5.8"
crossterm = "0.28.1"
crossterm = "0.27"
csv = "1.3"
ctrlc = "3.4"
devicons = "0.6.12"
deunicode = "1.6.0"
dialoguer = { default-features = false, version = "0.11" }
digest = { default-features = false, version = "0.10" }
dirs = "5.0"
dirs-sys = "0.4"
dtparse = "2.0"
encoding_rs = "0.8"
fancy-regex = "0.14"
fancy-regex = "0.13"
filesize = "0.2"
filetime = "0.2"
fuzzy-matcher = "0.3"
heck = "0.5.0"
human-date-parser = "0.3.0"
indexmap = "2.9"
human-date-parser = "0.1.1"
indexmap = "2.4"
indicatif = "0.17"
interprocess = "2.2.0"
is_executable = "1.0"
itertools = "0.13"
itertools = "0.12"
libc = "0.2"
libproc = "0.14"
log = "0.4"
lru = "0.12"
lscolors = { version = "0.17", default-features = false }
lsp-server = "0.7.8"
lsp-types = { version = "0.97.0", features = ["proposed"] }
lsp-textdocument = "0.4.2"
lsp-server = "0.7.5"
lsp-types = "0.95.0"
mach2 = "0.4"
md5 = { version = "0.10", package = "md-5" }
miette = "7.5"
miette = "7.2"
mime = "0.3.17"
mime_guess = "2.0"
mockito = { version = "1.7", default-features = false }
multipart-rs = "0.1.13"
mockito = { version = "1.5", default-features = false }
multipart-rs = "0.1.11"
native-tls = "0.2"
nix = { version = "0.29", default-features = false }
nix = { version = "0.28", default-features = false }
notify-debouncer-full = { version = "0.3", default-features = false }
nu-ansi-term = "0.50.1"
nucleo-matcher = "0.3"
num-format = "0.4"
num-traits = "0.2"
oem_cp = "2.0.0"
omnipath = "0.1"
once_cell = "1.18"
open = "5.3"
os_pipe = { version = "1.2", features = ["io_safety"] }
pathdiff = "0.2"
percent-encoding = "2"
pretty_assertions = "1.4"
print-positions = "0.6"
proc-macro-error2 = "2.0"
proc-macro-error = { version = "1.0", default-features = false }
proc-macro2 = "1.0"
procfs = "0.17.0"
procfs = "0.16.0"
pwd = "1.3"
quick-xml = "0.37.0"
quick-xml = "0.32.0"
quickcheck = "1.0"
quickcheck_macros = "1.0"
quote = "1.0"
rand = "0.9"
getrandom = "0.2" # pick same version that rand requires
rand_chacha = "0.9"
ratatui = "0.29"
rand = "0.8"
ratatui = "0.26"
rayon = "1.10"
reedline = "0.40.0"
reedline = "0.34.0"
regex = "1.9.5"
rmp = "0.8"
rmp-serde = "1.3"
roxmltree = "0.20"
rstest = { version = "0.23", default-features = false }
rstest_reuse = "0.7"
ropey = "1.6.1"
roxmltree = "0.19"
rstest = { version = "0.18", default-features = false }
rusqlite = "0.31"
rust-embed = "8.7.0"
scopeguard = { version = "1.2.0" }
serde = { version = "1.0" }
serde_json = "1.0.97"
rust-embed = "8.5.0"
same-file = "1.0"
serde = { version = "1.0", default-features = false }
serde_json = "1.0"
serde_urlencoded = "0.7.1"
serde_yaml = "0.9.33"
serde_yaml = "0.9"
sha2 = "0.10"
strip-ansi-escapes = "0.2.0"
strum = "0.26"
strum_macros = "0.26"
syn = "2.0"
sysinfo = "0.33"
tabled = { version = "0.17.0", default-features = false }
tempfile = "3.15"
titlecase = "3.5"
sysinfo = "0.30"
tabled = { version = "0.14.0", default-features = false }
tempfile = "3.10"
terminal_size = "0.3"
titlecase = "2.0"
toml = "0.8"
trash = "5.2"
update-informer = { version = "1.2.0", default-features = false, features = ["github", "native-tls", "ureq"] }
trash = "3.3"
umask = "2.1"
unicode-segmentation = "1.12"
unicode-width = "0.2"
ureq = { version = "2.12", default-features = false, features = ["socks-proxy"] }
unicode-segmentation = "1.11"
unicode-width = "0.1"
ureq = { version = "2.10", default-features = false }
url = "2.2"
uu_cp = "0.0.30"
uu_mkdir = "0.0.30"
uu_mktemp = "0.0.30"
uu_mv = "0.0.30"
uu_touch = "0.0.30"
uu_whoami = "0.0.30"
uu_uname = "0.0.30"
uucore = "0.0.30"
uuid = "1.16.0"
uu_cp = "0.0.27"
uu_mkdir = "0.0.27"
uu_mktemp = "0.0.27"
uu_mv = "0.0.27"
uu_whoami = "0.0.27"
uu_uname = "0.0.27"
uucore = "0.0.27"
uuid = "1.10.0"
v_htmlescape = "0.15.0"
wax = "0.6"
web-time = "1.1.0"
which = "7.0.0"
windows = "0.56"
which = "6.0.0"
windows = "0.54"
windows-sys = "0.48"
winreg = "0.52"
memchr = "2.7.4"
[workspace.lints.clippy]
# Warning: workspace lints affect library code as well as tests, so don't enable lints that would be too noisy in tests like that.
# todo = "warn"
unchecked_duration_subtraction = "warn"
[lints]
workspace = true
[dependencies]
nu-cli = { path = "./crates/nu-cli", version = "0.104.1" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.104.1" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.104.1" }
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.104.1", optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.104.1" }
nu-command = { path = "./crates/nu-command", version = "0.104.1" }
nu-engine = { path = "./crates/nu-engine", version = "0.104.1" }
nu-explore = { path = "./crates/nu-explore", version = "0.104.1" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.104.1" }
nu-parser = { path = "./crates/nu-parser", version = "0.104.1" }
nu-path = { path = "./crates/nu-path", version = "0.104.1" }
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.104.1" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.104.1" }
nu-std = { path = "./crates/nu-std", version = "0.104.1" }
nu-system = { path = "./crates/nu-system", version = "0.104.1" }
nu-utils = { path = "./crates/nu-utils", version = "0.104.1" }
nu-cli = { path = "./crates/nu-cli", version = "0.97.0" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.97.0" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.97.0" }
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.97.0", optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.97.0" }
nu-command = { path = "./crates/nu-command", version = "0.97.0" }
nu-engine = { path = "./crates/nu-engine", version = "0.97.0" }
nu-explore = { path = "./crates/nu-explore", version = "0.97.0" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.97.0" }
nu-parser = { path = "./crates/nu-parser", version = "0.97.0" }
nu-path = { path = "./crates/nu-path", version = "0.97.0" }
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.97.0" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.97.0" }
nu-std = { path = "./crates/nu-std", version = "0.97.0" }
nu-system = { path = "./crates/nu-system", version = "0.97.0" }
nu-utils = { path = "./crates/nu-utils", version = "0.97.0" }
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
crossterm = { workspace = true }
@ -220,6 +205,7 @@ ctrlc = { workspace = true }
dirs = { workspace = true }
log = { workspace = true }
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
mimalloc = { version = "0.1.42", default-features = false, optional = true }
multipart-rs = { workspace = true }
serde_json = { workspace = true }
simplelog = "0.12"
@ -241,38 +227,36 @@ nix = { workspace = true, default-features = false, features = [
] }
[dev-dependencies]
nu-test-support = { path = "./crates/nu-test-support", version = "0.104.1" }
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.104.1" }
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.104.1" }
nu-test-support = { path = "./crates/nu-test-support", version = "0.97.0" }
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.97.0" }
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.97.0" }
assert_cmd = "2.0"
dirs = { workspace = true }
tango-bench = "0.6"
tango-bench = "0.5"
pretty_assertions = { workspace = true }
fancy-regex = { workspace = true }
regex = { workspace = true }
rstest = { workspace = true, default-features = false }
serial_test = "3.2"
serial_test = "3.1"
tempfile = { workspace = true }
[features]
plugin = [
# crates
"nu-cmd-plugin",
"nu-plugin-engine",
# features
"nu-cmd-plugin",
"nu-cli/plugin",
"nu-cmd-lang/plugin",
"nu-command/plugin",
"nu-engine/plugin",
"nu-engine/plugin",
"nu-parser/plugin",
"nu-command/plugin",
"nu-protocol/plugin",
"nu-engine/plugin",
]
default = [
default = ["default-no-clipboard", "system-clipboard"]
# Enables convenient omitting of the system-clipboard feature, as it leads to problems in ci on linux
# See https://github.com/nushell/nushell/pull/11535
default-no-clipboard = [
"plugin",
"trash-support",
"sqlite",
"mimalloc",
]
stable = ["default"]
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
@ -281,8 +265,7 @@ stable = ["default"]
# otherwise the system version will be used. Not enabled by default because it takes a while to build
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
# Optional system clipboard support in `reedline`, this behavior has problematic compatibility with some systems.
# Missing X server/ Wayland can cause issues
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
system-clipboard = [
"reedline/system_clipboard",
"nu-cli/system-clipboard",
@ -293,7 +276,7 @@ system-clipboard = [
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
# SQLite commands for nushell
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite", "nu-std/sqlite"]
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
[profile.release]
opt-level = "s" # Optimize for size
@ -323,7 +306,7 @@ bench = false
# To use a development version of a dependency please use a global override here
# changing versions in each sub-crate of the workspace is tedious
[patch.crates-io]
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
# Run all benchmarks with `cargo bench`

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) 2019 - 2025 The Nushell Project Developers
Copyright (c) 2019 - 2023 The Nushell Project Developers
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

120
README.md
View File

@ -4,6 +4,7 @@
[![Nightly Build](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml/badge.svg)](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
[![Discord](https://img.shields.io/discord/601130461678272522.svg?logo=discord)](https://discord.gg/NtAbbGn)
[![The Changelog #363](https://img.shields.io/badge/The%20Changelog-%23363-61c192.svg)](https://changelog.com/podcast/363)
[![@nu_shell](https://img.shields.io/badge/twitter-@nu_shell-1DA1F3?style=flat-square)](https://twitter.com/nu_shell)
[![GitHub commit activity](https://img.shields.io/github/commit-activity/m/nushell/nushell)](https://github.com/nushell/nushell/graphs/commit-activity)
[![GitHub contributors](https://img.shields.io/github/contributors/nushell/nushell)](https://github.com/nushell/nushell/graphs/contributors)
@ -34,7 +35,7 @@ This project has reached a minimum-viable-product level of quality. Many people
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
We're also active on [Discord](https://discord.gg/NtAbbGn); come and chat with us!
We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
## Installation
@ -57,7 +58,7 @@ For details about which platforms the Nushell team actively supports, see [our p
## Configuration
The default configurations can be found at [sample_config](crates/nu-utils/src/default_files)
The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
which are the configuration files one gets when they startup Nushell for the first time.
It sets all of the default configuration to run Nushell. From here one can
@ -94,44 +95,44 @@ Commands that work in the pipeline fit into one of three categories:
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
```shell
ls | where type == "dir" | table
# => ╭────┬──────────┬──────┬─────────┬───────────────╮
# => │ # │ name │ type │ size │ modified │
# => ├────┼──────────┼──────┼─────────┼───────────────┤
# => │ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
# => │ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
# => │ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
# => │ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
# => │ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
# => │ 5 │ images │ dir │ 0 B │ 2 weeks ago │
# => │ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
# => │ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
# => │ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
# => │ 9 │ target │ dir │ 0 B │ a day ago │
# => │ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
# => │ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
# => ╰────┴──────────┴──────┴─────────┴───────────────╯
> ls | where type == "dir" | table
╭────┬──────────┬──────┬─────────┬───────────────╮
│ # │ name │ type │ size │ modified │
├────┼──────────┼──────┼─────────┼───────────────┤
│ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
│ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
│ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
│ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
│ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
│ 5 │ images │ dir │ 0 B │ 2 weeks ago │
│ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
│ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
│ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
│ 9 │ target │ dir │ 0 B │ a day ago │
│ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
│ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
╰────┴──────────┴──────┴─────────┴───────────────╯
```
Because most of the time you'll want to see the output of a pipeline, `table` is assumed.
We could have also written the above:
```shell
ls | where type == "dir"
> ls | where type == "dir"
```
Being able to use the same commands and compose them differently is an important philosophy in Nu.
For example, we could use the built-in `ps` command to get a list of the running processes, using the same `where` as above.
```shell
ps | where cpu > 0
# => ╭───┬───────┬───────────┬───────┬───────────┬───────────╮
# => │ # │ pid │ name │ cpu │ mem │ virtual │
# => ├───┼───────┼───────────┼───────┼───────────┼───────────┤
# => │ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
# => │ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
# => │ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
# => ╰───┴───────┴───────────┴───────┴───────────┴───────────╯
> ps | where cpu > 0
╭───┬───────┬───────────┬───────┬───────────┬───────────╮
│ # │ pid │ name │ cpu │ mem │ virtual │
├───┼───────┼───────────┼───────┼───────────┼───────────┤
│ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
│ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
│ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
╰───┴───────┴───────────┴───────┴───────────┴───────────╯
```
### Opening files
@ -140,46 +141,46 @@ Nu can load file and URL contents as raw text or structured data (if it recogniz
For example, you can load a .toml file as structured data and explore it:
```shell
open Cargo.toml
# => ╭──────────────────┬────────────────────╮
# => │ bin │ [table 1 row] │
# => │ dependencies │ {record 25 fields} │
# => │ dev-dependencies │ {record 8 fields} │
# => │ features │ {record 10 fields} │
# => │ package │ {record 13 fields} │
# => │ patch │ {record 1 field} │
# => │ profile │ {record 3 fields} │
# => │ target │ {record 3 fields} │
# => │ workspace │ {record 1 field} │
# => ╰──────────────────┴────────────────────╯
> open Cargo.toml
╭──────────────────┬────────────────────╮
│ bin │ [table 1 row] │
│ dependencies │ {record 25 fields} │
│ dev-dependencies │ {record 8 fields} │
│ features │ {record 10 fields} │
│ package │ {record 13 fields} │
│ patch │ {record 1 field} │
│ profile │ {record 3 fields} │
│ target │ {record 3 fields} │
│ workspace │ {record 1 field} │
╰──────────────────┴────────────────────╯
```
We can pipe this into a command that gets the contents of one of the columns:
```shell
open Cargo.toml | get package
# => ╭───────────────┬────────────────────────────────────╮
# => │ authors │ [list 1 item] │
# => │ default-run │ nu │
# => │ description │ A new type of shell │
# => │ documentation │ https://www.nushell.sh/book/ │
# => │ edition │ 2018 │
# => │ exclude │ [list 1 item] │
# => │ homepage │ https://www.nushell.sh │
# => │ license │ MIT │
# => │ metadata │ {record 1 field} │
# => │ name │ nu │
# => │ repository │ https://github.com/nushell/nushell │
# => │ rust-version │ 1.60 │
# => │ version │ 0.72.0 │
# => ╰───────────────┴────────────────────────────────────╯
> open Cargo.toml | get package
╭───────────────┬────────────────────────────────────╮
│ authors │ [list 1 item] │
│ default-run │ nu │
│ description │ A new type of shell │
│ documentation │ https://www.nushell.sh/book/ │
│ edition │ 2018 │
│ exclude │ [list 1 item] │
│ homepage │ https://www.nushell.sh │
│ license │ MIT │
│ metadata │ {record 1 field} │
│ name │ nu │
│ repository │ https://github.com/nushell/nushell │
│ rust-version │ 1.60 │
│ version │ 0.72.0 │
╰───────────────┴────────────────────────────────────╯
```
And if needed we can drill down further:
```shell
open Cargo.toml | get package.version
# => 0.72.0
> open Cargo.toml | get package.version
0.72.0
```
### Plugins
@ -222,14 +223,13 @@ Please submit an issue or PR to be added to this list.
- [Dorothy](http://github.com/bevry/dorothy)
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
- [x-cmd](https://x-cmd.com/mod/nu)
- [vfox](https://github.com/version-fox/vfox)
## Contributing
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
<a href="https://github.com/nushell/nushell/graphs/contributors">
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750&columns=20" />
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750" />
</a>
## License

View File

@ -1,6 +1,7 @@
use nu_cli::{eval_source, evaluate_commands};
use nu_plugin_core::{Encoder, EncodingType};
use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
use nu_protocol::{
engine::{EngineState, Stack},
PipelineData, Signals, Span, Spanned, Value,
@ -8,11 +9,12 @@ use nu_protocol::{
use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env};
use std::{
fmt::Write,
hint::black_box,
rc::Rc,
sync::{atomic::AtomicBool, Arc},
};
use std::hint::black_box;
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks};
fn load_bench_commands() -> EngineState {
@ -44,6 +46,9 @@ fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
let mut stack = Stack::new();
// Support running benchmarks with IR mode
stack.use_ir = std::env::var_os("NU_USE_IR").is_some();
evaluate_commands(
&commands,
&mut engine,
@ -139,16 +144,19 @@ fn bench_load_standard_lib() -> impl IntoBenchmarks {
})]
}
fn create_flat_record_string(n: usize) -> String {
let mut s = String::from("let record = { ");
fn create_flat_record_string(n: i32) -> String {
let mut s = String::from("let record = {");
for i in 0..n {
write!(s, "col_{i}: {i}, ").unwrap();
s.push_str(&format!("col_{}: {}", i, i));
if i < n - 1 {
s.push_str(", ");
}
}
s.push('}');
s
}
fn create_nested_record_string(depth: usize) -> String {
fn create_nested_record_string(depth: i32) -> String {
let mut s = String::from("let record = {");
for _ in 0..depth {
s.push_str("col: {");
@ -161,7 +169,7 @@ fn create_nested_record_string(depth: usize) -> String {
s
}
fn create_example_table_nrows(n: usize) -> String {
fn create_example_table_nrows(n: i32) -> String {
let mut s = String::from("let table = [[foo bar baz]; ");
for i in 0..n {
s.push_str(&format!("[0, 1, {i}]"));
@ -173,7 +181,7 @@ fn create_example_table_nrows(n: usize) -> String {
s
}
fn bench_record_create(n: usize) -> impl IntoBenchmarks {
fn bench_record_create(n: i32) -> impl IntoBenchmarks {
bench_command(
&format!("record_create_{n}"),
&create_flat_record_string(n),
@ -182,7 +190,7 @@ fn bench_record_create(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_record_flat_access(n: usize) -> impl IntoBenchmarks {
fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
let setup_command = create_flat_record_string(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
bench_command(
@ -193,10 +201,10 @@ fn bench_record_flat_access(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_record_nested_access(n: usize) -> impl IntoBenchmarks {
fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
let setup_command = create_nested_record_string(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let nested_access = ".col".repeat(n);
let nested_access = ".col".repeat(n as usize);
bench_command(
&format!("record_nested_access_{n}"),
&format!("$record{} | ignore", nested_access),
@ -205,18 +213,7 @@ fn bench_record_nested_access(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_record_insert(n: usize, m: usize) -> impl IntoBenchmarks {
let setup_command = create_flat_record_string(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let mut insert = String::from("$record");
for i in n..(n + m) {
write!(insert, " | insert col_{i} {i}").unwrap();
}
insert.push_str(" | ignore");
bench_command(&format!("record_insert_{n}_{m}"), &insert, stack, engine)
}
fn bench_table_create(n: usize) -> impl IntoBenchmarks {
fn bench_table_create(n: i32) -> impl IntoBenchmarks {
bench_command(
&format!("table_create_{n}"),
&create_example_table_nrows(n),
@ -225,7 +222,7 @@ fn bench_table_create(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_table_get(n: usize) -> impl IntoBenchmarks {
fn bench_table_get(n: i32) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
bench_command(
@ -236,7 +233,7 @@ fn bench_table_get(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_table_select(n: usize) -> impl IntoBenchmarks {
fn bench_table_select(n: i32) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
bench_command(
@ -247,29 +244,7 @@ fn bench_table_select(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_table_insert_row(n: usize, m: usize) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let mut insert = String::from("$table");
for i in n..(n + m) {
write!(insert, " | insert {i} {{ foo: 0, bar: 1, baz: {i} }}").unwrap();
}
insert.push_str(" | ignore");
bench_command(&format!("table_insert_row_{n}_{m}"), &insert, stack, engine)
}
fn bench_table_insert_col(n: usize, m: usize) -> impl IntoBenchmarks {
let setup_command = create_example_table_nrows(n);
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
let mut insert = String::from("$table");
for i in 0..m {
write!(insert, " | insert col_{i} {i}").unwrap();
}
insert.push_str(" | ignore");
bench_command(&format!("table_insert_col_{n}_{m}"), &insert, stack, engine)
}
fn bench_eval_interleave(n: usize) -> impl IntoBenchmarks {
fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
let engine = setup_engine();
let stack = Stack::new();
bench_command(
@ -280,7 +255,7 @@ fn bench_eval_interleave(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_eval_interleave_with_interrupt(n: usize) -> impl IntoBenchmarks {
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
let mut engine = setup_engine();
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
let stack = Stack::new();
@ -292,7 +267,7 @@ fn bench_eval_interleave_with_interrupt(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_eval_for(n: usize) -> impl IntoBenchmarks {
fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
let engine = setup_engine();
let stack = Stack::new();
bench_command(
@ -303,7 +278,7 @@ fn bench_eval_for(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_eval_each(n: usize) -> impl IntoBenchmarks {
fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
let engine = setup_engine();
let stack = Stack::new();
bench_command(
@ -314,7 +289,7 @@ fn bench_eval_each(n: usize) -> impl IntoBenchmarks {
)
}
fn bench_eval_par_each(n: usize) -> impl IntoBenchmarks {
fn bench_eval_par_each(n: i32) -> impl IntoBenchmarks {
let engine = setup_engine();
let stack = Stack::new();
bench_command(
@ -455,14 +430,6 @@ tango_benchmarks!(
bench_record_nested_access(32),
bench_record_nested_access(64),
bench_record_nested_access(128),
bench_record_insert(1, 1),
bench_record_insert(10, 1),
bench_record_insert(100, 1),
bench_record_insert(1000, 1),
bench_record_insert(1, 10),
bench_record_insert(10, 10),
bench_record_insert(100, 10),
bench_record_insert(1000, 10),
// Table
bench_table_create(1),
bench_table_create(10),
@ -476,22 +443,6 @@ tango_benchmarks!(
bench_table_select(10),
bench_table_select(100),
bench_table_select(1_000),
bench_table_insert_row(1, 1),
bench_table_insert_row(10, 1),
bench_table_insert_row(100, 1),
bench_table_insert_row(1000, 1),
bench_table_insert_row(1, 10),
bench_table_insert_row(10, 10),
bench_table_insert_row(100, 10),
bench_table_insert_row(1000, 10),
bench_table_insert_col(1, 1),
bench_table_insert_col(10, 1),
bench_table_insert_col(100, 1),
bench_table_insert_col(1000, 1),
bench_table_insert_col(1, 10),
bench_table_insert_col(10, 10),
bench_table_insert_col(100, 10),
bench_table_insert_col(1000, 10),
// Eval
// Interleave
bench_eval_interleave(100),

View File

@ -1,3 +0,0 @@
[[disallowed-types]]
path = "std::time::Instant"
reason = "WASM panics if used, use `web_time::Instant` instead"

View File

@ -5,43 +5,41 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
edition = "2021"
license = "MIT"
name = "nu-cli"
version = "0.104.1"
version = "0.97.0"
[lib]
bench = false
[dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.104.1" }
nu-command = { path = "../nu-command", version = "0.104.1" }
nu-std = { path = "../nu-std", version = "0.104.1" }
nu-test-support = { path = "../nu-test-support", version = "0.104.1" }
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.97.0" }
nu-command = { path = "../nu-command", version = "0.97.0" }
nu-test-support = { path = "../nu-test-support", version = "0.97.0" }
rstest = { workspace = true, default-features = false }
tempfile = { workspace = true }
[dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.104.1" }
nu-engine = { path = "../nu-engine", version = "0.104.1", features = ["os"] }
nu-glob = { path = "../nu-glob", version = "0.104.1" }
nu-path = { path = "../nu-path", version = "0.104.1" }
nu-parser = { path = "../nu-parser", version = "0.104.1" }
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.104.1", optional = true }
nu-protocol = { path = "../nu-protocol", version = "0.104.1", features = ["os"] }
nu-utils = { path = "../nu-utils", version = "0.104.1" }
nu-color-config = { path = "../nu-color-config", version = "0.104.1" }
nu-cmd-base = { path = "../nu-cmd-base", version = "0.97.0" }
nu-engine = { path = "../nu-engine", version = "0.97.0" }
nu-path = { path = "../nu-path", version = "0.97.0" }
nu-parser = { path = "../nu-parser", version = "0.97.0" }
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.97.0", optional = true }
nu-protocol = { path = "../nu-protocol", version = "0.97.0" }
nu-utils = { path = "../nu-utils", version = "0.97.0" }
nu-color-config = { path = "../nu-color-config", version = "0.97.0" }
nu-ansi-term = { workspace = true }
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
chrono = { default-features = false, features = ["std"], workspace = true }
crossterm = { workspace = true }
fancy-regex = { workspace = true }
fuzzy-matcher = { workspace = true }
is_executable = { workspace = true }
log = { workspace = true }
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
miette = { workspace = true, features = ["fancy-no-backtrace"] }
nucleo-matcher = { workspace = true }
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
once_cell = { workspace = true }
percent-encoding = { workspace = true }
sysinfo = { workspace = true }
strum = { workspace = true }
unicode-segmentation = { workspace = true }
uuid = { workspace = true, features = ["v4"] }
which = { workspace = true }
@ -49,6 +47,3 @@ which = { workspace = true }
[features]
plugin = ["nu-plugin-engine"]
system-clipboard = ["reedline/system_clipboard"]
[lints]
workspace = true

View File

@ -14,7 +14,7 @@ impl Command for Commandline {
.category(Category::Core)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"View the current command line input buffer."
}

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct CommandlineEdit;
pub struct SubCommand;
impl Command for CommandlineEdit {
impl Command for SubCommand {
fn name(&self) -> &str {
"commandline edit"
}
@ -29,12 +29,12 @@ impl Command for CommandlineEdit {
.required(
"str",
SyntaxShape::String,
"The string to perform the operation with.",
"the string to perform the operation with",
)
.category(Category::Core)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Modify the current command line input buffer."
}

View File

@ -2,9 +2,9 @@ use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)]
pub struct CommandlineGetCursor;
pub struct SubCommand;
impl Command for CommandlineGetCursor {
impl Command for SubCommand {
fn name(&self) -> &str {
"commandline get-cursor"
}
@ -16,7 +16,7 @@ impl Command for CommandlineGetCursor {
.category(Category::Core)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Get the current cursor position."
}

View File

@ -4,6 +4,6 @@ mod get_cursor;
mod set_cursor;
pub use commandline_::Commandline;
pub use edit::CommandlineEdit;
pub use get_cursor::CommandlineGetCursor;
pub use set_cursor::CommandlineSetCursor;
pub use edit::SubCommand as CommandlineEdit;
pub use get_cursor::SubCommand as CommandlineGetCursor;
pub use set_cursor::SubCommand as CommandlineSetCursor;

View File

@ -3,9 +3,9 @@ use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)]
pub struct CommandlineSetCursor;
pub struct SubCommand;
impl Command for CommandlineSetCursor {
impl Command for SubCommand {
fn name(&self) -> &str {
"commandline set-cursor"
}
@ -18,11 +18,11 @@ impl Command for CommandlineSetCursor {
"set the current cursor position to the end of the buffer",
Some('e'),
)
.optional("pos", SyntaxShape::Int, "Cursor position to be set.")
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
.category(Category::Core)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Set the current cursor position."
}

View File

@ -17,7 +17,6 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
CommandlineGetCursor,
CommandlineSetCursor,
History,
HistoryImport,
HistorySession,
Keybindings,
KeybindingsDefault,

View File

@ -1,9 +0,0 @@
// Each const is named after a HistoryItem field, and the value is the field name to be displayed to
// the user (or accept during import).
pub const COMMAND_LINE: &str = "command";
pub const START_TIMESTAMP: &str = "start_timestamp";
pub const HOSTNAME: &str = "hostname";
pub const CWD: &str = "cwd";
pub const EXIT_STATUS: &str = "exit_status";
pub const DURATION: &str = "duration";
pub const SESSION_ID: &str = "session_id";

View File

@ -1,12 +1,10 @@
use nu_engine::command_prelude::*;
use nu_protocol::{shell_error::io::IoError, HistoryFileFormat};
use nu_protocol::HistoryFileFormat;
use reedline::{
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
SqliteBackedHistory,
};
use super::fields;
#[derive(Clone)]
pub struct History;
@ -15,7 +13,7 @@ impl Command for History {
"history"
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Get the command history."
}
@ -44,77 +42,91 @@ impl Command for History {
let Some(history) = engine_state.history_config() else {
return Ok(PipelineData::empty());
};
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
let Some(history_path) = history.file_path() else {
return Err(ShellError::ConfigDirNotFound { span: Some(head) });
};
if let Some(config_path) = nu_path::config_dir() {
let clear = call.has_flag(engine_state, stack, "clear")?;
let long = call.has_flag(engine_state, stack, "long")?;
let signals = engine_state.signals().clone();
if call.has_flag(engine_state, stack, "clear")? {
let _ = std::fs::remove_file(history_path);
// TODO: FIXME also clear the auxiliary files when using sqlite
return Ok(PipelineData::empty());
}
let mut history_path = config_path;
history_path.push("nushell");
match history.file_format {
HistoryFileFormat::Sqlite => {
history_path.push("history.sqlite3");
}
HistoryFileFormat::PlainText => {
history_path.push("history.txt");
}
}
let long = call.has_flag(engine_state, stack, "long")?;
let signals = engine_state.signals().clone();
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
HistoryFileFormat::Sqlite => {
SqliteBackedHistory::with_file(history_path.clone(), None, None)
if clear {
let _ = std::fs::remove_file(history_path);
// TODO: FIXME also clear the auxiliary files when using sqlite
Ok(PipelineData::empty())
} else {
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
HistoryFileFormat::Sqlite => {
SqliteBackedHistory::with_file(history_path.clone().into(), None, None)
.map(|inner| {
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
boxed
})
.ok()
}
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
history.max_size as usize,
history_path.clone().into(),
)
.map(|inner| {
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
boxed
})
.ok()
.ok(),
};
match history.file_format {
HistoryFileFormat::PlainText => Ok(history_reader
.and_then(|h| {
h.search(SearchQuery::everything(SearchDirection::Forward, None))
.ok()
})
.map(move |entries| {
entries.into_iter().enumerate().map(move |(idx, entry)| {
Value::record(
record! {
"command" => Value::string(entry.command_line, head),
"index" => Value::int(idx as i64, head),
},
head,
)
})
})
.ok_or(ShellError::FileNotFound {
file: history_path.display().to_string(),
span: head,
})?
.into_pipeline_data(head, signals)),
HistoryFileFormat::Sqlite => Ok(history_reader
.and_then(|h| {
h.search(SearchQuery::everything(SearchDirection::Forward, None))
.ok()
})
.map(move |entries| {
entries.into_iter().enumerate().map(move |(idx, entry)| {
create_history_record(idx, entry, long, head)
})
})
.ok_or(ShellError::FileNotFound {
file: history_path.display().to_string(),
span: head,
})?
.into_pipeline_data(head, signals)),
}
}
HistoryFileFormat::Plaintext => {
FileBackedHistory::with_file(history.max_size as usize, history_path.clone())
.map(|inner| {
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
boxed
})
.ok()
}
};
match history.file_format {
HistoryFileFormat::Plaintext => Ok(history_reader
.and_then(|h| {
h.search(SearchQuery::everything(SearchDirection::Forward, None))
.ok()
})
.map(move |entries| {
entries.into_iter().enumerate().map(move |(idx, entry)| {
Value::record(
record! {
fields::COMMAND_LINE => Value::string(entry.command_line, head),
// TODO: This name is inconsistent with create_history_record.
"index" => Value::int(idx as i64, head),
},
head,
)
})
})
.ok_or(IoError::new(
std::io::ErrorKind::NotFound,
head,
history_path,
))?
.into_pipeline_data(head, signals)),
HistoryFileFormat::Sqlite => Ok(history_reader
.and_then(|h| {
h.search(SearchQuery::everything(SearchDirection::Forward, None))
.ok()
})
.map(move |entries| {
entries.into_iter().enumerate().map(move |(idx, entry)| {
create_sqlite_history_record(idx, entry, long, head)
})
})
.ok_or(IoError::new(
std::io::ErrorKind::NotFound,
head,
history_path,
))?
.into_pipeline_data(head, signals)),
} else {
Err(ShellError::ConfigDirNotFound { span: Some(head) })
}
}
@ -139,7 +151,7 @@ impl Command for History {
}
}
fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
//1. Format all the values
//2. Create a record of either short or long columns and values
@ -150,8 +162,11 @@ fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head
.unwrap_or_default(),
head,
);
let start_timestamp_value = Value::date(
entry.start_timestamp.unwrap_or_default().fixed_offset(),
let start_timestamp_value = Value::string(
entry
.start_timestamp
.map(|time| time.to_string())
.unwrap_or_default(),
head,
);
let command_value = Value::string(entry.command_line, head);
@ -177,13 +192,13 @@ fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head
Value::record(
record! {
"item_id" => item_id_value,
fields::START_TIMESTAMP => start_timestamp_value,
fields::COMMAND_LINE => command_value,
fields::SESSION_ID => session_id_value,
fields::HOSTNAME => hostname_value,
fields::CWD => cwd_value,
fields::DURATION => duration_value,
fields::EXIT_STATUS => exit_status_value,
"start_timestamp" => start_timestamp_value,
"command" => command_value,
"session_id" => session_id_value,
"hostname" => hostname_value,
"cwd" => cwd_value,
"duration" => duration_value,
"exit_status" => exit_status_value,
"idx" => index_value,
},
head,
@ -191,11 +206,11 @@ fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head
} else {
Value::record(
record! {
fields::START_TIMESTAMP => start_timestamp_value,
fields::COMMAND_LINE => command_value,
fields::CWD => cwd_value,
fields::DURATION => duration_value,
fields::EXIT_STATUS => exit_status_value,
"start_timestamp" => start_timestamp_value,
"command" => command_value,
"cwd" => cwd_value,
"duration" => duration_value,
"exit_status" => exit_status_value,
},
head,
)

View File

@ -1,441 +0,0 @@
use std::path::{Path, PathBuf};
use nu_engine::command_prelude::*;
use nu_protocol::{
shell_error::{self, io::IoError},
HistoryFileFormat,
};
use reedline::{
FileBackedHistory, History, HistoryItem, ReedlineError, SearchQuery, SqliteBackedHistory,
};
use super::fields;
#[derive(Clone)]
pub struct HistoryImport;
impl Command for HistoryImport {
fn name(&self) -> &str {
"history import"
}
fn description(&self) -> &str {
"Import command line history."
}
fn extra_description(&self) -> &str {
r#"Can import history from input, either successive command lines or more detailed records. If providing records, available fields are:
command, start_timestamp, hostname, cwd, duration, exit_status.
If no input is provided, will import all history items from existing history in the other format: if current history is stored in sqlite, it will store it in plain text and vice versa.
Note that history item IDs are ignored when importing from file."#
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("history import")
.category(Category::History)
.input_output_types(vec![
(Type::Nothing, Type::Nothing),
(Type::String, Type::Nothing),
(Type::List(Box::new(Type::String)), Type::Nothing),
(Type::table(), Type::Nothing),
])
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
example: "history import",
description:
"Append all items from history in the other format to the current history",
result: None,
},
Example {
example: "echo foo | history import",
description: "Append `foo` to the current history",
result: None,
},
Example {
example: "[[ command_line cwd ]; [ foo /home ]] | history import",
description: "Append `foo` ran from `/home` to the current history",
result: None,
},
]
}
fn run(
&self,
engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let span = call.head;
let ok = Ok(Value::nothing(call.head).into_pipeline_data());
let Some(history) = engine_state.history_config() else {
return ok;
};
let Some(current_history_path) = history.file_path() else {
return Err(ShellError::ConfigDirNotFound { span: span.into() });
};
if let Some(bak_path) = backup(&current_history_path, span)? {
println!("Backed history to {}", bak_path.display());
}
match input {
PipelineData::Empty => {
let other_format = match history.file_format {
HistoryFileFormat::Sqlite => HistoryFileFormat::Plaintext,
HistoryFileFormat::Plaintext => HistoryFileFormat::Sqlite,
};
let src = new_backend(other_format, None)?;
let mut dst = new_backend(history.file_format, Some(current_history_path))?;
let items = src
.search(SearchQuery::everything(
reedline::SearchDirection::Forward,
None,
))
.map_err(error_from_reedline)?
.into_iter()
.map(Ok);
import(dst.as_mut(), items)
}
_ => {
let input = input.into_iter().map(item_from_value);
import(
new_backend(history.file_format, Some(current_history_path))?.as_mut(),
input,
)
}
}?;
ok
}
}
fn new_backend(
format: HistoryFileFormat,
path: Option<PathBuf>,
) -> Result<Box<dyn History>, ShellError> {
let path = match path {
Some(path) => path,
None => {
let Some(mut path) = nu_path::nu_config_dir() else {
return Err(ShellError::ConfigDirNotFound { span: None });
};
path.push(format.default_file_name());
path.into_std_path_buf()
}
};
fn map(
result: Result<impl History + 'static, ReedlineError>,
) -> Result<Box<dyn History>, ShellError> {
result
.map(|x| Box::new(x) as Box<dyn History>)
.map_err(error_from_reedline)
}
match format {
// Use a reasonably large value for maximum capacity.
HistoryFileFormat::Plaintext => map(FileBackedHistory::with_file(0xfffffff, path)),
HistoryFileFormat::Sqlite => map(SqliteBackedHistory::with_file(path, None, None)),
}
}
fn import(
dst: &mut dyn History,
src: impl Iterator<Item = Result<HistoryItem, ShellError>>,
) -> Result<(), ShellError> {
for item in src {
let mut item = item?;
item.id = None;
dst.save(item).map_err(error_from_reedline)?;
}
Ok(())
}
fn error_from_reedline(e: ReedlineError) -> ShellError {
// TODO: Should we add a new ShellError variant?
ShellError::GenericError {
error: "Reedline error".to_owned(),
msg: format!("{e}"),
span: None,
help: None,
inner: Vec::new(),
}
}
fn item_from_value(v: Value) -> Result<HistoryItem, ShellError> {
let span = v.span();
match v {
Value::Record { val, .. } => item_from_record(val.into_owned(), span),
Value::String { val, .. } => Ok(HistoryItem {
command_line: val,
id: None,
start_timestamp: None,
session_id: None,
hostname: None,
cwd: None,
duration: None,
exit_status: None,
more_info: None,
}),
_ => Err(ShellError::UnsupportedInput {
msg: "Only list and record inputs are supported".to_owned(),
input: v.get_type().to_string(),
msg_span: span,
input_span: span,
}),
}
}
fn item_from_record(mut rec: Record, span: Span) -> Result<HistoryItem, ShellError> {
let cmd = match rec.remove(fields::COMMAND_LINE) {
Some(v) => v.as_str()?.to_owned(),
None => {
return Err(ShellError::TypeMismatch {
err_message: format!("missing column: {}", fields::COMMAND_LINE),
span,
})
}
};
fn get<T>(
rec: &mut Record,
field: &'static str,
f: impl FnOnce(Value) -> Result<T, ShellError>,
) -> Result<Option<T>, ShellError> {
rec.remove(field).map(f).transpose()
}
let rec = &mut rec;
let item = HistoryItem {
command_line: cmd,
id: None,
start_timestamp: get(rec, fields::START_TIMESTAMP, |v| Ok(v.as_date()?.to_utc()))?,
hostname: get(rec, fields::HOSTNAME, |v| Ok(v.as_str()?.to_owned()))?,
cwd: get(rec, fields::CWD, |v| Ok(v.as_str()?.to_owned()))?,
exit_status: get(rec, fields::EXIT_STATUS, |v| v.as_int())?,
duration: get(rec, fields::DURATION, |v| duration_from_value(v, span))?,
more_info: None,
// TODO: Currently reedline doesn't let you create session IDs.
session_id: None,
};
if !rec.is_empty() {
let cols = rec.columns().map(|s| s.as_str()).collect::<Vec<_>>();
return Err(ShellError::TypeMismatch {
err_message: format!("unsupported column names: {}", cols.join(", ")),
span,
});
}
Ok(item)
}
fn duration_from_value(v: Value, span: Span) -> Result<std::time::Duration, ShellError> {
chrono::Duration::nanoseconds(v.as_duration()?)
.to_std()
.map_err(|_| ShellError::NeedsPositiveValue { span })
}
fn find_backup_path(path: &Path, span: Span) -> Result<PathBuf, ShellError> {
let Ok(mut bak_path) = path.to_path_buf().into_os_string().into_string() else {
// This isn't fundamentally problem, but trying to work with OsString is a nightmare.
return Err(ShellError::GenericError {
error: "History path not UTF-8".to_string(),
msg: "History path must be representable as UTF-8".to_string(),
span: Some(span),
help: None,
inner: vec![],
});
};
bak_path.push_str(".bak");
if !Path::new(&bak_path).exists() {
return Ok(bak_path.into());
}
let base_len = bak_path.len();
for i in 1..100 {
use std::fmt::Write;
bak_path.truncate(base_len);
write!(&mut bak_path, ".{i}").unwrap();
if !Path::new(&bak_path).exists() {
return Ok(PathBuf::from(bak_path));
}
}
Err(ShellError::GenericError {
error: "Too many backup files".to_string(),
msg: "Found too many existing backup files".to_string(),
span: Some(span),
help: None,
inner: vec![],
})
}
fn backup(path: &Path, span: Span) -> Result<Option<PathBuf>, ShellError> {
match path.metadata() {
Ok(md) if md.is_file() => (),
Ok(_) => {
return Err(IoError::new_with_additional_context(
shell_error::io::ErrorKind::NotAFile,
span,
PathBuf::from(path),
"history path exists but is not a file",
)
.into())
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(e) => {
return Err(IoError::new_internal(
e.kind(),
"Could not get metadata",
nu_protocol::location!(),
)
.into())
}
}
let bak_path = find_backup_path(path, span)?;
std::fs::copy(path, &bak_path).map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not copy backup",
nu_protocol::location!(),
)
})?;
Ok(Some(bak_path))
}
#[cfg(test)]
mod tests {
use chrono::DateTime;
use rstest::rstest;
use super::*;
#[test]
fn test_item_from_value_string() -> Result<(), ShellError> {
let item = item_from_value(Value::string("foo", Span::unknown()))?;
assert_eq!(
item,
HistoryItem {
command_line: "foo".to_string(),
id: None,
start_timestamp: None,
session_id: None,
hostname: None,
cwd: None,
duration: None,
exit_status: None,
more_info: None
}
);
Ok(())
}
#[test]
fn test_item_from_value_record() {
let span = Span::unknown();
let rec = new_record(&[
("command", Value::string("foo", span)),
(
"start_timestamp",
Value::date(
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00").unwrap(),
span,
),
),
("hostname", Value::string("localhost", span)),
("cwd", Value::string("/home/test", span)),
("duration", Value::duration(100_000_000, span)),
("exit_status", Value::int(42, span)),
]);
let item = item_from_value(rec).unwrap();
assert_eq!(
item,
HistoryItem {
command_line: "foo".to_string(),
id: None,
start_timestamp: Some(
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00")
.unwrap()
.to_utc()
),
hostname: Some("localhost".to_string()),
cwd: Some("/home/test".to_string()),
duration: Some(std::time::Duration::from_nanos(100_000_000)),
exit_status: Some(42),
session_id: None,
more_info: None
}
);
}
#[test]
fn test_item_from_value_record_extra_field() {
let span = Span::unknown();
let rec = new_record(&[
("command_line", Value::string("foo", span)),
("id_nonexistent", Value::int(1, span)),
]);
assert!(item_from_value(rec).is_err());
}
#[test]
fn test_item_from_value_record_bad_type() {
let span = Span::unknown();
let rec = new_record(&[
("command_line", Value::string("foo", span)),
("id", Value::string("one".to_string(), span)),
]);
assert!(item_from_value(rec).is_err());
}
fn new_record(rec: &[(&'static str, Value)]) -> Value {
let span = Span::unknown();
let rec = Record::from_raw_cols_vals(
rec.iter().map(|(col, _)| col.to_string()).collect(),
rec.iter().map(|(_, val)| val.clone()).collect(),
span,
span,
)
.unwrap();
Value::record(rec, span)
}
#[rstest]
#[case::no_backup(&["history.dat"], "history.dat.bak")]
#[case::backup_exists(&["history.dat", "history.dat.bak"], "history.dat.bak.1")]
#[case::multiple_backups_exists( &["history.dat", "history.dat.bak", "history.dat.bak.1"], "history.dat.bak.2")]
fn test_find_backup_path(#[case] existing: &[&str], #[case] want: &str) {
let dir = tempfile::tempdir().unwrap();
for name in existing {
std::fs::File::create_new(dir.path().join(name)).unwrap();
}
let got = find_backup_path(&dir.path().join("history.dat"), Span::test_data()).unwrap();
assert_eq!(got, dir.path().join(want))
}
#[test]
fn test_backup() {
let dir = tempfile::tempdir().unwrap();
let mut history = std::fs::File::create_new(dir.path().join("history.dat")).unwrap();
use std::io::Write;
write!(&mut history, "123").unwrap();
let want_bak_path = dir.path().join("history.dat.bak");
assert_eq!(
backup(&dir.path().join("history.dat"), Span::test_data()),
Ok(Some(want_bak_path.clone()))
);
let got_data = String::from_utf8(std::fs::read(want_bak_path).unwrap()).unwrap();
assert_eq!(got_data, "123");
}
#[test]
fn test_backup_no_file() {
let dir = tempfile::tempdir().unwrap();
let bak_path = backup(&dir.path().join("history.dat"), Span::test_data()).unwrap();
assert!(bak_path.is_none());
}
}

View File

@ -8,7 +8,7 @@ impl Command for HistorySession {
"history session"
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Get the command history session."
}

View File

@ -1,8 +1,5 @@
mod fields;
mod history_;
mod history_import;
mod history_session;
pub use history_::History;
pub use history_import::HistoryImport;
pub use history_session::HistorySession;

View File

@ -14,11 +14,11 @@ impl Command for Keybindings {
.input_output_types(vec![(Type::Nothing, Type::String)])
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Keybindings related commands."
}
fn extra_description(&self) -> &str {
fn extra_usage(&self) -> &str {
r#"You must use one of the following subcommands. Using this command as-is will only produce this help message.
For more information on input and keybindings, check:

View File

@ -15,7 +15,7 @@ impl Command for KeybindingsDefault {
.input_output_types(vec![(Type::Nothing, Type::table())])
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"List default keybindings."
}

View File

@ -23,7 +23,7 @@ impl Command for KeybindingsList {
.category(Category::Platform)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"List available options that can be used to create keybindings."
}

View File

@ -2,7 +2,6 @@ use crossterm::{
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
};
use nu_engine::command_prelude::*;
use nu_protocol::shell_error::io::IoError;
use std::io::{stdout, Write};
#[derive(Clone)]
@ -13,11 +12,11 @@ impl Command for KeybindingsListen {
"keybindings listen"
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Get input from the user."
}
fn extra_description(&self) -> &str {
fn extra_usage(&self) -> &str {
"This is an internal debugging tool. For better output, try `input listen --types [key]`"
}
@ -40,13 +39,7 @@ impl Command for KeybindingsListen {
match print_events(engine_state) {
Ok(v) => Ok(v.into_pipeline_data()),
Err(e) => {
terminal::disable_raw_mode().map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not disable raw mode",
nu_protocol::location!(),
)
})?;
terminal::disable_raw_mode()?;
Err(ShellError::GenericError {
error: "Error with input".into(),
msg: "".into(),
@ -70,20 +63,8 @@ impl Command for KeybindingsListen {
pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
let config = engine_state.get_config();
stdout().flush().map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not flush stdout",
nu_protocol::location!(),
)
})?;
terminal::enable_raw_mode().map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not enable raw mode",
nu_protocol::location!(),
)
})?;
stdout().flush()?;
terminal::enable_raw_mode()?;
if config.use_kitty_protocol {
if let Ok(false) = crossterm::terminal::supports_keyboard_enhancement() {
@ -113,9 +94,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
let mut stdout = std::io::BufWriter::new(std::io::stderr());
loop {
let event = crossterm::event::read().map_err(|err| {
IoError::new_internal(err.kind(), "Could not read event", nu_protocol::location!())
})?;
let event = crossterm::event::read()?;
if event == Event::Key(KeyCode::Esc.into()) {
break;
}
@ -134,25 +113,9 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
_ => "".to_string(),
};
stdout.queue(crossterm::style::Print(o)).map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not print output record",
nu_protocol::location!(),
)
})?;
stdout
.queue(crossterm::style::Print("\r\n"))
.map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not print linebreak",
nu_protocol::location!(),
)
})?;
stdout.flush().map_err(|err| {
IoError::new_internal(err.kind(), "Could not flush", nu_protocol::location!())
})?;
stdout.queue(crossterm::style::Print(o))?;
stdout.queue(crossterm::style::Print("\r\n"))?;
stdout.flush()?;
}
if config.use_kitty_protocol {
@ -162,13 +125,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
);
}
terminal::disable_raw_mode().map_err(|err| {
IoError::new_internal(
err.kind(),
"Could not disable raw mode",
nu_protocol::location!(),
)
})?;
terminal::disable_raw_mode()?;
Ok(Value::nothing(Span::unknown()))
}

View File

@ -7,7 +7,7 @@ mod keybindings_list;
mod keybindings_listen;
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
pub use history::{History, HistoryImport, HistorySession};
pub use history::{History, HistorySession};
pub use keybindings::Keybindings;
pub use keybindings_default::KeybindingsDefault;
pub use keybindings_list::KeybindingsList;

View File

@ -1,87 +0,0 @@
use super::{completion_options::NuMatcher, SemanticSuggestion};
use crate::{
completions::{Completer, CompletionOptions},
SuggestionKind,
};
use nu_protocol::{
engine::{Stack, StateWorkingSet},
Span,
};
use reedline::Suggestion;
pub struct AttributeCompletion;
pub struct AttributableCompletion;
impl Completer for AttributeCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options);
let attr_commands =
working_set.find_commands_by_predicate(|s| s.starts_with(b"attr "), true);
for (decl_id, name, desc, ty) in attr_commands {
let name = name.strip_prefix(b"attr ").unwrap_or(&name);
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(name).into_owned(),
description: desc,
style: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: false,
},
kind: Some(SuggestionKind::Command(ty, Some(decl_id))),
});
}
matcher.results()
}
}
impl Completer for AttributableCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options);
for s in ["def", "extern", "export def", "export extern"] {
let decl_id = working_set
.find_decl(s.as_bytes())
.expect("internal error, builtin declaration not found");
let cmd = working_set.get_decl(decl_id);
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: cmd.name().into(),
description: Some(cmd.description().into()),
style: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: false,
},
kind: Some(SuggestionKind::Command(cmd.command_type(), None)),
});
}
matcher.results()
}
}

View File

@ -1,7 +1,7 @@
use crate::completions::CompletionOptions;
use nu_protocol::{
engine::{Stack, StateWorkingSet},
DeclId, Span,
Span,
};
use reedline::Suggestion;
@ -12,9 +12,10 @@ pub trait Completer {
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
prefix: impl AsRef<str>,
prefix: Vec<u8>,
span: Span,
offset: usize,
pos: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion>;
}
@ -28,15 +29,8 @@ pub struct SemanticSuggestion {
// TODO: think about name: maybe suggestion context?
#[derive(Clone, Debug, PartialEq)]
pub enum SuggestionKind {
Command(nu_protocol::engine::CommandType, Option<DeclId>),
Value(nu_protocol::Type),
CellPath,
Directory,
File,
Flag,
Module,
Operator,
Variable,
Command(nu_protocol::engine::CommandType),
Type(nu_protocol::Type),
}
impl From<Suggestion> for SemanticSuggestion {

View File

@ -1,153 +0,0 @@
use std::borrow::Cow;
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
use nu_engine::{column::get_columns, eval_variable};
use nu_protocol::{
ast::{Expr, Expression, FullCellPath, PathMember},
engine::{Stack, StateWorkingSet},
eval_const::eval_constant,
ShellError, Span, Value,
};
use reedline::Suggestion;
use super::completion_options::NuMatcher;
pub struct CellPathCompletion<'a> {
pub full_cell_path: &'a FullCellPath,
pub position: usize,
}
fn prefix_from_path_member(member: &PathMember, pos: usize) -> (String, Span) {
let (prefix_str, start) = match member {
PathMember::String { val, span, .. } => (val, span.start),
PathMember::Int { val, span, .. } => (&val.to_string(), span.start),
};
let prefix_str = prefix_str.get(..pos + 1 - start).unwrap_or(prefix_str);
// strip wrapping quotes
let quotations = ['"', '\'', '`'];
let prefix_str = prefix_str.strip_prefix(quotations).unwrap_or(prefix_str);
(prefix_str.to_string(), Span::new(start, pos + 1))
}
impl Completer for CellPathCompletion<'_> {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
_prefix: impl AsRef<str>,
_span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut prefix_str = String::new();
// position at dots, e.g. `$env.config.<TAB>`
let mut span = Span::new(self.position + 1, self.position + 1);
let mut path_member_num_before_pos = 0;
for member in self.full_cell_path.tail.iter() {
if member.span().end <= self.position {
path_member_num_before_pos += 1;
} else if member.span().contains(self.position) {
(prefix_str, span) = prefix_from_path_member(member, self.position);
break;
}
}
let current_span = reedline::Span {
start: span.start - offset,
end: span.end - offset,
};
let mut matcher = NuMatcher::new(prefix_str, options);
let path_members = self
.full_cell_path
.tail
.get(0..path_member_num_before_pos)
.unwrap_or_default();
let value = eval_cell_path(
working_set,
stack,
&self.full_cell_path.head,
path_members,
span,
)
.unwrap_or_default();
for suggestion in get_suggestions_by_value(&value, current_span) {
matcher.add_semantic_suggestion(suggestion);
}
matcher.results()
}
}
/// Follow cell path to get the value
/// NOTE: This is a relatively lightweight implementation,
/// so it may fail to get the exact value when the expression is complicated.
/// One failing example would be `[$foo].0`
pub(crate) fn eval_cell_path(
working_set: &StateWorkingSet,
stack: &Stack,
head: &Expression,
path_members: &[PathMember],
span: Span,
) -> Result<Value, ShellError> {
// evaluate the head expression to get its value
let head_value = if let Expr::Var(var_id) = head.expr {
working_set
.get_variable(var_id)
.const_val
.to_owned()
.map_or_else(
|| eval_variable(working_set.permanent_state, stack, var_id, span),
Ok,
)
} else {
eval_constant(working_set, head)
}?;
head_value
.follow_cell_path(path_members, false)
.map(Cow::into_owned)
}
fn get_suggestions_by_value(
value: &Value,
current_span: reedline::Span,
) -> Vec<SemanticSuggestion> {
let to_suggestion = |s: String, v: Option<&Value>| {
// Check if the string needs quoting
let value = if s.is_empty()
|| s.chars()
.any(|c: char| !(c.is_ascii_alphabetic() || ['_', '-'].contains(&c)))
{
format!("{:?}", s)
} else {
s
};
SemanticSuggestion {
suggestion: Suggestion {
value,
span: current_span,
description: v.map(|v| v.get_type().to_string()),
..Suggestion::default()
},
kind: Some(SuggestionKind::CellPath),
}
};
match value {
Value::Record { val, .. } => val
.columns()
.map(|s| to_suggestion(s.to_string(), val.get(s)))
.collect(),
Value::List { vals, .. } => get_columns(vals.as_slice())
.into_iter()
.map(|s| {
let sub_val = vals
.first()
.and_then(|v| v.as_record().ok())
.and_then(|rv| rv.get(&s));
to_suggestion(s, sub_val)
})
.collect(),
_ => vec![],
}
}

View File

@ -1,37 +1,47 @@
use std::collections::HashMap;
use crate::{
completions::{Completer, CompletionOptions},
completions::{Completer, CompletionOptions, MatchAlgorithm},
SuggestionKind,
};
use nu_parser::FlatShape;
use nu_protocol::{
engine::{CommandType, Stack, StateWorkingSet},
engine::{CachedFile, Stack, StateWorkingSet},
Span,
};
use reedline::Suggestion;
use super::{completion_options::NuMatcher, SemanticSuggestion};
use super::{completion_common::sort_suggestions, SemanticSuggestion};
pub struct CommandCompletion {
/// Whether to include internal commands
pub internals: bool,
/// Whether to include external commands
pub externals: bool,
flattened: Vec<(Span, FlatShape)>,
flat_shape: FlatShape,
force_completion_after_space: bool,
}
impl CommandCompletion {
pub fn new(
flattened: Vec<(Span, FlatShape)>,
flat_shape: FlatShape,
force_completion_after_space: bool,
) -> Self {
Self {
flattened,
flat_shape,
force_completion_after_space,
}
}
fn external_command_completion(
&self,
working_set: &StateWorkingSet,
sugg_span: reedline::Span,
matched_internal: impl Fn(&str) -> bool,
matcher: &mut NuMatcher<String>,
) -> HashMap<String, SemanticSuggestion> {
let mut suggs = HashMap::new();
prefix: &str,
match_algorithm: MatchAlgorithm,
) -> Vec<String> {
let mut executables = vec![];
let paths = working_set.permanent_state.get_env_var_insensitive("path");
// os agnostic way to get the PATH env var
let paths = working_set.permanent_state.get_path_env_var();
if let Some((_, paths)) = paths {
if let Some(paths) = paths {
if let Ok(paths) = paths.as_list() {
for path in paths {
let path = path.coerce_str().unwrap_or_default();
@ -41,46 +51,25 @@ impl CommandCompletion {
if working_set
.permanent_state
.config
.completions
.external
.max_results
<= suggs.len() as i64
.max_external_completion_results
> executables.len() as i64
&& !executables.contains(
&item
.path()
.file_name()
.map(|x| x.to_string_lossy().to_string())
.unwrap_or_default(),
)
&& matches!(
item.path().file_name().map(|x| match_algorithm
.matches_str(&x.to_string_lossy(), prefix)),
Some(true)
)
&& is_executable::is_executable(item.path())
{
break;
}
let Ok(name) = item.file_name().into_string() else {
continue;
};
let value = if matched_internal(&name) {
format!("^{}", name)
} else {
name.clone()
};
if suggs.contains_key(&value) {
continue;
}
// TODO: check name matching before a relative heavy IO involved
// `is_executable` for performance consideration, should avoid
// duplicated `match_aux` call for matched items in the future
if matcher.matches(&name) && is_executable::is_executable(item.path()) {
// If there's an internal command with the same name, adds ^cmd to the
// matcher so that both the internal and external command are included
matcher.add(&name, value.clone());
suggs.insert(
value.clone(),
SemanticSuggestion {
suggestion: Suggestion {
value,
span: sugg_span,
append_whitespace: true,
..Default::default()
},
kind: Some(SuggestionKind::Command(
CommandType::External,
None,
)),
},
);
if let Ok(name) = item.file_name().into_string() {
executables.push(name);
}
}
}
}
@ -88,7 +77,77 @@ impl CommandCompletion {
}
}
suggs
executables
}
fn complete_commands(
&self,
working_set: &StateWorkingSet,
span: Span,
offset: usize,
find_externals: bool,
match_algorithm: MatchAlgorithm,
) -> Vec<SemanticSuggestion> {
let partial = working_set.get_span_contents(span);
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
let mut results = working_set
.find_commands_by_predicate(filter_predicate, true)
.into_iter()
.map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&x.0).to_string(),
description: x.1,
span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true,
..Suggestion::default()
},
kind: Some(SuggestionKind::Command(x.2)),
})
.collect::<Vec<_>>();
let partial = working_set.get_span_contents(span);
let partial = String::from_utf8_lossy(partial).to_string();
if find_externals {
let results_external = self
.external_command_completion(working_set, &partial, match_algorithm)
.into_iter()
.map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x,
span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true,
..Suggestion::default()
},
// TODO: is there a way to create a test?
kind: None,
});
let results_strings: Vec<String> =
results.iter().map(|x| x.suggestion.value.clone()).collect();
for external in results_external {
if results_strings.contains(&external.suggestion.value) {
results.push(SemanticSuggestion {
suggestion: Suggestion {
value: format!("^{}", external.suggestion.value),
span: external.suggestion.span,
append_whitespace: true,
..Suggestion::default()
},
kind: external.kind,
})
} else {
results.push(external)
}
}
results
} else {
results
}
}
}
@ -97,62 +156,175 @@ impl Completer for CommandCompletion {
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
prefix: Vec<u8>,
span: Span,
offset: usize,
pos: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options);
let last = self
.flattened
.iter()
.rev()
.skip_while(|x| x.0.end > pos)
.take_while(|x| {
matches!(
x.1,
FlatShape::InternalCall(_)
| FlatShape::External
| FlatShape::ExternalArg
| FlatShape::Literal
| FlatShape::String
)
})
.last();
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset);
let mut internal_suggs = HashMap::new();
if self.internals {
let filtered_commands = working_set.find_commands_by_predicate(
|name| {
let name = String::from_utf8_lossy(name);
matcher.add(&name, name.to_string())
},
true,
);
for (decl_id, name, description, typ) in filtered_commands {
let name = String::from_utf8_lossy(&name);
internal_suggs.insert(
name.to_string(),
SemanticSuggestion {
suggestion: Suggestion {
value: name.to_string(),
description,
span: sugg_span,
append_whitespace: true,
..Suggestion::default()
},
kind: Some(SuggestionKind::Command(typ, Some(decl_id))),
},
);
}
}
let mut external_suggs = if self.externals {
self.external_command_completion(
// The last item here would be the earliest shape that could possible by part of this subcommand
let subcommands = if let Some(last) = last {
self.complete_commands(
working_set,
sugg_span,
|name| internal_suggs.contains_key(name),
&mut matcher,
Span::new(last.0.start, pos),
offset,
false,
options.match_algorithm,
)
} else {
HashMap::new()
vec![]
};
let mut res = Vec::new();
for cmd_name in matcher.results() {
if let Some(sugg) = internal_suggs
.remove(&cmd_name)
.or_else(|| external_suggs.remove(&cmd_name))
{
res.push(sugg);
}
if !subcommands.is_empty() {
return sort_suggestions(&String::from_utf8_lossy(&prefix), subcommands, options);
}
let config = working_set.get_config();
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|| ((span.end - span.start) == 0)
|| is_passthrough_command(working_set.delta.get_file_contents())
{
// we're in a gap or at a command
if working_set.get_span_contents(span).is_empty() && !self.force_completion_after_space
{
return vec![];
}
self.complete_commands(
working_set,
span,
offset,
config.enable_external_completion,
options.match_algorithm,
)
} else {
vec![]
};
sort_suggestions(&String::from_utf8_lossy(&prefix), commands, options)
}
}
pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
match contents.get(start..) {
Some(contents) => {
contents
.iter()
.take_while(|x| x.is_ascii_whitespace())
.count()
+ start
}
None => start,
}
}
pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool {
for cached_file in working_set_file_contents {
let contents = &cached_file.content;
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
let cur_pos = find_non_whitespace_index(contents, last_pipe_pos);
let result = match contents.get(cur_pos..) {
Some(contents) => contents.starts_with(b"sudo ") || contents.starts_with(b"doas "),
None => false,
};
if result {
return true;
}
}
false
}
#[cfg(test)]
mod command_completions_tests {
use super::*;
use nu_protocol::engine::EngineState;
use std::sync::Arc;
#[test]
fn test_find_non_whitespace_index() {
let commands = [
(" hello", 4),
("sudo ", 0),
(" sudo ", 2),
(" sudo ", 2),
(" hello ", 1),
(" hello ", 3),
(" hello | sudo ", 4),
(" sudo|sudo", 5),
("sudo | sudo ", 0),
(" hello sud", 1),
];
for (idx, ele) in commands.iter().enumerate() {
let index = find_non_whitespace_index(ele.0.as_bytes(), 0);
assert_eq!(index, ele.1, "Failed on index {}", idx);
}
}
#[test]
fn test_is_last_command_passthrough() {
let commands = [
(" hello", false),
(" sudo ", true),
("sudo ", true),
(" hello", false),
(" sudo", false),
(" sudo ", true),
(" sudo ", true),
(" sudo ", true),
(" hello ", false),
(" hello | sudo ", true),
(" sudo|sudo", false),
("sudo | sudo ", true),
(" hello sud", false),
(" sudo | sud ", false),
(" sudo|sudo ", true),
(" sudo | sudo ls | sudo ", true),
];
for (idx, ele) in commands.iter().enumerate() {
let input = ele.0.as_bytes();
let mut engine_state = EngineState::new();
engine_state.add_file("test.nu".into(), Arc::new([]));
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
let _ = working_set.add_file("child.nu".into(), input);
working_set.render()
};
let result = engine_state.merge_delta(delta);
assert!(
result.is_ok(),
"Merge delta has failed: {}",
result.err().unwrap()
);
let is_passthrough_command = is_passthrough_command(engine_state.get_file_contents());
assert_eq!(
is_passthrough_command, ele.1,
"index for '{}': {}",
ele.0, idx
);
}
res
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,20 +1,22 @@
use super::{completion_options::NuMatcher, MatchAlgorithm};
use crate::completions::CompletionOptions;
use crate::{
completions::{matches, CompletionOptions},
SemanticSuggestion,
};
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use nu_ansi_term::Style;
use nu_engine::env_to_string;
use nu_path::dots::expand_ndots;
use nu_path::{expand_to_real_path, home_dir};
use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet},
Span,
CompletionSort, Span,
};
use nu_utils::get_ls_colors;
use nu_utils::IgnoreCaseExt;
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
use super::MatchAlgorithm;
#[derive(Clone, Default)]
pub struct PathBuiltFromString {
cwd: PathBuf,
parts: Vec<String>,
isdir: bool,
}
@ -22,101 +24,82 @@ pub struct PathBuiltFromString {
/// Recursively goes through paths that match a given `partial`.
/// built: State struct for a valid matching path built so far.
///
/// `want_directory`: Whether we want only directories as completion matches.
/// Some commands like `cd` can only be run on directories whereas others
/// like `ls` can be run on regular files as well.
///
/// `isdir`: whether the current partial path has a trailing slash.
/// Parsing a path string into a pathbuf loses that bit of information.
///
/// `enable_exact_match`: Whether match algorithm is Prefix and all previous components
/// of the path matched a directory exactly.
fn complete_rec(
/// want_directory: Whether we want only directories as completion matches.
/// Some commands like `cd` can only be run on directories whereas others
/// like `ls` can be run on regular files as well.
pub fn complete_rec(
partial: &[&str],
built_paths: &[PathBuiltFromString],
built: &PathBuiltFromString,
cwd: &Path,
options: &CompletionOptions,
want_directory: bool,
isdir: bool,
enable_exact_match: bool,
) -> Vec<PathBuiltFromString> {
let mut completions = vec![];
if let Some((&base, rest)) = partial.split_first() {
if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) {
let built_paths: Vec<_> = built_paths
.iter()
.map(|built| {
let mut built = built.clone();
built.parts.push(base.to_string());
built.isdir = true;
built
})
.collect();
return complete_rec(
rest,
&built_paths,
options,
want_directory,
isdir,
enable_exact_match,
);
if (base == "." || base == "..") && (isdir || !rest.is_empty()) {
let mut built = built.clone();
built.parts.push(base.to_string());
built.isdir = true;
return complete_rec(rest, &built, cwd, options, want_directory, isdir);
}
}
let mut built_path = cwd.to_path_buf();
for part in &built.parts {
built_path.push(part);
}
let Ok(result) = built_path.read_dir() else {
return completions;
};
let mut entries = Vec::new();
for entry in result.filter_map(|e| e.ok()) {
let entry_name = entry.file_name().to_string_lossy().into_owned();
let entry_isdir = entry.path().is_dir();
let mut built = built.clone();
built.parts.push(entry_name.clone());
built.isdir = entry_isdir;
if !want_directory || entry_isdir {
entries.push((entry_name, built));
}
}
let prefix = partial.first().unwrap_or(&"");
let mut matcher = NuMatcher::new(prefix, options);
let sorted_entries = sort_completions(prefix, entries, options, |(entry, _)| entry);
for built in built_paths {
let mut path = built.cwd.clone();
for part in &built.parts {
path.push(part);
}
let Ok(result) = path.read_dir() else {
continue;
};
for entry in result.filter_map(|e| e.ok()) {
let entry_name = entry.file_name().to_string_lossy().into_owned();
let entry_isdir = entry.path().is_dir();
let mut built = built.clone();
built.parts.push(entry_name.clone());
// Symlinks to directories shouldn't have a trailing slash (#13275)
built.isdir = entry_isdir && !entry.path().is_symlink();
if !want_directory || entry_isdir {
matcher.add(entry_name.clone(), (entry_name, built));
}
}
}
let mut completions = vec![];
for (entry_name, built) in matcher.results() {
for (entry_name, built) in sorted_entries {
match partial.split_first() {
Some((base, rest)) => {
// We use `isdir` to confirm that the current component has
// at least one next component or a slash.
// Serves as confirmation to ignore longer completions for
// components in between.
if !rest.is_empty() || isdir {
// Don't show longer completions if we have an exact match (#13204, #14794)
let exact_match = enable_exact_match
&& (if options.case_sensitive {
entry_name.eq(base)
} else {
entry_name.eq_ignore_case(base)
});
completions.extend(complete_rec(
rest,
&[built],
options,
want_directory,
isdir,
exact_match,
));
if exact_match {
break;
if matches(base, &entry_name, options) {
// We use `isdir` to confirm that the current component has
// at least one next component or a slash.
// Serves as confirmation to ignore longer completions for
// components in between.
if !rest.is_empty() || isdir {
completions.extend(complete_rec(
rest,
&built,
cwd,
options,
want_directory,
isdir,
));
} else {
completions.push(built);
}
} else {
completions.push(built);
}
if entry_name.eq(base)
&& matches!(options.match_algorithm, MatchAlgorithm::Prefix)
&& isdir
{
break;
}
}
None => {
@ -150,7 +133,7 @@ impl OriginalCwd {
}
}
pub fn surround_remove(partial: &str) -> String {
fn surround_remove(partial: &str) -> String {
for c in ['`', '"', '\''] {
if partial.starts_with(c) {
let ret = partial.strip_prefix(c).unwrap_or(partial);
@ -164,58 +147,37 @@ pub fn surround_remove(partial: &str) -> String {
partial.to_string()
}
pub struct FileSuggestion {
pub span: nu_protocol::Span,
pub path: String,
pub style: Option<Style>,
pub is_dir: bool,
}
/// # Parameters
/// * `cwds` - A list of directories in which to search. The only reason this isn't a single string
/// is because dotnu_completions searches in multiple directories at once
pub fn complete_item(
want_directory: bool,
span: nu_protocol::Span,
partial: &str,
cwds: &[impl AsRef<str>],
cwd: &str,
options: &CompletionOptions,
engine_state: &EngineState,
stack: &Stack,
) -> Vec<FileSuggestion> {
let cleaned_partial = surround_remove(partial);
let isdir = cleaned_partial.ends_with(is_separator);
let expanded_partial = expand_ndots(Path::new(&cleaned_partial));
let should_collapse_dots = expanded_partial != Path::new(&cleaned_partial);
let mut partial = expanded_partial.to_string_lossy().to_string();
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
let partial = surround_remove(partial);
let isdir = partial.ends_with(is_separator);
#[cfg(unix)]
let path_separator = SEP;
#[cfg(windows)]
let path_separator = cleaned_partial
let path_separator = partial
.chars()
.rfind(|c: &char| is_separator(*c))
.unwrap_or(SEP);
let cwd_pathbuf = Path::new(cwd).to_path_buf();
let ls_colors = (engine_state.config.use_ls_colors_completions
&& engine_state.config.use_ansi_coloring)
.then(|| {
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
Some(v) => env_to_string("LS_COLORS", &v, engine_state, stack).ok(),
None => None,
};
get_ls_colors(ls_colors_env_str)
});
// Handle the trailing dot case
if cleaned_partial.ends_with(&format!("{path_separator}.")) {
partial.push_str(&format!("{path_separator}."));
}
let cwd_pathbufs: Vec<_> = cwds
.iter()
.map(|cwd| Path::new(cwd.as_ref()).to_path_buf())
.collect();
let ls_colors = (engine_state.config.completions.use_ls_colors
&& engine_state.config.use_ansi_coloring.get(engine_state))
.then(|| {
let ls_colors_env_str = stack
.get_env_var(engine_state, "LS_COLORS")
.and_then(|v| env_to_string("LS_COLORS", v, engine_state, stack).ok());
get_ls_colors(ls_colors_env_str)
});
let mut cwds = cwd_pathbufs.clone();
let mut cwd = cwd_pathbuf.clone();
let mut prefix_len = 0;
let mut original_cwd = OriginalCwd::None;
@ -223,21 +185,25 @@ pub fn complete_item(
match components.peek().cloned() {
Some(c @ Component::Prefix(..)) => {
// windows only by definition
cwds = vec![[c, Component::RootDir].iter().collect()];
components.next();
if let Some(Component::RootDir) = components.peek().cloned() {
components.next();
};
cwd = [c, Component::RootDir].iter().collect();
prefix_len = c.as_os_str().len();
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
}
Some(c @ Component::RootDir) => {
components.next();
// This is kind of a hack. When joining an empty string with the rest,
// we add the slash automagically
cwds = vec![PathBuf::from(c.as_os_str())];
cwd = PathBuf::from(c.as_os_str());
prefix_len = 1;
original_cwd = OriginalCwd::Prefix(String::new());
}
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
cwds = home_dir()
.map(|dir| vec![dir.into()])
.unwrap_or(cwd_pathbufs);
components.next();
cwd = home_dir().map(Into::into).unwrap_or(cwd_pathbuf);
prefix_len = 1;
original_cwd = OriginalCwd::Home;
}
@ -254,68 +220,52 @@ pub fn complete_item(
complete_rec(
partial.as_slice(),
&cwds
.into_iter()
.map(|cwd| PathBuiltFromString {
cwd,
parts: Vec::new(),
isdir: false,
})
.collect::<Vec<_>>(),
&PathBuiltFromString::default(),
&cwd,
options,
want_directory,
isdir,
options.match_algorithm == MatchAlgorithm::Prefix,
)
.into_iter()
.map(|mut p| {
if should_collapse_dots {
p = collapse_ndots(p);
}
let is_dir = p.isdir;
.map(|p| {
let path = original_cwd.apply(p, path_separator);
let real_path = expand_to_real_path(&path);
let metadata = std::fs::symlink_metadata(&real_path).ok();
let style = ls_colors.as_ref().map(|lsc| {
lsc.style_for_path_with_metadata(&real_path, metadata.as_ref())
.map(lscolors::Style::to_nu_ansi_term_style)
.unwrap_or_default()
lsc.style_for_path_with_metadata(
&path,
std::fs::symlink_metadata(expand_to_real_path(&path))
.ok()
.as_ref(),
)
.map(lscolors::Style::to_nu_ansi_term_style)
.unwrap_or_default()
});
FileSuggestion {
span,
path: escape_path(path),
style,
is_dir,
}
(span, escape_path(path, want_directory), style)
})
.collect()
}
// Fix files or folders with quotes or hashes
pub fn escape_path(path: String) -> String {
pub fn escape_path(path: String, dir: bool) -> String {
// make glob pattern have the highest priority.
if nu_glob::is_glob(path.as_str()) || path.contains('`') {
// expand home `~` for https://github.com/nushell/nushell/issues/13905
let pathbuf = nu_path::expand_tilde(path);
let path = pathbuf.to_string_lossy();
if path.contains('\'') {
// decide to use double quotes
// Path as Debug will do the escaping for `"`, `\`
format!("{:?}", path)
let glob_contaminated = path.contains(['[', '*', ']', '?']);
if glob_contaminated {
return if path.contains('\'') {
// decide to use double quote, also need to escape `"` in path
// or else users can't do anything with completed path either.
format!("\"{}\"", path.replace('"', r#"\""#))
} else {
format!("'{path}'")
}
};
}
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
let maybe_flag = path.starts_with('-');
let maybe_number = path.parse::<f64>().is_ok();
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_number {
format!("`{path}`")
} else {
let contaminated =
path.contains(['\'', '"', ' ', '#', '(', ')', '{', '}', '[', ']', '|', ';']);
let maybe_flag = path.starts_with('-');
let maybe_variable = path.starts_with('$');
let maybe_number = path.parse::<f64>().is_ok();
if contaminated || maybe_flag || maybe_variable || maybe_number {
format!("`{path}`")
} else {
path
}
path
}
}
@ -326,12 +276,12 @@ pub struct AdjustView {
}
pub fn adjust_if_intermediate(
prefix: &str,
prefix: &[u8],
working_set: &StateWorkingSet,
mut span: nu_protocol::Span,
) -> AdjustView {
let span_contents = String::from_utf8_lossy(working_set.get_span_contents(span)).to_string();
let mut prefix = prefix.to_string();
let mut prefix = String::from_utf8_lossy(prefix).to_string();
// A difference of 1 because of the cursor's unicode code point in between.
// Using .chars().count() because unicode and Windows.
@ -352,37 +302,41 @@ pub fn adjust_if_intermediate(
}
}
/// Collapse multiple ".." components into n-dots.
///
/// It performs the reverse operation of `expand_ndots`, collapsing sequences of ".." into n-dots,
/// such as "..." and "....".
///
/// The resulting path will use platform-specific path separators, regardless of what path separators were used in the input.
fn collapse_ndots(path: PathBuiltFromString) -> PathBuiltFromString {
let mut result = PathBuiltFromString {
parts: Vec::with_capacity(path.parts.len()),
isdir: path.isdir,
cwd: path.cwd,
};
let mut dot_count = 0;
for part in path.parts {
if part == ".." {
dot_count += 1;
} else {
if dot_count > 0 {
result.parts.push(".".repeat(dot_count + 1));
dot_count = 0;
}
result.parts.push(part);
}
}
// Add any remaining dots
if dot_count > 0 {
result.parts.push(".".repeat(dot_count + 1));
}
result
/// Convenience function to sort suggestions using [`sort_completions`]
pub fn sort_suggestions(
prefix: &str,
items: Vec<SemanticSuggestion>,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
sort_completions(prefix, items, options, |it| &it.suggestion.value)
}
/// # Arguments
/// * `prefix` - What the user's typed, for sorting by fuzzy matcher score
pub fn sort_completions<T>(
prefix: &str,
mut items: Vec<T>,
options: &CompletionOptions,
get_value: fn(&T) -> &str,
) -> Vec<T> {
// Sort items
if options.sort == CompletionSort::Smart && options.match_algorithm == MatchAlgorithm::Fuzzy {
let mut matcher = SkimMatcherV2::default();
if options.case_sensitive {
matcher = matcher.respect_case();
} else {
matcher = matcher.ignore_case();
};
items.sort_by(|a, b| {
let a_str = get_value(a);
let b_str = get_value(b);
let a_score = matcher.fuzzy_match(a_str, prefix).unwrap_or_default();
let b_score = matcher.fuzzy_match(b_str, prefix).unwrap_or_default();
b_score.cmp(&a_score).then(a_str.cmp(b_str))
});
} else {
items.sort_by(|a, b| get_value(a).cmp(get_value(b)));
}
items
}

View File

@ -1,13 +1,7 @@
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use nu_parser::trim_quotes_str;
use nu_protocol::{CompletionAlgorithm, CompletionSort};
use nu_utils::IgnoreCaseExt;
use nucleo_matcher::{
pattern::{Atom, AtomKind, CaseMatching, Normalization},
Config, Matcher, Utf32Str,
};
use std::{borrow::Cow, fmt::Display};
use super::SemanticSuggestion;
use std::fmt::Display;
/// Describes how suggestions should be matched.
#[derive(Copy, Clone, Debug, PartialEq)]
@ -18,12 +12,6 @@ pub enum MatchAlgorithm {
/// "git switch" is matched by "git sw"
Prefix,
/// Only show suggestions which have a substring matching with the given input
///
/// Example:
/// "git checkout" is matched by "checkout"
Substring,
/// Only show suggestions which contain the input chars at any place
///
/// Example:
@ -31,203 +19,39 @@ pub enum MatchAlgorithm {
Fuzzy,
}
pub struct NuMatcher<'a, T> {
options: &'a CompletionOptions,
needle: String,
state: State<T>,
}
enum State<T> {
Prefix {
/// Holds (haystack, item)
items: Vec<(String, T)>,
},
Substring {
/// Holds (haystack, item)
items: Vec<(String, T)>,
},
Fuzzy {
matcher: Matcher,
atom: Atom,
/// Holds (haystack, item, score)
items: Vec<(String, T, u16)>,
},
}
/// Filters and sorts suggestions
impl<T> NuMatcher<'_, T> {
/// # Arguments
///
/// * `needle` - The text to search for
pub fn new(needle: impl AsRef<str>, options: &CompletionOptions) -> NuMatcher<T> {
let needle = trim_quotes_str(needle.as_ref());
match options.match_algorithm {
MatchAlgorithm::Prefix => {
let lowercase_needle = if options.case_sensitive {
needle.to_owned()
} else {
needle.to_folded_case()
};
NuMatcher {
options,
needle: lowercase_needle,
state: State::Prefix { items: Vec::new() },
}
}
MatchAlgorithm::Substring => {
let lowercase_needle = if options.case_sensitive {
needle.to_owned()
} else {
needle.to_folded_case()
};
NuMatcher {
options,
needle: lowercase_needle,
state: State::Substring { items: Vec::new() },
}
}
MatchAlgorithm::Fuzzy => {
let atom = Atom::new(
needle,
if options.case_sensitive {
CaseMatching::Respect
} else {
CaseMatching::Ignore
},
Normalization::Smart,
AtomKind::Fuzzy,
false,
);
NuMatcher {
options,
needle: needle.to_owned(),
state: State::Fuzzy {
matcher: Matcher::new(Config::DEFAULT),
atom,
items: Vec::new(),
},
}
}
}
}
/// Returns whether or not the haystack matches the needle. If it does, `item` is added
/// to the list of matches (if given).
///
/// Helper to avoid code duplication between [NuMatcher::add] and [NuMatcher::matches].
fn matches_aux(&mut self, haystack: &str, item: Option<T>) -> bool {
impl MatchAlgorithm {
/// Returns whether the `needle` search text matches the given `haystack`.
pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
let haystack = trim_quotes_str(haystack);
match &mut self.state {
State::Prefix { items } => {
let haystack_folded = if self.options.case_sensitive {
Cow::Borrowed(haystack)
} else {
Cow::Owned(haystack.to_folded_case())
};
let matches = haystack_folded.starts_with(self.needle.as_str());
if matches {
if let Some(item) = item {
items.push((haystack.to_string(), item));
}
}
matches
}
State::Substring { items } => {
let haystack_folded = if self.options.case_sensitive {
Cow::Borrowed(haystack)
} else {
Cow::Owned(haystack.to_folded_case())
};
let matches = haystack_folded.contains(self.needle.as_str());
if matches {
if let Some(item) = item {
items.push((haystack.to_string(), item));
}
}
matches
}
State::Fuzzy {
matcher,
atom,
items,
} => {
let mut haystack_buf = Vec::new();
let haystack_utf32 = Utf32Str::new(trim_quotes_str(haystack), &mut haystack_buf);
let mut indices = Vec::new();
let Some(score) = atom.indices(haystack_utf32, matcher, &mut indices) else {
return false;
};
if let Some(item) = item {
items.push((haystack.to_string(), item, score));
}
true
let needle = trim_quotes_str(needle);
match *self {
MatchAlgorithm::Prefix => haystack.starts_with(needle),
MatchAlgorithm::Fuzzy => {
let matcher = SkimMatcherV2::default();
matcher.fuzzy_match(haystack, needle).is_some()
}
}
}
/// Add the given item if the given haystack matches the needle.
///
/// Returns whether the item was added.
pub fn add(&mut self, haystack: impl AsRef<str>, item: T) -> bool {
self.matches_aux(haystack.as_ref(), Some(item))
}
/// Returns whether the `needle` search text matches the given `haystack`.
pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
match *self {
MatchAlgorithm::Prefix => haystack.starts_with(needle),
MatchAlgorithm::Fuzzy => {
let haystack_str = String::from_utf8_lossy(haystack);
let needle_str = String::from_utf8_lossy(needle);
/// Returns whether the haystack matches the needle.
pub fn matches(&mut self, haystack: &str) -> bool {
self.matches_aux(haystack, None)
}
/// Get all the items that matched (sorted)
pub fn results(self) -> Vec<T> {
match self.state {
State::Prefix { mut items, .. } | State::Substring { mut items, .. } => {
items.sort_by(|(haystack1, _), (haystack2, _)| {
let cmp_sensitive = haystack1.cmp(haystack2);
if self.options.case_sensitive {
cmp_sensitive
} else {
haystack1
.to_folded_case()
.cmp(&haystack2.to_folded_case())
.then(cmp_sensitive)
}
});
items.into_iter().map(|(_, item)| item).collect::<Vec<_>>()
}
State::Fuzzy { mut items, .. } => {
match self.options.sort {
CompletionSort::Alphabetical => {
items.sort_by(|(haystack1, _, _), (haystack2, _, _)| {
haystack1.cmp(haystack2)
});
}
CompletionSort::Smart => {
items.sort_by(|(haystack1, _, score1), (haystack2, _, score2)| {
score2.cmp(score1).then(haystack1.cmp(haystack2))
});
}
}
items
.into_iter()
.map(|(_, item, _)| item)
.collect::<Vec<_>>()
let matcher = SkimMatcherV2::default();
matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
}
}
}
}
impl NuMatcher<'_, SemanticSuggestion> {
pub fn add_semantic_suggestion(&mut self, sugg: SemanticSuggestion) -> bool {
let value = sugg.suggestion.value.to_string();
self.add(value, sugg)
}
}
impl From<CompletionAlgorithm> for MatchAlgorithm {
fn from(value: CompletionAlgorithm) -> Self {
match value {
CompletionAlgorithm::Prefix => MatchAlgorithm::Prefix,
CompletionAlgorithm::Substring => MatchAlgorithm::Substring,
CompletionAlgorithm::Fuzzy => MatchAlgorithm::Fuzzy,
}
}
@ -239,7 +63,6 @@ impl TryFrom<String> for MatchAlgorithm {
fn try_from(value: String) -> Result<Self, Self::Error> {
match value.as_str() {
"prefix" => Ok(Self::Prefix),
"substring" => Ok(Self::Substring),
"fuzzy" => Ok(Self::Fuzzy),
_ => Err(InvalidMatchAlgorithm::Unknown),
}
@ -264,6 +87,7 @@ impl std::error::Error for InvalidMatchAlgorithm {}
#[derive(Clone)]
pub struct CompletionOptions {
pub case_sensitive: bool,
pub positional: bool,
pub match_algorithm: MatchAlgorithm,
pub sort: CompletionSort,
}
@ -272,6 +96,7 @@ impl Default for CompletionOptions {
fn default() -> Self {
Self {
case_sensitive: true,
positional: true,
match_algorithm: MatchAlgorithm::Prefix,
sort: Default::default(),
}
@ -280,70 +105,35 @@ impl Default for CompletionOptions {
#[cfg(test)]
mod test {
use rstest::rstest;
use super::MatchAlgorithm;
use super::{CompletionOptions, MatchAlgorithm, NuMatcher};
#[test]
fn match_algorithm_prefix() {
let algorithm = MatchAlgorithm::Prefix;
#[rstest]
#[case(MatchAlgorithm::Prefix, "example text", "", true)]
#[case(MatchAlgorithm::Prefix, "example text", "examp", true)]
#[case(MatchAlgorithm::Prefix, "example text", "text", false)]
#[case(MatchAlgorithm::Substring, "example text", "", true)]
#[case(MatchAlgorithm::Substring, "example text", "text", true)]
#[case(MatchAlgorithm::Substring, "example text", "mplxt", false)]
#[case(MatchAlgorithm::Fuzzy, "example text", "", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "examp", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "ext", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "mplxt", true)]
#[case(MatchAlgorithm::Fuzzy, "example text", "mpp", false)]
fn match_algorithm_simple(
#[case] match_algorithm: MatchAlgorithm,
#[case] haystack: &str,
#[case] needle: &str,
#[case] should_match: bool,
) {
let options = CompletionOptions {
match_algorithm,
..Default::default()
};
let mut matcher = NuMatcher::new(needle, &options);
matcher.add(haystack, haystack);
if should_match {
assert_eq!(vec![haystack], matcher.results());
} else {
assert_ne!(vec![haystack], matcher.results());
}
assert!(algorithm.matches_str("example text", ""));
assert!(algorithm.matches_str("example text", "examp"));
assert!(!algorithm.matches_str("example text", "text"));
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
}
#[test]
fn match_algorithm_fuzzy_sort_score() {
let options = CompletionOptions {
match_algorithm: MatchAlgorithm::Fuzzy,
..Default::default()
};
let mut matcher = NuMatcher::new("fob", &options);
for item in ["foo/bar", "fob", "foo bar"] {
matcher.add(item, item);
}
// Sort by score, then in alphabetical order
assert_eq!(vec!["fob", "foo bar", "foo/bar"], matcher.results());
}
fn match_algorithm_fuzzy() {
let algorithm = MatchAlgorithm::Fuzzy;
#[test]
fn match_algorithm_fuzzy_sort_strip() {
let options = CompletionOptions {
match_algorithm: MatchAlgorithm::Fuzzy,
..Default::default()
};
let mut matcher = NuMatcher::new("'love spaces' ", &options);
for item in [
"'i love spaces'",
"'i love spaces' so much",
"'lovespaces' ",
] {
matcher.add(item, item);
}
// Make sure the spaces are respected
assert_eq!(vec!["'i love spaces' so much"], matcher.results());
assert!(algorithm.matches_str("example text", ""));
assert!(algorithm.matches_str("example text", "examp"));
assert!(algorithm.matches_str("example text", "ext"));
assert!(algorithm.matches_str("example text", "mplxt"));
assert!(!algorithm.matches_str("example text", "mpp"));
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
}
}

View File

@ -6,81 +6,74 @@ use nu_engine::eval_call;
use nu_protocol::{
ast::{Argument, Call, Expr, Expression},
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet},
DeclId, PipelineData, Span, Type, Value,
engine::{Stack, StateWorkingSet},
CompletionSort, PipelineData, Span, Type, Value,
};
use nu_utils::IgnoreCaseExt;
use std::collections::HashMap;
use super::completion_options::NuMatcher;
use super::completion_common::sort_suggestions;
pub struct CustomCompletion<T: Completer> {
decl_id: DeclId,
pub struct CustomCompletion {
stack: Stack,
decl_id: usize,
line: String,
line_pos: usize,
fallback: T,
}
impl<T: Completer> CustomCompletion<T> {
pub fn new(decl_id: DeclId, line: String, line_pos: usize, fallback: T) -> Self {
impl CustomCompletion {
pub fn new(stack: Stack, decl_id: usize, line: String) -> Self {
Self {
stack,
decl_id,
line,
line_pos,
fallback,
}
}
}
impl<T: Completer> Completer for CustomCompletion<T> {
impl Completer for CustomCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
prefix: impl AsRef<str>,
_stack: &Stack,
prefix: Vec<u8>,
span: Span,
offset: usize,
orig_options: &CompletionOptions,
pos: usize,
completion_options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
// Call custom declaration
let mut stack_mut = stack.clone();
let mut eval = |engine_state: &EngineState| {
eval_call::<WithoutDebug>(
engine_state,
&mut stack_mut,
&Call {
decl_id: self.decl_id,
head: span,
arguments: vec![
Argument::Positional(Expression::new_unknown(
Expr::String(self.line.clone()),
Span::unknown(),
Type::String,
)),
Argument::Positional(Expression::new_unknown(
Expr::Int(self.line_pos as i64),
Span::unknown(),
Type::Int,
)),
],
parser_info: HashMap::new(),
},
PipelineData::empty(),
)
};
let result = if self.decl_id.get() < working_set.permanent_state.num_decls() {
eval(working_set.permanent_state)
} else {
let mut engine_state = working_set.permanent_state.clone();
let _ = engine_state.merge_delta(working_set.delta.clone());
eval(&engine_state)
};
// Line position
let line_pos = pos - offset;
let mut completion_options = orig_options.clone();
let mut should_sort = true;
// Call custom declaration
let result = eval_call::<WithoutDebug>(
working_set.permanent_state,
&mut self.stack,
&Call {
decl_id: self.decl_id,
head: span,
arguments: vec![
Argument::Positional(Expression::new_unknown(
Expr::String(self.line.clone()),
Span::unknown(),
Type::String,
)),
Argument::Positional(Expression::new_unknown(
Expr::Int(line_pos as i64),
Span::unknown(),
Type::Int,
)),
],
parser_info: HashMap::new(),
},
PipelineData::empty(),
);
let mut custom_completion_options = None;
// Parse result
let suggestions = match result.and_then(|data| data.into_value(span)) {
Ok(value) => match &value {
let suggestions = result
.and_then(|data| data.into_value(span))
.map(|value| match &value {
Value::Record { val, .. } => {
let completions = val
.get("completions")
@ -93,74 +86,78 @@ impl<T: Completer> Completer for CustomCompletion<T> {
let options = val.get("options");
if let Some(Value::Record { val: options, .. }) = &options {
if let Some(sort) = options.get("sort").and_then(|val| val.as_bool().ok()) {
should_sort = sort;
}
if let Some(case_sensitive) = options
.get("case_sensitive")
let should_sort = options
.get("sort")
.and_then(|val| val.as_bool().ok())
{
completion_options.case_sensitive = case_sensitive;
}
let positional =
options.get("positional").and_then(|val| val.as_bool().ok());
if positional.is_some() {
log::warn!("Use of the positional option is deprecated. Use the substring match algorithm instead.");
}
if let Some(algorithm) = options
.get("completion_algorithm")
.and_then(|option| option.coerce_string().ok())
.and_then(|option| option.try_into().ok())
{
completion_options.match_algorithm = algorithm;
if let Some(false) = positional {
if completion_options.match_algorithm == MatchAlgorithm::Prefix {
completion_options.match_algorithm = MatchAlgorithm::Substring
}
}
}
.unwrap_or(false);
custom_completion_options = Some(CompletionOptions {
case_sensitive: options
.get("case_sensitive")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
positional: options
.get("positional")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
match_algorithm: match options.get("completion_algorithm") {
Some(option) => option
.coerce_string()
.ok()
.and_then(|option| option.try_into().ok())
.unwrap_or(MatchAlgorithm::Prefix),
None => completion_options.match_algorithm,
},
sort: if should_sort {
CompletionSort::Alphabetical
} else {
CompletionSort::Smart
},
});
}
completions
}
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
Value::Nothing { .. } => {
return self.fallback.fetch(
working_set,
stack,
prefix,
span,
offset,
orig_options,
);
}
_ => {
log::error!(
"Custom completer returned invalid value of type {}",
value.get_type().to_string()
);
return vec![];
}
},
Err(e) => {
log::error!("Error getting custom completions: {e}");
return vec![];
}
};
_ => vec![],
})
.unwrap_or_default();
let mut matcher = NuMatcher::new(prefix, &completion_options);
if should_sort {
for sugg in suggestions {
matcher.add_semantic_suggestion(sugg);
}
matcher.results()
} else {
suggestions
.into_iter()
.filter(|sugg| matcher.matches(&sugg.suggestion.value))
.collect()
}
let options = custom_completion_options
.as_ref()
.unwrap_or(completion_options);
let suggestions = filter(&prefix, suggestions, completion_options);
sort_suggestions(&String::from_utf8_lossy(&prefix), suggestions, options)
}
}
fn filter(
prefix: &[u8],
items: Vec<SemanticSuggestion>,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
items
.into_iter()
.filter(|it| match options.match_algorithm {
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
(true, true) => it.suggestion.value.as_bytes().starts_with(prefix),
(true, false) => it
.suggestion
.value
.contains(std::str::from_utf8(prefix).unwrap_or("")),
(false, positional) => {
let value = it.suggestion.value.to_folded_case();
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
if positional {
value.starts_with(&prefix)
} else {
value.contains(&prefix)
}
}
},
MatchAlgorithm::Fuzzy => options
.match_algorithm
.matches_u8(it.suggestion.value.as_bytes(), prefix),
})
.collect()
}

View File

@ -2,6 +2,7 @@ use crate::completions::{
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
Completer, CompletionOptions,
};
use nu_ansi_term::Style;
use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet},
Span,
@ -9,22 +10,29 @@ use nu_protocol::{
use reedline::Suggestion;
use std::path::Path;
use super::{completion_common::FileSuggestion, SemanticSuggestion, SuggestionKind};
use super::SemanticSuggestion;
pub struct DirectoryCompletion;
#[derive(Clone, Default)]
pub struct DirectoryCompletion {}
impl DirectoryCompletion {
pub fn new() -> Self {
Self::default()
}
}
impl Completer for DirectoryCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
prefix: impl AsRef<str>,
prefix: Vec<u8>,
span: Span,
offset: usize,
_pos: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let AdjustView { prefix, span, .. } =
adjust_if_intermediate(prefix.as_ref(), working_set, span);
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
// Filter only the folders
#[allow(deprecated)]
@ -39,15 +47,16 @@ impl Completer for DirectoryCompletion {
.into_iter()
.map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x.path,
style: x.style,
value: x.1,
style: x.2,
span: reedline::Span {
start: x.span.start - offset,
end: x.span.end - offset,
start: x.0.start - offset,
end: x.0.end - offset,
},
..Suggestion::default()
},
kind: Some(SuggestionKind::Directory),
// TODO????
kind: None,
})
.collect();
@ -83,6 +92,6 @@ pub fn directory_completion(
options: &CompletionOptions,
engine_state: &EngineState,
stack: &Stack,
) -> Vec<FileSuggestion> {
complete_item(true, span, partial, &[cwd], options, engine_state, stack)
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
complete_item(true, span, partial, cwd, options, engine_state, stack)
}

View File

@ -1,22 +1,20 @@
use crate::completions::{
completion_common::{surround_remove, FileSuggestion},
completion_options::NuMatcher,
file_path_completion, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
};
use nu_path::expand_tilde;
use crate::completions::{file_path_completion, Completer, CompletionOptions};
use nu_protocol::{
engine::{Stack, StateWorkingSet, VirtualPath},
engine::{Stack, StateWorkingSet},
Span,
};
use reedline::Suggestion;
use std::{
collections::HashSet,
path::{is_separator, PathBuf, MAIN_SEPARATOR_STR},
};
use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
pub struct DotNuCompletion {
/// e.g. use std/a<tab>
pub std_virtual_path: bool,
use super::{completion_common::sort_suggestions, SemanticSuggestion};
#[derive(Clone, Default)]
pub struct DotNuCompletion {}
impl DotNuCompletion {
pub fn new() -> Self {
Self::default()
}
}
impl Completer for DotNuCompletion {
@ -24,185 +22,114 @@ impl Completer for DotNuCompletion {
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
prefix: impl AsRef<str>,
prefix: Vec<u8>,
span: Span,
offset: usize,
_pos: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let prefix_str = prefix.as_ref();
let start_with_backquote = prefix_str.starts_with('`');
let end_with_backquote = prefix_str.ends_with('`');
let prefix_str = prefix_str.replace('`', "");
// e.g. `./`, `..\`, `/`
let not_lib_dirs = prefix_str
.chars()
.find(|c| *c != '.')
.is_some_and(is_separator);
let mut search_dirs: Vec<PathBuf> = vec![];
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
let mut search_dirs: Vec<String> = vec![];
let (base, partial) = if let Some((parent, remain)) = prefix_str.rsplit_once(is_separator) {
// If prefix_str is only a word we want to search in the current dir.
// "/xx" should be split to "/" and "xx".
if parent.is_empty() {
(MAIN_SEPARATOR_STR, remain)
} else {
(parent, remain)
}
} else {
(".", prefix_str.as_str())
};
// If prefix_str is only a word we want to search in the current dir
let (base, partial) = prefix_str
.rsplit_once(is_separator)
.unwrap_or((".", &prefix_str));
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
let mut partial = partial.to_string();
// On windows, this standardizes paths to use \
let mut is_current_folder = false;
// Fetch the lib dirs
// NOTE: 2 ways to setup `NU_LIB_DIRS`
// 1. `const NU_LIB_DIRS = [paths]`, equal to `nu -I paths`
// 2. `$env.NU_LIB_DIRS = [paths]`
let const_lib_dirs = working_set
.find_variable(b"$NU_LIB_DIRS")
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref());
let env_lib_dirs = working_set.get_env_var("NU_LIB_DIRS");
let lib_dirs: HashSet<PathBuf> = [const_lib_dirs, env_lib_dirs]
.into_iter()
.flatten()
.flat_map(|lib_dirs| {
lib_dirs
.as_list()
.into_iter()
.flat_map(|it| it.iter().filter_map(|x| x.to_path().ok()))
.map(expand_tilde)
})
.collect();
let lib_dirs: Vec<String> = if let Some(lib_dirs) = working_set.get_env_var("NU_LIB_DIRS") {
lib_dirs
.as_list()
.into_iter()
.flat_map(|it| {
it.iter().map(|x| {
x.to_path()
.expect("internal error: failed to convert lib path")
})
})
.map(|it| {
it.into_os_string()
.into_string()
.expect("internal error: failed to convert OS path")
})
.collect()
} else {
vec![]
};
// Check if the base_dir is a folder
let cwd = working_set.permanent_state.cwd(None);
// rsplit_once removes the separator
if base_dir != "." {
let expanded_base_dir = expand_tilde(&base_dir);
let is_base_dir_relative = expanded_base_dir.is_relative();
// Search in base_dir as well as lib_dirs.
// After expanded, base_dir can be a relative path or absolute path.
// If relative, we join "current working dir" with it to get subdirectory and add to search_dirs.
// If absolute, we add it to search_dirs.
if let Ok(mut cwd) = cwd {
if is_base_dir_relative {
cwd.push(&base_dir);
search_dirs.push(cwd.into_std_path_buf());
} else {
search_dirs.push(expanded_base_dir);
}
}
if !not_lib_dirs {
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
dir.push(&base_dir);
dir
}));
}
// Add the base dir into the directories to be searched
search_dirs.push(base_dir.clone());
// Reset the partial adding the basic dir back
// in order to make the span replace work properly
let mut base_dir_partial = base_dir;
base_dir_partial.push_str(&partial);
partial = base_dir_partial;
} else {
if let Ok(cwd) = cwd {
search_dirs.push(cwd.into_std_path_buf());
}
if !not_lib_dirs {
search_dirs.extend(lib_dirs);
}
// Fetch the current folder
#[allow(deprecated)]
let current_folder = working_set.permanent_state.current_work_dir();
is_current_folder = true;
// Add the current folder and the lib dirs into the
// directories to be searched
search_dirs.push(current_folder);
search_dirs.extend(lib_dirs);
}
// Fetch the files filtering the ones that ends with .nu
// and transform them into suggestions
let mut completions = file_path_completion(
span,
partial,
&search_dirs
.iter()
.filter_map(|d| d.to_str())
.collect::<Vec<_>>(),
options,
working_set.permanent_state,
stack,
);
if self.std_virtual_path {
let mut matcher = NuMatcher::new(partial, options);
let base_dir = surround_remove(&base_dir);
if base_dir == "." {
let surround_prefix = partial
.chars()
.take_while(|c| "`'\"".contains(*c))
.collect::<String>();
for path in ["std", "std-rfc"] {
let path = format!("{}{}", surround_prefix, path);
matcher.add(
path.clone(),
FileSuggestion {
span,
path,
style: None,
is_dir: true,
},
);
}
} else if let Some(VirtualPath::Dir(sub_paths)) =
working_set.find_virtual_path(&base_dir)
{
for sub_vp_id in sub_paths {
let (path, sub_vp) = working_set.get_virtual_path(*sub_vp_id);
let path = path
.strip_prefix(&format!("{}/", base_dir))
.unwrap_or(path)
.to_string();
matcher.add(
path.clone(),
FileSuggestion {
path,
span,
style: None,
is_dir: matches!(sub_vp, VirtualPath::Dir(_)),
},
);
}
}
completions.extend(matcher.results());
}
completions
let output: Vec<SemanticSuggestion> = search_dirs
.into_iter()
// Different base dir, so we list the .nu files or folders
.filter(|it| {
// for paths with spaces in them
let path = it.path.trim_end_matches('`');
path.ends_with(".nu") || it.is_dir
.flat_map(|search_dir| {
let completions = file_path_completion(
span,
&partial,
&search_dir,
options,
working_set.permanent_state,
stack,
);
completions
.into_iter()
.filter(move |it| {
// Different base dir, so we list the .nu files or folders
if !is_current_folder {
it.1.ends_with(".nu") || it.1.ends_with(SEP)
} else {
// Lib dirs, so we filter only the .nu files or directory modules
if it.1.ends_with(SEP) {
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
} else {
it.1.ends_with(".nu")
}
}
})
.map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x.1,
style: x.2,
span: reedline::Span {
start: x.0.start - offset,
end: x.0.end - offset,
},
append_whitespace: true,
..Suggestion::default()
},
// TODO????
kind: None,
})
})
.map(|x| {
let append_whitespace = !x.is_dir && (!start_with_backquote || end_with_backquote);
// Re-calculate the span to replace
let mut span_offset = 0;
let mut value = x.path.to_string();
// Complete only the last path component
if base_dir == MAIN_SEPARATOR_STR {
span_offset = base_dir.len()
} else if base_dir != "." {
span_offset = base_dir.len() + 1
}
// Retain only one '`'
if start_with_backquote {
value = value.trim_start_matches('`').to_string();
span_offset += 1;
}
// Add the backquote back
if end_with_backquote && !value.ends_with('`') {
value.push('`');
}
let end = x.span.end - offset;
let start = std::cmp::min(end, x.span.start - offset + span_offset);
SemanticSuggestion {
suggestion: Suggestion {
value,
style: x.style,
span: reedline::Span { start, end },
append_whitespace,
..Suggestion::default()
},
kind: Some(SuggestionKind::Module),
}
})
.collect::<Vec<_>>()
.collect();
sort_suggestions(&prefix_str, output, options)
}
}

View File

@ -1,112 +0,0 @@
use crate::completions::{
completion_common::surround_remove, completion_options::NuMatcher, Completer,
CompletionOptions, SemanticSuggestion, SuggestionKind,
};
use nu_protocol::{
engine::{Stack, StateWorkingSet},
ModuleId, Span,
};
use reedline::Suggestion;
pub struct ExportableCompletion<'a> {
pub module_id: ModuleId,
pub temp_working_set: Option<StateWorkingSet<'a>>,
}
/// If name contains space, wrap it in quotes
fn wrapped_name(name: String) -> String {
if !name.contains(' ') {
return name;
}
if name.contains('\'') {
format!("\"{}\"", name.replace('"', r#"\""#))
} else {
format!("'{name}'")
}
}
impl Completer for ExportableCompletion<'_> {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::<()>::new(surround_remove(prefix.as_ref()), options);
let mut results = Vec::new();
let span = reedline::Span {
start: span.start - offset,
end: span.end - offset,
};
// TODO: use matcher.add_lazy to lazy evaluate an item if it matches the prefix
let mut add_suggestion = |value: String,
description: Option<String>,
extra: Option<Vec<String>>,
kind: SuggestionKind| {
results.push(SemanticSuggestion {
suggestion: Suggestion {
value,
span,
description,
extra,
..Suggestion::default()
},
kind: Some(kind),
});
};
let working_set = self.temp_working_set.as_ref().unwrap_or(working_set);
let module = working_set.get_module(self.module_id);
for (name, decl_id) in &module.decls {
let name = String::from_utf8_lossy(name).to_string();
if matcher.matches(&name) {
let cmd = working_set.get_decl(*decl_id);
add_suggestion(
wrapped_name(name),
Some(cmd.description().to_string()),
None,
// `None` here avoids arguments being expanded by snippet edit style for lsp
SuggestionKind::Command(cmd.command_type(), None),
);
}
}
for (name, module_id) in &module.submodules {
let name = String::from_utf8_lossy(name).to_string();
if matcher.matches(&name) {
let comments = working_set.get_module_comments(*module_id).map(|spans| {
spans
.iter()
.map(|sp| {
String::from_utf8_lossy(working_set.get_span_contents(*sp)).into()
})
.collect::<Vec<String>>()
});
add_suggestion(
wrapped_name(name),
Some("Submodule".into()),
comments,
SuggestionKind::Module,
);
}
}
for (name, var_id) in &module.constants {
let name = String::from_utf8_lossy(name).to_string();
if matcher.matches(&name) {
let var = working_set.get_variable(*var_id);
add_suggestion(
wrapped_name(name),
var.const_val
.as_ref()
.and_then(|v| v.clone().coerce_into_string().ok()),
None,
SuggestionKind::Variable,
);
}
}
results
}
}

View File

@ -2,39 +2,49 @@ use crate::completions::{
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
Completer, CompletionOptions,
};
use nu_ansi_term::Style;
use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet},
Span,
};
use nu_utils::IgnoreCaseExt;
use reedline::Suggestion;
use std::path::Path;
use super::{completion_common::FileSuggestion, SemanticSuggestion, SuggestionKind};
use super::SemanticSuggestion;
pub struct FileCompletion;
#[derive(Clone, Default)]
pub struct FileCompletion {}
impl FileCompletion {
pub fn new() -> Self {
Self::default()
}
}
impl Completer for FileCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
prefix: impl AsRef<str>,
prefix: Vec<u8>,
span: Span,
offset: usize,
_pos: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let AdjustView {
prefix,
span,
readjusted,
} = adjust_if_intermediate(prefix.as_ref(), working_set, span);
} = adjust_if_intermediate(&prefix, working_set, span);
#[allow(deprecated)]
let items: Vec<_> = complete_item(
readjusted,
span,
&prefix,
&[&working_set.permanent_state.current_work_dir()],
&working_set.permanent_state.current_work_dir(),
options,
working_set.permanent_state,
stack,
@ -42,19 +52,16 @@ impl Completer for FileCompletion {
.into_iter()
.map(move |x| SemanticSuggestion {
suggestion: Suggestion {
value: x.path,
style: x.style,
value: x.1,
style: x.2,
span: reedline::Span {
start: x.span.start - offset,
end: x.span.end - offset,
start: x.0.start - offset,
end: x.0.end - offset,
},
..Suggestion::default()
},
kind: Some(if x.is_dir {
SuggestionKind::Directory
} else {
SuggestionKind::File
}),
// TODO????
kind: None,
})
.collect();
@ -88,10 +95,21 @@ impl Completer for FileCompletion {
pub fn file_path_completion(
span: nu_protocol::Span,
partial: &str,
cwds: &[impl AsRef<str>],
cwd: &str,
options: &CompletionOptions,
engine_state: &EngineState,
stack: &Stack,
) -> Vec<FileSuggestion> {
complete_item(false, span, partial, cwds, options, engine_state, stack)
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
complete_item(false, span, partial, cwd, options, engine_state, stack)
}
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
// Check for case sensitive
if !options.case_sensitive {
return options
.match_algorithm
.matches_str(&from.to_folded_case(), &partial.to_folded_case());
}
options.match_algorithm.matches_str(from, partial)
}

View File

@ -1,15 +1,22 @@
use crate::completions::{
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
};
use crate::completions::{completion_common::sort_suggestions, Completer, CompletionOptions};
use nu_protocol::{
ast::{Expr, Expression},
engine::{Stack, StateWorkingSet},
DeclId, Span,
Span,
};
use reedline::Suggestion;
use super::SemanticSuggestion;
#[derive(Clone)]
pub struct FlagCompletion {
pub decl_id: DeclId,
expression: Expression,
}
impl FlagCompletion {
pub fn new(expression: Expression) -> Self {
Self { expression }
}
}
impl Completer for FlagCompletion {
@ -17,42 +24,73 @@ impl Completer for FlagCompletion {
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
prefix: Vec<u8>,
span: Span,
offset: usize,
_pos: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options);
let mut add_suggestion = |value: String, description: String| {
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value,
description: Some(description),
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
..Suggestion::default()
},
kind: Some(SuggestionKind::Flag),
});
};
// Check if it's a flag
if let Expr::Call(call) = &self.expression.expr {
let decl = working_set.get_decl(call.decl_id);
let sig = decl.signature();
let decl = working_set.get_decl(self.decl_id);
let sig = decl.signature();
for named in &sig.named {
if let Some(short) = named.short {
let mut name = String::from("-");
name.push(short);
add_suggestion(name, named.desc.clone());
let mut output = vec![];
for named in &sig.named {
let flag_desc = &named.desc;
if let Some(short) = named.short {
let mut named = vec![0; short.len_utf8()];
short.encode_utf8(&mut named);
named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()),
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
..Suggestion::default()
},
// TODO????
kind: None,
});
}
}
if named.long.is_empty() {
continue;
}
let mut named = named.long.as_bytes().to_vec();
named.insert(0, b'-');
named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()),
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
..Suggestion::default()
},
// TODO????
kind: None,
});
}
}
if named.long.is_empty() {
continue;
}
add_suggestion(format!("--{}", named.long), named.desc.clone());
return sort_suggestions(&String::from_utf8_lossy(&prefix), output, options);
}
matcher.results()
vec![]
}
}

View File

@ -1,6 +1,4 @@
mod attribute_completions;
mod base;
mod cell_path_completions;
mod command_completions;
mod completer;
mod completion_common;
@ -8,23 +6,17 @@ mod completion_options;
mod custom_completions;
mod directory_completions;
mod dotnu_completions;
mod exportable_completions;
mod file_completions;
mod flag_completions;
mod operator_completions;
mod variable_completions;
pub use attribute_completions::{AttributableCompletion, AttributeCompletion};
pub use base::{Completer, SemanticSuggestion, SuggestionKind};
pub use cell_path_completions::CellPathCompletion;
pub use command_completions::CommandCompletion;
pub use completer::NuCompleter;
pub use completion_options::{CompletionOptions, MatchAlgorithm};
pub use custom_completions::CustomCompletion;
pub use directory_completions::DirectoryCompletion;
pub use dotnu_completions::DotNuCompletion;
pub use exportable_completions::ExportableCompletion;
pub use file_completions::{file_path_completion, FileCompletion};
pub use file_completions::{file_path_completion, matches, FileCompletion};
pub use flag_completions::FlagCompletion;
pub use operator_completions::OperatorCompletion;
pub use variable_completions::VariableCompletion;

View File

@ -1,277 +0,0 @@
use crate::completions::{
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
};
use nu_protocol::{
ast::{self, Comparison, Expr, Expression},
engine::{Stack, StateWorkingSet},
Span, Type, Value, ENV_VARIABLE_ID,
};
use reedline::Suggestion;
use strum::{EnumMessage, IntoEnumIterator};
use super::cell_path_completions::eval_cell_path;
#[derive(Clone)]
pub struct OperatorCompletion<'a> {
pub left_hand_side: &'a Expression,
}
struct OperatorItem {
pub symbols: String,
pub description: String,
}
fn operator_to_item<T: EnumMessage + AsRef<str>>(op: T) -> OperatorItem {
OperatorItem {
symbols: op.as_ref().into(),
description: op.get_message().unwrap_or_default().into(),
}
}
fn common_comparison_ops() -> Vec<OperatorItem> {
vec![
operator_to_item(Comparison::In),
operator_to_item(Comparison::NotIn),
operator_to_item(Comparison::Equal),
operator_to_item(Comparison::NotEqual),
]
}
fn all_ops_for_immutable() -> Vec<OperatorItem> {
ast::Comparison::iter()
.map(operator_to_item)
.chain(ast::Math::iter().map(operator_to_item))
.chain(ast::Boolean::iter().map(operator_to_item))
.chain(ast::Bits::iter().map(operator_to_item))
.collect()
}
fn collection_comparison_ops() -> Vec<OperatorItem> {
let mut ops = common_comparison_ops();
ops.push(operator_to_item(Comparison::Has));
ops.push(operator_to_item(Comparison::NotHas));
ops
}
fn number_comparison_ops() -> Vec<OperatorItem> {
Comparison::iter()
.filter(|op| {
!matches!(
op,
Comparison::RegexMatch
| Comparison::NotRegexMatch
| Comparison::StartsWith
| Comparison::EndsWith
| Comparison::Has
| Comparison::NotHas
)
})
.map(operator_to_item)
.collect()
}
fn math_ops() -> Vec<OperatorItem> {
ast::Math::iter()
.filter(|op| !matches!(op, ast::Math::Concatenate | ast::Math::Pow))
.map(operator_to_item)
.collect()
}
fn bit_ops() -> Vec<OperatorItem> {
ast::Bits::iter().map(operator_to_item).collect()
}
fn all_assignment_ops() -> Vec<OperatorItem> {
ast::Assignment::iter().map(operator_to_item).collect()
}
fn numeric_assignment_ops() -> Vec<OperatorItem> {
ast::Assignment::iter()
.filter(|op| !matches!(op, ast::Assignment::ConcatenateAssign))
.map(operator_to_item)
.collect()
}
fn concat_assignment_ops() -> Vec<OperatorItem> {
vec![
operator_to_item(ast::Assignment::Assign),
operator_to_item(ast::Assignment::ConcatenateAssign),
]
}
fn valid_int_ops() -> Vec<OperatorItem> {
let mut ops = valid_float_ops();
ops.extend(bit_ops());
ops
}
fn valid_float_ops() -> Vec<OperatorItem> {
let mut ops = valid_value_with_unit_ops();
ops.push(operator_to_item(ast::Math::Pow));
ops
}
fn valid_string_ops() -> Vec<OperatorItem> {
let mut ops: Vec<OperatorItem> = Comparison::iter().map(operator_to_item).collect();
ops.push(operator_to_item(ast::Math::Concatenate));
ops.push(OperatorItem {
symbols: "like".into(),
description: Comparison::RegexMatch
.get_message()
.unwrap_or_default()
.into(),
});
ops.push(OperatorItem {
symbols: "not-like".into(),
description: Comparison::NotRegexMatch
.get_message()
.unwrap_or_default()
.into(),
});
ops
}
fn valid_list_ops() -> Vec<OperatorItem> {
let mut ops = collection_comparison_ops();
ops.push(operator_to_item(ast::Math::Concatenate));
ops
}
fn valid_binary_ops() -> Vec<OperatorItem> {
let mut ops = number_comparison_ops();
ops.extend(bit_ops());
ops.push(operator_to_item(ast::Math::Concatenate));
ops
}
fn valid_bool_ops() -> Vec<OperatorItem> {
let mut ops: Vec<OperatorItem> = ast::Boolean::iter().map(operator_to_item).collect();
ops.extend(common_comparison_ops());
ops
}
fn valid_value_with_unit_ops() -> Vec<OperatorItem> {
let mut ops = number_comparison_ops();
ops.extend(math_ops());
ops
}
fn ops_by_value(value: &Value, mutable: bool) -> Vec<OperatorItem> {
let mut ops = match value {
Value::Int { .. } => valid_int_ops(),
Value::Float { .. } => valid_float_ops(),
Value::String { .. } => valid_string_ops(),
Value::Binary { .. } => valid_binary_ops(),
Value::Bool { .. } => valid_bool_ops(),
Value::Date { .. } => number_comparison_ops(),
Value::Filesize { .. } | Value::Duration { .. } => valid_value_with_unit_ops(),
Value::Range { .. } | Value::Record { .. } => collection_comparison_ops(),
Value::List { .. } => valid_list_ops(),
_ => all_ops_for_immutable(),
};
if mutable {
ops.extend(match value {
Value::Int { .. }
| Value::Float { .. }
| Value::Filesize { .. }
| Value::Duration { .. } => numeric_assignment_ops(),
Value::String { .. } | Value::Binary { .. } | Value::List { .. } => {
concat_assignment_ops()
}
Value::Bool { .. }
| Value::Date { .. }
| Value::Range { .. }
| Value::Record { .. } => vec![operator_to_item(ast::Assignment::Assign)],
_ => all_assignment_ops(),
})
}
ops
}
fn is_expression_mutable(expr: &Expr, working_set: &StateWorkingSet) -> bool {
let Expr::FullCellPath(path) = expr else {
return false;
};
let Expr::Var(id) = path.head.expr else {
return false;
};
if id == ENV_VARIABLE_ID {
return true;
}
let var = working_set.get_variable(id);
var.mutable
}
impl Completer for OperatorCompletion<'_> {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
stack: &Stack,
prefix: impl AsRef<str>,
span: Span,
offset: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut needs_assignment_ops = true;
// Complete according expression type
// TODO: type inference on self.left_hand_side to get more accurate completions
let mut possible_operations: Vec<OperatorItem> = match &self.left_hand_side.ty {
Type::Int | Type::Number => valid_int_ops(),
Type::Float => valid_float_ops(),
Type::String => valid_string_ops(),
Type::Binary => valid_binary_ops(),
Type::Bool => valid_bool_ops(),
Type::Date => number_comparison_ops(),
Type::Filesize | Type::Duration => valid_value_with_unit_ops(),
Type::Record(_) | Type::Range => collection_comparison_ops(),
Type::List(_) | Type::Table(_) => valid_list_ops(),
// Unknown type, resort to evaluated values
Type::Any => match &self.left_hand_side.expr {
Expr::FullCellPath(path) => {
// for `$ <tab>`
if matches!(path.head.expr, Expr::Garbage) {
return vec![];
}
let value =
eval_cell_path(working_set, stack, &path.head, &path.tail, path.head.span)
.unwrap_or_default();
let mutable = is_expression_mutable(&self.left_hand_side.expr, working_set);
// to avoid duplication
needs_assignment_ops = false;
ops_by_value(&value, mutable)
}
_ => all_ops_for_immutable(),
},
_ => common_comparison_ops(),
};
// If the left hand side is a variable, add assignment operators if mutable
if needs_assignment_ops && is_expression_mutable(&self.left_hand_side.expr, working_set) {
possible_operations.extend(match &self.left_hand_side.ty {
Type::Int | Type::Float | Type::Number => numeric_assignment_ops(),
Type::Filesize | Type::Duration => numeric_assignment_ops(),
Type::String | Type::Binary | Type::List(_) => concat_assignment_ops(),
Type::Any => all_assignment_ops(),
_ => vec![operator_to_item(ast::Assignment::Assign)],
});
}
let mut matcher = NuMatcher::new(prefix, options);
for OperatorItem {
symbols,
description,
} in possible_operations
{
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: symbols.to_owned(),
description: Some(description.to_owned()),
span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true,
..Suggestion::default()
},
kind: Some(SuggestionKind::Operator),
});
}
matcher.results()
}
}

View File

@ -1,55 +1,167 @@
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
use crate::completions::{
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
};
use nu_engine::{column::get_columns, eval_variable};
use nu_protocol::{
engine::{Stack, StateWorkingSet},
Span, VarId,
Span, Value,
};
use reedline::Suggestion;
use std::str;
use super::completion_options::NuMatcher;
use super::completion_common::sort_suggestions;
pub struct VariableCompletion;
#[derive(Clone)]
pub struct VariableCompletion {
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
}
impl VariableCompletion {
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
Self { var_context }
}
}
impl Completer for VariableCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
_stack: &Stack,
prefix: impl AsRef<str>,
stack: &Stack,
prefix: Vec<u8>,
span: Span,
offset: usize,
_pos: usize,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
let mut matcher = NuMatcher::new(prefix, options);
let mut output = vec![];
let builtins = ["$nu", "$in", "$env"];
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
let var_id = working_set.find_variable(&self.var_context.0);
let current_span = reedline::Span {
start: span.start - offset,
end: span.end - offset,
};
let sublevels_count = self.var_context.1.len();
let prefix_str = String::from_utf8_lossy(&prefix);
// Variable completion (e.g: $en<tab> to complete $env)
let builtins = ["$nu", "$in", "$env"];
for builtin in builtins {
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: builtin.to_string(),
span: current_span,
description: Some("reserved".into()),
..Suggestion::default()
},
kind: Some(SuggestionKind::Variable),
});
// Completions for the given variable
if !var_str.is_empty() {
// Completion for $env.<tab>
if var_str == "$env" {
let env_vars = stack.get_env_vars(working_set.permanent_state);
// Return nested values
if sublevels_count > 0 {
// Extract the target var ($env.<target-var>)
let target_var = self.var_context.1[0].clone();
let target_var_str =
str::from_utf8(&target_var).unwrap_or_default().to_string();
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
let nested_levels: Vec<Vec<u8>> =
self.var_context.1.clone().into_iter().skip(1).collect();
if let Some(val) = env_vars.get(&target_var_str) {
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
suggestion.suggestion.value.as_bytes(),
&prefix,
) {
output.push(suggestion);
}
}
return sort_suggestions(&prefix_str, output, options);
}
} else {
// No nesting provided, return all env vars
for env_var in env_vars {
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
env_var.0.as_bytes(),
&prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: env_var.0,
span: current_span,
..Suggestion::default()
},
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
});
}
}
return sort_suggestions(&prefix_str, output, options);
}
}
// Completions for $nu.<tab>
if var_str == "$nu" {
// Eval nu var
if let Ok(nuval) = eval_variable(
working_set.permanent_state,
stack,
nu_protocol::NU_VARIABLE_ID,
nu_protocol::Span::new(current_span.start, current_span.end),
) {
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
{
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
suggestion.suggestion.value.as_bytes(),
&prefix,
) {
output.push(suggestion);
}
}
return sort_suggestions(&prefix_str, output, options);
}
}
// Completion other variable types
if let Some(var_id) = var_id {
// Extract the variable value from the stack
let var = stack.get_var(var_id, Span::new(span.start, span.end));
// If the value exists and it's of type Record
if let Ok(value) = var {
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
{
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
suggestion.suggestion.value.as_bytes(),
&prefix,
) {
output.push(suggestion);
}
}
return sort_suggestions(&prefix_str, output, options);
}
}
}
let mut add_candidate = |name, var_id: &VarId| {
matcher.add_semantic_suggestion(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(name).to_string(),
span: current_span,
description: Some(working_set.get_variable(*var_id).ty.to_string()),
..Suggestion::default()
},
kind: Some(SuggestionKind::Variable),
})
};
// Variable completion (e.g: $en<tab> to complete $env)
for builtin in builtins {
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
builtin.as_bytes(),
&prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: builtin.to_string(),
span: current_span,
..Suggestion::default()
},
// TODO is there a way to get the VarId to get the type???
kind: None,
});
}
}
// TODO: The following can be refactored (see find_commands_by_predicate() used in
// command_completions).
@ -57,11 +169,27 @@ impl Completer for VariableCompletion {
// Working set scope vars
for scope_frame in working_set.delta.scope.iter().rev() {
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
for (name, var_id) in &overlay_frame.vars {
add_candidate(name, var_id);
for v in &overlay_frame.vars {
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
v.0,
&prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
span: current_span,
..Suggestion::default()
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
});
}
}
}
}
// Permanent state vars
// for scope in &self.engine_state.scope {
for overlay_frame in working_set
@ -69,11 +197,118 @@ impl Completer for VariableCompletion {
.active_overlays(&removed_overlays)
.rev()
{
for (name, var_id) in &overlay_frame.vars {
add_candidate(name, var_id);
for v in &overlay_frame.vars {
if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive,
v.0,
&prefix,
) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
span: current_span,
..Suggestion::default()
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
});
}
}
}
matcher.results()
output = sort_suggestions(&prefix_str, output, options);
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
output
}
}
// Find recursively the values for sublevels
// if no sublevels are set it returns the current value
fn nested_suggestions(
val: &Value,
sublevels: &[Vec<u8>],
current_span: reedline::Span,
) -> Vec<SemanticSuggestion> {
let mut output: Vec<SemanticSuggestion> = vec![];
let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
let kind = SuggestionKind::Type(value.get_type());
match value {
Value::Record { val, .. } => {
// Add all the columns as completion
for col in val.columns() {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: col.clone(),
span: current_span,
..Suggestion::default()
},
kind: Some(kind.clone()),
});
}
output
}
Value::List { vals, .. } => {
for column_name in get_columns(vals.as_slice()) {
output.push(SemanticSuggestion {
suggestion: Suggestion {
value: column_name,
span: current_span,
..Suggestion::default()
},
kind: Some(kind.clone()),
});
}
output
}
_ => output,
}
}
// Extracts the recursive value (e.g: $var.a.b.c)
fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
// Go to next sublevel
if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
let span = val.span();
match val {
Value::Record { val, .. } => {
if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
// If matches try to fetch recursively the next
recursive_value(value, next_sublevels)
} else {
// Current sublevel value not found
Err(span)
}
}
Value::List { vals, .. } => {
for col in get_columns(vals.as_slice()) {
if col.as_bytes() == *sublevel {
let val = val.get_data_by_key(&col).ok_or(span)?;
return recursive_value(&val, next_sublevels);
}
}
// Current sublevel value not found
Err(span)
}
_ => Ok(val.clone()),
}
} else {
Ok(val.clone())
}
}
impl MatchAlgorithm {
pub fn matches_u8_insensitive(&self, sensitive: bool, haystack: &[u8], needle: &[u8]) -> bool {
if sensitive {
self.matches_u8(haystack, needle)
} else {
self.matches_u8(&haystack.to_ascii_lowercase(), &needle.to_ascii_lowercase())
}
}
}

View File

@ -2,10 +2,10 @@ use crate::util::eval_source;
#[cfg(feature = "plugin")]
use nu_path::canonicalize_with;
#[cfg(feature = "plugin")]
use nu_protocol::{engine::StateWorkingSet, ParseError, PluginRegistryFile, Spanned};
use nu_protocol::{engine::StateWorkingSet, report_error, ParseError, PluginRegistryFile, Spanned};
use nu_protocol::{
engine::{EngineState, Stack},
report_shell_error, PipelineData,
report_error_new, HistoryFileFormat, PipelineData,
};
#[cfg(feature = "plugin")]
use nu_utils::perf;
@ -16,9 +16,16 @@ const PLUGIN_FILE: &str = "plugin.msgpackz";
#[cfg(feature = "plugin")]
const OLD_PLUGIN_FILE: &str = "plugin.nu";
const HISTORY_FILE_TXT: &str = "history.txt";
const HISTORY_FILE_SQLITE: &str = "history.sqlite3";
#[cfg(feature = "plugin")]
pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
use nu_protocol::{shell_error::io::IoError, ShellError};
pub fn read_plugin_file(
engine_state: &mut EngineState,
plugin_file: Option<Spanned<String>>,
storage_path: &str,
) {
use nu_protocol::ShellError;
use std::path::Path;
let span = plugin_file.as_ref().map(|s| s.span);
@ -29,7 +36,7 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
.and_then(|p| Path::new(&p.item).extension())
.is_some_and(|ext| ext == "nu")
{
report_shell_error(
report_error_new(
engine_state,
&ShellError::GenericError {
error: "Wrong plugin file format".into(),
@ -45,14 +52,11 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
let mut start_time = std::time::Instant::now();
// Reading signatures from plugin registry file
// The plugin.msgpackz file stores the parsed signature collected from each registered plugin
add_plugin_file(engine_state, plugin_file.clone());
add_plugin_file(engine_state, plugin_file.clone(), storage_path);
perf!(
"add plugin file to engine_state",
start_time,
engine_state
.get_config()
.use_ansi_coloring
.get(engine_state)
engine_state.get_config().use_ansi_coloring
);
start_time = std::time::Instant::now();
@ -66,7 +70,8 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
log::warn!("Plugin file not found: {}", plugin_path.display());
// Try migration of an old plugin file if this wasn't a custom plugin file
if plugin_file.is_none() && migrate_old_plugin_file(engine_state) {
if plugin_file.is_none() && migrate_old_plugin_file(engine_state, storage_path)
{
let Ok(file) = std::fs::File::open(&plugin_path) else {
log::warn!("Failed to load newly migrated plugin file");
return;
@ -76,14 +81,18 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
return;
}
} else {
report_shell_error(
report_error_new(
engine_state,
&ShellError::Io(IoError::new_internal_with_path(
err.kind(),
"Could not open plugin registry file",
nu_protocol::location!(),
plugin_path,
)),
&ShellError::GenericError {
error: format!(
"Error while opening plugin registry file: {}",
plugin_path.display()
),
msg: "plugin path defined here".into(),
span,
help: None,
inner: vec![err.into()],
},
);
return;
}
@ -104,7 +113,7 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
Ok(contents) => contents,
Err(err) => {
log::warn!("Failed to read plugin registry file: {err:?}");
report_shell_error(
report_error_new(
engine_state,
&ShellError::GenericError {
error: format!(
@ -128,10 +137,7 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
perf!(
&format!("read plugin file {}", plugin_path.display()),
start_time,
engine_state
.get_config()
.use_ansi_coloring
.get(engine_state)
engine_state.get_config().use_ansi_coloring
);
start_time = std::time::Instant::now();
@ -140,26 +146,27 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
nu_plugin_engine::load_plugin_file(&mut working_set, &contents, span);
if let Err(err) = engine_state.merge_delta(working_set.render()) {
report_shell_error(engine_state, &err);
report_error_new(engine_state, &err);
return;
}
perf!(
&format!("load plugin file {}", plugin_path.display()),
start_time,
engine_state
.get_config()
.use_ansi_coloring
.get(engine_state)
engine_state.get_config().use_ansi_coloring
);
}
}
#[cfg(feature = "plugin")]
pub fn add_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
pub fn add_plugin_file(
engine_state: &mut EngineState,
plugin_file: Option<Spanned<String>>,
storage_path: &str,
) {
use std::path::Path;
use nu_protocol::report_parse_error;
let working_set = StateWorkingSet::new(engine_state);
if let Ok(cwd) = engine_state.cwd_as_string(None) {
if let Some(plugin_file) = plugin_file {
@ -174,16 +181,17 @@ pub fn add_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spann
engine_state.plugin_path = Some(path)
} else {
// It's an error if the directory for the plugin file doesn't exist.
report_parse_error(
&StateWorkingSet::new(engine_state),
report_error(
&working_set,
&ParseError::FileNotFound(
path_dir.to_string_lossy().into_owned(),
plugin_file.span,
),
);
}
} else if let Some(plugin_path) = nu_path::nu_config_dir() {
} else if let Some(mut plugin_path) = nu_path::config_dir() {
// Path to store plugins signatures
plugin_path.push(storage_path);
let mut plugin_path =
canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path.into());
plugin_path.push(PLUGIN_FILE);
@ -206,8 +214,7 @@ pub fn eval_config_contents(
let prev_file = engine_state.file.take();
engine_state.file = Some(config_path.clone());
// TODO: ignore this error?
let _ = eval_source(
eval_source(
engine_state,
stack,
&contents,
@ -220,18 +227,36 @@ pub fn eval_config_contents(
engine_state.file = prev_file;
// Merge the environment in case env vars changed in the config
if let Err(e) = engine_state.merge_env(stack) {
report_shell_error(engine_state, &e);
match engine_state.cwd(Some(stack)) {
Ok(cwd) => {
if let Err(e) = engine_state.merge_env(stack, cwd) {
report_error_new(engine_state, &e);
}
}
Err(e) => {
report_error_new(engine_state, &e);
}
}
}
}
}
pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> Option<PathBuf> {
nu_path::config_dir().map(|mut history_path| {
history_path.push(storage_path);
history_path.push(match mode {
HistoryFileFormat::PlainText => HISTORY_FILE_TXT,
HistoryFileFormat::Sqlite => HISTORY_FILE_SQLITE,
});
history_path.into()
})
}
#[cfg(feature = "plugin")]
pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -> bool {
use nu_protocol::{
shell_error::io::IoError, PluginExample, PluginIdentity, PluginRegistryItem,
PluginRegistryItemData, PluginSignature, ShellError,
PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
ShellError,
};
use std::collections::BTreeMap;
@ -241,9 +266,10 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
return false;
};
let Some(config_dir) =
nu_path::nu_config_dir().and_then(|dir| nu_path::canonicalize_with(dir, &cwd).ok())
else {
let Some(config_dir) = nu_path::config_dir().and_then(|mut dir| {
dir.push(storage_path);
nu_path::canonicalize_with(dir, &cwd).ok()
}) else {
return false;
};
@ -254,7 +280,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
let old_contents = match std::fs::read(&old_plugin_file_path) {
Ok(old_contents) => old_contents,
Err(err) => {
report_shell_error(
report_error_new(
engine_state,
&ShellError::GenericError {
error: "Can't read old plugin file to migrate".into(),
@ -320,18 +346,10 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
// Write the new file
let new_plugin_file_path = config_dir.join(PLUGIN_FILE);
if let Err(err) = std::fs::File::create(&new_plugin_file_path)
.map_err(|err| {
IoError::new_internal_with_path(
err.kind(),
"Could not create new plugin file",
nu_protocol::location!(),
new_plugin_file_path.clone(),
)
})
.map_err(ShellError::from)
.map_err(|e| e.into())
.and_then(|file| contents.write_to(file, None))
{
report_shell_error(
report_error_new(
&engine_state,
&ShellError::GenericError {
error: "Failed to save migrated plugin file".into(),
@ -358,10 +376,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
perf!(
"migrate old plugin file",
start_time,
engine_state
.get_config()
.use_ansi_coloring
.get(&engine_state)
engine_state.get_config().use_ansi_coloring
);
true
}

View File

@ -1,16 +1,13 @@
use log::info;
use nu_engine::eval_block;
use nu_engine::{convert_env_values, eval_block};
use nu_parser::parse;
use nu_protocol::{
cli_error::report_compile_error,
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet},
report_parse_error, report_parse_warning, PipelineData, ShellError, Spanned, Value,
report_error, PipelineData, ShellError, Spanned, Value,
};
use std::sync::Arc;
use crate::util::print_pipeline;
#[derive(Default)]
pub struct EvaluateCommandsOpts {
pub table_mode: Option<Value>,
@ -50,10 +47,13 @@ pub fn evaluate_commands(
}
}
// Translate environment variables from Strings to Values
convert_env_values(engine_state, stack)?;
// Parse the source code
let (block, delta) = {
if let Some(ref t_mode) = table_mode {
Arc::make_mut(&mut engine_state.config).table.mode =
Arc::make_mut(&mut engine_state.config).table_mode =
t_mode.coerce_str()?.parse().unwrap_or_default();
}
@ -61,17 +61,17 @@ pub fn evaluate_commands(
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
if let Some(warning) = working_set.parse_warnings.first() {
report_parse_warning(&working_set, warning);
report_error(&working_set, warning);
}
if let Some(err) = working_set.parse_errors.first() {
report_parse_error(&working_set, err);
report_error(&working_set, err);
std::process::exit(1);
}
if let Some(err) = working_set.compile_errors.first() {
report_compile_error(&working_set, err);
std::process::exit(1);
report_error(&working_set, err);
// Not a fatal error, for now
}
(output, working_set.render())
@ -88,11 +88,15 @@ pub fn evaluate_commands(
}
if let Some(t_mode) = table_mode {
Arc::make_mut(&mut engine_state.config).table.mode =
Arc::make_mut(&mut engine_state.config).table_mode =
t_mode.coerce_str()?.parse().unwrap_or_default();
}
print_pipeline(engine_state, stack, pipeline, no_newline)?;
if let Some(status) = pipeline.print(engine_state, stack, no_newline, false)? {
if status.code() != 0 {
std::process::exit(status.code())
}
}
info!("evaluate {}:{}:{}", file!(), line!(), column!());

View File

@ -1,17 +1,14 @@
use crate::util::{eval_source, print_pipeline};
use crate::util::eval_source;
use log::{info, trace};
use nu_engine::eval_block;
use nu_engine::{convert_env_values, eval_block};
use nu_parser::parse;
use nu_path::canonicalize_with;
use nu_protocol::{
cli_error::report_compile_error,
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet},
report_parse_error, report_parse_warning,
shell_error::io::*,
PipelineData, ShellError, Span, Value,
report_error, PipelineData, ShellError, Span, Value,
};
use std::{path::PathBuf, sync::Arc};
use std::sync::Arc;
/// Entry point for evaluating a file.
///
@ -24,16 +21,16 @@ pub fn evaluate_file(
stack: &mut Stack,
input: PipelineData,
) -> Result<(), ShellError> {
// Convert environment variables from Strings to Values and store them in the engine state.
convert_env_values(engine_state, stack)?;
let cwd = engine_state.cwd_as_string(Some(stack))?;
let file_path = canonicalize_with(&path, cwd).map_err(|err| {
IoError::new_internal_with_path(
err.kind().not_found_as(NotFound::File),
"Could not access file",
nu_protocol::location!(),
PathBuf::from(&path),
)
})?;
let file_path =
canonicalize_with(&path, cwd).map_err(|err| ShellError::FileNotFoundCustom {
msg: format!("Could not access file '{path}': {err}"),
span: Span::unknown(),
})?;
let file_path_str = file_path
.to_str()
@ -45,24 +42,18 @@ pub fn evaluate_file(
span: Span::unknown(),
})?;
let file = std::fs::read(&file_path).map_err(|err| {
IoError::new_internal_with_path(
err.kind().not_found_as(NotFound::File),
"Could not read file",
nu_protocol::location!(),
file_path.clone(),
)
let file = std::fs::read(&file_path).map_err(|err| ShellError::FileNotFoundCustom {
msg: format!("Could not read file '{file_path_str}': {err}"),
span: Span::unknown(),
})?;
engine_state.file = Some(file_path.clone());
let parent = file_path.parent().ok_or_else(|| {
IoError::new_internal_with_path(
ErrorKind::DirectoryNotFound,
"The file path does not have a parent",
nu_protocol::location!(),
file_path.clone(),
)
})?;
let parent = file_path
.parent()
.ok_or_else(|| ShellError::FileNotFoundCustom {
msg: format!("The file path '{file_path_str}' does not have a parent"),
span: Span::unknown(),
})?;
stack.add_env_var(
"FILE_PWD".to_string(),
@ -86,18 +77,18 @@ pub fn evaluate_file(
let block = parse(&mut working_set, Some(file_path_str), &file, false);
if let Some(warning) = working_set.parse_warnings.first() {
report_parse_warning(&working_set, warning);
report_error(&working_set, warning);
}
// If any parse errors were found, report the first error and exit.
if let Some(err) = working_set.parse_errors.first() {
report_parse_error(&working_set, err);
report_error(&working_set, err);
std::process::exit(1);
}
if let Some(err) = working_set.compile_errors.first() {
report_compile_error(&working_set, err);
std::process::exit(1);
report_error(&working_set, err);
// Not a fatal error, for now
}
// Look for blocks whose name starts with "main" and replace it with the filename.
@ -127,7 +118,11 @@ pub fn evaluate_file(
};
// Print the pipeline output of the last command of the file.
print_pipeline(engine_state, stack, pipeline, true)?;
if let Some(status) = pipeline.print(engine_state, stack, true, false)? {
if status.code() != 0 {
std::process::exit(status.code())
}
}
// Invoke the main command with arguments.
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
@ -145,7 +140,7 @@ pub fn evaluate_file(
};
if exit_code != 0 {
std::process::exit(exit_code);
std::process::exit(exit_code)
}
info!("evaluate {}:{}:{}", file!(), line!(), column!());

View File

@ -21,6 +21,7 @@ pub use config_files::eval_config_contents;
pub use eval_cmds::{evaluate_commands, EvaluateCommandsOpts};
pub use eval_file::evaluate_file;
pub use menus::NuHelpCompleter;
pub use nu_cmd_base::util::get_init_cwd;
pub use nu_highlight::NuHighlight;
pub use print::Print;
pub use prompt::NushellPrompt;

View File

@ -30,15 +30,12 @@ impl NuHelpCompleter {
.filter_map(|(_, decl_id)| {
let decl = self.engine_state.get_decl(decl_id);
(decl.name().to_folded_case().contains(&folded_line)
|| decl.description().to_folded_case().contains(&folded_line)
|| decl.usage().to_folded_case().contains(&folded_line)
|| decl
.search_terms()
.into_iter()
.any(|term| term.to_folded_case().contains(&folded_line))
|| decl
.extra_description()
.to_folded_case()
.contains(&folded_line))
|| decl.extra_usage().to_folded_case().contains(&folded_line))
.then_some(decl)
})
.collect::<Vec<_>>();
@ -50,15 +47,15 @@ impl NuHelpCompleter {
.map(|decl| {
let mut long_desc = String::new();
let description = decl.description();
if !description.is_empty() {
long_desc.push_str(description);
let usage = decl.usage();
if !usage.is_empty() {
long_desc.push_str(usage);
long_desc.push_str("\r\n\r\n");
}
let extra_desc = decl.extra_description();
if !extra_desc.is_empty() {
long_desc.push_str(extra_desc);
let extra_usage = decl.extra_usage();
if !extra_usage.is_empty() {
long_desc.push_str(extra_usage);
long_desc.push_str("\r\n\r\n");
}

View File

@ -2,7 +2,7 @@ use nu_engine::eval_block;
use nu_protocol::{
debugger::WithoutDebug,
engine::{EngineState, Stack},
BlockId, IntoPipelineData, Span, Value,
IntoPipelineData, Span, Value,
};
use reedline::{menu_functions::parse_selection_char, Completer, Suggestion};
use std::sync::Arc;
@ -10,7 +10,7 @@ use std::sync::Arc;
const SELECTION_CHAR: char = '!';
pub struct NuMenuCompleter {
block_id: BlockId,
block_id: usize,
span: Span,
stack: Stack,
engine_state: Arc<EngineState>,
@ -19,7 +19,7 @@ pub struct NuMenuCompleter {
impl NuMenuCompleter {
pub fn new(
block_id: BlockId,
block_id: usize,
span: Span,
stack: Stack,
engine_state: Arc<EngineState>,
@ -28,7 +28,7 @@ impl NuMenuCompleter {
Self {
block_id,
span,
stack: stack.reset_out_dest().collect_value(),
stack: stack.reset_out_dest().capture(),
engine_state,
only_buffer_difference,
}

View File

@ -17,7 +17,7 @@ impl Command for NuHighlight {
.input_output_types(vec![(Type::String, Type::String)])
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Syntax highlight the input string."
}

View File

@ -1,5 +1,4 @@
use nu_engine::command_prelude::*;
use nu_protocol::ByteStreamSource;
#[derive(Clone)]
pub struct Print;
@ -31,11 +30,11 @@ impl Command for Print {
.category(Category::Strings)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Print the given values to stdout."
}
fn extra_description(&self) -> &str {
fn extra_usage(&self) -> &str {
r#"Unlike `echo`, this command does not return any value (`print | describe` will return "nothing").
Since this command has no output, there is no point in piping it with other commands.
@ -51,7 +50,7 @@ Since this command has no output, there is no point in piping it with other comm
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
mut input: PipelineData,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
let no_newline = call.has_flag(engine_state, stack, "no-newline")?;
@ -65,24 +64,15 @@ Since this command has no output, there is no point in piping it with other comm
arg.into_pipeline_data()
.print_raw(engine_state, no_newline, to_stderr)?;
} else {
arg.into_pipeline_data().print_table(
engine_state,
stack,
no_newline,
to_stderr,
)?;
arg.into_pipeline_data()
.print(engine_state, stack, no_newline, to_stderr)?;
}
}
} else if !input.is_nothing() {
if let PipelineData::ByteStream(stream, _) = &mut input {
if let ByteStreamSource::Child(child) = stream.source_mut() {
child.ignore_error(true);
}
}
if raw {
input.print_raw(engine_state, no_newline, to_stderr)?;
} else {
input.print_table(engine_state, stack, no_newline, to_stderr)?;
input.print(engine_state, stack, no_newline, to_stderr)?;
}
}

View File

@ -1,7 +1,10 @@
use crate::prompt_update::{
POST_PROMPT_MARKER, PRE_PROMPT_MARKER, VSCODE_POST_PROMPT_MARKER, VSCODE_PRE_PROMPT_MARKER,
};
use nu_protocol::engine::{EngineState, Stack};
use nu_protocol::{
engine::{EngineState, Stack},
Value,
};
#[cfg(windows)]
use nu_utils::enable_vt_processing;
use reedline::{
@ -121,11 +124,8 @@ impl Prompt for NushellPrompt {
.replace('\n', "\r\n");
if self.shell_integration_osc633 {
if self
.stack
.get_env_var(&self.engine_state, "TERM_PROGRAM")
.and_then(|v| v.as_str().ok())
== Some("vscode")
if self.stack.get_env_var(&self.engine_state, "TERM_PROGRAM")
== Some(Value::test_string("vscode"))
{
// We're in vscode and we have osc633 enabled
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()

View File

@ -1,9 +1,9 @@
use crate::NushellPrompt;
use log::{trace, warn};
use log::trace;
use nu_engine::ClosureEvalOnce;
use nu_protocol::{
engine::{EngineState, Stack},
report_shell_error, Config, PipelineData, Value,
report_error_new, Config, PipelineData, Value,
};
use reedline::Prompt;
@ -30,21 +30,30 @@ pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\";
#[allow(dead_code)]
pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;";
#[allow(dead_code)]
pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
// OSC633 is the same as OSC133 but specifically for VSCode
pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\";
pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\";
#[allow(dead_code)]
pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\";
#[allow(dead_code)]
//"\x1b]633;D;{}\x1b\\"
pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;";
#[allow(dead_code)]
pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
#[allow(dead_code)]
//"\x1b]633;E;{}\x1b\\"
pub(crate) const VSCODE_COMMANDLINE_MARKER_PREFIX: &str = "\x1b]633;E;";
#[allow(dead_code)]
pub(crate) const VSCODE_COMMANDLINE_MARKER_SUFFIX: &str = "\x1b\\";
#[allow(dead_code)]
// "\x1b]633;P;Cwd={}\x1b\\"
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd=";
#[allow(dead_code)]
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\";
pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
@ -59,7 +68,7 @@ fn get_prompt_string(
.get_env_var(engine_state, prompt)
.and_then(|v| match v {
Value::Closure { val, .. } => {
let result = ClosureEvalOnce::new(engine_state, stack, val.as_ref().clone())
let result = ClosureEvalOnce::new(engine_state, stack, *val)
.run_with_input(PipelineData::Empty);
trace!(
@ -71,7 +80,7 @@ fn get_prompt_string(
result
.map_err(|err| {
report_shell_error(engine_state, &err);
report_error_new(engine_state, &err);
})
.ok()
}
@ -80,19 +89,18 @@ fn get_prompt_string(
})
.and_then(|pipeline_data| {
let output = pipeline_data.collect_string("", config).ok();
let ansi_output = output.map(|mut x| {
// Always reset the color at the start of the right prompt
// to ensure there is no ansi bleed over
if x.is_empty() && prompt == PROMPT_COMMAND_RIGHT {
x.insert_str(0, "\x1b[0m")
};
output.map(|mut x| {
// Just remove the very last newline.
if x.ends_with('\n') {
x.pop();
}
if x.ends_with('\r') {
x.pop();
}
x
});
// Let's keep this for debugging purposes with nu --log-level warn
warn!("{}:{}:{} {:?}", file!(), line!(), column!(), ansi_output);
ansi_output
})
})
}
@ -110,17 +118,13 @@ pub(crate) fn update_prompt(
// Now that we have the prompt string lets ansify it.
// <133 A><prompt><133 B><command><133 C><command output>
let left_prompt_string = if config.shell_integration.osc633 {
if stack
.get_env_var(engine_state, "TERM_PROGRAM")
.and_then(|v| v.as_str().ok())
== Some("vscode")
{
let left_prompt_string = if config.shell_integration_osc633 {
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
// We're in vscode and we have osc633 enabled
Some(format!(
"{VSCODE_PRE_PROMPT_MARKER}{configured_left_prompt_string}{VSCODE_POST_PROMPT_MARKER}"
))
} else if config.shell_integration.osc133 {
} else if config.shell_integration_osc133 {
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
Some(format!(
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
@ -128,7 +132,7 @@ pub(crate) fn update_prompt(
} else {
configured_left_prompt_string.into()
}
} else if config.shell_integration.osc133 {
} else if config.shell_integration_osc133 {
Some(format!(
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
))

File diff suppressed because it is too large Load Diff

View File

@ -16,17 +16,18 @@ use crate::{
use crossterm::cursor::SetCursorStyle;
use log::{error, trace, warn};
use miette::{ErrReport, IntoDiagnostic, Result};
use nu_cmd_base::util::get_editor;
use nu_cmd_base::{
hook::eval_hook,
util::{get_editor, get_guaranteed_cwd},
};
use nu_color_config::StyleComputer;
#[allow(deprecated)]
use nu_engine::env_to_strings;
use nu_engine::exit::cleanup_exit;
use nu_engine::{convert_env_values, current_dir_str, env_to_strings};
use nu_parser::{lex, parse, trim_quotes_str};
use nu_protocol::shell_error::io::IoError;
use nu_protocol::{
config::NuCursorShape,
engine::{EngineState, Stack, StateWorkingSet},
report_shell_error, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
report_error_new, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
Value,
};
use nu_utils::{
@ -37,7 +38,6 @@ use reedline::{
CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory,
HistorySessionId, Reedline, SqliteBackedHistory, Vi,
};
use std::sync::atomic::Ordering;
use std::{
collections::HashMap,
env::temp_dir,
@ -53,6 +53,7 @@ use sysinfo::System;
pub fn evaluate_repl(
engine_state: &mut EngineState,
stack: Stack,
nushell_path: &str,
prerun_command: Option<Spanned<String>>,
load_std_lib: Option<Spanned<String>>,
entire_start_time: Instant,
@ -64,16 +65,18 @@ pub fn evaluate_repl(
// from the Arc. This lets us avoid copying stack variables needlessly
let mut unique_stack = stack.clone();
let config = engine_state.get_config();
let use_color = config.use_ansi_coloring.get(engine_state);
let use_color = config.use_ansi_coloring;
confirm_stdin_is_terminal()?;
let mut entry_num = 0;
// Let's grab the shell_integration configs
let shell_integration_osc2 = config.shell_integration.osc2;
let shell_integration_osc7 = config.shell_integration.osc7;
let shell_integration_osc9_9 = config.shell_integration.osc9_9;
let shell_integration_osc133 = config.shell_integration.osc133;
let shell_integration_osc633 = config.shell_integration.osc633;
let shell_integration_osc2 = config.shell_integration_osc2;
let shell_integration_osc7 = config.shell_integration_osc7;
let shell_integration_osc9_9 = config.shell_integration_osc9_9;
let shell_integration_osc133 = config.shell_integration_osc133;
let shell_integration_osc633 = config.shell_integration_osc633;
let nu_prompt = NushellPrompt::new(
shell_integration_osc133,
@ -82,15 +85,22 @@ pub fn evaluate_repl(
stack.clone(),
);
let start_time = std::time::Instant::now();
// Translate environment variables from Strings to Values
if let Err(e) = convert_env_values(engine_state, &unique_stack) {
report_error_new(engine_state, &e);
}
perf!("translate env vars", start_time, use_color);
// seed env vars
unique_stack.add_env_var(
"CMD_DURATION_MS".into(),
Value::string("0823", Span::unknown()),
);
unique_stack.set_last_exit_code(0, Span::unknown());
unique_stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(0, Span::unknown()));
let mut line_editor = get_line_editor(engine_state, use_color)?;
let mut line_editor = get_line_editor(engine_state, nushell_path, use_color)?;
let temp_file = temp_dir().join(format!("{}.nu", uuid::Uuid::new_v4()));
if let Some(s) = prerun_command {
@ -102,11 +112,10 @@ pub fn evaluate_repl(
PipelineData::empty(),
false,
);
engine_state.merge_env(&mut unique_stack)?;
let cwd = get_guaranteed_cwd(engine_state, &unique_stack);
engine_state.merge_env(&mut unique_stack, cwd)?;
}
confirm_stdin_is_terminal()?;
let hostname = System::host_name();
if shell_integration_osc2 {
run_shell_integration_osc2(None, engine_state, &mut unique_stack, use_color);
@ -127,7 +136,15 @@ pub fn evaluate_repl(
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
let cmd_text = line_editor.current_buffer_contents().to_string();
let replaced_cmd_text = escape_special_vscode_bytes(&cmd_text)?;
let replaced_cmd_text = cmd_text
.chars()
.map(|c| match c {
'\n' => '\x0a',
'\r' => '\x0d',
'\x1b' => '\x1b',
_ => c,
})
.collect();
run_shell_integration_osc633(
engine_state,
@ -142,30 +159,15 @@ pub fn evaluate_repl(
// Regenerate the $nu constant to contain the startup time and any other potential updates
engine_state.generate_nu_constant();
if load_std_lib.is_none() {
match engine_state.get_config().show_banner {
Value::Bool { val: false, .. } => {}
Value::String { ref val, .. } if val == "short" => {
eval_source(
engine_state,
&mut unique_stack,
r#"banner --short"#.as_bytes(),
"show short banner",
PipelineData::empty(),
false,
);
}
_ => {
eval_source(
engine_state,
&mut unique_stack,
r#"banner"#.as_bytes(),
"show_banner",
PipelineData::empty(),
false,
);
}
}
if load_std_lib.is_none() && engine_state.get_config().show_banner {
eval_source(
engine_state,
&mut unique_stack,
r#"use std banner; banner"#.as_bytes(),
"show_banner",
PipelineData::empty(),
false,
);
}
kitty_protocol_healthcheck(engine_state);
@ -218,7 +220,7 @@ pub fn evaluate_repl(
}
Err(_) => {
// line_editor is lost in the error case so reconstruct a new one
line_editor = get_line_editor(engine_state, use_color)?;
line_editor = get_line_editor(engine_state, nushell_path, use_color)?;
}
}
}
@ -226,44 +228,11 @@ pub fn evaluate_repl(
Ok(())
}
fn escape_special_vscode_bytes(input: &str) -> Result<String, ShellError> {
let bytes = input
.chars()
.flat_map(|c| {
let mut buf = [0; 4]; // Buffer to hold UTF-8 bytes of the character
let c_bytes = c.encode_utf8(&mut buf); // Get UTF-8 bytes for the character
if c_bytes.len() == 1 {
let byte = c_bytes.as_bytes()[0];
match byte {
// Escape bytes below 0x20
b if b < 0x20 => format!("\\x{:02X}", byte).into_bytes(),
// Escape semicolon as \x3B
b';' => "\\x3B".to_string().into_bytes(),
// Escape backslash as \\
b'\\' => "\\\\".to_string().into_bytes(),
// Otherwise, return the character unchanged
_ => vec![byte],
}
} else {
// pass through multi-byte characters unchanged
c_bytes.bytes().collect()
}
})
.collect();
String::from_utf8(bytes).map_err(|err| ShellError::CantConvert {
to_type: "string".to_string(),
from_type: "bytes".to_string(),
span: Span::unknown(),
help: Some(format!(
"Error {err}, Unable to convert {input} to escaped bytes"
)),
})
}
fn get_line_editor(engine_state: &mut EngineState, use_color: bool) -> Result<Reedline> {
fn get_line_editor(
engine_state: &mut EngineState,
nushell_path: &str,
use_color: bool,
) -> Result<Reedline> {
let mut start_time = std::time::Instant::now();
let mut line_editor = Reedline::create();
@ -274,7 +243,7 @@ fn get_line_editor(engine_state: &mut EngineState, use_color: bool) -> Result<Re
if let Some(history) = engine_state.history_config() {
start_time = std::time::Instant::now();
line_editor = setup_history(engine_state, line_editor, history)?;
line_editor = setup_history(nushell_path, engine_state, line_editor, history)?;
perf!("setup history", start_time, use_color);
}
@ -311,12 +280,17 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
hostname,
} = ctx;
let cwd = get_guaranteed_cwd(engine_state, &stack);
let mut start_time = std::time::Instant::now();
// Before doing anything, merge the environment from the previous REPL iteration into the
// permanent state.
if let Err(err) = engine_state.merge_env(&mut stack) {
report_shell_error(engine_state, &err);
if let Err(err) = engine_state.merge_env(&mut stack, cwd) {
report_error_new(engine_state, &err);
}
// Check whether $env.NU_USE_IR is set, so that the user can change it in the REPL
// Temporary while IR eval is optional
stack.use_ir = stack.has_env_var(engine_state, "NU_USE_IR");
perf!("merge env", start_time, use_color);
start_time = std::time::Instant::now();
@ -324,27 +298,21 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
perf!("reset signals", start_time, use_color);
start_time = std::time::Instant::now();
// Right before we start our prompt and take input from the user, fire the "pre_prompt" hook
if let Err(err) = hook::eval_hooks(
engine_state,
&mut stack,
vec![],
&engine_state.get_config().hooks.pre_prompt.clone(),
"pre_prompt",
) {
report_shell_error(engine_state, &err);
// Right before we start our prompt and take input from the user,
// fire the "pre_prompt" hook
if let Some(hook) = engine_state.get_config().hooks.pre_prompt.clone() {
if let Err(err) = eval_hook(engine_state, &mut stack, None, vec![], &hook, "pre_prompt") {
report_error_new(engine_state, &err);
}
}
perf!("pre-prompt hook", start_time, use_color);
start_time = std::time::Instant::now();
// Next, check all the environment variables they ask for
// fire the "env_change" hook
if let Err(error) = hook::eval_env_change_hook(
&engine_state.get_config().hooks.env_change.clone(),
engine_state,
&mut stack,
) {
report_shell_error(engine_state, &error)
let env_change = engine_state.get_config().hooks.env_change.clone();
if let Err(error) = hook::eval_env_change_hook(env_change, engine_state, &mut stack) {
report_error_new(engine_state, &error)
}
perf!("env-change hook", start_time, use_color);
@ -354,9 +322,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
start_time = std::time::Instant::now();
// Find the configured cursor shapes for each mode
let cursor_config = CursorConfig {
vi_insert: map_nucursorshape_to_cursorshape(config.cursor_shape.vi_insert),
vi_normal: map_nucursorshape_to_cursorshape(config.cursor_shape.vi_normal),
emacs: map_nucursorshape_to_cursorshape(config.cursor_shape.emacs),
vi_insert: map_nucursorshape_to_cursorshape(config.cursor_shape_vi_insert),
vi_normal: map_nucursorshape_to_cursorshape(config.cursor_shape_vi_normal),
emacs: map_nucursorshape_to_cursorshape(config.cursor_shape_emacs),
};
perf!("get config/cursor config", start_time, use_color);
@ -384,9 +352,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
// STACK-REFERENCE 2
stack_arc.clone(),
)))
.with_quick_completions(config.completions.quick)
.with_partial_completions(config.completions.partial)
.with_ansi_colors(config.use_ansi_coloring.get(engine_state))
.with_quick_completions(config.quick_completions)
.with_partial_completions(config.partial_completions)
.with_ansi_colors(config.use_ansi_coloring)
.with_cwd(Some(
engine_state
.cwd(None)
@ -395,18 +363,14 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
.to_string_lossy()
.to_string(),
))
.with_cursor_config(cursor_config)
.with_visual_selection_style(nu_ansi_term::Style {
is_reverse: true,
..Default::default()
});
.with_cursor_config(cursor_config);
perf!("reedline builder", start_time, use_color);
let style_computer = StyleComputer::from_config(engine_state, &stack_arc);
start_time = std::time::Instant::now();
line_editor = if config.use_ansi_coloring.get(engine_state) {
line_editor = if config.use_ansi_coloring {
line_editor.with_hinter(Box::new({
// As of Nov 2022, "hints" color_config closures only get `null` passed in.
let style = style_computer.compute("hints", &Value::nothing(Span::unknown()));
@ -422,7 +386,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
trace!("adding menus");
line_editor =
add_menus(line_editor, engine_reference, &stack_arc, config).unwrap_or_else(|e| {
report_shell_error(engine_state, &e);
report_error_new(engine_state, &e);
Reedline::create()
});
@ -493,12 +457,12 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
.with_completer(Box::<DefaultCompleter>::default());
// Let's grab the shell_integration configs
let shell_integration_osc2 = config.shell_integration.osc2;
let shell_integration_osc7 = config.shell_integration.osc7;
let shell_integration_osc9_9 = config.shell_integration.osc9_9;
let shell_integration_osc133 = config.shell_integration.osc133;
let shell_integration_osc633 = config.shell_integration.osc633;
let shell_integration_reset_application_mode = config.shell_integration.reset_application_mode;
let shell_integration_osc2 = config.shell_integration_osc2;
let shell_integration_osc7 = config.shell_integration_osc7;
let shell_integration_osc9_9 = config.shell_integration_osc9_9;
let shell_integration_osc133 = config.shell_integration_osc133;
let shell_integration_osc633 = config.shell_integration_osc633;
let shell_integration_reset_application_mode = config.shell_integration_reset_application_mode;
// TODO: we may clone the stack, this can lead to major performance issues
// so we should avoid it or making stack cheaper to clone.
@ -528,20 +492,21 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
// Right before we start running the code the user gave us, fire the `pre_execution`
// hook
{
if let Some(hook) = config.hooks.pre_execution.clone() {
// Set the REPL buffer to the current command for the "pre_execution" hook
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
repl.buffer = repl_cmd_line_text.to_string();
drop(repl);
if let Err(err) = hook::eval_hooks(
if let Err(err) = eval_hook(
engine_state,
&mut stack,
None,
vec![],
&engine_state.get_config().hooks.pre_execution.clone(),
&hook,
"pre_execution",
) {
report_shell_error(engine_state, &err);
report_error_new(engine_state, &err);
}
}
@ -553,10 +518,8 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
drop(repl);
if shell_integration_osc633 {
if stack
.get_env_var(engine_state, "TERM_PROGRAM")
.and_then(|v| v.as_str().ok())
== Some("vscode")
if stack.get_env_var(engine_state, "TERM_PROGRAM")
== Some(Value::test_string("vscode"))
{
start_time = Instant::now();
@ -694,11 +657,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
);
println!();
cleanup_exit((), engine_state, 0);
// if cleanup_exit didn't exit, we should keep running
return (true, stack, line_editor);
return (false, stack, line_editor);
}
Err(err) => {
let message = err.to_string();
@ -777,7 +736,7 @@ fn fill_in_result_related_history_metadata(
c.duration = Some(cmd_duration);
c.exit_status = stack
.get_env_var(engine_state, "LAST_EXIT_CODE")
.and_then(|e| e.as_int().ok());
.and_then(|e| e.as_i64().ok());
c
})
.into_diagnostic()?; // todo: don't stop repl if error here?
@ -816,10 +775,8 @@ fn parse_operation(
) -> Result<ReplOperation, ErrReport> {
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
// Check if this is a single call to a directory, if so auto-cd
let cwd = engine_state
.cwd(Some(stack))
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_default();
#[allow(deprecated)]
let cwd = nu_engine::env::current_dir_str(engine_state, stack).unwrap_or_default();
let mut orig = s.clone();
if orig.starts_with('`') {
orig = trim_quotes_str(&orig).to_string()
@ -851,28 +808,23 @@ fn do_auto_cd(
) {
let path = {
if !path.exists() {
report_shell_error(
report_error_new(
engine_state,
&ShellError::Io(IoError::new_with_additional_context(
std::io::ErrorKind::NotFound,
&ShellError::DirectoryNotFound {
dir: path.to_string_lossy().to_string(),
span,
PathBuf::from(&path),
"Cannot change directory",
)),
},
);
}
path.to_string_lossy().to_string()
};
if let PermissionResult::PermissionDenied = have_permission(path.clone()) {
report_shell_error(
if let PermissionResult::PermissionDenied(reason) = have_permission(path.clone()) {
report_error_new(
engine_state,
&ShellError::Io(IoError::new_with_additional_context(
std::io::ErrorKind::PermissionDenied,
span,
PathBuf::from(path),
"Cannot change directory",
)),
&ShellError::IOError {
msg: format!("Cannot change directory to {path}: {reason}"),
},
);
return;
}
@ -882,14 +834,14 @@ fn do_auto_cd(
//FIXME: this only changes the current scope, but instead this environment variable
//should probably be a block that loads the information from the state in the overlay
if let Err(err) = stack.set_cwd(&path) {
report_shell_error(engine_state, &err);
report_error_new(engine_state, &err);
return;
};
let cwd = Value::string(cwd, span);
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
let mut shells = if let Some(v) = shells {
v.clone().into_list().unwrap_or_else(|_| vec![cwd])
v.into_list().unwrap_or_else(|_| vec![cwd])
} else {
vec![cwd]
};
@ -915,7 +867,7 @@ fn do_auto_cd(
"NUSHELL_LAST_SHELL".into(),
Value::int(last_shell as i64, span),
);
stack.set_last_exit_code(0, Span::unknown());
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(0, Span::unknown()));
}
///
@ -936,9 +888,6 @@ fn do_run_cmd(
trace!("eval source: {}", s);
let mut cmds = s.split_whitespace();
let had_warning_before = engine_state.exit_warning_given.load(Ordering::SeqCst);
if let Some("exit") = cmds.next() {
let mut working_set = StateWorkingSet::new(engine_state);
let _ = parse(&mut working_set, None, s.as_bytes(), false);
@ -947,11 +896,13 @@ fn do_run_cmd(
match cmds.next() {
Some(s) => {
if let Ok(n) = s.parse::<i32>() {
return cleanup_exit(line_editor, engine_state, n);
drop(line_editor);
std::process::exit(n);
}
}
None => {
return cleanup_exit(line_editor, engine_state, 0);
drop(line_editor);
std::process::exit(0);
}
}
}
@ -970,14 +921,6 @@ fn do_run_cmd(
false,
);
// if there was a warning before, and we got to this point, it means
// the possible call to cleanup_exit did not occur.
if had_warning_before && engine_state.is_interactive {
engine_state
.exit_warning_given
.store(false, Ordering::SeqCst);
}
line_editor
}
@ -992,7 +935,8 @@ fn run_shell_integration_osc2(
stack: &mut Stack,
use_color: bool,
) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
#[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
let start_time = Instant::now();
// Try to abbreviate string for windows title
@ -1036,7 +980,8 @@ fn run_shell_integration_osc7(
stack: &mut Stack,
use_color: bool,
) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
#[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
let start_time = Instant::now();
// Otherwise, communicate the path as OSC 7 (often used for spawning new tabs in the same dir)
@ -1059,7 +1004,8 @@ fn run_shell_integration_osc7(
}
fn run_shell_integration_osc9_9(engine_state: &EngineState, stack: &mut Stack, use_color: bool) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
#[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
let start_time = Instant::now();
// Otherwise, communicate the path as OSC 9;9 from ConEmu (often used for spawning new tabs in the same dir)
@ -1083,14 +1029,11 @@ fn run_shell_integration_osc633(
use_color: bool,
repl_cmd_line_text: String,
) {
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
#[allow(deprecated)]
if let Ok(path) = current_dir_str(engine_state, stack) {
// Supported escape sequences of Microsoft's Visual Studio Code (vscode)
// https://code.visualstudio.com/docs/terminal/shell-integration#_supported-escape-sequences
if stack
.get_env_var(engine_state, "TERM_PROGRAM")
.and_then(|v| v.as_str().ok())
== Some("vscode")
{
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
let start_time = Instant::now();
// If we're in vscode, run their specific ansi escape sequence.
@ -1108,8 +1051,16 @@ fn run_shell_integration_osc633(
// escape a few things because this says so
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
let replaced_cmd_text =
escape_special_vscode_bytes(&repl_cmd_line_text).unwrap_or(repl_cmd_line_text);
let replaced_cmd_text: String = repl_cmd_line_text
.chars()
.map(|c| match c {
'\n' => '\x0a',
'\r' => '\x0d',
'\x1b' => '\x1b',
_ => c,
})
.collect();
//OSC 633 ; E ; <commandline> [; <nonce] ST - Explicitly set the command line with an optional nonce.
run_ansi_sequence(&format!(
@ -1147,6 +1098,7 @@ fn flush_engine_state_repl_buffer(engine_state: &mut EngineState, line_editor: &
/// Setup history management for Reedline
///
fn setup_history(
nushell_path: &str,
engine_state: &mut EngineState,
line_editor: Reedline,
history: HistoryConfig,
@ -1158,7 +1110,7 @@ fn setup_history(
None
};
if let Some(path) = history.file_path() {
if let Some(path) = crate::config_files::get_history_path(nushell_path, history.file_format) {
return update_line_editor_history(
engine_state,
path,
@ -1174,7 +1126,7 @@ fn setup_history(
/// Setup Reedline keybindingds based on the provided config
///
fn setup_keybindings(engine_state: &EngineState, line_editor: Reedline) -> Reedline {
match create_keybindings(engine_state.get_config()) {
return match create_keybindings(engine_state.get_config()) {
Ok(keybindings) => match keybindings {
KeybindingsMode::Emacs(keybindings) => {
let edit_mode = Box::new(Emacs::new(keybindings));
@ -1189,10 +1141,10 @@ fn setup_keybindings(engine_state: &EngineState, line_editor: Reedline) -> Reedl
}
},
Err(e) => {
report_shell_error(engine_state, &e);
report_error_new(engine_state, &e);
line_editor
}
}
};
}
///
@ -1221,7 +1173,7 @@ fn update_line_editor_history(
history_session_id: Option<HistorySessionId>,
) -> Result<Reedline, ErrReport> {
let history: Box<dyn reedline::History> = match history.file_format {
HistoryFileFormat::Plaintext => Box::new(
HistoryFileFormat::PlainText => Box::new(
FileBackedHistory::with_file(history.max_size as usize, history_path)
.into_diagnostic()?,
),
@ -1259,10 +1211,10 @@ fn confirm_stdin_is_terminal() -> Result<()> {
fn map_nucursorshape_to_cursorshape(shape: NuCursorShape) -> Option<SetCursorStyle> {
match shape {
NuCursorShape::Block => Some(SetCursorStyle::SteadyBlock),
NuCursorShape::Underscore => Some(SetCursorStyle::SteadyUnderScore),
NuCursorShape::UnderScore => Some(SetCursorStyle::SteadyUnderScore),
NuCursorShape::Line => Some(SetCursorStyle::SteadyBar),
NuCursorShape::BlinkBlock => Some(SetCursorStyle::BlinkingBlock),
NuCursorShape::BlinkUnderscore => Some(SetCursorStyle::BlinkingUnderScore),
NuCursorShape::BlinkUnderScore => Some(SetCursorStyle::BlinkingUnderScore),
NuCursorShape::BlinkLine => Some(SetCursorStyle::BlinkingBar),
NuCursorShape::Inherit => None,
}
@ -1276,14 +1228,10 @@ fn get_command_finished_marker(
) -> String {
let exit_code = stack
.get_env_var(engine_state, "LAST_EXIT_CODE")
.and_then(|e| e.as_int().ok());
.and_then(|e| e.as_i64().ok());
if shell_integration_osc633 {
if stack
.get_env_var(engine_state, "TERM_PROGRAM")
.and_then(|v| v.as_str().ok())
== Some("vscode")
{
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
// We're in vscode and we have osc633 enabled
format!(
"{}{}{}",
@ -1332,11 +1280,7 @@ fn run_finaliziation_ansi_sequence(
) {
if shell_integration_osc633 {
// Only run osc633 if we are in vscode
if stack
.get_env_var(engine_state, "TERM_PROGRAM")
.and_then(|v| v.as_str().ok())
== Some("vscode")
{
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
let start_time = Instant::now();
run_ansi_sequence(&get_command_finished_marker(
@ -1387,9 +1331,10 @@ fn run_finaliziation_ansi_sequence(
// Absolute paths with a drive letter, like 'C:', 'D:\', 'E:\foo'
#[cfg(windows)]
static DRIVE_PATH_REGEX: std::sync::LazyLock<fancy_regex::Regex> = std::sync::LazyLock::new(|| {
fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation")
});
static DRIVE_PATH_REGEX: once_cell::sync::Lazy<fancy_regex::Regex> =
once_cell::sync::Lazy::new(|| {
fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation")
});
// A best-effort "does this string look kinda like a path?" function to determine whether to auto-cd
fn looks_like_path(orig: &str) -> bool {
@ -1433,7 +1378,8 @@ fn trailing_slash_looks_like_path() {
fn are_session_ids_in_sync() {
let engine_state = &mut EngineState::new();
let history = engine_state.history_config().unwrap();
let history_path = history.file_path().unwrap();
let history_path =
crate::config_files::get_history_path("nushell", history.file_format).unwrap();
let line_editor = reedline::Reedline::create();
let history_session_id = reedline::Reedline::create_history_session_id();
let line_editor = update_line_editor_history(
@ -1451,7 +1397,7 @@ fn are_session_ids_in_sync() {
#[cfg(test)]
mod test_auto_cd {
use super::{do_auto_cd, escape_special_vscode_bytes, parse_operation, ReplOperation};
use super::{do_auto_cd, parse_operation, ReplOperation};
use nu_path::AbsolutePath;
use nu_protocol::engine::{EngineState, Stack};
use tempfile::tempdir;
@ -1589,13 +1535,6 @@ mod test_auto_cd {
symlink(&dir, &link).unwrap();
let input = if cfg!(windows) { r".\link" } else { "./link" };
check(tempdir, input, link);
let dir = tempdir.join("foo").join("bar");
std::fs::create_dir_all(&dir).unwrap();
let link = tempdir.join("link2");
symlink(&dir, &link).unwrap();
let input = "..";
check(link, input, tempdir);
}
#[test]
@ -1608,43 +1547,4 @@ mod test_auto_cd {
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
check(tempdir, input, dir);
}
#[test]
fn escape_vscode_semicolon_test() {
let input = r#"now;is"#;
let expected = r#"now\x3Bis"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_backslash_test() {
let input = r#"now\is"#;
let expected = r#"now\\is"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_linefeed_test() {
let input = "now\nis";
let expected = r#"now\x0Ais"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_tab_null_cr_test() {
let input = "now\t\0\ris";
let expected = r#"now\x09\x00\x0Dis"#;
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn escape_vscode_multibyte_ok() {
let input = "now🍪is";
let actual = escape_special_vscode_bytes(input).unwrap();
assert_eq!(input, actual);
}
}

View File

@ -144,6 +144,8 @@ impl Highlighter for NuHighlighter {
}
FlatShape::Flag => add_colored_token(&shape.1, next_token),
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
FlatShape::And => add_colored_token(&shape.1, next_token),
FlatShape::Or => add_colored_token(&shape.1, next_token),
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
@ -309,7 +311,6 @@ fn find_matching_block_end_in_expr(
.unwrap_or(expression.span.start);
return match &expression.expr {
// TODO: Can't these be handled with an `_ => None` branch? Refactor
Expr::Bool(_) => None,
Expr::Int(_) => None,
Expr::Float(_) => None,
@ -336,28 +337,6 @@ fn find_matching_block_end_in_expr(
Expr::Nothing => None,
Expr::Garbage => None,
Expr::AttributeBlock(ab) => ab
.attributes
.iter()
.find_map(|attr| {
find_matching_block_end_in_expr(
line,
working_set,
&attr.expr,
global_span_offset,
global_cursor_offset,
)
})
.or_else(|| {
find_matching_block_end_in_expr(
line,
working_set,
&ab.item,
global_span_offset,
global_cursor_offset,
)
}),
Expr::Table(table) => {
if expr_last == global_cursor_offset {
// cursor is at table end

View File

@ -1,18 +1,14 @@
#![allow(clippy::byte_char_slices)]
use nu_cmd_base::hook::eval_hook;
use nu_engine::{eval_block, eval_block_with_early_return};
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents};
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
use nu_protocol::{
cli_error::report_compile_error,
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet},
report_parse_error, report_parse_warning, report_shell_error, PipelineData, ShellError, Span,
Value,
report_error, report_error_new, PipelineData, ShellError, Span, Value,
};
#[cfg(windows)]
use nu_utils::enable_vt_processing;
use nu_utils::{escape_quote_string, perf};
use nu_utils::perf;
use std::path::Path;
// This will collect environment variables from std::env and adds them to a stack.
@ -43,7 +39,7 @@ fn gather_env_vars(
init_cwd: &Path,
) {
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
report_shell_error(
report_error_new(
engine_state,
&ShellError::GenericError {
error: format!("Environment variable was not captured: {env_str}"),
@ -74,7 +70,7 @@ fn gather_env_vars(
}
None => {
// Could not capture current working directory
report_shell_error(
report_error_new(
engine_state,
&ShellError::GenericError {
error: "Current directory is not a valid utf-8 path".into(),
@ -132,7 +128,7 @@ fn gather_env_vars(
working_set.error(err);
}
if !working_set.parse_errors.is_empty() {
if working_set.parse_errors.first().is_some() {
report_capture_error(
engine_state,
&String::from_utf8_lossy(contents),
@ -176,7 +172,7 @@ fn gather_env_vars(
working_set.error(err);
}
if !working_set.parse_errors.is_empty() {
if working_set.parse_errors.first().is_some() {
report_capture_error(
engine_state,
&String::from_utf8_lossy(contents),
@ -203,35 +199,6 @@ fn gather_env_vars(
}
}
/// Print a pipeline with formatting applied based on display_output hook.
///
/// This function should be preferred when printing values resulting from a completed evaluation.
/// For values printed as part of a command's execution, such as values printed by the `print` command,
/// the `PipelineData::print_table` function should be preferred instead as it is not config-dependent.
///
/// `no_newline` controls if we need to attach newline character to output.
pub fn print_pipeline(
engine_state: &mut EngineState,
stack: &mut Stack,
pipeline: PipelineData,
no_newline: bool,
) -> Result<(), ShellError> {
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
let pipeline = eval_hook(
engine_state,
stack,
Some(pipeline),
vec![],
&hook,
"display_output",
)?;
pipeline.print_raw(engine_state, no_newline, false)
} else {
// if display_output isn't set, we should still prefer to print with some formatting
pipeline.print_table(engine_state, stack, no_newline, false)
}
}
pub fn eval_source(
engine_state: &mut EngineState,
stack: &mut Stack,
@ -243,19 +210,18 @@ pub fn eval_source(
let start_time = std::time::Instant::now();
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
Ok(failed) => {
let code = failed.into();
stack.set_last_exit_code(code, Span::unknown());
code
}
Ok(code) => code.unwrap_or(0),
Err(err) => {
report_shell_error(engine_state, &err);
let code = err.exit_code();
stack.set_last_error(&err);
code.unwrap_or(0)
report_error_new(engine_state, &err);
1
}
};
stack.add_env_var(
"LAST_EXIT_CODE".to_string(),
Value::int(exit_code.into(), Span::unknown()),
);
// reset vt processing, aka ansi because illbehaved externals can break it
#[cfg(windows)]
{
@ -265,10 +231,7 @@ pub fn eval_source(
perf!(
&format!("eval_source {}", &fname),
start_time,
engine_state
.get_config()
.use_ansi_coloring
.get(engine_state)
engine_state.get_config().use_ansi_coloring
);
exit_code
@ -281,7 +244,7 @@ fn evaluate_source(
fname: &str,
input: PipelineData,
allow_return: bool,
) -> Result<bool, ShellError> {
) -> Result<Option<i32>, ShellError> {
let (block, delta) = {
let mut working_set = StateWorkingSet::new(engine_state);
let output = parse(
@ -291,17 +254,17 @@ fn evaluate_source(
false,
);
if let Some(warning) = working_set.parse_warnings.first() {
report_parse_warning(&working_set, warning);
report_error(&working_set, warning);
}
if let Some(err) = working_set.parse_errors.first() {
report_parse_error(&working_set, err);
return Ok(true);
report_error(&working_set, err);
return Ok(Some(1));
}
if let Some(err) = working_set.compile_errors.first() {
report_compile_error(&working_set, err);
return Ok(true);
report_error(&working_set, err);
// Not a fatal error, for now
}
(output, working_set.render())
@ -315,10 +278,25 @@ fn evaluate_source(
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
}?;
let no_newline = matches!(&pipeline, &PipelineData::ByteStream(..));
print_pipeline(engine_state, stack, pipeline, no_newline)?;
let status = if let PipelineData::ByteStream(..) = pipeline {
pipeline.print(engine_state, stack, false, false)?
} else {
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
let pipeline = eval_hook(
engine_state,
stack,
Some(pipeline),
vec![],
&hook,
"display_output",
)?;
pipeline.print(engine_state, stack, false, false)
} else {
pipeline.print(engine_state, stack, true, false)
}?
};
Ok(false)
Ok(status.map(|status| status.code()))
}
#[cfg(test)]

View File

@ -1,296 +0,0 @@
use nu_protocol::HistoryFileFormat;
use nu_test_support::{nu, Outcome};
use reedline::{
FileBackedHistory, History, HistoryItem, HistoryItemId, ReedlineError, SearchQuery,
SqliteBackedHistory,
};
use rstest::rstest;
use tempfile::TempDir;
struct Test {
cfg_dir: TempDir,
}
impl Test {
fn new(history_format: &'static str) -> Self {
let cfg_dir = tempfile::Builder::new()
.prefix("history_import_test")
.tempdir()
.unwrap();
// Assigning to $env.config.history.file_format seems to work only in startup
// configuration.
std::fs::write(
cfg_dir.path().join("env.nu"),
format!("$env.config.history.file_format = {history_format:?}"),
)
.unwrap();
Self { cfg_dir }
}
fn nu(&self, cmd: impl AsRef<str>) -> Outcome {
let env = [(
"XDG_CONFIG_HOME".to_string(),
self.cfg_dir.path().to_str().unwrap().to_string(),
)];
let env_config = self.cfg_dir.path().join("env.nu");
nu!(envs: env, env_config: env_config, cmd.as_ref())
}
fn open_plaintext(&self) -> Result<FileBackedHistory, ReedlineError> {
FileBackedHistory::with_file(
100,
self.cfg_dir
.path()
.join("nushell")
.join(HistoryFileFormat::Plaintext.default_file_name()),
)
}
fn open_sqlite(&self) -> Result<SqliteBackedHistory, ReedlineError> {
SqliteBackedHistory::with_file(
self.cfg_dir
.path()
.join("nushell")
.join(HistoryFileFormat::Sqlite.default_file_name()),
None,
None,
)
}
fn open_backend(&self, format: HistoryFileFormat) -> Result<Box<dyn History>, ReedlineError> {
fn boxed(be: impl History + 'static) -> Box<dyn History> {
Box::new(be)
}
use HistoryFileFormat::*;
match format {
Plaintext => self.open_plaintext().map(boxed),
Sqlite => self.open_sqlite().map(boxed),
}
}
}
enum HistorySource {
Vec(Vec<HistoryItem>),
Command(&'static str),
}
struct TestCase {
dst_format: HistoryFileFormat,
dst_history: Vec<HistoryItem>,
src_history: HistorySource,
want_history: Vec<HistoryItem>,
}
const EMPTY_TEST_CASE: TestCase = TestCase {
dst_format: HistoryFileFormat::Plaintext,
dst_history: Vec::new(),
src_history: HistorySource::Vec(Vec::new()),
want_history: Vec::new(),
};
impl TestCase {
fn run(self) {
use HistoryFileFormat::*;
let test = Test::new(match self.dst_format {
Plaintext => "plaintext",
Sqlite => "sqlite",
});
save_all(
&mut *test.open_backend(self.dst_format).unwrap(),
self.dst_history,
)
.unwrap();
let outcome = match self.src_history {
HistorySource::Vec(src_history) => {
let src_format = match self.dst_format {
Plaintext => Sqlite,
Sqlite => Plaintext,
};
save_all(&mut *test.open_backend(src_format).unwrap(), src_history).unwrap();
test.nu("history import")
}
HistorySource::Command(cmd) => {
let mut cmd = cmd.to_string();
cmd.push_str(" | history import");
test.nu(cmd)
}
};
assert!(outcome.status.success());
let got = query_all(&*test.open_backend(self.dst_format).unwrap()).unwrap();
// Compare just the commands first, for readability.
fn commands_only(items: &[HistoryItem]) -> Vec<&str> {
items
.iter()
.map(|item| item.command_line.as_str())
.collect()
}
assert_eq!(commands_only(&got), commands_only(&self.want_history));
// If commands match, compare full items.
assert_eq!(got, self.want_history);
}
}
fn query_all(history: &dyn History) -> Result<Vec<HistoryItem>, ReedlineError> {
history.search(SearchQuery::everything(
reedline::SearchDirection::Forward,
None,
))
}
fn save_all(history: &mut dyn History, items: Vec<HistoryItem>) -> Result<(), ReedlineError> {
for item in items {
history.save(item)?;
}
Ok(())
}
const EMPTY_ITEM: HistoryItem = HistoryItem {
command_line: String::new(),
id: None,
start_timestamp: None,
session_id: None,
hostname: None,
cwd: None,
duration: None,
exit_status: None,
more_info: None,
};
#[test]
fn history_import_pipe_string() {
TestCase {
dst_format: HistoryFileFormat::Plaintext,
src_history: HistorySource::Command("echo bar"),
want_history: vec![HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "bar".to_string(),
..EMPTY_ITEM
}],
..EMPTY_TEST_CASE
}
.run();
}
#[test]
fn history_import_pipe_record() {
TestCase {
dst_format: HistoryFileFormat::Sqlite,
src_history: HistorySource::Command("[[cwd command]; [/tmp some_command]]"),
want_history: vec![HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "some_command".to_string(),
cwd: Some("/tmp".to_string()),
..EMPTY_ITEM
}],
..EMPTY_TEST_CASE
}
.run();
}
#[test]
fn to_empty_plaintext() {
TestCase {
dst_format: HistoryFileFormat::Plaintext,
src_history: HistorySource::Vec(vec![
HistoryItem {
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
command_line: "bar".to_string(),
..EMPTY_ITEM
},
]),
want_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "bar".to_string(),
..EMPTY_ITEM
},
],
..EMPTY_TEST_CASE
}
.run()
}
#[test]
fn to_empty_sqlite() {
TestCase {
dst_format: HistoryFileFormat::Sqlite,
src_history: HistorySource::Vec(vec![
HistoryItem {
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
command_line: "bar".to_string(),
..EMPTY_ITEM
},
]),
want_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "foo".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(2)),
command_line: "bar".to_string(),
..EMPTY_ITEM
},
],
..EMPTY_TEST_CASE
}
.run()
}
#[rstest]
#[case::plaintext(HistoryFileFormat::Plaintext)]
#[case::sqlite(HistoryFileFormat::Sqlite)]
fn to_existing(#[case] dst_format: HistoryFileFormat) {
TestCase {
dst_format,
dst_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "original-1".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "original-2".to_string(),
..EMPTY_ITEM
},
],
src_history: HistorySource::Vec(vec![HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "new".to_string(),
..EMPTY_ITEM
}]),
want_history: vec![
HistoryItem {
id: Some(HistoryItemId::new(0)),
command_line: "original-1".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(1)),
command_line: "original-2".to_string(),
..EMPTY_ITEM
},
HistoryItem {
id: Some(HistoryItemId::new(2)),
command_line: "new".to_string(),
..EMPTY_ITEM
},
],
}
.run()
}

View File

@ -1,3 +1,2 @@
mod history_import;
mod keybindings_list;
mod nu_highlight;

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@ fn create_default_context() -> EngineState {
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
}
/// creates a new engine with the current path into the completions fixtures folder
// creates a new engine with the current path into the completions fixtures folder
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
// Target folder inside assets
let dir = fs::fixtures().join("completions");
@ -63,32 +63,13 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
);
// Merge environment into the permanent state
let merge_result = engine_state.merge_env(&mut stack);
let merge_result = engine_state.merge_env(&mut stack, &dir);
assert!(merge_result.is_ok());
(dir, dir_str, engine_state, stack)
}
/// Adds pseudo PATH env for external completion tests
pub fn new_external_engine() -> EngineState {
let mut engine = create_default_context();
let dir = fs::fixtures().join("external_completions").join("path");
let dir_str = dir.to_string_lossy().to_string();
let internal_span = nu_protocol::Span::new(0, dir_str.len());
engine.add_env_var(
"PATH".to_string(),
Value::List {
vals: vec![Value::String {
val: dir_str,
internal_span,
}],
internal_span,
},
);
engine
}
/// creates a new engine with the current path into the completions fixtures folder
// creates a new engine with the current path into the completions fixtures folder
pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
// Target folder inside assets
let dir = fs::fixtures().join("dotnu_completions");
@ -105,23 +86,6 @@ pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
// Add $nu
engine_state.generate_nu_constant();
// const $NU_LIB_DIRS
let mut working_set = StateWorkingSet::new(&engine_state);
let var_id = working_set.add_variable(
b"$NU_LIB_DIRS".into(),
Span::unknown(),
nu_protocol::Type::List(Box::new(nu_protocol::Type::String)),
false,
);
working_set.set_variable_const_val(
var_id,
Value::test_list(vec![
Value::string(file(dir.join("lib-dir1")), dir_span),
Value::string(file(dir.join("lib-dir3")), dir_span),
]),
);
let _ = engine_state.merge_delta(working_set.render());
// New stack
let mut stack = Stack::new();
@ -131,16 +95,21 @@ pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
"TEST".to_string(),
Value::string("NUSHELL".to_string(), dir_span),
);
stack.add_env_var(
"NU_LIB_DIRS".into(),
Value::test_list(vec![
Value::string(file(dir.join("lib-dir2")), dir_span),
Value::string(file(dir.join("lib-dir3")), dir_span),
]),
"NU_LIB_DIRS".to_string(),
Value::List {
vals: vec![
Value::string(file(dir.join("lib-dir1")), dir_span),
Value::string(file(dir.join("lib-dir2")), dir_span),
Value::string(file(dir.join("lib-dir3")), dir_span),
],
internal_span: dir_span,
},
);
// Merge environment into the permanent state
let merge_result = engine_state.merge_env(&mut stack);
let merge_result = engine_state.merge_env(&mut stack, &dir);
assert!(merge_result.is_ok());
(dir, dir_str, engine_state, stack)
@ -175,7 +144,7 @@ pub fn new_quote_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
);
// Merge environment into the permanent state
let merge_result = engine_state.merge_env(&mut stack);
let merge_result = engine_state.merge_env(&mut stack, &dir);
assert!(merge_result.is_ok());
(dir, dir_str, engine_state, stack)
@ -210,14 +179,14 @@ pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
);
// Merge environment into the permanent state
let merge_result = engine_state.merge_env(&mut stack);
let merge_result = engine_state.merge_env(&mut stack, &dir);
assert!(merge_result.is_ok());
(dir, dir_str, engine_state, stack)
}
/// match a list of suggestions with the expected values
pub fn match_suggestions(expected: &Vec<&str>, suggestions: &Vec<Suggestion>) {
// match a list of suggestions with the expected values
pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
let expected_len = expected.len();
let suggestions_len = suggestions.len();
if expected_len != suggestions_len {
@ -228,38 +197,33 @@ pub fn match_suggestions(expected: &Vec<&str>, suggestions: &Vec<Suggestion>) {
)
}
let suggestions_str = suggestions
let suggestoins_str = suggestions
.iter()
.map(|it| it.value.as_str())
.map(|it| it.value.clone())
.collect::<Vec<_>>();
assert_eq!(expected, &suggestions_str);
assert_eq!(expected, &suggestoins_str);
}
/// match a list of suggestions with the expected values
pub fn match_suggestions_by_string(expected: &[String], suggestions: &Vec<Suggestion>) {
let expected = expected.iter().map(|it| it.as_str()).collect::<Vec<_>>();
match_suggestions(&expected, suggestions);
}
/// append the separator to the converted path
// append the separator to the converted path
pub fn folder(path: impl Into<PathBuf>) -> String {
let mut converted_path = file(path);
converted_path.push(MAIN_SEPARATOR);
converted_path
}
/// convert a given path to string
// convert a given path to string
pub fn file(path: impl Into<PathBuf>) -> String {
path.into().into_os_string().into_string().unwrap()
}
/// merge_input executes the given input into the engine
/// and merges the state
// merge_input executes the given input into the engine
// and merges the state
pub fn merge_input(
input: &[u8],
engine_state: &mut EngineState,
stack: &mut Stack,
dir: AbsolutePathBuf,
) -> Result<(), ShellError> {
let (block, delta) = {
let mut working_set = StateWorkingSet::new(engine_state);
@ -282,5 +246,5 @@ pub fn merge_input(
.is_ok());
// Merge environment into the permanent state
engine_state.merge_env(stack)
engine_state.merge_env(stack, &dir)
}

View File

@ -1,5 +1,3 @@
pub mod completions_helpers;
pub use completions_helpers::{
file, folder, match_suggestions, match_suggestions_by_string, merge_input, new_engine,
};
pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};

View File

@ -5,18 +5,15 @@ edition = "2021"
license = "MIT"
name = "nu-cmd-base"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
version = "0.104.1"
version = "0.97.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lints]
workspace = true
[dependencies]
nu-engine = { path = "../nu-engine", version = "0.104.1", default-features = false }
nu-parser = { path = "../nu-parser", version = "0.104.1" }
nu-path = { path = "../nu-path", version = "0.104.1" }
nu-protocol = { path = "../nu-protocol", version = "0.104.1", default-features = false }
nu-engine = { path = "../nu-engine", version = "0.97.0" }
nu-parser = { path = "../nu-parser", version = "0.97.0" }
nu-path = { path = "../nu-path", version = "0.97.0" }
nu-protocol = { path = "../nu-protocol", version = "0.97.0" }
indexmap = { workspace = true }
miette = { workspace = true }

View File

@ -1,61 +1,61 @@
use crate::util::get_guaranteed_cwd;
use miette::Result;
use nu_engine::{eval_block, eval_block_with_early_return};
use nu_parser::parse;
use nu_protocol::{
cli_error::{report_parse_error, report_shell_error},
cli_error::{report_error, report_error_new},
debugger::WithoutDebug,
engine::{Closure, EngineState, Stack, StateWorkingSet},
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
};
use std::{collections::HashMap, sync::Arc};
use std::sync::Arc;
pub fn eval_env_change_hook(
env_change_hook: &HashMap<String, Vec<Value>>,
env_change_hook: Option<Value>,
engine_state: &mut EngineState,
stack: &mut Stack,
) -> Result<(), ShellError> {
for (env, hooks) in env_change_hook {
let before = engine_state.previous_env_vars.get(env);
let after = stack.get_env_var(engine_state, env);
if before != after {
let before = before.cloned().unwrap_or_default();
let after = after.cloned().unwrap_or_default();
if let Some(hook) = env_change_hook {
match hook {
Value::Record { val, .. } => {
for (env_name, hook_value) in &*val {
let before = engine_state
.previous_env_vars
.get(env_name)
.cloned()
.unwrap_or_default();
eval_hooks(
engine_state,
stack,
vec![("$before".into(), before), ("$after".into(), after.clone())],
hooks,
"env_change",
)?;
let after = stack
.get_env_var(engine_state, env_name)
.unwrap_or_default();
Arc::make_mut(&mut engine_state.previous_env_vars).insert(env.clone(), after);
if before != after {
eval_hook(
engine_state,
stack,
None,
vec![("$before".into(), before), ("$after".into(), after.clone())],
hook_value,
"env_change",
)?;
Arc::make_mut(&mut engine_state.previous_env_vars)
.insert(env_name.to_string(), after);
}
}
}
x => {
return Err(ShellError::TypeMismatch {
err_message: "record for the 'env_change' hook".to_string(),
span: x.span(),
});
}
}
}
Ok(())
}
pub fn eval_hooks(
engine_state: &mut EngineState,
stack: &mut Stack,
arguments: Vec<(String, Value)>,
hooks: &[Value],
hook_name: &str,
) -> Result<(), ShellError> {
for hook in hooks {
eval_hook(
engine_state,
stack,
None,
arguments.clone(),
hook,
&format!("{hook_name} list, recursive"),
)?;
}
Ok(())
}
pub fn eval_hook(
engine_state: &mut EngineState,
stack: &mut Stack,
@ -91,13 +91,12 @@ pub fn eval_hook(
false,
);
if let Some(err) = working_set.parse_errors.first() {
report_parse_error(&working_set, err);
return Err(ShellError::GenericError {
error: format!("Failed to run {hook_name} hook"),
msg: "source code has errors".into(),
span: Some(span),
help: None,
inner: Vec::new(),
report_error(&working_set, err);
return Err(ShellError::UnsupportedConfigValue {
expected: "valid source code".into(),
value: "source code with syntax errors".into(),
span,
});
}
@ -124,7 +123,7 @@ pub fn eval_hook(
output = pipeline_data;
}
Err(err) => {
report_shell_error(engine_state, &err);
report_error_new(engine_state, &err);
}
}
@ -133,7 +132,16 @@ pub fn eval_hook(
}
}
Value::List { vals, .. } => {
eval_hooks(engine_state, stack, arguments, vals, hook_name)?;
for val in vals {
eval_hook(
engine_state,
stack,
None,
arguments.clone(),
val,
&format!("{hook_name} list, recursive"),
)?;
}
}
Value::Record { val, .. } => {
// Hooks can optionally be a record in this form:
@ -159,10 +167,10 @@ pub fn eval_hook(
{
val
} else {
return Err(ShellError::RuntimeTypeMismatch {
expected: Type::Bool,
actual: pipeline_data.get_type(),
span: pipeline_data.span().unwrap_or(other_span),
return Err(ShellError::UnsupportedConfigValue {
expected: "boolean output".to_string(),
value: "other PipelineData variant".to_string(),
span: other_span,
});
}
}
@ -171,9 +179,9 @@ pub fn eval_hook(
}
}
} else {
return Err(ShellError::RuntimeTypeMismatch {
expected: Type::Closure,
actual: condition.get_type(),
return Err(ShellError::UnsupportedConfigValue {
expected: "block".to_string(),
value: format!("{}", condition.get_type()),
span: other_span,
});
}
@ -215,13 +223,12 @@ pub fn eval_hook(
false,
);
if let Some(err) = working_set.parse_errors.first() {
report_parse_error(&working_set, err);
return Err(ShellError::GenericError {
error: format!("Failed to run {hook_name} hook"),
msg: "source code has errors".into(),
span: Some(span),
help: None,
inner: Vec::new(),
report_error(&working_set, err);
return Err(ShellError::UnsupportedConfigValue {
expected: "valid source code".into(),
value: "source code with syntax errors".into(),
span: source_span,
});
}
@ -244,7 +251,7 @@ pub fn eval_hook(
output = pipeline_data;
}
Err(err) => {
report_shell_error(engine_state, &err);
report_error_new(engine_state, &err);
}
}
@ -256,9 +263,9 @@ pub fn eval_hook(
run_hook(engine_state, stack, val, input, arguments, source_span)?;
}
other => {
return Err(ShellError::RuntimeTypeMismatch {
expected: Type::custom("string or closure"),
actual: other.get_type(),
return Err(ShellError::UnsupportedConfigValue {
expected: "block or string".to_string(),
value: format!("{}", other.get_type()),
span: source_span,
});
}
@ -269,15 +276,16 @@ pub fn eval_hook(
output = run_hook(engine_state, stack, val, input, arguments, span)?;
}
other => {
return Err(ShellError::RuntimeTypeMismatch {
expected: Type::custom("string, closure, record, or list"),
actual: other.get_type(),
return Err(ShellError::UnsupportedConfigValue {
expected: "string, block, record, or list of commands".into(),
value: format!("{}", other.get_type()),
span: other.span(),
});
}
}
engine_state.merge_env(stack)?;
let cwd = get_guaranteed_cwd(engine_state, stack);
engine_state.merge_env(stack, cwd)?;
Ok(output)
}

View File

@ -1,9 +1,30 @@
use nu_path::AbsolutePathBuf;
use nu_protocol::{
engine::{EngineState, Stack},
Range, ShellError, Span, Value,
};
use std::ops::Bound;
pub fn get_init_cwd() -> AbsolutePathBuf {
std::env::current_dir()
.ok()
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
.or_else(|| {
std::env::var("PWD")
.ok()
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
})
.or_else(nu_path::home_dir)
.expect("Failed to get current working directory")
}
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> AbsolutePathBuf {
engine_state
.cwd(Some(stack))
.ok()
.unwrap_or_else(get_init_cwd)
}
type MakeRangeError = fn(&str, Span) -> ShellError;
/// Returns a inclusive pair of boundary in given `range`.
@ -78,10 +99,10 @@ pub fn get_editor(
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
{
Ok(buff_editor)
} else if let Some(value) = env_vars.get("VISUAL") {
get_editor_commandline(value, "$env.VISUAL")
} else if let Some(value) = env_vars.get("EDITOR") {
get_editor_commandline(value, "$env.EDITOR")
} else if let Some(value) = env_vars.get("VISUAL") {
get_editor_commandline(value, "$env.VISUAL")
} else {
Err(ShellError::GenericError {
error: "No editor configured".into(),

View File

@ -5,24 +5,21 @@ edition = "2021"
license = "MIT"
name = "nu-cmd-extra"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
version = "0.104.1"
version = "0.97.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
bench = false
[lints]
workspace = true
[dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.104.1" }
nu-engine = { path = "../nu-engine", version = "0.104.1", default-features = false }
nu-json = { version = "0.104.1", path = "../nu-json" }
nu-parser = { path = "../nu-parser", version = "0.104.1" }
nu-pretty-hex = { version = "0.104.1", path = "../nu-pretty-hex" }
nu-protocol = { path = "../nu-protocol", version = "0.104.1", default-features = false }
nu-utils = { path = "../nu-utils", version = "0.104.1", default-features = false }
nu-cmd-base = { path = "../nu-cmd-base", version = "0.97.0" }
nu-engine = { path = "../nu-engine", version = "0.97.0" }
nu-json = { version = "0.97.0", path = "../nu-json" }
nu-parser = { path = "../nu-parser", version = "0.97.0" }
nu-pretty-hex = { version = "0.97.0", path = "../nu-pretty-hex" }
nu-protocol = { path = "../nu-protocol", version = "0.97.0" }
nu-utils = { path = "../nu-utils", version = "0.97.0" }
# Potential dependencies for extras
heck = { workspace = true }
@ -34,9 +31,8 @@ serde = { workspace = true }
serde_urlencoded = { workspace = true }
v_htmlescape = { workspace = true }
itertools = { workspace = true }
mime = { workspace = true }
[dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.104.1" }
nu-command = { path = "../nu-command", version = "0.104.1" }
nu-test-support = { path = "../nu-test-support", version = "0.104.1" }
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.97.0" }
nu-command = { path = "../nu-command", version = "0.97.0" }
nu-test-support = { path = "../nu-test-support", version = "0.97.0" }

View File

@ -43,12 +43,7 @@ mod test_examples {
signature.operates_on_cell_paths(),
),
);
check_example_evaluates_to_expected_output(
cmd.name(),
&example,
cwd.as_path(),
&mut engine_state,
);
check_example_evaluates_to_expected_output(&example, cwd.as_path(), &mut engine_state);
}
check_all_signature_input_output_types_entries_have_examples(

View File

@ -26,7 +26,7 @@ impl Command for BitsAnd {
.required(
"target",
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
"Right-hand side of the operation.",
"right-hand side of the operation",
)
.named(
"endian",
@ -37,7 +37,7 @@ impl Command for BitsAnd {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Performs bitwise and for ints or binary values."
}

View File

@ -14,11 +14,11 @@ impl Command for Bits {
.input_output_types(vec![(Type::Nothing, Type::String)])
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Various commands for working with bits."
}
fn extra_description(&self) -> &str {
fn extra_usage(&self) -> &str {
"You must use one of the following subcommands. Using this command as-is will only produce this help message."
}

View File

@ -3,10 +3,10 @@ use std::io::{self, Read, Write};
use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
use nu_protocol::{shell_error::io::IoError, Signals};
use nu_protocol::Signals;
use num_traits::ToPrimitive;
struct Arguments {
pub struct Arguments {
cell_paths: Option<Vec<CellPath>>,
}
@ -17,15 +17,15 @@ impl CmdArgument for Arguments {
}
#[derive(Clone)]
pub struct FormatBits;
pub struct BitsInto;
impl Command for FormatBits {
impl Command for BitsInto {
fn name(&self) -> &str {
"format bits"
"into bits"
}
fn signature(&self) -> Signature {
Signature::build("format bits")
Signature::build("into bits")
.input_output_types(vec![
(Type::Binary, Type::String),
(Type::Int, Type::String),
@ -40,17 +40,17 @@ impl Command for FormatBits {
.rest(
"rest",
SyntaxShape::CellPath,
"For a data structure input, convert data at the given cell paths.",
"for a data structure input, convert data at the given cell paths",
)
.category(Category::Conversions)
}
fn description(&self) -> &str {
"Convert value to a string of binary data represented by 0 and 1."
fn usage(&self) -> &str {
"Convert value to a binary primitive."
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "cast", "binary"]
vec!["convert", "cast"]
}
fn run(
@ -60,49 +60,49 @@ impl Command for FormatBits {
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
format_bits(engine_state, stack, call, input)
into_bits(engine_state, stack, call, input)
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "convert a binary value into a string, padded to 8 places with 0s",
example: "0x[1] | format bits",
example: "0x[1] | into bits",
result: Some(Value::string("00000001",
Span::test_data(),
)),
},
Example {
description: "convert an int into a string, padded to 8 places with 0s",
example: "1 | format bits",
example: "1 | into bits",
result: Some(Value::string("00000001",
Span::test_data(),
)),
},
Example {
description: "convert a filesize value into a string, padded to 8 places with 0s",
example: "1b | format bits",
example: "1b | into bits",
result: Some(Value::string("00000001",
Span::test_data(),
)),
},
Example {
description: "convert a duration value into a string, padded to 8 places with 0s",
example: "1ns | format bits",
example: "1ns | into bits",
result: Some(Value::string("00000001",
Span::test_data(),
)),
},
Example {
description: "convert a boolean value into a string, padded to 8 places with 0s",
example: "true | format bits",
example: "true | into bits",
result: Some(Value::string("00000001",
Span::test_data(),
)),
},
Example {
description: "convert a string into a raw binary string, padded with 0s to 8 places",
example: "'nushell.sh' | format bits",
example: "'nushell.sh' | into bits",
result: Some(Value::string("01101110 01110101 01110011 01101000 01100101 01101100 01101100 00101110 01110011 01101000",
Span::test_data(),
)),
@ -111,7 +111,7 @@ impl Command for FormatBits {
}
}
fn format_bits(
fn into_bits(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
@ -141,11 +141,7 @@ fn byte_stream_to_bits(stream: ByteStream, head: Span) -> ByteStream {
ByteStreamType::String,
move |buffer| {
let mut byte = [0];
if reader
.read(&mut byte[..])
.map_err(|err| IoError::new(err.kind(), head, None))?
> 0
{
if reader.read(&mut byte[..]).err_span(head)? > 0 {
// Format the byte as bits
if is_first {
is_first = false;
@ -197,7 +193,7 @@ fn convert_to_smallest_number_type(num: i64, span: Span) -> Value {
}
}
fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
pub fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
match input {
Value::Binary { val, .. } => {
let mut raw_string = "".to_string();
@ -207,7 +203,7 @@ fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
Value::string(raw_string.trim(), span)
}
Value::Int { val, .. } => convert_to_smallest_number_type(*val, span),
Value::Filesize { val, .. } => convert_to_smallest_number_type(val.get(), span),
Value::Filesize { val, .. } => convert_to_smallest_number_type(*val, span),
Value::Duration { val, .. } => convert_to_smallest_number_type(*val, span),
Value::String { val, .. } => {
let raw_bytes = val.as_bytes();
@ -243,6 +239,6 @@ mod test {
fn test_examples() {
use crate::test_examples;
test_examples(FormatBits {})
test_examples(BitsInto {})
}
}

View File

@ -1,5 +1,6 @@
mod and;
mod bits_;
mod into;
mod not;
mod or;
mod rotate_left;
@ -10,6 +11,7 @@ mod xor;
pub use and::BitsAnd;
pub use bits_::Bits;
pub use into::BitsInto;
pub use not::BitsNot;
pub use or::BitsOr;
pub use rotate_left::BitsRol;
@ -42,25 +44,6 @@ enum InputNumType {
SignedEight,
}
impl InputNumType {
fn num_bits(self) -> u32 {
match self {
InputNumType::One => 8,
InputNumType::Two => 16,
InputNumType::Four => 32,
InputNumType::Eight => 64,
InputNumType::SignedOne => 8,
InputNumType::SignedTwo => 16,
InputNumType::SignedFour => 32,
InputNumType::SignedEight => 64,
}
}
fn is_permitted_bit_shift(self, bits: u32) -> bool {
bits < self.num_bits()
}
}
fn get_number_bytes(
number_bytes: Option<Spanned<usize>>,
head: Span,
@ -135,7 +118,7 @@ where
(min, max) => (rhs, lhs, max, min),
};
let pad = iter::repeat_n(0, max_len - min_len);
let pad = iter::repeat(0).take(max_len - min_len);
let mut a;
let mut b;
@ -159,10 +142,9 @@ where
}
(Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => {
Value::error(
ShellError::OnlySupportsThisInputType {
ShellError::PipelineMismatch {
exp_input_type: "input, and argument, to be both int or both binary"
.to_string(),
wrong_type: "int and binary".to_string(),
dst_span: rhs.span(),
src_span: span,
},

View File

@ -51,7 +51,7 @@ impl Command for BitsNot {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Performs logical negation on each bit."
}

View File

@ -27,7 +27,7 @@ impl Command for BitsOr {
.required(
"target",
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
"Right-hand side of the operation.",
"right-hand side of the operation",
)
.named(
"endian",
@ -38,7 +38,7 @@ impl Command for BitsOr {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Performs bitwise or for ints or binary values."
}

View File

@ -1,10 +1,11 @@
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
use itertools::Itertools;
use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
struct Arguments {
signed: bool,
bits: Spanned<usize>,
bits: usize,
number_size: NumberBytes,
}
@ -37,7 +38,7 @@ impl Command for BitsRol {
),
])
.allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to rotate left.")
.required("bits", SyntaxShape::Int, "number of bits to rotate left")
.switch(
"signed",
"always treat input number as a signed number",
@ -52,7 +53,7 @@ impl Command for BitsRol {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Bitwise rotate left for ints or binary values."
}
@ -68,7 +69,7 @@ impl Command for BitsRol {
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let bits = call.req(engine_state, stack, 0)?;
let bits: usize = call.req(engine_state, stack, 0)?;
let signed = call.has_flag(engine_state, stack, "signed")?;
let number_bytes: Option<Spanned<usize>> =
call.get_flag(engine_state, stack, "number-bytes")?;
@ -118,8 +119,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
number_size,
bits,
} = *args;
let bits_span = bits.span;
let bits = bits.item;
match input {
Value::Int { val, .. } => {
@ -128,19 +127,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
let bits = bits as u32;
let input_num_type = get_input_num_type(val, signed, number_size);
if bits > input_num_type.num_bits() {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to rotate by more than the available bits ({})",
input_num_type.num_bits()
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let int = match input_num_type {
One => (val as u8).rotate_left(bits) as i64,
Two => (val as u16).rotate_left(bits) as i64,
@ -171,28 +157,16 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
Value::int(int, span)
}
Value::Binary { val, .. } => {
let len = val.len();
if bits > len * 8 {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to rotate by more than the available bits ({})",
len * 8
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let byte_shift = bits / 8;
let bit_rotate = bits % 8;
let bytes = if bit_rotate == 0 {
rotate_bytes_left(val, byte_shift)
} else {
rotate_bytes_and_bits_left(val, byte_shift, bit_rotate)
};
let mut bytes = val
.iter()
.copied()
.circular_tuple_windows::<(u8, u8)>()
.map(|(lhs, rhs)| (lhs << bit_rotate) | (rhs >> (8 - bit_rotate)))
.collect::<Vec<u8>>();
bytes.rotate_left(byte_shift);
Value::binary(bytes, span)
}
@ -210,34 +184,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
}
}
fn rotate_bytes_left(data: &[u8], byte_shift: usize) -> Vec<u8> {
let len = data.len();
let mut output = vec![0; len];
output[..len - byte_shift].copy_from_slice(&data[byte_shift..]);
output[len - byte_shift..].copy_from_slice(&data[..byte_shift]);
output
}
fn rotate_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -> Vec<u8> {
debug_assert!(byte_shift < data.len());
debug_assert!(
(1..8).contains(&bit_shift),
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift");
let mut bytes = Vec::with_capacity(data.len());
let mut next_index = byte_shift;
for _ in 0..data.len() {
let curr_byte = data[next_index];
next_index += 1;
if next_index == data.len() {
next_index = 0;
}
let next_byte = data[next_index];
let new_byte = (curr_byte << bit_shift) | (next_byte >> (8 - bit_shift));
bytes.push(new_byte);
}
bytes
}
#[cfg(test)]
mod test {
use super::*;

View File

@ -1,10 +1,11 @@
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
use itertools::Itertools;
use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
struct Arguments {
signed: bool,
bits: Spanned<usize>,
bits: usize,
number_size: NumberBytes,
}
@ -37,7 +38,7 @@ impl Command for BitsRor {
),
])
.allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to rotate right.")
.required("bits", SyntaxShape::Int, "number of bits to rotate right")
.switch(
"signed",
"always treat input number as a signed number",
@ -52,7 +53,7 @@ impl Command for BitsRor {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Bitwise rotate right for ints or binary values."
}
@ -68,7 +69,7 @@ impl Command for BitsRor {
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let bits = call.req(engine_state, stack, 0)?;
let bits: usize = call.req(engine_state, stack, 0)?;
let signed = call.has_flag(engine_state, stack, "signed")?;
let number_bytes: Option<Spanned<usize>> =
call.get_flag(engine_state, stack, "number-bytes")?;
@ -122,8 +123,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
number_size,
bits,
} = *args;
let bits_span = bits.span;
let bits = bits.item;
match input {
Value::Int { val, .. } => {
@ -132,19 +131,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
let bits = bits as u32;
let input_num_type = get_input_num_type(val, signed, number_size);
if bits > input_num_type.num_bits() {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to rotate by more than the available bits ({})",
input_num_type.num_bits()
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let int = match input_num_type {
One => (val as u8).rotate_right(bits) as i64,
Two => (val as u16).rotate_right(bits) as i64,
@ -175,28 +161,16 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
Value::int(int, span)
}
Value::Binary { val, .. } => {
let len = val.len();
if bits > len * 8 {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to rotate by more than the available bits ({})",
len * 8
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let byte_shift = bits / 8;
let bit_rotate = bits % 8;
let bytes = if bit_rotate == 0 {
rotate_bytes_right(val, byte_shift)
} else {
rotate_bytes_and_bits_right(val, byte_shift, bit_rotate)
};
let mut bytes = val
.iter()
.copied()
.circular_tuple_windows::<(u8, u8)>()
.map(|(lhs, rhs)| (lhs >> bit_rotate) | (rhs << (8 - bit_rotate)))
.collect::<Vec<u8>>();
bytes.rotate_right(byte_shift);
Value::binary(bytes, span)
}
@ -214,35 +188,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
}
}
fn rotate_bytes_right(data: &[u8], byte_shift: usize) -> Vec<u8> {
let len = data.len();
let mut output = vec![0; len];
output[byte_shift..].copy_from_slice(&data[..len - byte_shift]);
output[..byte_shift].copy_from_slice(&data[len - byte_shift..]);
output
}
fn rotate_bytes_and_bits_right(data: &[u8], byte_shift: usize, bit_shift: usize) -> Vec<u8> {
debug_assert!(byte_shift < data.len());
debug_assert!(
(1..8).contains(&bit_shift),
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift"
);
let mut bytes = Vec::with_capacity(data.len());
let mut previous_index = data.len() - byte_shift - 1;
for _ in 0..data.len() {
let previous_byte = data[previous_index];
previous_index += 1;
if previous_index == data.len() {
previous_index = 0;
}
let curr_byte = data[previous_index];
let rotated_byte = (curr_byte >> bit_shift) | (previous_byte << (8 - bit_shift));
bytes.push(rotated_byte);
}
bytes
}
#[cfg(test)]
mod test {
use super::*;

View File

@ -7,7 +7,7 @@ use std::iter;
struct Arguments {
signed: bool,
bits: Spanned<usize>,
bits: usize,
number_size: NumberBytes,
}
@ -40,7 +40,7 @@ impl Command for BitsShl {
),
])
.allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to shift left.")
.required("bits", SyntaxShape::Int, "number of bits to shift left")
.switch(
"signed",
"always treat input number as a signed number",
@ -55,7 +55,7 @@ impl Command for BitsShl {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Bitwise shift left for ints or binary values."
}
@ -71,9 +71,7 @@ impl Command for BitsShl {
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
// This restricts to a positive shift value (our underlying operations do not
// permit them)
let bits: Spanned<usize> = call.req(engine_state, stack, 0)?;
let bits: usize = call.req(engine_state, stack, 0)?;
let signed = call.has_flag(engine_state, stack, "signed")?;
let number_bytes: Option<Spanned<usize>> =
call.get_flag(engine_state, stack, "number-bytes")?;
@ -133,29 +131,14 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
number_size,
bits,
} = *args;
let bits_span = bits.span;
let bits = bits.item;
match input {
Value::Int { val, .. } => {
use InputNumType::*;
let val = *val;
let bits = bits as u32;
let bits = bits as u64;
let input_num_type = get_input_num_type(val, signed, number_size);
if !input_num_type.is_permitted_bit_shift(bits) {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to shift by more than the available bits (permitted < {})",
input_num_type.num_bits()
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let int = match input_num_type {
One => ((val as u8) << bits) as i64,
Two => ((val as u16) << bits) as i64,
@ -164,14 +147,12 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
let Ok(i) = i64::try_from((val as u64) << bits) else {
return Value::error(
ShellError::GenericError {
error: "result out of range for int".into(),
error: "result out of range for specified number".into(),
msg: format!(
"shifting left by {bits} is out of range for the value {val}"
),
span: Some(span),
help: Some(
"Ensure the result fits in a 64-bit signed integer.".into(),
),
help: None,
inner: vec![],
},
span,
@ -191,26 +172,19 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
let byte_shift = bits / 8;
let bit_shift = bits % 8;
// This is purely for symmetry with the int case and the fact that the
// shift right implementation in its current form panicked with an overflow
if bits > val.len() * 8 {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to shift by more than the available bits ({})",
val.len() * 8
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let bytes = if bit_shift == 0 {
shift_bytes_left(val, byte_shift)
} else {
shift_bytes_and_bits_left(val, byte_shift, bit_shift)
};
use itertools::Position::*;
let bytes = val
.iter()
.copied()
.skip(byte_shift)
.circular_tuple_windows::<(u8, u8)>()
.with_position()
.map(|(pos, (lhs, rhs))| match pos {
Last | Only => lhs << bit_shift,
_ => (lhs << bit_shift) | (rhs >> bit_shift),
})
.chain(iter::repeat(0).take(byte_shift))
.collect::<Vec<u8>>();
Value::binary(bytes, span)
}
@ -228,31 +202,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
}
}
fn shift_bytes_left(data: &[u8], byte_shift: usize) -> Vec<u8> {
let len = data.len();
let mut output = vec![0; len];
output[..len - byte_shift].copy_from_slice(&data[byte_shift..]);
output
}
fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -> Vec<u8> {
use itertools::Position::*;
debug_assert!((1..8).contains(&bit_shift),
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift"
);
data.iter()
.copied()
.skip(byte_shift)
.circular_tuple_windows::<(u8, u8)>()
.with_position()
.map(|(pos, (lhs, rhs))| match pos {
Last | Only => lhs << bit_shift,
_ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)),
})
.chain(iter::repeat_n(0, byte_shift))
.collect::<Vec<u8>>()
}
#[cfg(test)]
mod test {
use super::*;

View File

@ -1,10 +1,13 @@
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
use itertools::Itertools;
use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
use std::iter;
struct Arguments {
signed: bool,
bits: Spanned<usize>,
bits: usize,
number_size: NumberBytes,
}
@ -37,7 +40,7 @@ impl Command for BitsShr {
),
])
.allow_variants_without_examples(true)
.required("bits", SyntaxShape::Int, "Number of bits to shift right.")
.required("bits", SyntaxShape::Int, "number of bits to shift right")
.switch(
"signed",
"always treat input number as a signed number",
@ -52,7 +55,7 @@ impl Command for BitsShr {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Bitwise shift right for ints or binary values."
}
@ -68,9 +71,7 @@ impl Command for BitsShr {
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
// This restricts to a positive shift value (our underlying operations do not
// permit them)
let bits: Spanned<usize> = call.req(engine_state, stack, 0)?;
let bits: usize = call.req(engine_state, stack, 0)?;
let signed = call.has_flag(engine_state, stack, "signed")?;
let number_bytes: Option<Spanned<usize>> =
call.get_flag(engine_state, stack, "number-bytes")?;
@ -120,8 +121,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
number_size,
bits,
} = *args;
let bits_span = bits.span;
let bits = bits.item;
match input {
Value::Int { val, .. } => {
@ -130,19 +129,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
let bits = bits as u32;
let input_num_type = get_input_num_type(val, signed, number_size);
if !input_num_type.is_permitted_bit_shift(bits) {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to shift by more than the available bits (permitted < {})",
input_num_type.num_bits()
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let int = match input_num_type {
One => ((val as u8) >> bits) as i64,
Two => ((val as u16) >> bits) as i64,
@ -161,27 +147,21 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
let bit_shift = bits % 8;
let len = val.len();
// This check is done for symmetry with the int case and the previous
// implementation would overflow byte indices leading to unexpected output
// lengths
if bits > len * 8 {
return Value::error(
ShellError::IncorrectValue {
msg: format!(
"Trying to shift by more than the available bits ({})",
len * 8
),
val_span: bits_span,
call_span: span,
},
span,
);
}
let bytes = if bit_shift == 0 {
shift_bytes_right(val, byte_shift)
} else {
shift_bytes_and_bits_right(val, byte_shift, bit_shift)
};
use itertools::Position::*;
let bytes = iter::repeat(0)
.take(byte_shift)
.chain(
val.iter()
.copied()
.circular_tuple_windows::<(u8, u8)>()
.with_position()
.map(|(pos, (lhs, rhs))| match pos {
First | Only => lhs >> bit_shift,
_ => (lhs >> bit_shift) | (rhs << bit_shift),
})
.take(len - byte_shift),
)
.collect::<Vec<u8>>();
Value::binary(bytes, span)
}
@ -198,35 +178,6 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
),
}
}
fn shift_bytes_right(data: &[u8], byte_shift: usize) -> Vec<u8> {
let len = data.len();
let mut output = vec![0; len];
output[byte_shift..].copy_from_slice(&data[..len - byte_shift]);
output
}
fn shift_bytes_and_bits_right(data: &[u8], byte_shift: usize, bit_shift: usize) -> Vec<u8> {
debug_assert!(
bit_shift > 0 && bit_shift < 8,
"bit_shift should be in the range (0, 8)"
);
let len = data.len();
let mut output = vec![0; len];
for i in byte_shift..len {
let shifted_bits = data[i - byte_shift] >> bit_shift;
let carried_bits = if i > byte_shift {
data[i - byte_shift - 1] << (8 - bit_shift)
} else {
0
};
let shifted_byte = shifted_bits | carried_bits;
output[i] = shifted_byte;
}
output
}
#[cfg(test)]
mod test {

View File

@ -27,7 +27,7 @@ impl Command for BitsXor {
.required(
"target",
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
"Right-hand side of the operation.",
"right-hand side of the operation",
)
.named(
"endian",
@ -38,7 +38,7 @@ impl Command for BitsXor {
.category(Category::Bits)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Performs bitwise xor for ints or binary values."
}

View File

@ -2,31 +2,31 @@ use nu_cmd_base::input_handler::{operate, CellPathOnlyArgs};
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct FormatNumber;
pub struct Fmt;
impl Command for FormatNumber {
impl Command for Fmt {
fn name(&self) -> &str {
"format number"
"fmt"
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Format a number."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("format number")
Signature::build("fmt")
.input_output_types(vec![(Type::Number, Type::record())])
.category(Category::Conversions)
}
fn search_terms(&self) -> Vec<&str> {
vec!["display", "render", "fmt"]
vec!["display", "render", "format"]
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Get a record containing multiple formats for the number 42",
example: "42 | format number",
example: "42 | fmt",
result: Some(Value::test_record(record! {
"binary" => Value::test_string("0b101010"),
"debug" => Value::test_string("42"),
@ -47,11 +47,11 @@ impl Command for FormatNumber {
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
format_number(engine_state, stack, call, input)
fmt(engine_state, stack, call, input)
}
}
pub(crate) fn format_number(
fn fmt(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
@ -64,9 +64,9 @@ pub(crate) fn format_number(
fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
match input {
Value::Float { val, .. } => format_f64(*val, span),
Value::Int { val, .. } => format_i64(*val, span),
Value::Filesize { val, .. } => format_i64(val.get(), span),
Value::Float { val, .. } => fmt_it_64(*val, span),
Value::Int { val, .. } => fmt_it(*val, span),
Value::Filesize { val, .. } => fmt_it(*val, span),
// Propagate errors by explicitly matching them before the final case.
Value::Error { .. } => input.clone(),
other => Value::error(
@ -81,7 +81,7 @@ fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
}
}
fn format_i64(num: i64, span: Span) -> Value {
fn fmt_it(num: i64, span: Span) -> Value {
Value::record(
record! {
"binary" => Value::string(format!("{num:#b}"), span),
@ -97,7 +97,7 @@ fn format_i64(num: i64, span: Span) -> Value {
)
}
fn format_f64(num: f64, span: Span) -> Value {
fn fmt_it_64(num: f64, span: Span) -> Value {
Value::record(
record! {
"binary" => Value::string(format!("{:b}", num.to_bits()), span),
@ -121,6 +121,6 @@ mod test {
fn test_examples() {
use crate::test_examples;
test_examples(FormatNumber {})
test_examples(Fmt {})
}
}

View File

@ -0,0 +1,3 @@
mod fmt;
pub(crate) use fmt::Fmt;

View File

@ -9,7 +9,7 @@ impl Command for EachWhile {
"each while"
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Run a closure on each row of the input list until a null is found, then create a new list with the results."
}
@ -25,8 +25,8 @@ impl Command for EachWhile {
)])
.required(
"closure",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The closure to run.",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
"the closure to run",
)
.category(Category::Filters)
}

View File

@ -18,11 +18,11 @@ impl Command for Roll {
.input_output_types(vec![(Type::Nothing, Type::String)])
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Rolling commands for tables."
}
fn extra_description(&self) -> &str {
fn extra_usage(&self) -> &str {
"You must use one of the following subcommands. Using this command as-is will only produce this help message."
}

View File

@ -21,7 +21,7 @@ impl Command for RollDown {
.category(Category::Filters)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Roll table rows down."
}

View File

@ -33,7 +33,7 @@ impl Command for RollLeft {
.category(Category::Filters)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Roll record or table columns left."
}

View File

@ -33,7 +33,7 @@ impl Command for RollRight {
.category(Category::Filters)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Roll table columns right."
}

View File

@ -21,7 +21,7 @@ impl Command for RollUp {
.category(Category::Filters)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Roll table rows up."
}

View File

@ -13,20 +13,17 @@ impl Command for Rotate {
.input_output_types(vec![
(Type::record(), Type::table()),
(Type::table(), Type::table()),
(Type::list(Type::Any), Type::table()),
(Type::String, Type::table()),
])
.switch("ccw", "rotate counter clockwise", None)
.rest(
"rest",
SyntaxShape::String,
"The names to give columns once rotated.",
"the names to give columns once rotated",
)
.category(Category::Filters)
.allow_variants_without_examples(true)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Rotates a table or record clockwise (default) or counter-clockwise (use --ccw flag)."
}

View File

@ -16,7 +16,7 @@ impl Command for UpdateCells {
.required(
"closure",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The closure to run an update for each cell.",
"the closure to run an update for each cell",
)
.named(
"columns",
@ -27,7 +27,7 @@ impl Command for UpdateCells {
.category(Category::Filters)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Update the table cells."
}

View File

@ -14,7 +14,7 @@ impl Command for FromUrl {
.category(Category::Formats)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Parse url-encoded string as a record."
}

View File

@ -2,4 +2,4 @@ mod from;
mod to;
pub(crate) use from::url::FromUrl;
pub use to::html::ToHtml;
pub(crate) use to::html::ToHtml;

View File

@ -138,11 +138,11 @@ impl Command for ToHtml {
]
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Convert table into simple HTML."
}
fn extra_description(&self) -> &str {
fn extra_usage(&self) -> &str {
"Screenshots of the themes can be browsed here: https://github.com/mbadolato/iTerm2-Color-Schemes."
}
@ -330,12 +330,7 @@ fn to_html(
output_string = run_regexes(&regex_hm, &output_string);
}
let metadata = PipelineMetadata {
data_source: nu_protocol::DataSource::None,
content_type: Some(mime::TEXT_HTML_UTF_8.to_string()),
};
Ok(Value::string(output_string, head).into_pipeline_data_with_metadata(metadata))
Ok(Value::string(output_string, head).into_pipeline_data())
}
fn theme_demo(span: Span) -> PipelineData {

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct MathArcCos;
pub struct SubCommand;
impl Command for MathArcCos {
impl Command for SubCommand {
fn name(&self) -> &str {
"math arccos"
}
@ -22,7 +22,7 @@ impl Command for MathArcCos {
.category(Category::Math)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Returns the arccosine of the number."
}
@ -114,6 +114,6 @@ mod test {
fn test_examples() {
use crate::test_examples;
test_examples(MathArcCos {})
test_examples(SubCommand {})
}
}

View File

@ -1,9 +1,9 @@
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct MathArcCosH;
pub struct SubCommand;
impl Command for MathArcCosH {
impl Command for SubCommand {
fn name(&self) -> &str {
"math arccosh"
}
@ -21,7 +21,7 @@ impl Command for MathArcCosH {
.category(Category::Math)
}
fn description(&self) -> &str {
fn usage(&self) -> &str {
"Returns the inverse of the hyperbolic cosine function."
}
@ -100,6 +100,6 @@ mod test {
fn test_examples() {
use crate::test_examples;
test_examples(MathArcCosH {})
test_examples(SubCommand {})
}
}

Some files were not shown because too many files have changed in this diff Show More