From 1f916e65cff4459698d465b2f4558da1e1bf6e44 Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Tue, 16 Jan 2024 13:59:48 -0500 Subject: Create helix-stdx crate for stdlib extensions helix-stdx is meant to carry extensions to the stdlib or low-level dependencies that are useful in all other crates. This commit starts with all of the path functions from helix-core and the CWD tracking that lived in helix-loader. The CWD tracking in helix-loader was previously unable to call the canonicalization functions in helix-core. Switching to our custom canonicalization code should make no noticeable difference though since `std::env::current_dir` returns a canonicalized path with symlinks resolved (at least on unix). --- Cargo.lock | 14 ++- Cargo.toml | 1 + helix-core/Cargo.toml | 2 +- helix-core/src/lib.rs | 1 - helix-core/src/path.rs | 181 ------------------------------- helix-core/tests/path.rs | 124 --------------------- helix-loader/src/lib.rs | 44 +------- helix-lsp/Cargo.toml | 1 + helix-lsp/src/client.rs | 7 +- helix-lsp/src/lib.rs | 12 +-- helix-stdx/Cargo.toml | 19 ++++ helix-stdx/src/env.rs | 48 +++++++++ helix-stdx/src/lib.rs | 2 + helix-stdx/src/path.rs | 185 ++++++++++++++++++++++++++++++++ helix-stdx/tests/path.rs | 124 +++++++++++++++++++++ helix-term/Cargo.toml | 1 + helix-term/src/application.rs | 3 +- helix-term/src/commands.rs | 10 +- helix-term/src/commands/dap.rs | 2 +- helix-term/src/commands/lsp.rs | 7 +- helix-term/src/commands/typed.rs | 16 +-- helix-term/src/main.rs | 6 +- helix-term/src/ui/mod.rs | 4 +- helix-term/src/ui/picker.rs | 2 +- helix-term/tests/test/commands/write.rs | 11 +- helix-term/tests/test/splits.rs | 8 +- helix-view/Cargo.toml | 1 + helix-view/src/document.rs | 6 +- helix-view/src/editor.rs | 2 +- 29 files changed, 448 insertions(+), 396 deletions(-) delete mode 100644 helix-core/src/path.rs delete mode 100644 helix-core/tests/path.rs create mode 100644 helix-stdx/Cargo.toml create mode 100644 helix-stdx/src/env.rs create mode 100644 helix-stdx/src/lib.rs create mode 100644 helix-stdx/src/path.rs create mode 100644 helix-stdx/tests/path.rs diff --git a/Cargo.lock b/Cargo.lock index da0dc361..09bec59f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1060,6 +1060,7 @@ dependencies = [ "etcetera", "hashbrown 0.14.3", "helix-loader", + "helix-stdx", "imara-diff", "indoc", "log", @@ -1074,7 +1075,6 @@ dependencies = [ "slotmap", "smallvec", "smartstring", - "tempfile", "textwrap", "toml", "tree-sitter", @@ -1136,6 +1136,7 @@ dependencies = [ "helix-core", "helix-loader", "helix-parsec", + "helix-stdx", "log", "lsp-types", "parking_lot", @@ -1151,6 +1152,15 @@ dependencies = [ name = "helix-parsec" version = "23.10.0" +[[package]] +name = "helix-stdx" +version = "23.10.0" +dependencies = [ + "dunce", + "etcetera", + "tempfile", +] + [[package]] name = "helix-term" version = "23.10.0" @@ -1169,6 +1179,7 @@ dependencies = [ "helix-event", "helix-loader", "helix-lsp", + "helix-stdx", "helix-tui", "helix-vcs", "helix-view", @@ -1241,6 +1252,7 @@ dependencies = [ "helix-event", "helix-loader", "helix-lsp", + "helix-stdx", "helix-tui", "helix-vcs", "libc", diff --git a/Cargo.toml b/Cargo.toml index f59896ec..91f6e7ca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,7 @@ members = [ "helix-loader", "helix-vcs", "helix-parsec", + "helix-stdx", "xtask", ] diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 07c801b8..42c88f4b 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -16,6 +16,7 @@ unicode-lines = ["ropey/unicode_lines"] integration = [] [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-loader = { path = "../helix-loader" } ropey = { version = "1.6.1", default-features = false, features = ["simd"] } @@ -55,4 +56,3 @@ parking_lot = "0.12" [dev-dependencies] quickcheck = { version = "1", default-features = false } indoc = "2.0.4" -tempfile = "3.9" diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 0acdb238..94802eba 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -17,7 +17,6 @@ pub mod macros; pub mod match_brackets; pub mod movement; pub mod object; -pub mod path; mod position; pub mod search; pub mod selection; diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs deleted file mode 100644 index 0cf6f812..00000000 --- a/helix-core/src/path.rs +++ /dev/null @@ -1,181 +0,0 @@ -use etcetera::home_dir; -use std::path::{Component, Path, PathBuf}; - -/// Replaces users home directory from `path` with tilde `~` if the directory -/// is available, otherwise returns the path unchanged. -pub fn fold_home_dir(path: &Path) -> PathBuf { - if let Ok(home) = home_dir() { - if let Ok(stripped) = path.strip_prefix(&home) { - return PathBuf::from("~").join(stripped); - } - } - - path.to_path_buf() -} - -/// Expands tilde `~` into users home directory if available, otherwise returns the path -/// unchanged. The tilde will only be expanded when present as the first component of the path -/// and only slash follows it. -pub fn expand_tilde(path: &Path) -> PathBuf { - let mut components = path.components().peekable(); - if let Some(Component::Normal(c)) = components.peek() { - if c == &"~" { - if let Ok(home) = home_dir() { - // it's ok to unwrap, the path starts with `~` - return home.join(path.strip_prefix("~").unwrap()); - } - } - } - - path.to_path_buf() -} - -/// Normalize a path without resolving symlinks. -// Strategy: start from the first component and move up. Cannonicalize previous path, -// join component, cannonicalize new path, strip prefix and join to the final result. -pub fn get_normalized_path(path: &Path) -> PathBuf { - let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { - components.next(); - PathBuf::from(c.as_os_str()) - } else { - PathBuf::new() - }; - - for component in components { - match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); - } - Component::CurDir => {} - #[cfg(not(windows))] - Component::ParentDir => { - ret.pop(); - } - #[cfg(windows)] - Component::ParentDir => { - if let Some(head) = ret.components().next_back() { - match head { - Component::Prefix(_) | Component::RootDir => {} - Component::CurDir => unreachable!(), - // If we left previous component as ".." it means we met a symlink before and we can't pop path. - Component::ParentDir => { - ret.push(".."); - } - Component::Normal(_) => { - if ret.is_symlink() { - ret.push(".."); - } else { - ret.pop(); - } - } - } - } - } - #[cfg(not(windows))] - Component::Normal(c) => { - ret.push(c); - } - #[cfg(windows)] - Component::Normal(c) => 'normal: { - use std::fs::canonicalize; - - let new_path = ret.join(c); - if new_path.is_symlink() { - ret = new_path; - break 'normal; - } - let (can_new, can_old) = (canonicalize(&new_path), canonicalize(&ret)); - match (can_new, can_old) { - (Ok(can_new), Ok(can_old)) => { - let striped = can_new.strip_prefix(can_old); - ret.push(striped.unwrap_or_else(|_| c.as_ref())); - } - _ => ret.push(c), - } - } - } - } - dunce::simplified(&ret).to_path_buf() -} - -/// Returns the canonical, absolute form of a path with all intermediate components normalized. -/// -/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify -/// here if the path exists, just normalize it's components. -pub fn get_canonicalized_path(path: &Path) -> PathBuf { - let path = expand_tilde(path); - let path = if path.is_relative() { - helix_loader::current_working_dir().join(path) - } else { - path - }; - - get_normalized_path(path.as_path()) -} - -pub fn get_relative_path(path: &Path) -> PathBuf { - let path = PathBuf::from(path); - let path = if path.is_absolute() { - let cwdir = get_normalized_path(&helix_loader::current_working_dir()); - get_normalized_path(&path) - .strip_prefix(cwdir) - .map(PathBuf::from) - .unwrap_or(path) - } else { - path - }; - fold_home_dir(&path) -} - -/// Returns a truncated filepath where the basepart of the path is reduced to the first -/// char of the folder and the whole filename appended. -/// -/// Also strip the current working directory from the beginning of the path. -/// Note that this function does not check if the truncated path is unambiguous. -/// -/// ``` -/// use helix_core::path::get_truncated_path; -/// use std::path::Path; -/// -/// assert_eq!( -/// get_truncated_path("/home/cnorris/documents/jokes.txt").as_path(), -/// Path::new("/h/c/d/jokes.txt") -/// ); -/// assert_eq!( -/// get_truncated_path("jokes.txt").as_path(), -/// Path::new("jokes.txt") -/// ); -/// assert_eq!( -/// get_truncated_path("/jokes.txt").as_path(), -/// Path::new("/jokes.txt") -/// ); -/// assert_eq!( -/// get_truncated_path("/h/c/d/jokes.txt").as_path(), -/// Path::new("/h/c/d/jokes.txt") -/// ); -/// assert_eq!(get_truncated_path("").as_path(), Path::new("")); -/// ``` -/// -pub fn get_truncated_path>(path: P) -> PathBuf { - let cwd = helix_loader::current_working_dir(); - let path = path - .as_ref() - .strip_prefix(cwd) - .unwrap_or_else(|_| path.as_ref()); - let file = path.file_name().unwrap_or_default(); - let base = path.parent().unwrap_or_else(|| Path::new("")); - let mut ret = PathBuf::new(); - for d in base { - ret.push( - d.to_string_lossy() - .chars() - .next() - .unwrap_or_default() - .to_string(), - ); - } - ret.push(file); - ret -} diff --git a/helix-core/tests/path.rs b/helix-core/tests/path.rs deleted file mode 100644 index cbda5e1a..00000000 --- a/helix-core/tests/path.rs +++ /dev/null @@ -1,124 +0,0 @@ -#![cfg(windows)] - -use std::{ - env::set_current_dir, - error::Error, - path::{Component, Path, PathBuf}, -}; - -use helix_core::path::get_normalized_path; -use tempfile::Builder; - -// Paths on Windows are almost always case-insensitive. -// Normalization should return the original path. -// E.g. mkdir `CaSe`, normalize(`case`) = `CaSe`. -#[test] -fn test_case_folding_windows() -> Result<(), Box> { - // tmp/root/case - let tmp_prefix = std::env::temp_dir(); - set_current_dir(&tmp_prefix)?; - - let root = Builder::new().prefix("root-").tempdir()?; - let case = Builder::new().prefix("CaSe-").tempdir_in(&root)?; - - let root_without_prefix = root.path().strip_prefix(&tmp_prefix)?; - - let lowercase_case = format!( - "case-{}", - case.path() - .file_name() - .unwrap() - .to_string_lossy() - .split_at(5) - .1 - ); - let test_path = root_without_prefix.join(lowercase_case); - assert_eq!( - get_normalized_path(&test_path), - case.path().strip_prefix(&tmp_prefix)? - ); - - Ok(()) -} - -#[test] -fn test_normalize_path() -> Result<(), Box> { - /* - tmp/root/ - ├── link -> dir1/orig_file - ├── dir1/ - │ └── orig_file - └── dir2/ - └── dir_link -> ../dir1/ - */ - - let tmp_prefix = std::env::temp_dir(); - set_current_dir(&tmp_prefix)?; - - // Create a tree structure as shown above - let root = Builder::new().prefix("root-").tempdir()?; - let dir1 = Builder::new().prefix("dir1-").tempdir_in(&root)?; - let orig_file = Builder::new().prefix("orig_file-").tempfile_in(&dir1)?; - let dir2 = Builder::new().prefix("dir2-").tempdir_in(&root)?; - - // Create path and delete existing file - let dir_link = Builder::new() - .prefix("dir_link-") - .tempfile_in(&dir2)? - .path() - .to_owned(); - let link = Builder::new() - .prefix("link-") - .tempfile_in(&root)? - .path() - .to_owned(); - - use std::os::windows; - windows::fs::symlink_dir(&dir1, &dir_link)?; - windows::fs::symlink_file(&orig_file, &link)?; - - // root/link - let path = link.strip_prefix(&tmp_prefix)?; - assert_eq!( - get_normalized_path(path), - path, - "input {:?} and symlink last component shouldn't be resolved", - path - ); - - // root/dir2/dir_link/orig_file/../.. - let path = dir_link - .strip_prefix(&tmp_prefix) - .unwrap() - .join(orig_file.path().file_name().unwrap()) - .join(Component::ParentDir) - .join(Component::ParentDir); - let expected = dir_link - .strip_prefix(&tmp_prefix) - .unwrap() - .join(Component::ParentDir); - assert_eq!( - get_normalized_path(&path), - expected, - "input {:?} and \"..\" should not erase the simlink that goes ahead", - &path - ); - - // root/link/.././../dir2/../ - let path = link - .strip_prefix(&tmp_prefix) - .unwrap() - .join(Component::ParentDir) - .join(Component::CurDir) - .join(Component::ParentDir) - .join(dir2.path().file_name().unwrap()) - .join(Component::ParentDir); - let expected = link - .strip_prefix(&tmp_prefix) - .unwrap() - .join(Component::ParentDir) - .join(Component::ParentDir); - assert_eq!(get_normalized_path(&path), expected, "input {:?}", &path); - - Ok(()) -} diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index 5337d602..991504fb 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -1,14 +1,13 @@ pub mod config; pub mod grammar; +use helix_stdx::{env::current_working_dir, path}; + use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; use std::path::{Path, PathBuf}; -use std::sync::RwLock; pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH"); -static CWD: RwLock> = RwLock::new(None); - static RUNTIME_DIRS: once_cell::sync::Lazy> = once_cell::sync::Lazy::new(prioritize_runtime_dirs); @@ -16,31 +15,6 @@ static CONFIG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCe static LOG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCell::new(); -// Get the current working directory. -// This information is managed internally as the call to std::env::current_dir -// might fail if the cwd has been deleted. -pub fn current_working_dir() -> PathBuf { - if let Some(path) = &*CWD.read().unwrap() { - return path.clone(); - } - - let path = std::env::current_dir() - .and_then(dunce::canonicalize) - .expect("Couldn't determine current working directory"); - let mut cwd = CWD.write().unwrap(); - *cwd = Some(path.clone()); - - path -} - -pub fn set_current_working_dir(path: impl AsRef) -> std::io::Result<()> { - let path = dunce::canonicalize(path)?; - std::env::set_current_dir(&path)?; - let mut cwd = CWD.write().unwrap(); - *cwd = Some(path); - Ok(()) -} - pub fn initialize_config_file(specified_file: Option) { let config_file = specified_file.unwrap_or_else(default_config_file); ensure_parent_dir(&config_file); @@ -280,21 +254,9 @@ fn ensure_parent_dir(path: &Path) { mod merge_toml_tests { use std::str; - use super::{current_working_dir, merge_toml_values, set_current_working_dir}; + use super::merge_toml_values; use toml::Value; - #[test] - fn current_dir_is_set() { - let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap(); - let cwd = current_working_dir(); - assert_ne!(cwd, new_path); - - set_current_working_dir(&new_path).expect("Couldn't set new path"); - - let cwd = current_working_dir(); - assert_eq!(cwd, new_path); - } - #[test] fn language_toml_map_merges() { const USER: &str = r#" diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml index 851351e0..510be6ee 100644 --- a/helix-lsp/Cargo.toml +++ b/helix-lsp/Cargo.toml @@ -13,6 +13,7 @@ homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-loader = { path = "../helix-loader" } helix-parsec = { path = "../helix-parsec" } diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 682d4db6..1af27c1d 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -4,8 +4,9 @@ use crate::{ Call, Error, OffsetEncoding, Result, }; -use helix_core::{find_workspace, path, syntax::LanguageServerFeature, ChangeSet, Rope}; +use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope}; use helix_loader::{self, VERSION_AND_GIT_HASH}; +use helix_stdx::path; use lsp::{ notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport, DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, WorkspaceFolder, @@ -68,7 +69,7 @@ impl Client { may_support_workspace: bool, ) -> bool { let (workspace, workspace_is_cwd) = find_workspace(); - let workspace = path::get_normalized_path(&workspace); + let workspace = path::normalize(workspace); let root = find_lsp_workspace( doc_path .and_then(|x| x.parent().and_then(|x| x.to_str())) @@ -204,7 +205,7 @@ impl Client { let (server_rx, server_tx, initialize_notify) = Transport::start(reader, writer, stderr, id, name.clone()); let (workspace, workspace_is_cwd) = find_workspace(); - let workspace = path::get_normalized_path(&workspace); + let workspace = path::normalize(workspace); let root = find_lsp_workspace( doc_path .and_then(|x| x.parent().and_then(|x| x.to_str())) diff --git a/helix-lsp/src/lib.rs b/helix-lsp/src/lib.rs index 83625897..c99ec217 100644 --- a/helix-lsp/src/lib.rs +++ b/helix-lsp/src/lib.rs @@ -11,10 +11,10 @@ pub use lsp::{Position, Url}; pub use lsp_types as lsp; use futures_util::stream::select_all::SelectAll; -use helix_core::{ - path, - syntax::{LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures}, +use helix_core::syntax::{ + LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures, }; +use helix_stdx::path; use tokio::sync::mpsc::UnboundedReceiver; use std::{ @@ -958,10 +958,10 @@ pub fn find_lsp_workspace( let mut file = if file.is_absolute() { file.to_path_buf() } else { - let current_dir = helix_loader::current_working_dir(); + let current_dir = helix_stdx::env::current_working_dir(); current_dir.join(file) }; - file = path::get_normalized_path(&file); + file = path::normalize(&file); if !file.starts_with(workspace) { return None; @@ -978,7 +978,7 @@ pub fn find_lsp_workspace( if root_dirs .iter() - .any(|root_dir| path::get_normalized_path(&workspace.join(root_dir)) == ancestor) + .any(|root_dir| path::normalize(workspace.join(root_dir)) == ancestor) { // if the worskapce is the cwd do not search any higher for workspaces // but specify diff --git a/helix-stdx/Cargo.toml b/helix-stdx/Cargo.toml new file mode 100644 index 00000000..216a3b40 --- /dev/null +++ b/helix-stdx/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "helix-stdx" +description = "Standard library extensions" +include = ["src/**/*", "README.md"] +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +categories.workspace = true +repository.workspace = true +homepage.workspace = true + +[dependencies] +dunce = "1.0" +etcetera = "0.8" + +[dev-dependencies] +tempfile = "3.9" diff --git a/helix-stdx/src/env.rs b/helix-stdx/src/env.rs new file mode 100644 index 00000000..864ba828 --- /dev/null +++ b/helix-stdx/src/env.rs @@ -0,0 +1,48 @@ +use std::{ + path::{Path, PathBuf}, + sync::RwLock, +}; + +static CWD: RwLock> = RwLock::new(None); + +// Get the current working directory. +// This information is managed internally as the call to std::env::current_dir +// might fail if the cwd has been deleted. +pub fn current_working_dir() -> PathBuf { + if let Some(path) = &*CWD.read().unwrap() { + return path.clone(); + } + + let path = std::env::current_dir() + .map(crate::path::normalize) + .expect("Couldn't determine current working directory"); + let mut cwd = CWD.write().unwrap(); + *cwd = Some(path.clone()); + + path +} + +pub fn set_current_working_dir(path: impl AsRef) -> std::io::Result<()> { + let path = crate::path::canonicalize(path); + std::env::set_current_dir(&path)?; + let mut cwd = CWD.write().unwrap(); + *cwd = Some(path); + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::{current_working_dir, set_current_working_dir}; + + #[test] + fn current_dir_is_set() { + let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap(); + let cwd = current_working_dir(); + assert_ne!(cwd, new_path); + + set_current_working_dir(&new_path).expect("Couldn't set new path"); + + let cwd = current_working_dir(); + assert_eq!(cwd, new_path); + } +} diff --git a/helix-stdx/src/lib.rs b/helix-stdx/src/lib.rs new file mode 100644 index 00000000..ae3c3a98 --- /dev/null +++ b/helix-stdx/src/lib.rs @@ -0,0 +1,2 @@ +pub mod env; +pub mod path; diff --git a/helix-stdx/src/path.rs b/helix-stdx/src/path.rs new file mode 100644 index 00000000..5746657c --- /dev/null +++ b/helix-stdx/src/path.rs @@ -0,0 +1,185 @@ +pub use etcetera::home_dir; + +use std::path::{Component, Path, PathBuf}; + +use crate::env::current_working_dir; + +/// Replaces users home directory from `path` with tilde `~` if the directory +/// is available, otherwise returns the path unchanged. +pub fn fold_home_dir(path: &Path) -> PathBuf { + if let Ok(home) = home_dir() { + if let Ok(stripped) = path.strip_prefix(&home) { + return PathBuf::from("~").join(stripped); + } + } + + path.to_path_buf() +} + +/// Expands tilde `~` into users home directory if available, otherwise returns the path +/// unchanged. The tilde will only be expanded when present as the first component of the path +/// and only slash follows it. +pub fn expand_tilde(path: impl AsRef) -> PathBuf { + let path = path.as_ref(); + let mut components = path.components().peekable(); + if let Some(Component::Normal(c)) = components.peek() { + if c == &"~" { + if let Ok(home) = home_dir() { + // it's ok to unwrap, the path starts with `~` + return home.join(path.strip_prefix("~").unwrap()); + } + } + } + + path.to_path_buf() +} + +/// Normalize a path without resolving symlinks. +// Strategy: start from the first component and move up. Cannonicalize previous path, +// join component, cannonicalize new path, strip prefix and join to the final result. +pub fn normalize(path: impl AsRef) -> PathBuf { + let mut components = path.as_ref().components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + #[cfg(not(windows))] + Component::ParentDir => { + ret.pop(); + } + #[cfg(windows)] + Component::ParentDir => { + if let Some(head) = ret.components().next_back() { + match head { + Component::Prefix(_) | Component::RootDir => {} + Component::CurDir => unreachable!(), + // If we left previous component as ".." it means we met a symlink before and we can't pop path. + Component::ParentDir => { + ret.push(".."); + } + Component::Normal(_) => { + if ret.is_symlink() { + ret.push(".."); + } else { + ret.pop(); + } + } + } + } + } + #[cfg(not(windows))] + Component::Normal(c) => { + ret.push(c); + } + #[cfg(windows)] + Component::Normal(c) => 'normal: { + use std::fs::canonicalize; + + let new_path = ret.join(c); + if new_path.is_symlink() { + ret = new_path; + break 'normal; + } + let (can_new, can_old) = (canonicalize(&new_path), canonicalize(&ret)); + match (can_new, can_old) { + (Ok(can_new), Ok(can_old)) => { + let striped = can_new.strip_prefix(can_old); + ret.push(striped.unwrap_or_else(|_| c.as_ref())); + } + _ => ret.push(c), + } + } + } + } + dunce::simplified(&ret).to_path_buf() +} + +/// Returns the canonical, absolute form of a path with all intermediate components normalized. +/// +/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify +/// here if the path exists, just normalize it's components. +pub fn canonicalize(path: impl AsRef) -> PathBuf { + let path = expand_tilde(path); + let path = if path.is_relative() { + current_working_dir().join(path) + } else { + path + }; + + normalize(path) +} + +pub fn get_relative_path(path: impl AsRef) -> PathBuf { + let path = PathBuf::from(path.as_ref()); + let path = if path.is_absolute() { + let cwdir = normalize(current_working_dir()); + normalize(&path) + .strip_prefix(cwdir) + .map(PathBuf::from) + .unwrap_or(path) + } else { + path + }; + fold_home_dir(&path) +} + +/// Returns a truncated filepath where the basepart of the path is reduced to the first +/// char of the folder and the whole filename appended. +/// +/// Also strip the current working directory from the beginning of the path. +/// Note that this function does not check if the truncated path is unambiguous. +/// +/// ``` +/// use helix_stdx::path::get_truncated_path; +/// use std::path::Path; +/// +/// assert_eq!( +/// get_truncated_path("/home/cnorris/documents/jokes.txt").as_path(), +/// Path::new("/h/c/d/jokes.txt") +/// ); +/// assert_eq!( +/// get_truncated_path("jokes.txt").as_path(), +/// Path::new("jokes.txt") +/// ); +/// assert_eq!( +/// get_truncated_path("/jokes.txt").as_path(), +/// Path::new("/jokes.txt") +/// ); +/// assert_eq!( +/// get_truncated_path("/h/c/d/jokes.txt").as_path(), +/// Path::new("/h/c/d/jokes.txt") +/// ); +/// assert_eq!(get_truncated_path("").as_path(), Path::new("")); +/// ``` +/// +pub fn get_truncated_path(path: impl AsRef) -> PathBuf { + let cwd = current_working_dir(); + let path = path + .as_ref() + .strip_prefix(cwd) + .unwrap_or_else(|_| path.as_ref()); + let file = path.file_name().unwrap_or_default(); + let base = path.parent().unwrap_or_else(|| Path::new("")); + let mut ret = PathBuf::new(); + for d in base { + ret.push( + d.to_string_lossy() + .chars() + .next() + .unwrap_or_default() + .to_string(), + ); + } + ret.push(file); + ret +} diff --git a/helix-stdx/tests/path.rs b/helix-stdx/tests/path.rs new file mode 100644 index 00000000..cc3c15cb --- /dev/null +++ b/helix-stdx/tests/path.rs @@ -0,0 +1,124 @@ +#![cfg(windows)] + +use std::{ + env::set_current_dir, + error::Error, + path::{Component, Path, PathBuf}, +}; + +use helix_stdx::path; +use tempfile::Builder; + +// Paths on Windows are almost always case-insensitive. +// Normalization should return the original path. +// E.g. mkdir `CaSe`, normalize(`case`) = `CaSe`. +#[test] +fn test_case_folding_windows() -> Result<(), Box> { + // tmp/root/case + let tmp_prefix = std::env::temp_dir(); + set_current_dir(&tmp_prefix)?; + + let root = Builder::new().prefix("root-").tempdir()?; + let case = Builder::new().prefix("CaSe-").tempdir_in(&root)?; + + let root_without_prefix = root.path().strip_prefix(&tmp_prefix)?; + + let lowercase_case = format!( + "case-{}", + case.path() + .file_name() + .unwrap() + .to_string_lossy() + .split_at(5) + .1 + ); + let test_path = root_without_prefix.join(lowercase_case); + assert_eq!( + path::normalize(&test_path), + case.path().strip_prefix(&tmp_prefix)? + ); + + Ok(()) +} + +#[test] +fn test_normalize_path() -> Result<(), Box> { + /* + tmp/root/ + ├── link -> dir1/orig_file + ├── dir1/ + │ └── orig_file + └── dir2/ + └── dir_link -> ../dir1/ + */ + + let tmp_prefix = std::env::temp_dir(); + set_current_dir(&tmp_prefix)?; + + // Create a tree structure as shown above + let root = Builder::new().prefix("root-").tempdir()?; + let dir1 = Builder::new().prefix("dir1-").tempdir_in(&root)?; + let orig_file = Builder::new().prefix("orig_file-").tempfile_in(&dir1)?; + let dir2 = Builder::new().prefix("dir2-").tempdir_in(&root)?; + + // Create path and delete existing file + let dir_link = Builder::new() + .prefix("dir_link-") + .tempfile_in(&dir2)? + .path() + .to_owned(); + let link = Builder::new() + .prefix("link-") + .tempfile_in(&root)? + .path() + .to_owned(); + + use std::os::windows; + windows::fs::symlink_dir(&dir1, &dir_link)?; + windows::fs::symlink_file(&orig_file, &link)?; + + // root/link + let path = link.strip_prefix(&tmp_prefix)?; + assert_eq!( + path::normalize(path), + path, + "input {:?} and symlink last component shouldn't be resolved", + path + ); + + // root/dir2/dir_link/orig_file/../.. + let path = dir_link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(orig_file.path().file_name().unwrap()) + .join(Component::ParentDir) + .join(Component::ParentDir); + let expected = dir_link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(Component::ParentDir); + assert_eq!( + path::normalize(&path), + expected, + "input {:?} and \"..\" should not erase the simlink that goes ahead", + &path + ); + + // root/link/.././../dir2/../ + let path = link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(Component::ParentDir) + .join(Component::CurDir) + .join(Component::ParentDir) + .join(dir2.path().file_name().unwrap()) + .join(Component::ParentDir); + let expected = link + .strip_prefix(&tmp_prefix) + .unwrap() + .join(Component::ParentDir) + .join(Component::ParentDir); + assert_eq!(path::normalize(&path), expected, "input {:?}", &path); + + Ok(()) +} diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 80bda2b6..21c35553 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -23,6 +23,7 @@ name = "hx" path = "src/main.rs" [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-event = { path = "../helix-event" } helix-view = { path = "../helix-view" } diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 1b0a06dd..290441b4 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -1,11 +1,12 @@ use arc_swap::{access::Map, ArcSwap}; use futures_util::Stream; -use helix_core::{path::get_relative_path, pos_at_coords, syntax, Selection}; +use helix_core::{pos_at_coords, syntax, Selection}; use helix_lsp::{ lsp::{self, notification::Notification}, util::lsp_range_to_range, LspProgressMap, }; +use helix_stdx::path::get_relative_path; use helix_view::{ align_view, document::DocumentSavedEventResult, diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 937326f6..53783e4e 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -2169,7 +2169,7 @@ fn global_search(cx: &mut Context) { type Data = Option; fn format(&self, current_path: &Self::Data) -> Row { - let relative_path = helix_core::path::get_relative_path(&self.path) + let relative_path = helix_stdx::path::get_relative_path(&self.path) .to_string_lossy() .into_owned(); if current_path @@ -2218,7 +2218,7 @@ fn global_search(cx: &mut Context) { .case_smart(smart_case) .build(regex.as_str()) { - let search_root = helix_loader::current_working_dir(); + let search_root = helix_stdx::env::current_working_dir(); if !search_root.exists() { cx.editor .set_error("Current working directory does not exist"); @@ -2731,7 +2731,7 @@ fn file_picker_in_current_buffer_directory(cx: &mut Context) { } fn file_picker_in_current_directory(cx: &mut Context) { - let cwd = helix_loader::current_working_dir(); + let cwd = helix_stdx::env::current_working_dir(); if !cwd.exists() { cx.editor .set_error("Current working directory does not exist"); @@ -2759,7 +2759,7 @@ fn buffer_picker(cx: &mut Context) { let path = self .path .as_deref() - .map(helix_core::path::get_relative_path); + .map(helix_stdx::path::get_relative_path); let path = match path.as_deref().and_then(Path::to_str) { Some(path) => path, None => SCRATCH_BUFFER_NAME, @@ -2826,7 +2826,7 @@ fn jumplist_picker(cx: &mut Context) { let path = self .path .as_deref() - .map(helix_core::path::get_relative_path); + .map(helix_stdx::path::get_relative_path); let path = match path.as_deref().and_then(Path::to_str) { Some(path) => path, None => SCRATCH_BUFFER_NAME, diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs index dec25cbd..d62b0a4e 100644 --- a/helix-term/src/commands/dap.rs +++ b/helix-term/src/commands/dap.rs @@ -217,7 +217,7 @@ pub fn dap_start_impl( } } - args.insert("cwd", to_value(helix_loader::current_working_dir())?); + args.insert("cwd", to_value(helix_stdx::env::current_working_dir())?); let args = to_value(args).unwrap(); diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index 0096e6aa..051cdcd3 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -17,9 +17,8 @@ use tui::{ use super::{align_view, push_jump, Align, Context, Editor, Open}; -use helix_core::{ - path, syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection, -}; +use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection}; +use helix_stdx::path; use helix_view::{ document::{DocumentInlayHints, DocumentInlayHintsId, Mode}, editor::Action, @@ -1018,7 +1017,7 @@ fn goto_impl( locations: Vec, offset_encoding: OffsetEncoding, ) { - let cwdir = helix_loader::current_working_dir(); + let cwdir = helix_stdx::env::current_working_dir(); match locations.as_slice() { [location] => { diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index eb88e041..ee02a7d2 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -7,7 +7,7 @@ use super::*; use helix_core::fuzzy::fuzzy_match; use helix_core::indent::MAX_INDENT; -use helix_core::{encoding, line_ending, path::get_canonicalized_path, shellwords::Shellwords}; +use helix_core::{encoding, line_ending, shellwords::Shellwords}; use helix_lsp::{OffsetEncoding, Url}; use helix_view::document::DEFAULT_LANGUAGE_NAME; use helix_view::editor::{Action, CloseError, ConfigEvent}; @@ -111,7 +111,7 @@ fn open(cx: &mut compositor::Context, args: &[Cow], event: PromptEvent) -> ensure!(!args.is_empty(), "wrong argument count"); for arg in args { let (path, pos) = args::parse_file(arg); - let path = helix_core::path::expand_tilde(&path); + let path = helix_stdx::path::expand_tilde(&path); // If the path is a directory, open a file picker on that directory and update the status // message if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) { @@ -1079,18 +1079,17 @@ fn change_current_directory( return Ok(()); } - let dir = helix_core::path::expand_tilde( + let dir = helix_stdx::path::expand_tilde( args.first() .context("target directory not provided")? - .as_ref() .as_ref(), ); - helix_loader::set_current_working_dir(dir)?; + helix_stdx::env::set_current_working_dir(dir)?; cx.editor.set_status(format!( "Current working directory is now {}", - helix_loader::current_working_dir().display() + helix_stdx::env::current_working_dir().display() )); Ok(()) } @@ -1104,7 +1103,7 @@ fn show_current_directory( return Ok(()); } - let cwd = helix_loader::current_working_dir(); + let cwd = helix_stdx::env::current_working_dir(); let message = format!("Current working directory is {}", cwd.display()); if cwd.exists() { @@ -2409,7 +2408,8 @@ fn move_buffer( ensure!(args.len() == 1, format!(":move takes one argument")); let doc = doc!(cx.editor); - let new_path = get_canonicalized_path(&PathBuf::from(args.first().unwrap().to_string())); + let new_path = + helix_stdx::path::canonicalize(&PathBuf::from(args.first().unwrap().to_string())); let old_path = doc .path() .ok_or_else(|| anyhow!("Scratch buffer cannot be moved. Use :write instead"))? diff --git a/helix-term/src/main.rs b/helix-term/src/main.rs index a62c54a4..132ee796 100644 --- a/helix-term/src/main.rs +++ b/helix-term/src/main.rs @@ -118,16 +118,16 @@ FLAGS: // Before setting the working directory, resolve all the paths in args.files for (path, _) in args.files.iter_mut() { - *path = helix_core::path::get_canonicalized_path(path); + *path = helix_stdx::path::canonicalize(&path); } // NOTE: Set the working directory early so the correct configuration is loaded. Be aware that // Application::new() depends on this logic so it must be updated if this changes. if let Some(path) = &args.working_directory { - helix_loader::set_current_working_dir(path)?; + helix_stdx::env::set_current_working_dir(path)?; } else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) { // If the first file is a directory, it will be the working directory unless -w was specified - helix_loader::set_current_working_dir(path)?; + helix_stdx::env::set_current_working_dir(path)?; } let config = match Config::load_default() { diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 660bbfea..efa2473e 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -409,7 +409,7 @@ pub mod completers { use std::path::Path; let is_tilde = input == "~"; - let path = helix_core::path::expand_tilde(Path::new(input)); + let path = helix_stdx::path::expand_tilde(Path::new(input)); let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) { (path, None) @@ -430,7 +430,7 @@ pub mod completers { match path.parent() { Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(), // Path::new("h")'s parent is Some("")... - _ => helix_loader::current_working_dir(), + _ => helix_stdx::env::current_working_dir(), } }; diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index 08a367ba..4be5a11e 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -63,7 +63,7 @@ impl PathOrId { fn get_canonicalized(self) -> Self { use PathOrId::*; match self { - Path(path) => Path(helix_core::path::get_canonicalized_path(&path)), + Path(path) => Path(helix_stdx::path::canonicalize(path)), Id(id) => Id(id), } } diff --git a/helix-term/tests/test/commands/write.rs b/helix-term/tests/test/commands/write.rs index 376ba5e7..adc721c5 100644 --- a/helix-term/tests/test/commands/write.rs +++ b/helix-term/tests/test/commands/write.rs @@ -3,7 +3,8 @@ use std::{ ops::RangeInclusive, }; -use helix_core::{diagnostic::Severity, path::get_normalized_path}; +use helix_core::diagnostic::Severity; +use helix_stdx::path; use helix_view::doc; use super::*; @@ -23,7 +24,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> { assert_eq!(1, docs.len()); let doc = docs.pop().unwrap(); - assert_eq!(Some(&get_normalized_path(file.path())), doc.path()); + assert_eq!(Some(&path::normalize(file.path())), doc.path()); assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1); }), false, @@ -269,7 +270,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> { assert_eq!(1, docs.len()); let doc = docs.pop().unwrap(); - assert_eq!(Some(&get_normalized_path(file.path())), doc.path()); + assert_eq!(Some(&path::normalize(file.path())), doc.path()); }), false, ) @@ -341,7 +342,7 @@ async fn test_write_new_path() -> anyhow::Result<()> { Some(&|app| { let doc = doc!(app.editor); assert!(!app.editor.is_err()); - assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap()); + assert_eq!(&path::normalize(file1.path()), doc.path().unwrap()); }), ), ( @@ -349,7 +350,7 @@ async fn test_write_new_path() -> anyhow::Result<()> { Some(&|app| { let doc = doc!(app.editor); assert!(!app.editor.is_err()); - assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap()); + assert_eq!(&path::normalize(file2.path()), doc.path().unwrap()); assert!(app.editor.document_by_path(file1.path()).is_none()); }), ), diff --git a/helix-term/tests/test/splits.rs b/helix-term/tests/test/splits.rs index f010c86b..3b66c048 100644 --- a/helix-term/tests/test/splits.rs +++ b/helix-term/tests/test/splits.rs @@ -1,6 +1,6 @@ use super::*; -use helix_core::path::get_normalized_path; +use helix_stdx::path; #[tokio::test(flavor = "multi_thread")] async fn test_split_write_quit_all() -> anyhow::Result<()> { @@ -27,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> { let doc1 = docs .iter() - .find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path())) + .find(|doc| doc.path().unwrap() == &path::normalize(file1.path())) .unwrap(); assert_eq!("hello1", doc1.text().to_string()); let doc2 = docs .iter() - .find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path())) + .find(|doc| doc.path().unwrap() == &path::normalize(file2.path())) .unwrap(); assert_eq!("hello2", doc2.text().to_string()); let doc3 = docs .iter() - .find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path())) + .find(|doc| doc.path().unwrap() == &path::normalize(file3.path())) .unwrap(); assert_eq!("hello3", doc3.text().to_string()); diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index db53b54c..0dc18b37 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -15,6 +15,7 @@ default = [] term = ["crossterm"] [dependencies] +helix-stdx = { path = "../helix-stdx" } helix-core = { path = "../helix-core" } helix-event = { path = "../helix-event" } helix-loader = { path = "../helix-loader" } diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 0de0cd17..6473c2d1 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -855,7 +855,7 @@ impl Document { let text = self.text().clone(); let path = match path { - Some(path) => helix_core::path::get_canonicalized_path(&path), + Some(path) => helix_stdx::path::canonicalize(path), None => { if self.path.is_none() { bail!("Can't save with no path set!"); @@ -1049,7 +1049,7 @@ impl Document { } pub fn set_path(&mut self, path: Option<&Path>) { - let path = path.map(helix_core::path::get_canonicalized_path); + let path = path.map(helix_stdx::path::canonicalize); // if parent doesn't exist we still want to open the document // and error out when document is saved @@ -1672,7 +1672,7 @@ impl Document { pub fn relative_path(&self) -> Option { self.path .as_deref() - .map(helix_core::path::get_relative_path) + .map(helix_stdx::path::get_relative_path) } pub fn display_name(&self) -> Cow<'static, str> { diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index f13df213..0ab4be8b 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -1464,7 +1464,7 @@ impl Editor { // ??? possible use for integration tests pub fn open(&mut self, path: &Path, action: Action) -> Result { - let path = helix_core::path::get_canonicalized_path(path); + let path = helix_stdx::path::canonicalize(path); let id = self.document_by_path(&path).map(|doc| doc.id); let id = if let Some(id) = id { -- cgit v1.2.3-70-g09d2