aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjazzfool2023-03-30 16:21:40 +0000
committerGitHub2023-03-30 16:21:40 +0000
commitd04288e0f3d292ce47fc0246bcbdc50a9d57ad5e (patch)
treeca103d419254e48e0043fc4acdd610643e126f16
parent5b3dd6a678ba138ea21d7d5dd8d3c8a53c7a6d3b (diff)
Canonicalize paths before stripping current dir as prefix (#6290)
Co-authored-by: jazzfool <shamoslover69@gmail.com>
-rw-r--r--Cargo.lock1
-rw-r--r--helix-core/Cargo.toml1
-rw-r--r--helix-core/src/path.rs29
-rw-r--r--helix-term/tests/integration.rs2
-rw-r--r--helix-term/tests/test/commands/write.rs10
-rw-r--r--helix-term/tests/test/splits.rs8
6 files changed, 37 insertions, 14 deletions
diff --git a/Cargo.lock b/Cargo.lock
index e6ee9d54..278ab535 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1082,6 +1082,7 @@ dependencies = [
"arc-swap",
"bitflags 2.0.2",
"chrono",
+ "dunce",
"encoding_rs",
"etcetera",
"hashbrown 0.13.2",
diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml
index 9dfef9ae..e5c5f8f1 100644
--- a/helix-core/Cargo.toml
+++ b/helix-core/Cargo.toml
@@ -32,6 +32,7 @@ regex = "1"
bitflags = "2.0"
ahash = "0.8.3"
hashbrown = { version = "0.13.2", features = ["raw"] }
+dunce = "1.0"
log = "0.4"
serde = { version = "1.0", features = ["derive"] }
diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs
index d59a6baa..efa46c46 100644
--- a/helix-core/src/path.rs
+++ b/helix-core/src/path.rs
@@ -40,6 +40,21 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
/// needs to improve on.
/// Copied from cargo: <https://github.com/rust-lang/cargo/blob/070e459c2d8b79c5b2ac5218064e7603329c92ae/crates/cargo-util/src/paths.rs#L81>
pub fn get_normalized_path(path: &Path) -> PathBuf {
+ // normalization strategy is to canonicalize first ancestor path that exists (i.e., canonicalize as much as possible),
+ // then run handrolled normalization on the non-existent remainder
+ let (base, path) = path
+ .ancestors()
+ .find_map(|base| {
+ let canonicalized_base = dunce::canonicalize(base).ok()?;
+ let remainder = path.strip_prefix(base).ok()?.into();
+ Some((canonicalized_base, remainder))
+ })
+ .unwrap_or_else(|| (PathBuf::new(), PathBuf::from(path)));
+
+ if path.as_os_str().is_empty() {
+ return base;
+ }
+
let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next();
@@ -63,7 +78,7 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
}
}
}
- ret
+ base.join(ret)
}
/// Returns the canonical, absolute form of a path with all intermediate components normalized.
@@ -82,13 +97,19 @@ pub fn get_canonicalized_path(path: &Path) -> std::io::Result<PathBuf> {
}
pub fn get_relative_path(path: &Path) -> PathBuf {
+ let path = PathBuf::from(path);
let path = if path.is_absolute() {
- let cwdir = std::env::current_dir().expect("couldn't determine current directory");
- path.strip_prefix(cwdir).unwrap_or(path)
+ let cwdir = std::env::current_dir()
+ .map(|path| get_normalized_path(&path))
+ .expect("couldn't determine current directory");
+ get_normalized_path(&path)
+ .strip_prefix(cwdir)
+ .map(PathBuf::from)
+ .unwrap_or(path)
} else {
path
};
- fold_home_dir(path)
+ fold_home_dir(&path)
}
/// Returns a truncated filepath where the basepart of the path is reduced to the first
diff --git a/helix-term/tests/integration.rs b/helix-term/tests/integration.rs
index cec374af..d77eefed 100644
--- a/helix-term/tests/integration.rs
+++ b/helix-term/tests/integration.rs
@@ -2,8 +2,6 @@
mod test {
mod helpers;
- use std::path::PathBuf;
-
use helix_core::{syntax::AutoPairConfig, Selection};
use helix_term::config::Config;
diff --git a/helix-term/tests/test/commands/write.rs b/helix-term/tests/test/commands/write.rs
index 0ea66a12..26515b7a 100644
--- a/helix-term/tests/test/commands/write.rs
+++ b/helix-term/tests/test/commands/write.rs
@@ -3,7 +3,7 @@ use std::{
ops::RangeInclusive,
};
-use helix_core::diagnostic::Severity;
+use helix_core::{diagnostic::Severity, path::get_normalized_path};
use helix_view::doc;
use super::*;
@@ -23,7 +23,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> {
assert_eq!(1, docs.len());
let doc = docs.pop().unwrap();
- assert_eq!(Some(file.path()), doc.path().map(PathBuf::as_path));
+ assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
}),
false,
@@ -269,7 +269,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> {
assert_eq!(1, docs.len());
let doc = docs.pop().unwrap();
- assert_eq!(Some(&file.path().to_path_buf()), doc.path());
+ assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
}),
false,
)
@@ -341,7 +341,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
Some(&|app| {
let doc = doc!(app.editor);
assert!(!app.editor.is_err());
- assert_eq!(file1.path(), doc.path().unwrap());
+ assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap());
}),
),
(
@@ -349,7 +349,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
Some(&|app| {
let doc = doc!(app.editor);
assert!(!app.editor.is_err());
- assert_eq!(file2.path(), doc.path().unwrap());
+ assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap());
assert!(app.editor.document_by_path(file1.path()).is_none());
}),
),
diff --git a/helix-term/tests/test/splits.rs b/helix-term/tests/test/splits.rs
index 96ced21a..1d70f24a 100644
--- a/helix-term/tests/test/splits.rs
+++ b/helix-term/tests/test/splits.rs
@@ -1,5 +1,7 @@
use super::*;
+use helix_core::path::get_normalized_path;
+
#[tokio::test(flavor = "multi_thread")]
async fn test_split_write_quit_all() -> anyhow::Result<()> {
let mut file1 = tempfile::NamedTempFile::new()?;
@@ -25,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> {
let doc1 = docs
.iter()
- .find(|doc| doc.path().unwrap() == file1.path())
+ .find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path()))
.unwrap();
assert_eq!("hello1", doc1.text().to_string());
let doc2 = docs
.iter()
- .find(|doc| doc.path().unwrap() == file2.path())
+ .find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path()))
.unwrap();
assert_eq!("hello2", doc2.text().to_string());
let doc3 = docs
.iter()
- .find(|doc| doc.path().unwrap() == file3.path())
+ .find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path()))
.unwrap();
assert_eq!("hello3", doc3.text().to_string());