diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 09f6010bc30..e5a84914612 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,7 +38,6 @@ jobs: - name: Setup dependencies run: sudo apt-get install tree - - run: git config --global protocol.file.allow always # workaround for https://bugs.launchpad.net/ubuntu/+source/git/+bug/1993586 - name: test env: CI: true diff --git a/Cargo.lock b/Cargo.lock index f3fd6d20b3d..a3ec58587cc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1694,7 +1694,6 @@ dependencies = [ "serial_test", "signal-hook", "smallvec", - "tempfile", "thiserror", "unicode-normalization", "walkdir", @@ -2781,9 +2780,9 @@ dependencies = [ [[package]] name = "prodash" -version = "20.2.0" +version = "21.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd4e8b029f29b4eb8f95315957fb7ac8a8fd1924405fadf885b0e208fe34ba39" +checksum = "d27f6a3ef883aaea624a6ad91c88452e5df05430a79fd880c12673a7bc1648d6" dependencies = [ "async-io", "atty", diff --git a/Cargo.toml b/Cargo.toml index bebce28c9d3..9b54b94200d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -86,7 +86,7 @@ git-repository = { version = "^0.25.0", path = "git-repository", default-feature git-transport-for-configuration-only = { package = "git-transport", optional = true, version = "^0.21.0", path = "git-transport" } clap = { version = "3.2.5", features = ["derive", "cargo"] } -prodash = { version = "20.2.0", optional = true, default-features = false } +prodash = { version = "21", optional = true, default-features = false } atty = { version = "0.2.14", optional = true, default-features = false } env_logger = { version = "0.9.0", default-features = false } crosstermion = { version = "0.10.1", optional = true, default-features = false } diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 2a11a4e4f26..d3fbb7622ed 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -125,8 +125,9 @@ A bunch of notes collected to keep track of what's needed to eventually support ## `Options` vs `Context` -- Use `Options` whenever there is something to configure in terms of branching behaviour. -- Use `Context` when potential optional data is required to perform an operation at all. See `git_config::path::Context` as reference. +- Use `Options` whenever there is something to configure in terms of branching behaviour. It can be defaulted, and if it can't these fields should be parameters. +- Use `Context` when potential optional data is required to perform an operation at all. See `git_config::path::Context` as reference. It can't be defaulted and the + fields could also be parameters. ## Examples, Experiments, Porcelain CLI and Plumbing CLI - which does what? diff --git a/README.md b/README.md index 3620566c01d..b9f324932e2 100644 --- a/README.md +++ b/README.md @@ -54,8 +54,8 @@ Please see _'Development Status'_ for a listing of all crates and their capabili * [x] **ref-map** - show how remote references relate to their local tracking branches as mapped by refspecs. * [x] **fetch** - fetch the current remote or the given one, optionally just as dry-run. * **clone** - * [ ] initialize a new **bare** repository and fetch all objects. - * [ ] initialize a new repository, fetch all objects and checkout the main worktree. + * [x] initialize a new **bare** repository and fetch all objects. + * [x] initialize a new repository, fetch all objects and checkout the main worktree. * **credential** * [x] **fill/approve/reject** - The same as `git credential`, but implemented in Rust, calling helpers only when from trusted configuration. * **free** - no git repository necessary @@ -313,11 +313,13 @@ For additional details, also take a look at the [collaboration guide]. Provide a CLI to for the most basic user journey: * [x] initialize a repository +* [x] fetch + * [ ] and update worktree * clone a repository - [ ] bare - [ ] with working tree -* [ ] create a commit -* [ ] add a remote +* [ ] create a commit after adding worktree files +* [x] add a remote * [ ] push * [x] create (thin) pack diff --git a/crate-status.md b/crate-status.md index 48c7bc335e5..9ebdc10d16a 100644 --- a/crate-status.md +++ b/crate-status.md @@ -110,7 +110,7 @@ Check out the [performance discussion][git-diff-performance] as well. * There are various ways to generate a patch from two blobs. * [ ] any * **lines** - * [ ] Simple line-by-line diffs powered by the `similar` crate. + * [x] Simple line-by-line diffs powered by the `imara-diff` crate. * diffing, merging, working with hunks of data * find differences between various states, i.e. index, working tree, commit-tree * [x] API documentation @@ -143,8 +143,6 @@ Check out the [performance discussion][git-traverse-performance] as well. * [x] convert URL to string * [x] API documentation * [ ] Some examples -- **deviation** - * URLs may not contain passwords, which cannot be represent here and if present, will be ignored. ### git-protocol * _abstract over protocol versions to allow delegates to deal only with a single way of doing things_ @@ -467,7 +465,7 @@ See its [README.md](https://github.com/Byron/gitoxide/blob/main/git-lock/README. * [x] access to refs and objects * **credentials** * [x] run `git credential` directly - * [x] use credential helper configuration and to obtain credentials with `git_credential::helper::Cascade` + * [x] use credential helper configuration and to obtain credentials with `git_credentials::helper::Cascade` * **config** * [ ] facilities to apply the [url-match](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httplturlgt) algorithm and to [normalize urls](https://github.com/git/git/blob/be1a02a17ede4082a86dfbfee0f54f345e8b43ac/urlmatch.c#L109:L109) before comparison. @@ -504,11 +502,12 @@ See its [README.md](https://github.com/Byron/gitoxide/blob/main/git-lock/README. * **remotes** * [ ] clone * [ ] shallow - * [ ] fetch + * [ ] [bundles](https://git-scm.com/docs/git-bundle) + * [x] fetch * [ ] push * [x] ls-refs - * [ ] ls-refs with ref-spec filter - * [ ] list, find by name + * [x] ls-refs with ref-spec filter + * [x] list, find by name * [x] create in memory * [ ] groups * [ ] [remote and branch files](https://github.com/git/git/blob/master/remote.c#L300) diff --git a/etc/check-package-size.sh b/etc/check-package-size.sh index 0b7749c717a..23f529f2c38 100755 --- a/etc/check-package-size.sh +++ b/etc/check-package-size.sh @@ -57,6 +57,6 @@ echo "in root: gitoxide CLI" (enter git-odb && indent cargo diet -n --package-size-limit 120KB) (enter git-protocol && indent cargo diet -n --package-size-limit 55KB) (enter git-packetline && indent cargo diet -n --package-size-limit 35KB) -(enter git-repository && indent cargo diet -n --package-size-limit 200KB) +(enter git-repository && indent cargo diet -n --package-size-limit 210KB) (enter git-transport && indent cargo diet -n --package-size-limit 60KB) (enter gitoxide-core && indent cargo diet -n --package-size-limit 90KB) diff --git a/git-config/src/file/access/comfort.rs b/git-config/src/file/access/comfort.rs index e9ac2c13ca9..ad334c7dff8 100644 --- a/git-config/src/file/access/comfort.rs +++ b/git-config/src/file/access/comfort.rs @@ -2,7 +2,7 @@ use std::{borrow::Cow, convert::TryFrom}; use bstr::BStr; -use crate::{file::MetadataFilter, lookup, parse::section, value, File}; +use crate::{file::MetadataFilter, value, File}; /// Comfortable API for accessing values impl<'event> File<'event> { @@ -81,19 +81,22 @@ impl<'event> File<'event> { filter: &mut MetadataFilter, ) -> Option> { let section_name = section_name.as_ref(); + let section_ids = self + .section_ids_by_name_and_subname(section_name, subsection_name) + .ok()?; let key = key.as_ref(); - match self.raw_value_filter(section_name, subsection_name, key, filter) { - Ok(v) => Some(crate::Boolean::try_from(v).map(|b| b.into())), - Err(lookup::existing::Error::KeyMissing) => { - let section = self - .section_filter(section_name, subsection_name, filter) - .ok() - .flatten()?; - let key = section::Key::try_from(key).ok()?; - section.key_and_value_range_by(&key).map(|_| Ok(true)) + for section_id in section_ids.rev() { + let section = self.sections.get(§ion_id).expect("known section id"); + if !filter(section.meta()) { + continue; + } + match section.value_implicit(key) { + Some(Some(v)) => return Some(crate::Boolean::try_from(v).map(|b| b.into())), + Some(None) => return Some(Ok(true)), + None => continue, } - Err(_err) => None, } + None } /// Like [`value()`][File::value()], but returning an `Option` if the integer wasn't found. diff --git a/git-config/src/file/mutable/section.rs b/git-config/src/file/mutable/section.rs index 26b1abacc76..225777e08b9 100644 --- a/git-config/src/file/mutable/section.rs +++ b/git-config/src/file/mutable/section.rs @@ -37,9 +37,12 @@ impl<'a, 'event> SectionMut<'a, 'event> { } body.push(Event::SectionKey(key)); - if let Some(value) = value { - body.extend(self.whitespace.key_value_separators()); - body.push(Event::Value(escape_value(value).into())); + match value { + Some(value) => { + body.extend(self.whitespace.key_value_separators()); + body.push(Event::Value(escape_value(value).into())); + } + None => body.push(Event::Value(Cow::Borrowed("".into()))), } if self.implicit_newline { body.push(Event::Newline(BString::from(self.newline.to_vec()).into())); diff --git a/git-config/src/file/section/body.rs b/git-config/src/file/section/body.rs index 9f904fa64ea..91ed6c5a383 100644 --- a/git-config/src/file/section/body.rs +++ b/git-config/src/file/section/body.rs @@ -18,22 +18,32 @@ impl<'event> Body<'event> { /// Note that we consider values without key separator `=` non-existing. #[must_use] pub fn value(&self, key: impl AsRef) -> Option> { + self.value_implicit(key).flatten() + } + + /// Retrieves the last matching value in a section with the given key, if present, and indicates an implicit value with `Some(None)`, + /// and a non-existing one as `None` + #[must_use] + pub fn value_implicit(&self, key: impl AsRef) -> Option>> { let key = Key::from_str_unchecked(key.as_ref()); let (_key_range, range) = self.key_and_value_range_by(&key)?; - let range = range?; + let range = match range { + None => return Some(None), + Some(range) => range, + }; let mut concatenated = BString::default(); for event in &self.0[range] { match event { Event::Value(v) => { - return Some(normalize_bstr(v.as_ref())); + return Some(Some(normalize_bstr(v.as_ref()))); } Event::ValueNotDone(v) => { concatenated.push_str(v.as_ref()); } Event::ValueDone(v) => { concatenated.push_str(v.as_ref()); - return Some(normalize_bstring(concatenated)); + return Some(Some(normalize_bstring(concatenated))); } _ => (), } diff --git a/git-config/src/file/util.rs b/git-config/src/file/util.rs index 9b8405b3cae..daa2462a171 100644 --- a/git-config/src/file/util.rs +++ b/git-config/src/file/util.rs @@ -124,9 +124,6 @@ impl<'event> File<'event> { .get(§ion_name) .ok_or(lookup::existing::Error::SectionMissing)?; let mut maybe_ids = None; - // Don't simplify if and matches here -- the for loop currently needs - // `n + 1` checks, while the if and matches will result in the for loop - // needing `2n` checks. if let Some(subsection_name) = subsection_name { let subsection_name: &BStr = subsection_name.into(); for node in section_ids { @@ -152,16 +149,17 @@ impl<'event> File<'event> { ) -> Result + '_, lookup::existing::Error> { let section_name = section::Name::from_str_unchecked(section_name); match self.section_lookup_tree.get(§ion_name) { - Some(lookup) => Ok(lookup.iter().flat_map({ - let section_order = &self.section_order; - move |node| match node { - SectionBodyIdsLut::Terminal(v) => Box::new(v.iter().copied()) as Box>, - SectionBodyIdsLut::NonTerminal(v) => Box::new({ - let v: Vec<_> = v.values().flatten().copied().collect(); - section_order.iter().filter(move |a| v.contains(a)).copied() - }), + Some(lookup) => { + let mut lut = Vec::with_capacity(self.section_order.len()); + for node in lookup { + match node { + SectionBodyIdsLut::Terminal(v) => lut.extend(v.iter().copied()), + SectionBodyIdsLut::NonTerminal(v) => lut.extend(v.values().flatten().copied()), + } } - })), + + Ok(self.section_order.iter().filter(move |a| lut.contains(a)).copied()) + } None => Err(lookup::existing::Error::SectionMissing), } } diff --git a/git-config/src/parse/nom/mod.rs b/git-config/src/parse/nom/mod.rs index 1a2da7b8516..cc9048bf0c5 100644 --- a/git-config/src/parse/nom/mod.rs +++ b/git-config/src/parse/nom/mod.rs @@ -290,7 +290,7 @@ fn config_value<'a>(i: &'a [u8], dispatch: &mut impl FnMut(Event<'a>)) -> IResul } else { // This is a special way of denoting 'empty' values which a lot of code depends on. // Hence, rather to fix this everywhere else, leave it here and fix it where it matters, namely - // when it's about differentiating between a missing key-vaue separator, and one followed by emptiness. + // when it's about differentiating between a missing key-value separator, and one followed by emptiness. dispatch(Event::Value(Cow::Borrowed("".into()))); Ok((i, 0)) } diff --git a/git-config/tests/file/access/read_only.rs b/git-config/tests/file/access/read_only.rs index 50941ac77ed..8af11d9adae 100644 --- a/git-config/tests/file/access/read_only.rs +++ b/git-config/tests/file/access/read_only.rs @@ -42,11 +42,21 @@ fn get_value_for_all_provided_values() -> crate::Result { assert!( config.value::("core", None, "bool-implicit").is_err(), - "this cannot work like in git as the value isn't there for us" + "this cannot work like in git as the original value isn't there for us" ); assert!( config.boolean("core", None, "bool-implicit").expect("present")?, - "this should work" + "implicit booleans resolve to being true" + ); + assert_eq!( + config.string("core", None, "bool-implicit"), + None, + "unset values are not present" + ); + assert_eq!( + config.strings("core", None, "bool-implicit").expect("present"), + &[cow_str("")], + "unset values show up as empty within a string array" ); assert_eq!(config.string("doesnt", None, "exist"), None); @@ -146,8 +156,6 @@ fn get_value_for_all_provided_values() -> crate::Result { Ok(()) } -/// There was a regression where lookup would fail because we only checked the -/// last section entry for any given section and subsection #[test] fn get_value_looks_up_all_sections_before_failing() -> crate::Result { let config = r#" @@ -163,14 +171,17 @@ fn get_value_looks_up_all_sections_before_failing() -> crate::Result { // Checks that we check the last entry first still assert!( !file.value::("core", None, "bool-implicit")?.0, - "this one can't do it, needs special handling" + "implicit bool is invisible to `value` and boolean is the only value we want. Would have to special case it." ); assert!( - !file.boolean("core", None, "bool-implicit").expect("present")?, - "this should work, but doesn't yet" + file.boolean("core", None, "bool-implicit").expect("present")?, + "correct handling of booleans is implemented specifically" ); - assert!(!file.value::("core", None, "bool-explicit")?.0); + assert!( + !file.value::("core", None, "bool-explicit")?.0, + "explicit values always work" + ); Ok(()) } @@ -313,3 +324,34 @@ fn multi_line_value_outer_quotes_escaped_inner_quotes() { let expected = r#"!f() { git status; git add -A; git commit -m "$1"; git push -f; git log -1; }; f; unset f"#; assert_eq!(config.raw_value("alias", None, "save").unwrap().as_ref(), expected); } + +#[test] +fn overrides_with_implicit_booleans_work_in_single_section() { + let config = r#" + [a] + b = false + b + "#; + let config = File::try_from(config).unwrap(); + assert_eq!( + config.boolean("a", None, "b"), + Some(Ok(true)), + "empty implicit booleans " + ); +} + +#[test] +fn overrides_with_implicit_booleans_work_across_sections() { + let config = r#" + [a] + b = false + [a] + b + "#; + let config = File::try_from(config).unwrap(); + assert_eq!( + config.boolean("a", None, "b"), + Some(Ok(true)), + "empty implicit booleans " + ); +} diff --git a/git-config/tests/file/init/from_paths/includes/conditional/gitdir/util.rs b/git-config/tests/file/init/from_paths/includes/conditional/gitdir/util.rs index 064ce420163..25d6ebf637f 100644 --- a/git-config/tests/file/init/from_paths/includes/conditional/gitdir/util.rs +++ b/git-config/tests/file/init/from_paths/includes/conditional/gitdir/util.rs @@ -9,6 +9,7 @@ use std::{ use bstr::{BString, ByteSlice}; use git_config::file::init::{self}; +use crate::file::init::from_paths::includes::conditional::git_init; use crate::file::{ cow_str, init::from_paths::{escape_backslashes, includes::conditional::options_with_git_dir}, @@ -216,7 +217,7 @@ fn write_main_config( env: &GitEnv, overwrite_config_location: ConfigLocation, ) -> crate::Result { - git_repository::init(env.worktree_dir())?; + git_init(env.worktree_dir(), false)?; if overwrite_config_location == ConfigLocation::Repo { write_append_config_value(env.git_dir().join("config"), "base-value")?; diff --git a/git-config/tests/file/init/from_paths/includes/conditional/mod.rs b/git-config/tests/file/init/from_paths/includes/conditional/mod.rs index 772cb4c4f21..376686a9276 100644 --- a/git-config/tests/file/init/from_paths/includes/conditional/mod.rs +++ b/git-config/tests/file/init/from_paths/includes/conditional/mod.rs @@ -4,6 +4,7 @@ use git_config::{ file::{includes, init}, path, File, }; +use git_repository as git; use tempfile::tempdir; use crate::file::{cow_str, init::from_paths::escape_backslashes}; @@ -138,6 +139,17 @@ fn options_with_git_dir(git_dir: &Path) -> init::Options<'_> { } } +fn git_init(path: impl AsRef, bare: bool) -> crate::Result { + Ok(git::ThreadSafeRepository::init_opts( + path, + bare.then(|| git::create::Kind::Bare) + .unwrap_or(git::create::Kind::WithWorktree), + git::create::Options::default(), + git::open::Options::isolated(), + )? + .to_thread_local()) +} + fn create_symlink(from: impl AsRef, to: impl AsRef) { std::fs::create_dir_all(from.as_ref().parent().unwrap()).unwrap(); #[cfg(not(windows))] diff --git a/git-config/tests/file/init/from_paths/includes/conditional/onbranch.rs b/git-config/tests/file/init/from_paths/includes/conditional/onbranch.rs index 38a972ccb7e..80ab38d00bc 100644 --- a/git-config/tests/file/init/from_paths/includes/conditional/onbranch.rs +++ b/git-config/tests/file/init/from_paths/includes/conditional/onbranch.rs @@ -17,6 +17,7 @@ use git_repository as git; use tempfile::tempdir; use crate::file::cow_str; +use crate::file::init::from_paths::includes::conditional::git_init; type Result = crate::Result; @@ -188,7 +189,7 @@ struct GitEnv { impl GitEnv { fn new() -> crate::Result { let dir = tempdir()?; - let repo = git_repository::init_bare(dir.path())?; + let repo = git_init(dir.path(), true)?; Ok(GitEnv { repo, dir }) } } @@ -284,8 +285,8 @@ value = branch-override-by-include }, deref: false, }), - git_repository::lock::acquire::Fail::Immediately, - git_repository::lock::acquire::Fail::Immediately, + git::lock::acquire::Fail::Immediately, + git::lock::acquire::Fail::Immediately, )? .commit(repo.committer_or_default())?; diff --git a/git-config/tests/file/init/from_paths/mod.rs b/git-config/tests/file/init/from_paths/mod.rs index 8c807e687b8..27aab932388 100644 --- a/git-config/tests/file/init/from_paths/mod.rs +++ b/git-config/tests/file/init/from_paths/mod.rs @@ -71,7 +71,7 @@ fn frontmatter_is_maintained_in_multiple_files() -> crate::Result { fs::write(a_path.as_path(), b";before a\n[core]\na = true")?; let b_path = dir.path().join("b"); - fs::write(b_path.as_path(), b";before b\n [core]\nb = true")?; + fs::write(b_path.as_path(), b";before b\n [core]\nb")?; let c_path = dir.path().join("c"); fs::write(c_path.as_path(), b"# nothing in c")?; @@ -84,15 +84,35 @@ fn frontmatter_is_maintained_in_multiple_files() -> crate::Result { assert_eq!( config.to_string(), - ";before a\n[core]\na = true\n;before b\n [core]\nb = true\n# nothing in c\n; nothing in d\n" + ";before a\n[core]\na = true\n;before b\n [core]\nb\n# nothing in c\n; nothing in d\n" + ); + assert_eq!( + config.strings("core", None, "a").expect("present").len(), + 1, + "precondition" + ); + assert_eq!( + config.strings("core", None, "b").expect("present").len(), + 1, + "precondition" ); config.append(config.clone()); assert_eq!( config.to_string(), - ";before a\n[core]\na = true\n;before b\n [core]\nb = true\n# nothing in c\n; nothing in d\n;before a\n[core]\na = true\n;before b\n [core]\nb = true\n# nothing in c\n; nothing in d\n", + ";before a\n[core]\na = true\n;before b\n [core]\nb\n# nothing in c\n; nothing in d\n;before a\n[core]\na = true\n;before b\n [core]\nb\n# nothing in c\n; nothing in d\n", "other files post-section matter works as well, adding newlines as needed" ); + assert_eq!( + config.strings("core", None, "a").expect("present").len(), + 2, + "the same value is now present twice" + ); + assert_eq!( + config.strings("core", None, "b").expect("present").len(), + 2, + "the same value is now present twice" + ); assert_eq!( config diff --git a/git-config/tests/file/mutable/section.rs b/git-config/tests/file/mutable/section.rs index b8e890480d5..c1f78a70ac9 100644 --- a/git-config/tests/file/mutable/section.rs +++ b/git-config/tests/file/mutable/section.rs @@ -123,6 +123,7 @@ mod set { mod push { use std::convert::{TryFrom, TryInto}; + use crate::file::cow_str; use git_config::parse::section::Key; #[test] @@ -131,6 +132,12 @@ mod push { let mut section = file.section_mut_or_create_new("a", Some("sub"))?; section.push("key".try_into()?, None); let expected = format!("[a \"sub\"]{nl}\tkey{nl}", nl = section.newline()); + assert_eq!(section.value("key"), None, "single value counts as None"); + assert_eq!( + section.values("key"), + &[cow_str("")], + "multi-value counts as empty value" + ); assert_eq!(file.to_bstring(), expected); Ok(()) } diff --git a/git-diff/src/text.rs b/git-diff/src/text.rs index 2951744ab98..6179eaa3179 100644 --- a/git-diff/src/text.rs +++ b/git-diff/src/text.rs @@ -3,24 +3,24 @@ use git_object::bstr::BStr; pub use imara_diff as imara; pub use imara_diff::Algorithm; -/// Create a diff yielding the changes to turn `old` into `new` with `algorithm`. `make_input` obtains the `old` and `new` -/// byte buffers and produces an interner, which is then passed to `make_sink` for creating a processor over the changes. +/// Create a diff yielding the changes to turn `old` into `new` with `algorithm`. `new_input` obtains the `old` and `new` +/// byte buffers and produces an interner, which is then passed to `new_sink` for creating a processor over the changes. /// /// See [the `imara-diff` crate documentation][imara] for information on how to implement a [`Sink`][imara::Sink]. pub fn with<'a, FnI, FnS, S>( old: &'a BStr, new: &'a BStr, algorithm: Algorithm, - make_input: FnI, - make_sink: FnS, + new_input: FnI, + new_sink: FnS, ) -> (imara::intern::InternedInput<&'a [u8]>, S::Out) where FnI: FnOnce(&'a [u8], &'a [u8]) -> imara::intern::InternedInput<&'a [u8]>, FnS: FnOnce(&imara::intern::InternedInput<&'a [u8]>) -> S, S: imara_diff::Sink, { - let input = make_input(old.as_ref(), new.as_ref()); - let sink = make_sink(&input); + let input = new_input(old.as_ref(), new.as_ref()); + let sink = new_sink(&input); let out = imara::diff(algorithm, &input, sink); (input, out) } diff --git a/git-features/Cargo.toml b/git-features/Cargo.toml index f79e53aba1b..84183b39e35 100644 --- a/git-features/Cargo.toml +++ b/git-features/Cargo.toml @@ -118,7 +118,7 @@ crc32fast = { version = "1.2.1", optional = true } sha1 = { version = "0.10.0", optional = true } # progress -prodash = { version = "20.2.0", optional = true, default-features = false, features = ["unit-bytes", "unit-human"] } +prodash = { version = "21", optional = true, default-features = false, features = ["unit-bytes", "unit-human"] } # pipe bytes = { version = "1.0.0", optional = true } diff --git a/git-features/src/fs.rs b/git-features/src/fs.rs index 2121eabe901..33f69b099de 100644 --- a/git-features/src/fs.rs +++ b/git-features/src/fs.rs @@ -88,6 +88,15 @@ mod snapshot { modified: std::time::SystemTime, } + impl Clone for Snapshot { + fn clone(&self) -> Self { + Self { + value: self.value.clone(), + modified: self.modified, + } + } + } + /// A snapshot of a resource which is up-to-date in the moment it is retrieved. pub type SharedSnapshot = OwnShared>; diff --git a/git-index/src/lib.rs b/git-index/src/lib.rs index 7fc52a3a039..c0167a7a153 100644 --- a/git-index/src/lib.rs +++ b/git-index/src/lib.rs @@ -61,6 +61,7 @@ pub struct Entry { } /// An index file whose state was read from a file on disk. +#[derive(Clone)] pub struct File { /// The state containing the actual index data. pub(crate) state: State, diff --git a/git-pack/src/bundle/write/mod.rs b/git-pack/src/bundle/write/mod.rs index 43e559959dd..b79fd480218 100644 --- a/git-pack/src/bundle/write/mod.rs +++ b/git-pack/src/bundle/write/mod.rs @@ -150,15 +150,19 @@ impl crate::Bundle { /// As it sends portions of the input to a thread it requires the 'static lifetime for the interrupt flags. This can only /// be satisfied by a static AtomicBool which is only suitable for programs that only run one of these operations at a time /// or don't mind that all of them abort when the flag is set. - pub fn write_to_directory_eagerly( + pub fn write_to_directory_eagerly

( pack: impl io::Read + Send + 'static, pack_size: Option, directory: Option>, - mut progress: impl Progress, + mut progress: P, should_interrupt: &'static AtomicBool, thin_pack_base_object_lookup_fn: Option, options: Options, - ) -> Result { + ) -> Result + where + P: Progress, + P::SubProgress: 'static, + { let mut read_progress = progress.add_child("read pack"); read_progress.init(pack_size.map(|s| s as usize), progress::bytes()); let pack = progress::Read { diff --git a/git-pack/src/data/output/entry/iter_from_counts.rs b/git-pack/src/data/output/entry/iter_from_counts.rs index 37725b99d7c..4d03326efa6 100644 --- a/git-pack/src/data/output/entry/iter_from_counts.rs +++ b/git-pack/src/data/output/entry/iter_from_counts.rs @@ -36,7 +36,7 @@ use crate::data::output; pub fn iter_from_counts( mut counts: Vec, db: Find, - mut progress: impl Progress, + mut progress: impl Progress + 'static, Options { version, mode, diff --git a/git-pack/src/index/traverse/mod.rs b/git-pack/src/index/traverse/mod.rs index 6f6de9e7d41..955a3a37940 100644 --- a/git-pack/src/index/traverse/mod.rs +++ b/git-pack/src/index/traverse/mod.rs @@ -95,7 +95,7 @@ impl index::File { git_object::Kind, &[u8], &index::Entry, - &mut <

::SubProgress as Progress>::SubProgress, + &mut ::SubProgress, ) -> Result<(), E>, F: Fn() -> C + Send + Clone, { diff --git a/git-pack/src/index/traverse/with_index.rs b/git-pack/src/index/traverse/with_index.rs index 06af9951032..a85eaf991d0 100644 --- a/git-pack/src/index/traverse/with_index.rs +++ b/git-pack/src/index/traverse/with_index.rs @@ -38,7 +38,7 @@ impl index::File { git_object::Kind, &[u8], &index::Entry, - &mut <

::SubProgress as Progress>::SubProgress, + &mut ::SubProgress, ) -> Result<(), E>, E: std::error::Error + Send + Sync + 'static, { diff --git a/git-pack/src/index/traverse/with_lookup.rs b/git-pack/src/index/traverse/with_lookup.rs index a450264ae99..7f7d209a5b2 100644 --- a/git-pack/src/index/traverse/with_lookup.rs +++ b/git-pack/src/index/traverse/with_lookup.rs @@ -59,7 +59,7 @@ impl index::File { git_object::Kind, &[u8], &index::Entry, - &mut <

::SubProgress as Progress>::SubProgress, + &mut ::SubProgress, ) -> Result<(), E>, F: Fn() -> C + Send + Clone, { diff --git a/git-protocol/src/fetch/command.rs b/git-protocol/src/fetch/command.rs index 3efa2fddc0c..0a82d6ec2f3 100644 --- a/git-protocol/src/fetch/command.rs +++ b/git-protocol/src/fetch/command.rs @@ -31,7 +31,7 @@ mod with_io { /// Only V2 fn all_argument_prefixes(&self) -> &'static [&'static str] { match self { - Command::LsRefs => &["symrefs", "peel", "ref-prefix "], + Command::LsRefs => &["symrefs", "peel", "ref-prefix ", "unborn"], Command::Fetch => &[ "want ", // hex oid "have ", // hex oid diff --git a/git-protocol/src/fetch/refs/function.rs b/git-protocol/src/fetch/refs/function.rs index e4d2ad7df95..83899ad153a 100644 --- a/git-protocol/src/fetch/refs/function.rs +++ b/git-protocol/src/fetch/refs/function.rs @@ -32,6 +32,13 @@ pub async fn refs( let ls_refs = Command::LsRefs; let mut ls_features = ls_refs.default_features(protocol_version, capabilities); let mut ls_args = ls_refs.initial_arguments(&ls_features); + if capabilities + .capability("ls-refs") + .and_then(|cap| cap.supports("unborn")) + .unwrap_or_default() + { + ls_args.push("unborn".into()); + } let refs = match prepare_ls_refs(capabilities, &mut ls_args, &mut ls_features) { Ok(LsRefsAction::Skip) => Vec::new(), Ok(LsRefsAction::Continue) => { diff --git a/git-protocol/src/fetch/refs/mod.rs b/git-protocol/src/fetch/refs/mod.rs index 122df7f644b..7c8f4c4c9be 100644 --- a/git-protocol/src/fetch/refs/mod.rs +++ b/git-protocol/src/fetch/refs/mod.rs @@ -66,32 +66,47 @@ pub enum Ref { }, /// A symbolic ref pointing to `target` ref, which in turn points to an `object` Symbolic { - /// The name at which the symbolic ref is located, like `refs/heads/main`. + /// The name at which the symbolic ref is located, like `HEAD`. full_ref_name: BString, - /// The path of the ref the symbolic ref points to, see issue [#205] for details + /// The path of the ref the symbolic ref points to, like `refs/heads/main`. + /// + /// See issue [#205] for details /// /// [#205]: https://github.com/Byron/gitoxide/issues/205 target: BString, /// The hash of the object the `target` ref points to. object: git_hash::ObjectId, }, + /// A ref is unborn on the remote and just points to the initial, unborn branch, as is the case in a newly initialized repository + /// or dangling symbolic refs. + Unborn { + /// The name at which the ref is located, typically `HEAD`. + full_ref_name: BString, + /// The path of the ref the symbolic ref points to, like `refs/heads/main`, even though the `target` does not yet exist. + target: BString, + }, } impl Ref { /// Provide shared fields referring to the ref itself, namely `(name, target, [peeled])`. /// In case of peeled refs, the tag object itself is returned as it is what the ref directly refers to, and target of the tag is returned /// as `peeled`. - pub fn unpack(&self) -> (&BStr, &git_hash::oid, Option<&git_hash::oid>) { + /// If `unborn`, the first object id will be the null oid. + pub fn unpack(&self) -> (&BStr, Option<&git_hash::oid>, Option<&git_hash::oid>) { match self { Ref::Direct { full_ref_name, object } | Ref::Symbolic { full_ref_name, object, .. - } => (full_ref_name.as_ref(), object, None), + } => (full_ref_name.as_ref(), Some(object), None), Ref::Peeled { full_ref_name, tag: object, object: peeled, - } => (full_ref_name.as_ref(), object, Some(peeled)), + } => (full_ref_name.as_ref(), Some(object), Some(peeled)), + Ref::Unborn { + full_ref_name, + target: _, + } => (full_ref_name.as_ref(), None, None), } } } diff --git a/git-protocol/src/fetch/refs/shared.rs b/git-protocol/src/fetch/refs/shared.rs index 87e0541d49a..38ae4995000 100644 --- a/git-protocol/src/fetch/refs/shared.rs +++ b/git-protocol/src/fetch/refs/shared.rs @@ -171,7 +171,11 @@ pub(in crate::fetch::refs) fn parse_v2(line: &str) -> Result { let mut tokens = trimmed.splitn(3, ' '); match (tokens.next(), tokens.next()) { (Some(hex_hash), Some(path)) => { - let id = git_hash::ObjectId::from_hex(hex_hash.as_bytes())?; + let id = if hex_hash == "unborn" { + None + } else { + Some(git_hash::ObjectId::from_hex(hex_hash.as_bytes())?) + }; if path.is_empty() { return Err(Error::MalformedV2RefLine(trimmed.to_owned())); } @@ -186,17 +190,27 @@ pub(in crate::fetch::refs) fn parse_v2(line: &str) -> Result { "peeled" => Ref::Peeled { full_ref_name: path.into(), object: git_hash::ObjectId::from_hex(value.as_bytes())?, - tag: id, + tag: id.ok_or(Error::InvariantViolation { + message: "got 'unborn' as tag target", + })?, }, "symref-target" => match value { "(null)" => Ref::Direct { full_ref_name: path.into(), - object: id, + object: id.ok_or(Error::InvariantViolation { + message: "got 'unborn' while (null) was a symref target", + })?, }, - name => Ref::Symbolic { - full_ref_name: path.into(), - object: id, - target: name.into(), + name => match id { + Some(id) => Ref::Symbolic { + full_ref_name: path.into(), + object: id, + target: name.into(), + }, + None => Ref::Unborn { + full_ref_name: path.into(), + target: name.into(), + }, }, }, _ => { @@ -211,7 +225,9 @@ pub(in crate::fetch::refs) fn parse_v2(line: &str) -> Result { } } else { Ref::Direct { - object: id, + object: id.ok_or(Error::InvariantViolation { + message: "got 'unborn' as object name of direct reference", + })?, full_ref_name: path.into(), } }) diff --git a/git-protocol/src/fetch/tests/refs.rs b/git-protocol/src/fetch/tests/refs.rs index e6ca670ba59..4f4259df55b 100644 --- a/git-protocol/src/fetch/tests/refs.rs +++ b/git-protocol/src/fetch/tests/refs.rs @@ -7,6 +7,8 @@ use crate::fetch::{refs, refs::shared::InternalRef, Ref}; async fn extract_references_from_v2_refs() { let input = &mut "808e50d724f604f69ab93c6da2919c014667bedb HEAD symref-target:refs/heads/main 808e50d724f604f69ab93c6da2919c014667bedb MISSING_NAMESPACE_TARGET symref-target:(null) +unborn HEAD symref-target:refs/heads/main +unborn refs/heads/symbolic symref-target:refs/heads/target 808e50d724f604f69ab93c6da2919c014667bedb refs/heads/main 7fe1b98b39423b71e14217aa299a03b7c937d656 refs/tags/foo peeled:808e50d724f604f69ab93c6da2919c014667bedb 7fe1b98b39423b71e14217aa299a03b7c937d6ff refs/tags/blaz @@ -27,6 +29,14 @@ async fn extract_references_from_v2_refs() { full_ref_name: "MISSING_NAMESPACE_TARGET".into(), object: oid("808e50d724f604f69ab93c6da2919c014667bedb") }, + Ref::Unborn { + full_ref_name: "HEAD".into(), + target: "refs/heads/main".into(), + }, + Ref::Unborn { + full_ref_name: "refs/heads/symbolic".into(), + target: "refs/heads/target".into(), + }, Ref::Direct { full_ref_name: "refs/heads/main".into(), object: oid("808e50d724f604f69ab93c6da2919c014667bedb") diff --git a/git-protocol/src/fetch_fn.rs b/git-protocol/src/fetch_fn.rs index e19d78deca8..172261bff13 100644 --- a/git-protocol/src/fetch_fn.rs +++ b/git-protocol/src/fetch_fn.rs @@ -44,17 +44,19 @@ impl Default for FetchConnection { /// /// _Note_ that depending on the `delegate`, the actual action performed can be `ls-refs`, `clone` or `fetch`. #[maybe_async] -pub async fn fetch( +pub async fn fetch( mut transport: T, mut delegate: D, authenticate: F, - mut progress: impl Progress, + mut progress: P, fetch_mode: FetchConnection, ) -> Result<(), Error> where F: FnMut(credentials::helper::Action) -> credentials::protocol::Result, D: Delegate, T: client::Transport, + P: Progress, + P::SubProgress: 'static, { let handshake::Outcome { server_protocol_version: protocol_version, @@ -141,10 +143,11 @@ where Ok(()) } -fn setup_remote_progress( - progress: &mut impl Progress, - reader: &mut Box, -) { +fn setup_remote_progress

(progress: &mut P, reader: &mut Box) +where + P: Progress, + P::SubProgress: 'static, +{ reader.set_progress_handler(Some(Box::new({ let mut remote_progress = progress.add_child("remote"); move |is_err: bool, data: &[u8]| { diff --git a/git-protocol/tests/fetch/mod.rs b/git-protocol/tests/fetch/mod.rs index dfdcdbdf309..644df5ab125 100644 --- a/git-protocol/tests/fetch/mod.rs +++ b/git-protocol/tests/fetch/mod.rs @@ -41,7 +41,9 @@ impl fetch::DelegateBlocking for CloneDelegate { _previous_response: Option<&Response>, ) -> io::Result { for r in refs { - arguments.want(r.unpack().1); + if let Some(id) = r.unpack().1 { + arguments.want(id); + } } Ok(Action::Cancel) } diff --git a/git-ref/src/store/file/mod.rs b/git-ref/src/store/file/mod.rs index a71a38a707a..af4be110529 100644 --- a/git-ref/src/store/file/mod.rs +++ b/git-ref/src/store/file/mod.rs @@ -62,11 +62,11 @@ mod access { } /// A transaction on a file store -pub struct Transaction<'s> { +pub struct Transaction<'s, 'p> { store: &'s Store, packed_transaction: Option, updates: Option>, - packed_refs: transaction::PackedRefs, + packed_refs: transaction::PackedRefs<'p>, } pub(in crate::store_impl::file) fn path_to_name<'a>(path: impl Into>) -> Cow<'a, BStr> { diff --git a/git-ref/src/store/file/transaction/commit.rs b/git-ref/src/store/file/transaction/commit.rs index f76ccb7f538..2db1f5e07d3 100644 --- a/git-ref/src/store/file/transaction/commit.rs +++ b/git-ref/src/store/file/transaction/commit.rs @@ -4,7 +4,7 @@ use crate::{ Target, }; -impl<'s> Transaction<'s> { +impl<'s, 'p> Transaction<'s, 'p> { /// Make all [prepared][Transaction::prepare()] permanent and return the performed edits which represent the current /// state of the affected refs in the ref store in that instant. Please note that the obtained edits may have been /// adjusted to contain more dependent edits or additional information. @@ -42,26 +42,38 @@ impl<'s> Transaction<'s> { RefLog::AndReference => (true, true), }; if update_reflog { - match new { - Target::Symbolic(_) => {} // no reflog for symref changes + let log_update = match new { + Target::Symbolic(_) => { + // no reflog for symref changes, unless the ref is new and we can obtain a peeled id + // identified by the expectation of what could be there, as is the case when cloning. + match expected { + PreviousValue::ExistingMustMatch(Target::Peeled(oid)) => { + Some((Some(git_hash::ObjectId::null(oid.kind())), oid)) + } + _ => None, + } + } Target::Peeled(new_oid) => { let previous = match expected { PreviousValue::MustExistAndMatch(Target::Peeled(oid)) => Some(oid.to_owned()), _ => None, } .or(change.leaf_referent_previous_oid); - let do_update = previous.as_ref().map_or(true, |previous| previous != new_oid); - if do_update { - self.store.reflog_create_or_append( - change.update.name.as_ref(), - &lock, - previous, - new_oid, - committer, - log.message.as_ref(), - log.force_create_reflog, - )?; - } + Some((previous, new_oid)) + } + }; + if let Some((previous, new_oid)) = log_update { + let do_update = previous.as_ref().map_or(true, |previous| previous != new_oid); + if do_update { + self.store.reflog_create_or_append( + change.update.name.as_ref(), + &lock, + previous, + new_oid, + committer, + log.message.as_ref(), + log.force_create_reflog, + )?; } } } diff --git a/git-ref/src/store/file/transaction/mod.rs b/git-ref/src/store/file/transaction/mod.rs index e1b0af63844..5a1c7267bcb 100644 --- a/git-ref/src/store/file/transaction/mod.rs +++ b/git-ref/src/store/file/transaction/mod.rs @@ -10,24 +10,24 @@ use crate::{ /// used to obtain the peeled object ids for storage in packed-refs files. /// /// Resolution means to follow tag objects until the end of the chain. -pub type FindObjectFn = - dyn FnMut( +pub type FindObjectFn<'a> = dyn FnMut( git_hash::ObjectId, &mut Vec, - ) -> Result, Box>; + ) -> Result, Box> + + 'a; /// How to handle packed refs during a transaction -pub enum PackedRefs { +pub enum PackedRefs<'a> { /// Only propagate deletions of references. This is the default DeletionsOnly, /// Propagate deletions as well as updates to references which are peeled, that is contain an object id - DeletionsAndNonSymbolicUpdates(Box), + DeletionsAndNonSymbolicUpdates(Box>), /// Propagate deletions as well as updates to references which are peeled, that is contain an object id. Furthermore delete the /// reference which is originally updated if it exists. If it doesn't, the new value will be written into the packed ref right away. - DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box), + DeletionsAndNonSymbolicUpdatesRemoveLooseSourceReference(Box>), } -impl Default for PackedRefs { +impl Default for PackedRefs<'_> { fn default() -> Self { PackedRefs::DeletionsOnly } @@ -71,7 +71,7 @@ impl file::Store { /// will never have been altered. /// /// The transaction inherits the parent namespace. - pub fn transaction(&self) -> Transaction<'_> { + pub fn transaction(&self) -> Transaction<'_, '_> { Transaction { store: self, packed_transaction: None, @@ -81,9 +81,9 @@ impl file::Store { } } -impl<'s> Transaction<'s> { +impl<'s, 'p> Transaction<'s, 'p> { /// Configure the way packed refs are handled during the transaction - pub fn packed_refs(mut self, packed_refs: PackedRefs) -> Self { + pub fn packed_refs(mut self, packed_refs: PackedRefs<'p>) -> Self { self.packed_refs = packed_refs; self } diff --git a/git-ref/src/store/file/transaction/prepare.rs b/git-ref/src/store/file/transaction/prepare.rs index 2f38dad6ea3..c74c694a3f9 100644 --- a/git-ref/src/store/file/transaction/prepare.rs +++ b/git-ref/src/store/file/transaction/prepare.rs @@ -12,7 +12,7 @@ use crate::{ FullName, FullNameRef, Reference, Target, }; -impl<'s> Transaction<'s> { +impl<'s, 'p> Transaction<'s, 'p> { fn lock_ref_and_apply_change( store: &file::Store, lock_fail_mode: git_lock::acquire::Fail, @@ -167,7 +167,7 @@ impl<'s> Transaction<'s> { } } -impl<'s> Transaction<'s> { +impl<'s, 'p> Transaction<'s, 'p> { /// Prepare for calling [`commit(…)`][Transaction::commit()] in a way that can be rolled back perfectly. /// /// If the operation succeeds, the transaction can be committed or dropped to cause a rollback automatically. diff --git a/git-ref/src/store/packed/transaction.rs b/git-ref/src/store/packed/transaction.rs index 19f942a629c..91cb8eb78d3 100644 --- a/git-ref/src/store/packed/transaction.rs +++ b/git-ref/src/store/packed/transaction.rs @@ -38,7 +38,7 @@ impl packed::Transaction { pub fn prepare( mut self, edits: impl IntoIterator, - find: &mut FindObjectFn, + find: &mut FindObjectFn<'_>, ) -> Result { assert!(self.edits.is_none(), "BUG: cannot call prepare(…) more than once"); let buffer = &self.buffer; diff --git a/git-ref/tests/file/transaction/prepare_and_commit/create_or_update.rs b/git-ref/tests/file/transaction/prepare_and_commit/create_or_update.rs index 352cf53c4cf..8621c0c7b11 100644 --- a/git-ref/tests/file/transaction/prepare_and_commit/create_or_update.rs +++ b/git-ref/tests/file/transaction/prepare_and_commit/create_or_update.rs @@ -405,6 +405,54 @@ fn cancellation_after_preparation_leaves_no_change() -> crate::Result { Ok(()) } +#[test] +fn symbolic_reference_writes_reflog_if_previous_value_is_set() -> crate::Result { + let (_keep, store) = empty_store()?; + let referent = "refs/heads/alt-main"; + assert!( + store.try_find_loose(referent)?.is_none(), + "the reference does not exist" + ); + let log = LogChange { + mode: RefLog::AndReference, + force_create_reflog: false, + message: "message".into(), + }; + let new_head_value = Target::Symbolic(referent.try_into().unwrap()); + let new_oid = hex_to_id("28ce6a8b26aa170e1de65536fe8abe1832bd3242"); + let edits = store + .transaction() + .prepare( + Some(RefEdit { + change: Change::Update { + log, + new: new_head_value, + expected: PreviousValue::ExistingMustMatch(Target::Peeled(new_oid)), + }, + name: "refs/heads/symbolic".try_into()?, + deref: false, + }), + Fail::Immediately, + Fail::Immediately, + )? + .commit(committer().to_ref())?; + assert_eq!(edits.len(), 1, "no split was performed"); + let head = store.find_loose(&edits[0].name)?; + assert_eq!(head.name.as_bstr(), "refs/heads/symbolic"); + assert_eq!(head.kind(), git_ref::Kind::Symbolic); + assert_eq!( + head.target.to_ref().try_name().map(|n| n.as_bstr()), + Some(referent.as_bytes().as_bstr()) + ); + assert!( + head.log_exists(&store), + "reflog is written for new symbolic ref with information about the peeled target id" + ); + assert!(store.try_find_loose(referent)?.is_none(), "referent wasn't created"); + + Ok(()) +} + #[test] fn symbolic_head_missing_referent_then_update_referent() -> crate::Result { for reflog_writemode in &[WriteReflog::Normal, WriteReflog::Disable, WriteReflog::Always] { diff --git a/git-refspec/src/instruction.rs b/git-refspec/src/instruction.rs index c37ceebcd35..990d0debc6f 100644 --- a/git-refspec/src/instruction.rs +++ b/git-refspec/src/instruction.rs @@ -29,8 +29,10 @@ pub enum Push<'a> { /// Push a single ref or refspec to a known destination ref. Matching { /// The source ref or refspec to push. If pattern, it contains a single `*`. + /// Examples are refnames like `HEAD` or `refs/heads/main`, or patterns like `refs/heads/*`. src: &'a BStr, /// The ref to update with the object from `src`. If `src` is a pattern, this is a pattern too. + /// Examples are refnames like `HEAD` or `refs/heads/main`, or patterns like `refs/heads/*`. dst: &'a BStr, /// If true, allow non-fast-forward updates of `dest`. allow_non_fast_forward: bool, diff --git a/git-refspec/src/match_group/util.rs b/git-refspec/src/match_group/util.rs index 1199c6029a5..09bb3755420 100644 --- a/git-refspec/src/match_group/util.rs +++ b/git-refspec/src/match_group/util.rs @@ -69,28 +69,10 @@ impl<'a> Needle<'a> { Match::None } } - Needle::PartialName(name) => { - let mut buf = BString::from(Vec::with_capacity(128)); - for (base, append_head) in [ - ("", false), - ("refs/", false), - ("refs/tags/", false), - ("refs/heads/", false), - ("refs/remotes/", false), - ("refs/remotes/", true), - ] { - buf.clear(); - buf.push_str(base); - buf.push_str(name); - if append_head { - buf.push_str("/HEAD"); - } - if buf == item.full_ref_name { - return Match::Normal; - } - } - Match::None - } + Needle::PartialName(name) => crate::spec::expand_partial_name(name, |expanded| { + (expanded == item.full_ref_name).then(|| Match::Normal) + }) + .unwrap_or(Match::None), Needle::Glob { name, asterisk_pos } => { match item.full_ref_name.get(..*asterisk_pos) { Some(full_name_portion) if full_name_portion != name[..*asterisk_pos] => { diff --git a/git-refspec/src/spec.rs b/git-refspec/src/spec.rs index efd97abd3fe..a609e41dc6f 100644 --- a/git-refspec/src/spec.rs +++ b/git-refspec/src/spec.rs @@ -1,4 +1,4 @@ -use bstr::{BStr, ByteSlice}; +use bstr::{BStr, BString, ByteSlice}; use crate::{ instruction::{Fetch, Push}, @@ -122,17 +122,57 @@ impl<'a> RefSpecRef<'a> { } } - /// Derive the prefix from the `source` side of this spec, if possible. + /// Derive the prefix from the [`source`][Self::source()] side of this spec if this is a fetch spec, + /// or the [`destination`][Self::destination()] side if it is a push spec, if it is possible to do so without ambiguity. /// /// This means it starts with `refs/`. Note that it won't contain more than two components, like `refs/heads/` pub fn prefix(&self) -> Option<&BStr> { - let source = self.source()?; + if self.mode == Mode::Negative { + return None; + } + let source = match self.op { + Operation::Fetch => self.source(), + Operation::Push => self.destination(), + }?; + if source == "HEAD" { + return source.into(); + } let suffix = source.strip_prefix(b"refs/")?; let slash_pos = suffix.find_byte(b'/')?; let prefix = source[..="refs/".len() + slash_pos].as_bstr(); (!prefix.contains(&b'*')).then(|| prefix) } + /// As opposed to [`prefix()`][Self::prefix], if the latter is `None` it will expand to all possible prefixes and place them in `out`. + /// + /// Note that only the `source` side is considered. + pub fn expand_prefixes(&self, out: &mut Vec) { + match self.prefix() { + Some(prefix) => out.push(prefix.into()), + None => { + let source = match match self.op { + Operation::Fetch => self.source(), + Operation::Push => self.destination(), + } { + Some(source) => source, + None => return, + }; + if let Some(rest) = source.strip_prefix(b"refs/") { + if !rest.contains(&b'/') { + out.push(source.into()); + } + return; + } else if git_hash::ObjectId::from_hex(source).is_ok() { + return; + } + expand_partial_name(source, |expanded| { + out.push(expanded.into()); + None::<()> + }); + } + } + } + /// Transform the state of the refspec into an instruction making clear what to do with it. pub fn instruction(&self) -> Instruction<'a> { match self.op { @@ -191,3 +231,27 @@ impl RefSpecRef<'_> { } } } + +pub(crate) fn expand_partial_name(name: &BStr, mut cb: impl FnMut(&BStr) -> Option) -> Option { + use bstr::ByteVec; + let mut buf = BString::from(Vec::with_capacity(128)); + for (base, append_head) in [ + ("", false), + ("refs/", false), + ("refs/tags/", false), + ("refs/heads/", false), + ("refs/remotes/", false), + ("refs/remotes/", true), + ] { + buf.clear(); + buf.push_str(base); + buf.push_str(name); + if append_head { + buf.push_str("/HEAD"); + } + if let Some(res) = cb(buf.as_ref()) { + return Some(res); + } + } + None +} diff --git a/git-refspec/tests/spec/mod.rs b/git-refspec/tests/spec/mod.rs index 329f8689eae..891cd805398 100644 --- a/git-refspec/tests/spec/mod.rs +++ b/git-refspec/tests/spec/mod.rs @@ -1,16 +1,37 @@ mod prefix { use git_refspec::{parse::Operation, RefSpec}; + #[test] + fn head_is_specifically_known() { + assert_eq!(parse("HEAD").to_ref().prefix().unwrap(), "HEAD"); + } + #[test] fn partial_refs_have_no_prefix() { assert_eq!(parse("main").to_ref().prefix(), None); } + #[test] + fn negative_specs_have_no_prefix() { + assert_eq!(parse("^refs/heads/main").to_ref().prefix(), None); + } + #[test] fn short_absolute_refs_have_no_prefix() { assert_eq!(parse("refs/short").to_ref().prefix(), None); } + #[test] + fn push_specs_use_the_destination() { + assert_eq!( + git_refspec::parse("refs/local/main:refs/remote/main".into(), Operation::Push) + .unwrap() + .prefix() + .unwrap(), + "refs/remote/" + ); + } + #[test] fn full_names_have_a_prefix() { assert_eq!(parse("refs/heads/main").to_ref().prefix().unwrap(), "refs/heads/"); @@ -38,3 +59,73 @@ mod prefix { git_refspec::parse(spec.into(), Operation::Fetch).unwrap().to_owned() } } + +mod expand_prefixes { + use git_refspec::parse::Operation; + + #[test] + fn head_is_specifically_known() { + assert_eq!(parse("HEAD"), ["HEAD"]); + } + + #[test] + fn partial_refs_have_many_prefixes() { + assert_eq!( + parse("main"), + [ + "main", + "refs/main", + "refs/tags/main", + "refs/heads/main", + "refs/remotes/main", + "refs/remotes/main/HEAD" + ] + ); + } + + #[test] + fn negative_specs_have_no_prefix() { + assert_eq!(parse("^refs/heads/main").len(), 0); + } + + #[test] + fn short_absolute_refs_expand_to_themselves() { + assert_eq!(parse("refs/short"), ["refs/short"]); + } + + #[test] + fn full_names_expand_to_their_prefix() { + assert_eq!(parse("refs/heads/main"), ["refs/heads/"]); + assert_eq!(parse("refs/foo/bar"), ["refs/foo/"]); + assert_eq!(parse("refs/heads/*:refs/remotes/origin/*"), ["refs/heads/"]); + } + + #[test] + fn push_specs_use_the_destination() { + let mut out = Vec::new(); + git_refspec::parse("refs/local/main:refs/remote/main".into(), Operation::Push) + .unwrap() + .expand_prefixes(&mut out); + assert_eq!(out, ["refs/remote/"]); + } + + #[test] + fn strange_glob_patterns_expand_to_nothing() { + assert_eq!(parse("refs/*/main:refs/*/main").len(), 0); + } + + #[test] + fn object_names_expand_to_nothing() { + assert_eq!(parse("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391").len(), 0); + } + + fn parse(spec: &str) -> Vec { + let mut out = Vec::new(); + git_refspec::parse(spec.into(), Operation::Fetch) + .unwrap() + .to_owned() + .to_ref() + .expand_prefixes(&mut out); + out.into_iter().map(|b| b.to_string()).collect() + } +} diff --git a/git-repository/Cargo.toml b/git-repository/Cargo.toml index 060e9429443..0cd14841e1e 100644 --- a/git-repository/Cargo.toml +++ b/git-repository/Cargo.toml @@ -135,7 +135,6 @@ unicode-normalization = { version = "0.1.19", default-features = false } git-testtools = { path = "../tests/tools" } is_ci = "1.1.1" anyhow = "1" -tempfile = "3.2.0" walkdir = "2.3.2" serial_test = "0.9.0" diff --git a/git-repository/src/clone.rs b/git-repository/src/clone.rs deleted file mode 100644 index 9e7c9db904f..00000000000 --- a/git-repository/src/clone.rs +++ /dev/null @@ -1,207 +0,0 @@ -type ConfigureRemoteFn = Box) -> Result, crate::remote::init::Error>>; - -/// A utility to collect configuration on how to fetch from a remote and possibly create a working tree locally. -pub struct Prepare { - /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user - repo: Option, - /// The name of the remote, which defaults to `origin` if not overridden. - remote_name: Option, - /// A function to configure a remote prior to fetching a pack. - configure_remote: Option, - /// Options for preparing a fetch operation. - #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] - fetch_options: crate::remote::ref_map::Options, - /// The url to clone from - #[allow(dead_code)] - url: git_url::Url, -} - -/// -#[cfg(feature = "blocking-network-client")] -pub mod fetch { - /// The error returned by [`Prepare::fetch_only()`][super::Prepare::fetch_only()]. - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error(transparent)] - Connect(#[from] crate::remote::connect::Error), - #[error(transparent)] - PrepareFetch(#[from] crate::remote::fetch::prepare::Error), - #[error(transparent)] - Fetch(#[from] crate::remote::fetch::Error), - #[error(transparent)] - RemoteConfiguration(#[from] crate::remote::init::Error), - #[error("Default remote configured at `clone.defaultRemoteName` is invalid")] - RemoteName(#[from] crate::remote::name::Error), - #[error("Failed to load repo-local git configuration before writing")] - LoadConfig(#[from] git_config::file::init::from_paths::Error), - #[error("Failed to store configured remote in memory")] - SaveConfig(#[from] crate::remote::save::AsError), - #[error("Failed to write repository configuration to disk")] - SaveConfigIo(#[from] std::io::Error), - } -} - -/// -pub mod prepare { - use std::convert::TryInto; - - use crate::{clone::Prepare, Repository}; - - /// The error returned by [`Prepare::new()`]. - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error(transparent)] - Init(#[from] crate::init::Error), - #[error(transparent)] - UrlParse(#[from] git_url::parse::Error), - } - - /// Instantiation - impl Prepare { - /// Create a new repository at `path` with `crate_opts` which is ready to clone from `url`, possibly after making additional adjustments to - /// configuration and settings. - /// - /// Note that this is merely a handle to perform the actual connection to the remote, and if any of it fails the freshly initialized repository - /// will be removed automatically as soon as this instance drops. - pub fn new( - url: Url, - path: impl AsRef, - create_opts: crate::create::Options, - open_opts: crate::open::Options, - ) -> Result - where - Url: TryInto, - git_url::parse::Error: From, - { - let url = url.try_into().map_err(git_url::parse::Error::from)?; - let repo = crate::ThreadSafeRepository::init_opts(path, create_opts, open_opts)?.to_thread_local(); - Ok(Prepare { - url, - #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] - fetch_options: Default::default(), - repo: Some(repo), - remote_name: None, - configure_remote: None, - }) - } - } - - /// Modification - impl Prepare { - /// Fetch a pack and update local branches according to refspecs, providing `progress` and checking `should_interrupt` to stop - /// the operation. - /// On success, the persisted repository is returned, and this method must not be called again to avoid a **panic**. - /// On error, the method may be called again to retry as often as needed. - /// - /// Note that all data we created will be removed once this instance drops if the operation wasn't successful. - #[cfg(feature = "blocking-network-client")] - pub fn fetch_only( - &mut self, - progress: impl crate::Progress, - should_interrupt: &std::sync::atomic::AtomicBool, - ) -> Result<(Repository, crate::remote::fetch::Outcome), super::fetch::Error> { - let repo = self - .repo - .as_mut() - .expect("user error: multiple calls are allowed only until it succeeds"); - - let remote_name = match self.remote_name.as_deref() { - Some(name) => name.to_owned(), - None => repo - .config - .resolved - .string("clone", None, "defaultRemoteName") - .map(|n| crate::remote::name::validated(n.to_string())) - .unwrap_or_else(|| Ok("origin".into()))?, - }; - - let mut remote = repo - .remote_at(self.url.clone())? - .with_refspec("+refs/heads/*:refs/remotes/origin/*", crate::remote::Direction::Fetch) - .expect("valid static spec"); - if let Some(f) = self.configure_remote.as_mut() { - remote = f(remote)?; - } - - let mut metadata = git_config::file::Metadata::from(git_config::Source::Local); - let config_path = repo.git_dir().join("config"); - metadata.path = Some(config_path.clone()); - let mut config = - git_config::File::from_paths_metadata(Some(metadata), Default::default())?.expect("one file to load"); - remote.save_as_to(remote_name, &mut config)?; - std::fs::write(config_path, config.to_bstring())?; - - let outcome = remote - .connect(crate::remote::Direction::Fetch, progress)? - .prepare_fetch(self.fetch_options.clone())? - .receive(should_interrupt)?; - - let repo_config = git_features::threading::OwnShared::make_mut(&mut repo.config.resolved); - let ids_to_remove: Vec<_> = repo_config - .sections_and_ids() - .filter_map(|(s, id)| (s.meta().source == git_config::Source::Local).then(|| id)) - .collect(); - for id in ids_to_remove { - repo_config.remove_section_by_id(id); - } - repo_config.append(config); - - Ok((self.repo.take().expect("still present"), outcome)) - } - } - - /// Builder - impl Prepare { - /// Set additional options to adjust parts of the fetch operation that are not affected by the git configuration. - #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] - pub fn with_fetch_options(mut self, opts: crate::remote::ref_map::Options) -> Self { - self.fetch_options = opts; - self - } - /// Use `f` to apply arbitrary changes to the remote that is about to be used to fetch a pack. - /// - /// The passed in `remote` will be un-named and pre-configured to be a default remote as we know it from git-clone. - /// It is not yet present in the configuration of the repository, - /// but each change it will eventually be written to the configuration prior to performing a the fetch operation. - pub fn configure_remote( - mut self, - f: impl FnMut(crate::Remote<'_>) -> Result, crate::remote::init::Error> + 'static, - ) -> Self { - self.configure_remote = Some(Box::new(f)); - self - } - - /// Set the remote's name to the given value after it was configured using the function provided via - /// [`configure_remote()`][Self::configure_remote()]. - /// - /// If not set here, it defaults to `origin` or the value of `clone.defaultRemoteName`. - pub fn with_remote_name(mut self, name: impl Into) -> Result { - self.remote_name = Some(crate::remote::name::validated(name)?); - Ok(self) - } - } - - /// Consumption - impl Prepare { - /// Persist the contained repository as is even if an error may have occurred when interacting with the remote or checking out the main working tree. - pub fn persist(mut self) -> Repository { - self.repo.take().expect("present and consumed once") - } - } - - impl Drop for Prepare { - fn drop(&mut self) { - if let Some(repo) = self.repo.take() { - std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok(); - } - } - } - - impl From for Repository { - fn from(prep: Prepare) -> Self { - prep.persist() - } - } -} diff --git a/git-repository/src/clone/checkout.rs b/git-repository/src/clone/checkout.rs new file mode 100644 index 00000000000..58141ed7817 --- /dev/null +++ b/git-repository/src/clone/checkout.rs @@ -0,0 +1,142 @@ +use crate::clone::PrepareCheckout; +use crate::Repository; + +/// +pub mod main_worktree { + use crate::clone::PrepareCheckout; + use crate::{Progress, Repository}; + use git_odb::FindExt; + use std::path::PathBuf; + use std::sync::atomic::AtomicBool; + + /// The error returned by [`PrepareCheckout::main_worktree()`]. + #[derive(Debug, thiserror::Error)] + #[allow(missing_docs)] + pub enum Error { + #[error("Repository at \"{}\" is a bare repository and cannot have a main worktree checkout", git_dir.display())] + BareRepository { git_dir: PathBuf }, + #[error("The object pointed to by HEAD is not a treeish")] + NoHeadTree(#[from] crate::object::peel::to_kind::Error), + #[error("Could not create index from tree at {id}")] + IndexFromTree { + id: git_hash::ObjectId, + source: git_traverse::tree::breadthfirst::Error, + }, + #[error(transparent)] + WriteIndex(#[from] git_index::file::write::Error), + #[error(transparent)] + CheckoutOptions(#[from] crate::config::checkout_options::Error), + #[error(transparent)] + IndexCheckout( + #[from] + git_worktree::index::checkout::Error>, + ), + #[error("Failed to reopen object database as Arc (only if thread-safety wasn't compiled in)")] + OpenArcOdb(#[from] std::io::Error), + #[error("The HEAD reference could not be located")] + FindHead(#[from] crate::reference::find::existing::Error), + #[error("The HEAD reference could not be located")] + PeelHeadToId(#[from] crate::head::peel::Error), + } + + /// Modification + impl PrepareCheckout { + /// Checkout the main worktree, determining how many threads to use by looking at `checkout.workers`, defaulting to using + /// on thread per logical core. + /// + /// Note that this is a no-op if the remote was empty, leaving this repository empty as well. This can be validated by checking + /// if the `head()` of the returned repository is not unborn. + pub fn main_worktree( + &mut self, + mut progress: impl crate::Progress, + should_interrupt: &AtomicBool, + ) -> Result<(Repository, git_worktree::index::checkout::Outcome), Error> { + let repo = self + .repo + .as_ref() + .expect("still present as we never succeeded the worktree checkout yet"); + let workdir = repo.work_dir().ok_or_else(|| Error::BareRepository { + git_dir: repo.git_dir().to_owned(), + })?; + let root_tree = match repo.head()?.peel_to_id_in_place().transpose()? { + Some(id) => id.object().expect("downloaded from remote").peel_to_tree()?.id, + None => { + return Ok(( + self.repo.take().expect("still present"), + git_worktree::index::checkout::Outcome::default(), + )) + } + }; + let index = git_index::State::from_tree(&root_tree, |oid, buf| repo.objects.find_tree_iter(oid, buf).ok()) + .map_err(|err| Error::IndexFromTree { + id: root_tree, + source: err, + })?; + let mut index = git_index::File::from_state(index, repo.index_path()); + + let mut opts = repo.config.checkout_options(repo.git_dir())?; + opts.destination_is_initially_empty = true; + + let mut files = progress.add_child("checkout"); + let mut bytes = progress.add_child("writing"); + + files.init(Some(index.entries().len()), crate::progress::count("files")); + bytes.init(None, crate::progress::bytes()); + + let start = std::time::Instant::now(); + let outcome = git_worktree::index::checkout( + &mut index, + workdir, + { + let objects = repo.objects.clone().into_arc()?; + move |oid, buf| objects.find_blob(oid, buf) + }, + &mut files, + &mut bytes, + should_interrupt, + opts, + )?; + files.show_throughput(start); + bytes.show_throughput(start); + + index.write(Default::default())?; + Ok((self.repo.take().expect("still present"), outcome)) + } + } +} + +/// Access +impl PrepareCheckout { + /// Get access to the repository while the checkout isn't yet completed. + /// + /// # Panics + /// + /// If the checkout is completed and the [`Repository`] was already passed on to the caller. + pub fn repo(&self) -> &Repository { + self.repo + .as_ref() + .expect("present as checkout operation isn't complete") + } +} + +/// Consumption +impl PrepareCheckout { + /// Persist the contained repository as is even if an error may have occurred when checking out the main working tree. + pub fn persist(mut self) -> Repository { + self.repo.take().expect("present and consumed once") + } +} + +impl Drop for PrepareCheckout { + fn drop(&mut self) { + if let Some(repo) = self.repo.take() { + std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok(); + } + } +} + +impl From for Repository { + fn from(prep: PrepareCheckout) -> Self { + prep.persist() + } +} diff --git a/git-repository/src/clone/fetch/mod.rs b/git-repository/src/clone/fetch/mod.rs new file mode 100644 index 00000000000..39a9ed02e0e --- /dev/null +++ b/git-repository/src/clone/fetch/mod.rs @@ -0,0 +1,193 @@ +use crate::{clone::PrepareFetch, Repository}; + +/// The error returned by [`PrepareFetch::fetch_only()`]. +#[derive(Debug, thiserror::Error)] +#[allow(missing_docs)] +#[cfg(feature = "blocking-network-client")] +pub enum Error { + #[error(transparent)] + Connect(#[from] crate::remote::connect::Error), + #[error(transparent)] + PrepareFetch(#[from] crate::remote::fetch::prepare::Error), + #[error(transparent)] + Fetch(#[from] crate::remote::fetch::Error), + #[error(transparent)] + RemoteConfiguration(#[from] crate::remote::init::Error), + #[error("Default remote configured at `clone.defaultRemoteName` is invalid")] + RemoteName(#[from] crate::remote::name::Error), + #[error("Failed to load repo-local git configuration before writing")] + LoadConfig(#[from] git_config::file::init::from_paths::Error), + #[error("Failed to store configured remote in memory")] + SaveConfig(#[from] crate::remote::save::AsError), + #[error("Failed to write repository configuration to disk")] + SaveConfigIo(#[from] std::io::Error), + #[error("The remote HEAD points to a reference named {head_ref_name:?} which is invalid.")] + InvalidHeadRef { + source: git_validate::refname::Error, + head_ref_name: crate::bstr::BString, + }, + #[error("Failed to update HEAD with values from remote")] + HeadUpdate(#[from] crate::reference::edit::Error), +} + +/// Modification +impl PrepareFetch { + /// Fetch a pack and update local branches according to refspecs, providing `progress` and checking `should_interrupt` to stop + /// the operation. + /// On success, the persisted repository is returned, and this method must not be called again to avoid a **panic**. + /// On error, the method may be called again to retry as often as needed. + /// + /// If the remote repository was empty, that is newly initialized, the returned repository will also be empty and like + /// it was newly initialized. + /// + /// Note that all data we created will be removed once this instance drops if the operation wasn't successful. + #[cfg(feature = "blocking-network-client")] + pub fn fetch_only

( + &mut self, + progress: P, + should_interrupt: &std::sync::atomic::AtomicBool, + ) -> Result<(Repository, crate::remote::fetch::Outcome), Error> + where + P: crate::Progress, + P::SubProgress: 'static, + { + use crate::bstr::ByteVec; + use crate::remote::fetch::RefLogMessage; + + let repo = self + .repo + .as_mut() + .expect("user error: multiple calls are allowed only until it succeeds"); + + let remote_name = match self.remote_name.as_deref() { + Some(name) => name.to_owned(), + None => repo + .config + .resolved + .string("clone", None, "defaultRemoteName") + .map(|n| crate::remote::name::validated(n.to_string())) + .unwrap_or_else(|| Ok("origin".into()))?, + }; + + let mut remote = repo + .remote_at(self.url.clone())? + .with_refspec( + format!("+refs/heads/*:refs/remotes/{remote_name}/*").as_str(), + crate::remote::Direction::Fetch, + ) + .expect("valid static spec"); + if let Some(f) = self.configure_remote.as_mut() { + remote = f(remote)?; + } + + let config = util::write_remote_to_local_config_file(&mut remote, remote_name.clone())?; + + // Add HEAD after the remote was written to config, we need it to know what to checkout later, and assure + // the ref that HEAD points to is present no matter what. + remote.fetch_specs.push( + git_refspec::parse( + format!("HEAD:refs/remotes/{remote_name}/HEAD").as_str().into(), + git_refspec::parse::Operation::Fetch, + ) + .expect("valid") + .to_owned(), + ); + let pending_pack: crate::remote::fetch::Prepare<'_, '_, _, _> = remote + .connect(crate::remote::Direction::Fetch, progress)? + .prepare_fetch(self.fetch_options.clone())?; + if pending_pack.ref_map().object_hash != repo.object_hash() { + unimplemented!("configure repository to expect a different object hash as advertised by the server") + } + let reflog_message = { + let mut b = self.url.to_bstring(); + b.insert_str(0, "clone: from "); + b + }; + let outcome = pending_pack + .with_reflog_message(RefLogMessage::Override { + message: reflog_message.clone(), + }) + .receive(should_interrupt)?; + + util::replace_changed_local_config_file(repo, config); + util::update_head( + repo, + &outcome.ref_map.remote_refs, + reflog_message.as_ref(), + &remote_name, + )?; + + Ok((self.repo.take().expect("still present"), outcome)) + } + + /// Similar to [`fetch_only()`][Self::fetch_only()`], but passes ownership to a utility type to configure a checkout operation. + #[cfg(feature = "blocking-network-client")] + pub fn fetch_then_checkout

( + &mut self, + progress: P, + should_interrupt: &std::sync::atomic::AtomicBool, + ) -> Result<(crate::clone::PrepareCheckout, crate::remote::fetch::Outcome), Error> + where + P: crate::Progress, + P::SubProgress: 'static, + { + let (repo, fetch_outcome) = self.fetch_only(progress, should_interrupt)?; + Ok((crate::clone::PrepareCheckout { repo: repo.into() }, fetch_outcome)) + } +} + +/// Builder +impl PrepareFetch { + /// Set additional options to adjust parts of the fetch operation that are not affected by the git configuration. + #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] + pub fn with_fetch_options(mut self, opts: crate::remote::ref_map::Options) -> Self { + self.fetch_options = opts; + self + } + /// Use `f` to apply arbitrary changes to the remote that is about to be used to fetch a pack. + /// + /// The passed in `remote` will be un-named and pre-configured to be a default remote as we know it from git-clone. + /// It is not yet present in the configuration of the repository, + /// but each change it will eventually be written to the configuration prior to performing a the fetch operation. + pub fn configure_remote( + mut self, + f: impl FnMut(crate::Remote<'_>) -> Result, crate::remote::init::Error> + 'static, + ) -> Self { + self.configure_remote = Some(Box::new(f)); + self + } + + /// Set the remote's name to the given value after it was configured using the function provided via + /// [`configure_remote()`][Self::configure_remote()]. + /// + /// If not set here, it defaults to `origin` or the value of `clone.defaultRemoteName`. + pub fn with_remote_name(mut self, name: impl Into) -> Result { + self.remote_name = Some(crate::remote::name::validated(name)?); + Ok(self) + } +} + +/// Consumption +impl PrepareFetch { + /// Persist the contained repository as is even if an error may have occurred when fetching from the remote. + pub fn persist(mut self) -> Repository { + self.repo.take().expect("present and consumed once") + } +} + +impl Drop for PrepareFetch { + fn drop(&mut self) { + if let Some(repo) = self.repo.take() { + std::fs::remove_dir_all(repo.work_dir().unwrap_or_else(|| repo.path())).ok(); + } + } +} + +impl From for Repository { + fn from(prep: PrepareFetch) -> Self { + prep.persist() + } +} + +#[cfg(feature = "blocking-network-client")] +mod util; diff --git a/git-repository/src/clone/fetch/util.rs b/git-repository/src/clone/fetch/util.rs new file mode 100644 index 00000000000..8e16b90acb1 --- /dev/null +++ b/git-repository/src/clone/fetch/util.rs @@ -0,0 +1,211 @@ +use super::Error; +use crate::bstr::{BStr, ByteSlice}; +use crate::Repository; +use git_odb::Find; +use git_ref::transaction::{LogChange, RefLog}; +use git_ref::FullNameRef; +use std::borrow::Cow; +use std::convert::TryInto; + +pub fn write_remote_to_local_config_file( + remote: &mut crate::Remote<'_>, + remote_name: String, +) -> Result, Error> { + let mut metadata = git_config::file::Metadata::from(git_config::Source::Local); + let config_path = remote.repo.git_dir().join("config"); + metadata.path = Some(config_path.clone()); + let mut config = + git_config::File::from_paths_metadata(Some(metadata), Default::default())?.expect("one file to load"); + remote.save_as_to(remote_name, &mut config)?; + std::fs::write(config_path, config.to_bstring())?; + Ok(config) +} + +pub fn replace_changed_local_config_file(repo: &mut Repository, mut config: git_config::File<'static>) { + let repo_config = git_features::threading::OwnShared::make_mut(&mut repo.config.resolved); + let ids_to_remove: Vec<_> = repo_config + .sections_and_ids() + .filter_map(|(s, id)| { + matches!(s.meta().source, git_config::Source::Local | git_config::Source::Api).then(|| id) + }) + .collect(); + for id in ids_to_remove { + repo_config.remove_section_by_id(id); + } + crate::config::overrides::apply(&mut config, &repo.options.config_overrides, git_config::Source::Api) + .expect("applied once and can be applied again"); + repo_config.append(config); + repo.reread_values_and_clear_caches() + .expect("values could be read once and can be read again"); +} + +/// HEAD cannot be written by means of refspec by design, so we have to do it manually here. Also create the pointed-to ref +/// if we have to, as it might not have been naturally included in the ref-specs. +pub fn update_head( + repo: &mut Repository, + remote_refs: &[git_protocol::fetch::Ref], + reflog_message: &BStr, + remote_name: &str, +) -> Result<(), Error> { + use git_ref::transaction::{PreviousValue, RefEdit}; + use git_ref::Target; + let (head_peeled_id, head_ref) = match remote_refs.iter().find_map(|r| { + Some(match r { + git_protocol::fetch::Ref::Symbolic { + full_ref_name, + target, + object, + } if full_ref_name == "HEAD" => (Some(object.as_ref()), Some(target)), + git_protocol::fetch::Ref::Direct { full_ref_name, object } if full_ref_name == "HEAD" => { + (Some(object.as_ref()), None) + } + git_protocol::fetch::Ref::Unborn { full_ref_name, target } if full_ref_name == "HEAD" => { + (None, Some(target)) + } + _ => return None, + }) + }) { + Some(t) => t, + None => return Ok(()), + }; + + let head: git_ref::FullName = "HEAD".try_into().expect("valid"); + let reflog_message = || LogChange { + mode: RefLog::AndReference, + force_create_reflog: false, + message: reflog_message.to_owned(), + }; + match head_ref { + Some(referent) => { + let referent: git_ref::FullName = referent.try_into().map_err(|err| Error::InvalidHeadRef { + head_ref_name: referent.to_owned(), + source: err, + })?; + repo.refs + .transaction() + .packed_refs(git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates( + Box::new(|oid, buf| { + repo.objects + .try_find(oid, buf) + .map(|obj| obj.map(|obj| obj.kind)) + .map_err(|err| Box::new(err) as Box) + }), + )) + .prepare( + { + let mut edits = vec![RefEdit { + change: git_ref::transaction::Change::Update { + log: reflog_message(), + expected: PreviousValue::Any, + new: Target::Symbolic(referent.clone()), + }, + name: head.clone(), + deref: false, + }]; + if let Some(head_peeled_id) = head_peeled_id { + edits.push(RefEdit { + change: git_ref::transaction::Change::Update { + log: reflog_message(), + expected: PreviousValue::Any, + new: Target::Peeled(head_peeled_id.to_owned()), + }, + name: referent.clone(), + deref: false, + }); + }; + edits + }, + git_lock::acquire::Fail::Immediately, + git_lock::acquire::Fail::Immediately, + ) + .map_err(crate::reference::edit::Error::from)? + .commit(repo.committer_or_default()) + .map_err(crate::reference::edit::Error::from)?; + + if let Some(head_peeled_id) = head_peeled_id { + let mut log = reflog_message(); + log.mode = RefLog::Only; + repo.edit_reference(RefEdit { + change: git_ref::transaction::Change::Update { + log, + expected: PreviousValue::Any, + new: Target::Peeled(head_peeled_id.to_owned()), + }, + name: head, + deref: false, + })?; + } + + setup_branch_config(repo, referent.as_ref(), head_peeled_id, remote_name)?; + } + None => { + repo.edit_reference(RefEdit { + change: git_ref::transaction::Change::Update { + log: reflog_message(), + expected: PreviousValue::Any, + new: Target::Peeled( + head_peeled_id + .expect("detached heads always point to something") + .to_owned(), + ), + }, + name: head, + deref: false, + })?; + } + }; + Ok(()) +} + +/// Setup the remote configuration for `branch` so that it points to itself, but on the remote, if an only if currently saved refspec +/// is able to match it. +/// For that we reload the remote of `remote_name` and use its ref_specs for match. +fn setup_branch_config( + repo: &mut Repository, + branch: &FullNameRef, + branch_id: Option<&git_hash::oid>, + remote_name: &str, +) -> Result<(), Error> { + let short_name = match branch.category_and_short_name() { + Some((cat, shortened)) if cat == git_ref::Category::LocalBranch => match shortened.to_str() { + Ok(s) => s, + Err(_) => return Ok(()), + }, + _ => return Ok(()), + }; + let remote = repo + .find_remote(remote_name) + .expect("remote was just created and must be visible in config"); + let group = git_refspec::MatchGroup::from_fetch_specs(remote.fetch_specs.iter().map(|s| s.to_ref())); + let null = git_hash::ObjectId::null(repo.object_hash()); + let res = group.match_remotes( + Some(git_refspec::match_group::Item { + full_ref_name: branch.as_bstr(), + target: branch_id.unwrap_or(&null), + object: None, + }) + .into_iter(), + ); + if !res.mappings.is_empty() { + let mut metadata = git_config::file::Metadata::from(git_config::Source::Local); + let config_path = remote.repo.git_dir().join("config"); + metadata.path = Some(config_path.clone()); + let mut config = + git_config::File::from_paths_metadata(Some(metadata), Default::default())?.expect("one file to load"); + + let mut section = config + .new_section("branch", Some(Cow::Owned(short_name.into()))) + .expect("section header name is always valid per naming rules, our input branch name is valid"); + section.push( + "remote".try_into().expect("valid at compile time"), + Some(remote_name.into()), + ); + section.push( + "merge".try_into().expect("valid at compile time"), + Some(branch.as_bstr()), + ); + std::fs::write(config_path, config.to_bstring())?; + replace_changed_local_config_file(repo, config); + } + Ok(()) +} diff --git a/git-repository/src/clone/mod.rs b/git-repository/src/clone/mod.rs new file mode 100644 index 00000000000..075681996b2 --- /dev/null +++ b/git-repository/src/clone/mod.rs @@ -0,0 +1,86 @@ +use std::convert::TryInto; + +type ConfigureRemoteFn = Box) -> Result, crate::remote::init::Error>>; + +/// A utility to collect configuration on how to fetch from a remote and initiate a fetch operation. It will delete the newly +/// created repository on when dropped without successfully finishing a fetch. +#[must_use] +pub struct PrepareFetch { + /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user + repo: Option, + /// The name of the remote, which defaults to `origin` if not overridden. + remote_name: Option, + /// A function to configure a remote prior to fetching a pack. + configure_remote: Option, + /// Options for preparing a fetch operation. + #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] + fetch_options: crate::remote::ref_map::Options, + /// The url to clone from + #[cfg_attr(not(feature = "blocking-network-client"), allow(dead_code))] + url: git_url::Url, +} + +/// The error returned by [`PrepareFetch::new()`]. +#[derive(Debug, thiserror::Error)] +#[allow(missing_docs)] +pub enum Error { + #[error(transparent)] + Init(#[from] crate::init::Error), + #[error(transparent)] + UrlParse(#[from] git_url::parse::Error), + #[error("Failed to turn a the relative file url \"{}\" into an absolute one", url.to_bstring())] + CanonicalizeUrl { + url: git_url::Url, + source: git_path::realpath::Error, + }, +} + +/// Instantiation +impl PrepareFetch { + /// Create a new repository at `path` with `crate_opts` which is ready to clone from `url`, possibly after making additional adjustments to + /// configuration and settings. + /// + /// Note that this is merely a handle to perform the actual connection to the remote, and if any of it fails the freshly initialized repository + /// will be removed automatically as soon as this instance drops. + pub fn new( + url: Url, + path: impl AsRef, + kind: crate::create::Kind, + mut create_opts: crate::create::Options, + open_opts: crate::open::Options, + ) -> Result + where + Url: TryInto, + git_url::parse::Error: From, + { + let mut url = url.try_into().map_err(git_url::parse::Error::from)?; + url.canonicalize().map_err(|err| Error::CanonicalizeUrl { + url: url.clone(), + source: err, + })?; + create_opts.destination_must_be_empty = true; + let repo = crate::ThreadSafeRepository::init_opts(path, kind, create_opts, open_opts)?.to_thread_local(); + Ok(PrepareFetch { + url, + #[cfg(any(feature = "async-network-client", feature = "blocking-network-client"))] + fetch_options: Default::default(), + repo: Some(repo), + remote_name: None, + configure_remote: None, + }) + } +} + +/// A utility to collect configuration on how to perform a checkout into a working tree, and when dropped without checking out successfully +/// the fetched repository will be dropped. +#[must_use] +pub struct PrepareCheckout { + /// A freshly initialized repository which is owned by us, or `None` if it was handed to the user + pub(self) repo: Option, +} + +/// +pub mod fetch; + +/// +pub mod checkout; diff --git a/git-repository/src/config/cache/access.rs b/git-repository/src/config/cache/access.rs index 62790e0c8a8..37ef9d76e0b 100644 --- a/git-repository/src/config/cache/access.rs +++ b/git-repository/src/config/cache/access.rs @@ -1,11 +1,48 @@ +use std::borrow::Cow; use std::{convert::TryInto, path::PathBuf, time::Duration}; use git_lock::acquire::Fail; +use crate::config::cache::util::check_lenient_default; +use crate::config::checkout_options; use crate::{config::Cache, remote, repository::identity}; /// Access impl Cache { + pub(crate) fn diff_algorithm(&self) -> Result { + use crate::config::diff::algorithm::Error; + self.diff_algorithm + .get_or_try_init(|| { + let res = { + let name = self + .resolved + .string("diff", None, "algorithm") + .unwrap_or_else(|| Cow::Borrowed("myers".into())); + if name.eq_ignore_ascii_case(b"myers") || name.eq_ignore_ascii_case(b"default") { + Ok(git_diff::text::Algorithm::Myers) + } else if name.eq_ignore_ascii_case(b"minimal") { + Ok(git_diff::text::Algorithm::MyersMinimal) + } else if name.eq_ignore_ascii_case(b"histogram") { + Ok(git_diff::text::Algorithm::Histogram) + } else if name.eq_ignore_ascii_case(b"patience") { + if self.lenient_config { + Ok(git_diff::text::Algorithm::Histogram) + } else { + Err(Error::Unimplemented { + name: name.into_owned(), + }) + } + } else { + Err(Error::Unknown { + name: name.into_owned(), + }) + } + }; + check_lenient_default(res, self.lenient_config, || git_diff::text::Algorithm::Myers) + }) + .copied() + } + pub(crate) fn personas(&self) -> &identity::Personas { self.personas .get_or_init(|| identity::Personas::from_config_and_env(&self.resolved, self.git_prefix)) @@ -59,32 +96,128 @@ impl Cache { } /// The path to the user-level excludes file to ignore certain files in the worktree. - pub(crate) fn excludes_file(&self) -> Result, git_config::path::interpolate::Error> { - let home = self.home_dir(); + pub(crate) fn excludes_file(&self) -> Option> { + self.trusted_file_path("core", None, "excludesFile")? + .map(|p| p.into_owned()) + .into() + } + + /// A helper to obtain a file from trusted configuration at `section_name`, `subsection_name`, and `key`, which is interpolated + /// if present. + pub(crate) fn trusted_file_path( + &self, + section_name: impl AsRef, + subsection_name: Option<&str>, + key: impl AsRef, + ) -> Option, git_config::path::interpolate::Error>> { + let path = self.resolved.path_filter( + section_name, + subsection_name, + key, + &mut self.filter_config_section.clone(), + )?; + let install_dir = crate::path::install_dir().ok(); + let home = self.home_dir(); let ctx = crate::config::cache::interpolate_context(install_dir.as_deref(), home.as_deref()); - match self - .resolved - .path_filter("core", None, "excludesFile", &mut self.filter_config_section.clone()) - .map(|p| p.interpolate(ctx).map(|p| p.into_owned())) - .transpose() - { - Ok(f) => Ok(f), - Err(_err) if self.lenient_config => Ok(None), - Err(err) => Err(err), + Some(path.interpolate(ctx)) + } + + pub(crate) fn apply_leniency(&self, res: Option>) -> Result, E> { + match res { + Some(Ok(v)) => Ok(Some(v)), + Some(Err(_err)) if self.lenient_config => Ok(None), + Some(Err(err)) => Err(err), + None => Ok(None), } } - /// Return a path by using the `$XDF_CONFIG_HOME` or `$HOME/.config/…` environment variables locations. - pub fn xdg_config_path( + /// Collect everything needed to checkout files into a worktree. + /// Note that some of the options being returned will be defaulted so safe settings, the caller might have to override them + /// depending on the use-case. + pub(crate) fn checkout_options( + &self, + git_dir: &std::path::Path, + ) -> Result { + fn checkout_thread_limit_from_config( + config: &git_config::File<'static>, + ) -> Option> { + config.integer("checkout", None, "workers").map(|val| match val { + Ok(v) if v < 0 => Ok(0), + Ok(v) => Ok(v.try_into().expect("positive i64 can always be usize on 64 bit")), + Err(err) => Err(checkout_options::Error::Configuration { + key: "checkout.workers", + source: err, + }), + }) + } + + fn boolean(me: &Cache, full_key: &'static str, default: bool) -> Result { + let mut tokens = full_key.split('.'); + let section = tokens.next().expect("section"); + let key = tokens.next().expect("key"); + assert!(tokens.next().is_none(), "core."); + Ok(me + .apply_leniency(me.resolved.boolean(section, None, key)) + .map_err(|err| checkout_options::Error::Configuration { + key: full_key, + source: err, + })? + .unwrap_or(default)) + } + + fn assemble_attribute_globals( + me: &Cache, + _git_dir: &std::path::Path, + ) -> Result { + let _attributes_file = match me.trusted_file_path("core", None, "attributesFile").transpose()? { + Some(attributes) => Some(attributes.into_owned()), + None => me.xdg_config_path("attributes").ok().flatten(), + }; + // TODO: implement git_attributes::MatchGroup::::from_git_dir(), similar to what's done for `Ignore`. + Ok(Default::default()) + } + + let thread_limit = self.apply_leniency(checkout_thread_limit_from_config(&self.resolved))?; + Ok(git_worktree::index::checkout::Options { + fs: git_worktree::fs::Capabilities { + precompose_unicode: boolean(self, "core.precomposeUnicode", false)?, + ignore_case: boolean(self, "core.ignoreCase", false)?, + executable_bit: boolean(self, "core.fileMode", true)?, + symlink: boolean(self, "core.symlinks", true)?, + }, + thread_limit, + destination_is_initially_empty: false, + overwrite_existing: false, + keep_going: false, + trust_ctime: boolean(self, "core.trustCTime", true)?, + check_stat: self + .resolved + .string("core", None, "checkStat") + .map_or(true, |v| v.as_ref() != "minimal"), + attribute_globals: assemble_attribute_globals(self, git_dir)?, + }) + } + pub(crate) fn xdg_config_path( &self, resource_file_name: &str, ) -> Result, git_sec::permission::Error> { std::env::var_os("XDG_CONFIG_HOME") - .map(|path| (path, &self.xdg_config_home_env)) - .or_else(|| std::env::var_os("HOME").map(|path| (path, &self.home_env))) + .map(|path| (PathBuf::from(path), &self.xdg_config_home_env)) + .or_else(|| { + std::env::var_os("HOME").map(|path| { + ( + { + let mut p = PathBuf::from(path); + p.push(".config"); + p + }, + &self.home_env, + ) + }) + }) .and_then(|(base, permission)| { - let resource = std::path::PathBuf::from(base).join("git").join(resource_file_name); + let resource = base.join("git").join(resource_file_name); permission.check(resource).transpose() }) .transpose() @@ -94,7 +227,7 @@ impl Cache { /// /// We never fail for here even if the permission is set to deny as we `git-config` will fail later /// if it actually wants to use the home directory - we don't want to fail prematurely. - pub fn home_dir(&self) -> Option { + pub(crate) fn home_dir(&self) -> Option { std::env::var_os("HOME") .map(PathBuf::from) .and_then(|path| self.home_env.check_opt(path)) diff --git a/git-repository/src/config/cache/incubate.rs b/git-repository/src/config/cache/incubate.rs index f8048a535ab..233b4c0f3e6 100644 --- a/git-repository/src/config/cache/incubate.rs +++ b/git-repository/src/config/cache/incubate.rs @@ -57,7 +57,7 @@ impl StageOne { .transpose()? .unwrap_or(git_hash::Kind::Sha1); - let reflog = util::query_refupdates(&config); + let reflog = util::query_refupdates(&config, lenient)?; Ok(StageOne { git_dir_config: config, buf, diff --git a/git-repository/src/config/cache/init.rs b/git-repository/src/config/cache/init.rs index 01442d25d09..67b4d818c98 100644 --- a/git-repository/src/config/cache/init.rs +++ b/git-repository/src/config/cache/init.rs @@ -1,4 +1,5 @@ use super::{interpolate_context, util, Error, StageOne}; +use crate::bstr::BString; use crate::{config::Cache, repository}; /// Initialization @@ -33,6 +34,7 @@ impl Cache { includes: use_includes, }: repository::permissions::Config, lenient_config: bool, + config_overrides: &[BString], ) -> Result { let options = git_config::file::init::Options { includes: if use_includes { @@ -108,13 +110,16 @@ impl Cache { if use_env { globals.append(git_config::File::from_env(options)?.unwrap_or_default()); } + if !config_overrides.is_empty() { + crate::config::overrides::apply(&mut globals, config_overrides, git_config::Source::Api)?; + } globals }; let hex_len = util::check_lenient(util::parse_core_abbrev(&config, object_hash), lenient_config)?; use util::config_bool; - let reflog = util::query_refupdates(&config); + let reflog = util::query_refupdates(&config, lenient_config)?; let ignore_case = config_bool(&config, "core.ignoreCase", false, lenient_config)?; let use_multi_pack_index = config_bool(&config, "core.multiPackIndex", true, lenient_config)?; let object_kind_hint = util::disambiguate_hint(&config); @@ -136,6 +141,7 @@ impl Cache { url_rewrite: Default::default(), #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] url_scheme: Default::default(), + diff_algorithm: Default::default(), git_prefix, }) } @@ -145,25 +151,41 @@ impl Cache { /// However, those that are lazily read won't be re-evaluated right away and might thus pass now but fail later. /// /// Note that we unconditionally re-read all values. - pub fn reread_values_and_clear_caches(&mut self, config: crate::Config) -> Result<(), Error> { - let hex_len = util::check_lenient(util::parse_core_abbrev(&config, self.object_hash), self.lenient_config)?; + pub fn reread_values_and_clear_caches_replacing_config(&mut self, config: crate::Config) -> Result<(), Error> { + let prev = std::mem::replace(&mut self.resolved, config); + match self.reread_values_and_clear_caches() { + Err(err) => { + drop(std::mem::replace(&mut self.resolved, prev)); + Err(err) + } + Ok(()) => Ok(()), + } + } + + /// Similar to `reread_values_and_clear_caches_replacing_config()`, but works on the existing configuration instead of a passed + /// in one that it them makes the default. + pub fn reread_values_and_clear_caches(&mut self) -> Result<(), Error> { + let config = &self.resolved; + let hex_len = util::check_lenient(util::parse_core_abbrev(config, self.object_hash), self.lenient_config)?; use util::config_bool; - let ignore_case = config_bool(&config, "core.ignoreCase", false, self.lenient_config)?; - let object_kind_hint = util::disambiguate_hint(&config); + let ignore_case = config_bool(config, "core.ignoreCase", false, self.lenient_config)?; + let object_kind_hint = util::disambiguate_hint(config); + let reflog = util::query_refupdates(config, self.lenient_config)?; + + self.hex_len = hex_len; + self.ignore_case = ignore_case; + self.object_kind_hint = object_kind_hint; + self.reflog = reflog; self.personas = Default::default(); self.url_rewrite = Default::default(); + self.diff_algorithm = Default::default(); #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] { self.url_scheme = Default::default(); } - self.resolved = config; - self.hex_len = hex_len; - self.ignore_case = ignore_case; - self.object_kind_hint = object_kind_hint; - Ok(()) } } diff --git a/git-repository/src/config/cache/util.rs b/git-repository/src/config/cache/util.rs index 0442f3d4f4a..9f618601a34 100644 --- a/git-repository/src/config/cache/util.rs +++ b/git-repository/src/config/cache/util.rs @@ -41,17 +41,27 @@ pub(crate) fn config_bool( } } -pub(crate) fn query_refupdates(config: &git_config::File<'static>) -> Option { - config.string("core", None, "logallrefupdates").map(|val| { - (val.eq_ignore_ascii_case(b"always")) - .then(|| git_ref::store::WriteReflog::Always) - .or_else(|| { - git_config::Boolean::try_from(val) - .ok() - .and_then(|b| b.is_true().then(|| git_ref::store::WriteReflog::Normal)) - }) - .unwrap_or(git_ref::store::WriteReflog::Disable) - }) +pub(crate) fn query_refupdates( + config: &git_config::File<'static>, + lenient_config: bool, +) -> Result, Error> { + match config + .boolean("core", None, "logAllRefUpdates") + .and_then(|b| b.ok()) + .map(|b| { + b.then(|| git_ref::store::WriteReflog::Normal) + .unwrap_or(git_ref::store::WriteReflog::Disable) + }) { + Some(val) => Ok(Some(val)), + None => match config.string("core", None, "logAllRefUpdates") { + Some(val) if val.eq_ignore_ascii_case(b"always") => Ok(Some(git_ref::store::WriteReflog::Always)), + Some(_val) if lenient_config => Ok(None), + Some(val) => Err(Error::LogAllRefUpdates { + value: val.into_owned(), + }), + None => Ok(None), + }, + } } pub(crate) fn check_lenient(v: Result, E>, lenient: bool) -> Result, E> { @@ -62,6 +72,14 @@ pub(crate) fn check_lenient(v: Result, E>, lenient: bool) -> Res } } +pub(crate) fn check_lenient_default(v: Result, lenient: bool, default: impl FnOnce() -> T) -> Result { + match v { + Ok(v) => Ok(v), + Err(_) if lenient => Ok(default()), + Err(err) => Err(err), + } +} + pub(crate) fn parse_core_abbrev( config: &git_config::File<'static>, object_hash: git_hash::Kind, diff --git a/git-repository/src/config/mod.rs b/git-repository/src/config/mod.rs index bdfe59a62b0..7be534a791b 100644 --- a/git-repository/src/config/mod.rs +++ b/git-repository/src/config/mod.rs @@ -5,7 +5,10 @@ use crate::{bstr::BString, remote, repository::identity, revision::spec, Reposit pub(crate) mod cache; mod snapshot; -pub use snapshot::{apply_cli_overrides, credential_helpers}; +pub use snapshot::credential_helpers; + +/// +pub mod overrides; /// A platform to access configuration values as read from disk. /// @@ -41,6 +44,8 @@ pub(crate) mod section { } /// The error returned when failing to initialize the repository configuration. +/// +/// This configuration is on the critical path when opening a repository. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { @@ -62,6 +67,44 @@ pub enum Error { DecodeBoolean { key: String, value: BString }, #[error(transparent)] PathInterpolation(#[from] git_config::path::interpolate::Error), + #[error("Configuration overrides at open or init time could not be applied.")] + ConfigOverrides(#[from] overrides::Error), + #[error("Invalid value for 'core.logAllRefUpdates': \"{value}\"")] + LogAllRefUpdates { value: BString }, +} + +/// +pub mod diff { + /// + pub mod algorithm { + use crate::bstr::BString; + + /// The error produced when obtaining `diff.algorithm`. + #[derive(Debug, thiserror::Error)] + #[allow(missing_docs)] + pub enum Error { + #[error("Unknown diff algorithm named '{name}'")] + Unknown { name: BString }, + #[error("The '{name}' algorithm is not yet implemented")] + Unimplemented { name: BString }, + } + } +} + +/// +pub mod checkout_options { + /// The error produced when collecting all information needed for checking out files into a worktree. + #[derive(Debug, thiserror::Error)] + #[allow(missing_docs)] + pub enum Error { + #[error("{key} could not be decoded")] + Configuration { + key: &'static str, + source: git_config::value::Error, + }, + #[error("Failed to interpolate the attribute file configured at `core.attributesFile`")] + AttributesFileInterpolation(#[from] git_config::path::interpolate::Error), + } } /// Utility type to keep pre-obtained configuration values, only for those required during initial setup @@ -82,12 +125,14 @@ pub(crate) struct Cache { /// The representation of `core.logallrefupdates`, or `None` if the variable wasn't set. pub reflog: Option, /// identities for later use, lazy initialization. - pub personas: OnceCell, + pub(crate) personas: OnceCell, /// A lazily loaded rewrite list for remote urls - pub url_rewrite: OnceCell, + pub(crate) url_rewrite: OnceCell, /// A lazily loaded mapping to know which url schemes to allow #[cfg(any(feature = "blocking-network-client", feature = "async-network-client"))] - pub url_scheme: OnceCell, + pub(crate) url_scheme: OnceCell, + /// The algorithm to use when diffing blobs + pub(crate) diff_algorithm: OnceCell, /// The config section filter from the options used to initialize this instance. Keep these in sync! filter_config_section: fn(&git_config::file::Metadata) -> bool, /// The object kind to pick if a prefix is ambiguous. diff --git a/git-repository/src/config/overrides.rs b/git-repository/src/config/overrides.rs new file mode 100644 index 00000000000..888cbad7be8 --- /dev/null +++ b/git-repository/src/config/overrides.rs @@ -0,0 +1,43 @@ +use crate::bstr::{BStr, BString, ByteSlice}; +use std::convert::TryFrom; + +/// The error returned by [SnapshotMut::apply_cli_overrides()][crate::config::SnapshotMut::apply_cli_overrides()]. +#[derive(Debug, thiserror::Error)] +#[allow(missing_docs)] +pub enum Error { + #[error("{input:?} is not a valid configuration key. Examples are 'core.abbrev' or 'remote.origin.url'")] + InvalidKey { input: BString }, + #[error("Key {key:?} could not be parsed")] + SectionKey { + key: BString, + source: git_config::parse::section::key::Error, + }, + #[error(transparent)] + SectionHeader(#[from] git_config::parse::section::header::Error), +} + +pub(crate) fn apply( + config: &mut git_config::File<'static>, + values: impl IntoIterator>, + source: git_config::Source, +) -> Result<(), Error> { + let mut file = git_config::File::new(git_config::file::Metadata::from(source)); + for key_value in values { + let key_value = key_value.as_ref(); + let mut tokens = key_value.splitn(2, |b| *b == b'=').map(|v| v.trim()); + let key = tokens.next().expect("always one value").as_bstr(); + let value = tokens.next(); + let key = git_config::parse::key(key.to_str().map_err(|_| Error::InvalidKey { input: key.into() })?) + .ok_or_else(|| Error::InvalidKey { input: key.into() })?; + let mut section = file.section_mut_or_create_new(key.section_name, key.subsection_name)?; + section.push( + git_config::parse::section::Key::try_from(key.value_name.to_owned()).map_err(|err| Error::SectionKey { + source: err, + key: key.value_name.into(), + })?, + value.map(|v| v.as_bstr()), + ); + } + config.append(file); + Ok(()) +} diff --git a/git-repository/src/config/snapshot/access.rs b/git-repository/src/config/snapshot/access.rs index d177bb024b9..3f4e268b6dd 100644 --- a/git-repository/src/config/snapshot/access.rs +++ b/git-repository/src/config/snapshot/access.rs @@ -4,7 +4,7 @@ use git_features::threading::OwnShared; use crate::{ bstr::BStr, - config::{cache::interpolate_context, CommitAutoRollback, Snapshot, SnapshotMut}, + config::{CommitAutoRollback, Snapshot, SnapshotMut}, }; /// Access configuration values, frozen in time, using a `key` which is a `.` separated string of up to @@ -70,16 +70,9 @@ impl<'repo> Snapshot<'repo> { key: &str, ) -> Option, git_config::path::interpolate::Error>> { let key = git_config::parse::key(key)?; - let path = self.repo.config.resolved.path_filter( - key.section_name, - key.subsection_name, - key.value_name, - &mut self.repo.filter_config_section(), - )?; - - let install_dir = self.repo.install_dir().ok(); - let home = self.repo.config.home_dir(); - Some(path.interpolate(interpolate_context(install_dir.as_deref(), home.as_deref()))) + self.repo + .config + .trusted_file_path(key.section_name, key.subsection_name, key.value_name) } } @@ -95,6 +88,15 @@ impl<'repo> Snapshot<'repo> { /// Utilities impl<'repo> SnapshotMut<'repo> { + /// Apply configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true` + /// to the repository configuration, marked with [source CLI][git_config::Source::Cli]. + pub fn apply_cli_overrides( + &mut self, + values: impl IntoIterator>, + ) -> Result<&mut Self, crate::config::overrides::Error> { + crate::config::overrides::apply(&mut self.config, values, git_config::Source::Cli)?; + Ok(self) + } /// Apply all changes made to this instance. /// /// Note that this would also happen once this instance is dropped, but using this method may be more intuitive and won't squelch errors @@ -108,8 +110,7 @@ impl<'repo> SnapshotMut<'repo> { &mut self, repo: &'repo mut crate::Repository, ) -> Result<&'repo mut crate::Repository, crate::config::Error> { - repo.config - .reread_values_and_clear_caches(std::mem::take(&mut self.config).into())?; + repo.reread_values_and_clear_caches_replacing_config(std::mem::take(&mut self.config).into())?; Ok(repo) } @@ -143,8 +144,7 @@ impl<'repo> CommitAutoRollback<'repo> { &mut self, repo: &'repo mut crate::Repository, ) -> Result<&'repo mut crate::Repository, crate::config::Error> { - repo.config - .reread_values_and_clear_caches(OwnShared::clone(&self.prev_config))?; + repo.reread_values_and_clear_caches_replacing_config(OwnShared::clone(&self.prev_config))?; Ok(repo) } } diff --git a/git-repository/src/config/snapshot/apply_cli_overrides.rs b/git-repository/src/config/snapshot/apply_cli_overrides.rs index 91aa1c54c8b..b81d84edfbf 100644 --- a/git-repository/src/config/snapshot/apply_cli_overrides.rs +++ b/git-repository/src/config/snapshot/apply_cli_overrides.rs @@ -5,48 +5,4 @@ use crate::{ config::SnapshotMut, }; -/// The error returned by [SnapshotMut::apply_cli_overrides()][crate::config::SnapshotMut::apply_cli_overrides()]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("{input:?} is not a valid configuration key. Examples are 'core.abbrev' or 'remote.origin.url'")] - InvalidKey { input: BString }, - #[error("Key {key:?} could not be parsed")] - SectionKey { - key: BString, - source: git_config::parse::section::key::Error, - }, - #[error(transparent)] - SectionHeader(#[from] git_config::parse::section::header::Error), -} - -impl SnapshotMut<'_> { - /// Apply configuration values of the form `core.abbrev=5` or `remote.origin.url = foo` or `core.bool-implicit-true` - /// to the repository configuration, marked with [source CLI][git_config::Source::Cli]. - pub fn apply_cli_overrides( - &mut self, - values: impl IntoIterator>, - ) -> Result<&mut Self, Error> { - let mut file = git_config::File::new(git_config::file::Metadata::from(git_config::Source::Cli)); - for key_value in values { - let key_value = key_value.as_ref(); - let mut tokens = key_value.splitn(2, |b| *b == b'=').map(|v| v.trim()); - let key = tokens.next().expect("always one value").as_bstr(); - let value = tokens.next(); - let key = git_config::parse::key(key.to_str().map_err(|_| Error::InvalidKey { input: key.into() })?) - .ok_or_else(|| Error::InvalidKey { input: key.into() })?; - let mut section = file.section_mut_or_create_new(key.section_name, key.subsection_name)?; - section.push( - git_config::parse::section::Key::try_from(key.value_name.to_owned()).map_err(|err| { - Error::SectionKey { - source: err, - key: key.value_name.into(), - } - })?, - value.map(|v| v.as_bstr()), - ); - } - self.config.append(file); - Ok(self) - } -} +impl SnapshotMut<'_> {} diff --git a/git-repository/src/config/snapshot/credential_helpers.rs b/git-repository/src/config/snapshot/credential_helpers.rs index 0414dd3757e..d6946e841c4 100644 --- a/git-repository/src/config/snapshot/credential_helpers.rs +++ b/git-repository/src/config/snapshot/credential_helpers.rs @@ -19,6 +19,8 @@ mod error { section: BString, source: git_config::value::Error, }, + #[error("core.askpass could not be read")] + CoreAskpass(#[from] git_config::path::interpolate::Error), } } @@ -122,8 +124,14 @@ impl Snapshot<'_> { let allow_git_env = self.repo.options.permissions.env.git_prefix.is_allowed(); let allow_ssh_env = self.repo.options.permissions.env.ssh_prefix.is_allowed(); - let prompt_options = - git_prompt::Options::default().apply_environment(allow_git_env, allow_ssh_env, allow_git_env); + let prompt_options = git_prompt::Options { + askpass: self + .trusted_path("core.askpass") + .transpose()? + .map(|c| Cow::Owned(c.into_owned())), + ..Default::default() + } + .apply_environment(allow_git_env, allow_ssh_env, allow_git_env); Ok(( git_credentials::helper::Cascade { programs, diff --git a/git-repository/src/config/snapshot/mod.rs b/git-repository/src/config/snapshot/mod.rs index 2f44ef851f9..80ec6f94889 100644 --- a/git-repository/src/config/snapshot/mod.rs +++ b/git-repository/src/config/snapshot/mod.rs @@ -1,8 +1,5 @@ mod _impls; mod access; -/// -pub mod apply_cli_overrides; - /// pub mod credential_helpers; diff --git a/git-repository/src/create.rs b/git-repository/src/create.rs index 14050eb79e8..95f490406b7 100644 --- a/git-repository/src/create.rs +++ b/git-repository/src/create.rs @@ -24,6 +24,15 @@ pub enum Error { CreateDirectory { source: std::io::Error, path: PathBuf }, } +/// The kind of repository to create. +#[derive(Debug, Copy, Clone)] +pub enum Kind { + /// An empty repository with a `.git` folder, setup to contain files in its worktree. + WithWorktree, + /// A bare repository without a worktree. + Bare, +} + const TPL_INFO_EXCLUDE: &[u8] = include_bytes!("assets/baseline-init/info/exclude"); const TPL_HOOKS_APPLYPATCH_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/applypatch-msg.sample"); const TPL_HOOKS_COMMIT_MSG: &[u8] = include_bytes!("assets/baseline-init/hooks/commit-msg.sample"); @@ -98,11 +107,12 @@ fn create_dir(p: &Path) -> Result<(), Error> { } /// Options for use in [`into()`]; -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Default)] pub struct Options { - /// If true, the repository will be a bare repository without a worktree. - pub bare: bool, - + /// If true, and the kind of repository to create has a worktree, then the destination directory must be empty. + /// + /// By default repos with worktree can be initialized into a non-empty repository as long as there is no `.git` directory. + pub destination_must_be_empty: bool, /// If set, use these filesystem capabilities to populate the respective git-config fields. /// If `None`, the directory will be probed. pub fs_capabilities: Option, @@ -110,24 +120,40 @@ pub struct Options { /// Create a new `.git` repository of `kind` within the possibly non-existing `directory` /// and return its path. +/// Note that this is a simple template-based initialization routine which should be accompanied with additional corrections +/// to respect git configuration, which is accomplished by [its callers][crate::ThreadSafeRepository::init_opts()] +/// that return a [Repository][crate::Repository]. pub fn into( directory: impl Into, - Options { bare, fs_capabilities }: Options, + kind: Kind, + Options { + fs_capabilities, + destination_must_be_empty, + }: Options, ) -> Result { let mut dot_git = directory.into(); - - if bare { - if fs::read_dir(&dot_git) + let bare = matches!(kind, Kind::Bare); + + if bare || destination_must_be_empty { + let num_entries_in_dot_git = fs::read_dir(&dot_git) + .or_else(|err| { + if err.kind() == std::io::ErrorKind::NotFound { + fs::create_dir(&dot_git).and_then(|_| fs::read_dir(&dot_git)) + } else { + Err(err) + } + }) .map_err(|err| Error::IoOpen { source: err, path: dot_git.clone(), })? - .count() - != 0 - { + .count(); + if num_entries_in_dot_git != 0 { return Err(Error::DirectoryNotEmpty { path: dot_git }); } - } else { + } + + if !bare { dot_git.push(DOT_GIT_DIR); if dot_git.is_dir() { diff --git a/git-repository/src/head/mod.rs b/git-repository/src/head/mod.rs index 349e8b93a6c..048a98547de 100644 --- a/git-repository/src/head/mod.rs +++ b/git-repository/src/head/mod.rs @@ -54,11 +54,19 @@ impl<'repo> Head<'repo> { Kind::Detached { .. } => return None, }) } + /// Returns true if this instance is detached, and points to an object directly. pub fn is_detached(&self) -> bool { matches!(self.kind, Kind::Detached { .. }) } + /// Returns true if this instance is not yet born, hence it points to a ref that doesn't exist yet. + /// + /// This is the case in a newly initialized repository. + pub fn is_unborn(&self) -> bool { + matches!(self.kind, Kind::Unborn(_)) + } + // TODO: tests /// Returns the id the head points to, which isn't possible on unborn heads. pub fn id(&self) -> Option> { diff --git a/git-repository/src/init.rs b/git-repository/src/init.rs new file mode 100644 index 00000000000..9553f7993a7 --- /dev/null +++ b/git-repository/src/init.rs @@ -0,0 +1,97 @@ +use crate::bstr::BString; +use git_ref::store::WriteReflog; +use git_ref::transaction::{PreviousValue, RefEdit}; +use git_ref::{FullName, Target}; +use std::borrow::Cow; +use std::convert::TryInto; +use std::path::Path; + +use crate::ThreadSafeRepository; + +/// The name of the branch to use if non is configured via git configuration. +/// +/// # Deviation +/// +/// We use `main` instead of `master`. +pub const DEFAULT_BRANCH_NAME: &str = "main"; + +/// The error returned by [`crate::init()`]. +#[derive(Debug, thiserror::Error)] +#[allow(missing_docs)] +pub enum Error { + #[error(transparent)] + Init(#[from] crate::create::Error), + #[error(transparent)] + Open(#[from] crate::open::Error), + #[error("Invalid default branch name: {name:?}")] + InvalidBranchName { + name: BString, + source: git_validate::refname::Error, + }, + #[error("Could not edit HEAD reference with new default name")] + EditHeadForDefaultBranch(#[from] crate::reference::edit::Error), +} + +impl ThreadSafeRepository { + /// Create a repository with work-tree within `directory`, creating intermediate directories as needed. + /// + /// Fails without action if there is already a `.git` repository inside of `directory`, but + /// won't mind if the `directory` otherwise is non-empty. + pub fn init( + directory: impl AsRef, + kind: crate::create::Kind, + options: crate::create::Options, + ) -> Result { + use git_sec::trust::DefaultForLevel; + let open_options = crate::open::Options::default_for_level(git_sec::Trust::Full); + Self::init_opts(directory, kind, options, open_options) + } + + /// Similar to [`init`][Self::init()], but allows to determine how exactly to open the newly created repository. + /// + /// # Deviation + /// + /// Instead of naming the default branch `master`, we name it `main` unless configured explicitly using the `init.defaultBranch` + /// configuration key. + pub fn init_opts( + directory: impl AsRef, + kind: crate::create::Kind, + create_options: crate::create::Options, + mut open_options: crate::open::Options, + ) -> Result { + let path = crate::create::into(directory.as_ref(), kind, create_options)?; + let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories(); + open_options.git_dir_trust = Some(git_sec::Trust::Full); + let repo = ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, open_options)?; + + let branch_name = repo + .config + .resolved + .string("init", None, "defaultBranch") + .unwrap_or_else(|| Cow::Borrowed(DEFAULT_BRANCH_NAME.into())); + if branch_name.as_ref() != DEFAULT_BRANCH_NAME { + let sym_ref: FullName = + format!("refs/heads/{branch_name}") + .try_into() + .map_err(|err| Error::InvalidBranchName { + name: branch_name.into_owned(), + source: err, + })?; + let mut repo = repo.to_thread_local(); + let prev_write_reflog = repo.refs.write_reflog; + repo.refs.write_reflog = WriteReflog::Disable; + repo.edit_reference(RefEdit { + change: git_ref::transaction::Change::Update { + log: Default::default(), + expected: PreviousValue::Any, + new: Target::Symbolic(sym_ref), + }, + name: "HEAD".try_into().expect("valid"), + deref: false, + })?; + repo.refs.write_reflog = prev_write_reflog; + } + + Ok(repo) + } +} diff --git a/git-repository/src/lib.rs b/git-repository/src/lib.rs index 1ac52181064..a8f332957a1 100644 --- a/git-repository/src/lib.rs +++ b/git-repository/src/lib.rs @@ -200,47 +200,31 @@ pub fn discover(directory: impl AsRef) -> Result) -> Result { - ThreadSafeRepository::init( - directory, - create::Options { - bare: false, - fs_capabilities: None, - }, - ) - .map(Into::into) + ThreadSafeRepository::init(directory, create::Kind::WithWorktree, create::Options::default()).map(Into::into) } /// See [ThreadSafeRepository::init()], but returns a [`Repository`] instead. pub fn init_bare(directory: impl AsRef) -> Result { - ThreadSafeRepository::init( - directory, - create::Options { - bare: true, - fs_capabilities: None, - }, - ) - .map(Into::into) + ThreadSafeRepository::init(directory, create::Kind::Bare, create::Options::default()).map(Into::into) } /// Create a platform for configuring a bare clone from `url` to the local `path`, using default options for opening it (but /// amended with using configuration from the git installation to ensure all authentication options are honored). /// -/// See [`clone::Prepare::new()] for a function to take full control over all options. +/// See [`clone::PrepareFetch::new()] for a function to take full control over all options. pub fn prepare_clone_bare( url: Url, path: impl AsRef, -) -> Result +) -> Result where Url: std::convert::TryInto, git_url::parse::Error: From, { - clone::Prepare::new( + clone::PrepareFetch::new( url, path, - create::Options { - bare: true, - fs_capabilities: None, - }, + create::Kind::Bare, + create::Options::default(), open_opts_with_git_binary_config(), ) } @@ -248,22 +232,17 @@ where /// Create a platform for configuring a clone with main working tree from `url` to the local `path`, using default options for opening it /// (but amended with using configuration from the git installation to ensure all authentication options are honored). /// -/// See [`clone::Prepare::new()] for a function to take full control over all options. -pub fn prepare_clone( - url: Url, - path: impl AsRef, -) -> Result +/// See [`clone::PrepareFetch::new()] for a function to take full control over all options. +pub fn prepare_clone(url: Url, path: impl AsRef) -> Result where Url: std::convert::TryInto, git_url::parse::Error: From, { - clone::Prepare::new( + clone::PrepareFetch::new( url, path, - create::Options { - bare: false, - fs_capabilities: None, - }, + crate::create::Kind::WithWorktree, + create::Options::default(), open_opts_with_git_binary_config(), ) } @@ -323,45 +302,7 @@ pub mod revision; pub mod remote; /// -pub mod init { - use std::path::Path; - - use crate::ThreadSafeRepository; - - /// The error returned by [`crate::init()`]. - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error(transparent)] - Init(#[from] crate::create::Error), - #[error(transparent)] - Open(#[from] crate::open::Error), - } - - impl ThreadSafeRepository { - /// Create a repository with work-tree within `directory`, creating intermediate directories as needed. - /// - /// Fails without action if there is already a `.git` repository inside of `directory`, but - /// won't mind if the `directory` otherwise is non-empty. - pub fn init(directory: impl AsRef, options: crate::create::Options) -> Result { - use git_sec::trust::DefaultForLevel; - let open_options = crate::open::Options::default_for_level(git_sec::Trust::Full); - Self::init_opts(directory, options, open_options) - } - - /// Similar to [`init`][Self::init()], but allows to determine how exactly to open the newly created repository. - pub fn init_opts( - directory: impl AsRef, - create_options: crate::create::Options, - mut open_options: crate::open::Options, - ) -> Result { - let path = crate::create::into(directory.as_ref(), create_options)?; - let (git_dir, worktree_dir) = path.into_repository_and_work_tree_directories(); - open_options.git_dir_trust = Some(git_sec::Trust::Full); - ThreadSafeRepository::open_from_paths(git_dir, worktree_dir, open_options).map_err(Into::into) - } - } -} +pub mod init; /// Not to be confused with 'status'. pub mod state { diff --git a/git-repository/src/object/tree/diff.rs b/git-repository/src/object/tree/diff.rs deleted file mode 100644 index fd391f39475..00000000000 --- a/git-repository/src/object/tree/diff.rs +++ /dev/null @@ -1,317 +0,0 @@ -use std::collections::VecDeque; - -use git_object::TreeRefIter; -use git_odb::FindExt; - -use crate::{ - bstr::{BStr, BString, ByteSlice, ByteVec}, - ext::ObjectIdExt, - Repository, Tree, -}; - -/// The error return by methods on the [diff platform][Platform]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error(transparent)] - Diff(#[from] git_diff::tree::changes::Error), - #[error("The user-provided callback failed")] - ForEach(#[source] Box), -} - -/// Returned by the `for_each` function to control flow. -#[derive(Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)] -pub enum Action { - /// Continue the traversal of changes. - Continue, - /// Stop the traversal of changes and stop calling this function. - Cancel, -} - -impl Default for Action { - fn default() -> Self { - Action::Continue - } -} - -/// Represents any possible change in order to turn one tree into another. -#[derive(Debug, Clone, Copy)] -pub struct Change<'a, 'old, 'new> { - /// The location of the file or directory described by `event`, if tracking was enabled. - /// - /// Otherwise this value is always an empty path. - pub location: &'a BStr, - /// The diff event itself to provide information about what would need to change. - pub event: change::Event<'old, 'new>, -} - -/// -pub mod change { - use git_object::tree::EntryMode; - - use crate::{bstr::ByteSlice, Id}; - - /// An event emitted when finding differences between two trees. - #[derive(Debug, Clone, Copy)] - pub enum Event<'old, 'new> { - /// An entry was added, like the addition of a file or directory. - Addition { - /// The mode of the added entry. - entry_mode: git_object::tree::EntryMode, - /// The object id of the added entry. - id: Id<'new>, - }, - /// An entry was deleted, like the deletion of a file or directory. - Deletion { - /// The mode of the deleted entry. - entry_mode: git_object::tree::EntryMode, - /// The object id of the deleted entry. - id: Id<'old>, - }, - /// An entry was modified, e.g. changing the contents of a file adjusts its object id and turning - /// a file into a symbolic link adjusts its mode. - Modification { - /// The mode of the entry before the modification. - previous_entry_mode: git_object::tree::EntryMode, - /// The object id of the entry before the modification. - previous_id: Id<'old>, - - /// The mode of the entry after the modification. - entry_mode: git_object::tree::EntryMode, - /// The object id after the modification. - id: Id<'new>, - }, - } - - /// A platform to keep temporary information to perform line diffs. - pub struct DiffPlatform<'old, 'new> { - old: crate::Object<'old>, - new: crate::Object<'new>, - } - - impl<'old, 'new> Event<'old, 'new> { - /// Produce a platform for performing a line-diff, or `None` if this is not a [`Modification`][Event::Modification] - /// or one of the entries to compare is not a blob. - pub fn diff(&self) -> Option, crate::object::find::existing::Error>> { - match self { - Event::Modification { - previous_entry_mode: EntryMode::BlobExecutable | EntryMode::Blob, - previous_id, - entry_mode: EntryMode::BlobExecutable | EntryMode::Blob, - id, - } => match previous_id.object().and_then(|old| id.object().map(|new| (old, new))) { - Ok((old, new)) => Some(Ok(DiffPlatform { old, new })), - Err(err) => Some(Err(err)), - }, - _ => None, - } - } - } - - impl<'old, 'new> DiffPlatform<'old, 'new> { - /// Perform a diff on lines between the old and the new version of a blob. - /// Note that the [`Sink`][git_diff::text::imara::Sink] implementation is - /// what makes the diff usable and relies heavily on what the caller requires, as created by `make_sink`. - pub fn lines(&self, algorithm: git_diff::text::Algorithm, make_sink: FnS) -> S::Out - where - FnS: for<'a> FnOnce(&git_diff::text::imara::intern::InternedInput<&'a [u8]>) -> S, - S: git_diff::text::imara::Sink, - { - git_diff::text::with( - self.old.data.as_bstr(), - self.new.data.as_bstr(), - algorithm, - // TODO: make use of `core.eol` and/or filters to do line-counting correctly. It's probably - // OK to just know how these objects are saved to know what constitutes a line. - git_diff::text::imara::intern::InternedInput::new, - make_sink, - ) - .1 - } - } -} - -/// Diffing -impl<'repo> Tree<'repo> { - /// Return a platform to see the changes needed to create other trees, for instance. - /// - /// # Performance - /// - /// It's highly recommended to set an object cache to avoid extracting the same object multiple times. - pub fn changes<'a>(&'a self) -> Platform<'a, 'repo> { - Platform { - state: Default::default(), - lhs: self, - tracking: None, - } - } -} - -/// The diffing platform returned by [`Tree::changes()`]. -#[derive(Clone)] -pub struct Platform<'a, 'repo> { - state: git_diff::tree::State, - lhs: &'a Tree<'repo>, - tracking: Option, -} - -#[derive(Clone, Copy)] -enum Tracking { - FileName, - Path, -} - -/// Configuration -impl<'a, 'repo> Platform<'a, 'repo> { - /// Keep track of file-names, which makes the [`location`][Change::location] field usable with the filename of the changed item. - pub fn track_filename(&mut self) -> &mut Self { - self.tracking = Some(Tracking::FileName); - self - } - - /// Keep track of the entire path of a change, relative to the repository. - /// - /// This makes the [`location`][Change::location] field usable. - pub fn track_path(&mut self) -> &mut Self { - self.tracking = Some(Tracking::Path); - self - } -} - -/// Add the item to compare to. -impl<'a, 'old> Platform<'a, 'old> { - /// Call `for_each` repeatedly with all changes that are needed to convert the source of the diff to the tree to `other`. - pub fn for_each_to_obtain_tree<'new, E>( - &mut self, - other: &Tree<'new>, - for_each: impl FnMut(Change<'_, 'old, 'new>) -> Result, - ) -> Result<(), Error> - where - E: std::error::Error + Sync + Send + 'static, - { - let repo = self.lhs.repo; - let mut delegate = Delegate { - repo: self.lhs.repo, - other_repo: other.repo, - tracking: self.tracking, - location: BString::default(), - path_deque: Default::default(), - visit: for_each, - err: None, - }; - git_diff::tree::Changes::from(TreeRefIter::from_bytes(&self.lhs.data)).needed_to_obtain( - TreeRefIter::from_bytes(&other.data), - &mut self.state, - |oid, buf| repo.objects.find_tree_iter(oid, buf), - &mut delegate, - )?; - match delegate.err { - Some(err) => Err(Error::ForEach(Box::new(err))), - None => Ok(()), - } - } -} - -struct Delegate<'old, 'new, VisitFn, E> { - repo: &'old Repository, - other_repo: &'new Repository, - tracking: Option, - location: BString, - path_deque: VecDeque, - visit: VisitFn, - err: Option, -} - -impl Delegate<'_, '_, A, B> { - fn pop_element(&mut self) { - if let Some(pos) = self.location.rfind_byte(b'/') { - self.location.resize(pos, 0); - } else { - self.location.clear(); - } - } - - fn push_element(&mut self, name: &BStr) { - if !self.location.is_empty() { - self.location.push(b'/'); - } - self.location.push_str(name); - } -} - -impl<'old, 'new, VisitFn, E> git_diff::tree::Visit for Delegate<'old, 'new, VisitFn, E> -where - VisitFn: for<'delegate> FnMut(Change<'delegate, 'old, 'new>) -> Result, - E: std::error::Error + Sync + Send + 'static, -{ - fn pop_front_tracked_path_and_set_current(&mut self) { - if let Some(Tracking::Path) = self.tracking { - self.location = self - .path_deque - .pop_front() - .expect("every call is matched with push_tracked_path_component"); - } - } - - fn push_back_tracked_path_component(&mut self, component: &BStr) { - if let Some(Tracking::Path) = self.tracking { - self.push_element(component); - self.path_deque.push_back(self.location.clone()); - } - } - - fn push_path_component(&mut self, component: &BStr) { - match self.tracking { - Some(Tracking::FileName) => { - self.location.clear(); - self.location.push_str(component); - } - Some(Tracking::Path) => { - self.push_element(component); - } - None => {} - } - } - - fn pop_path_component(&mut self) { - if let Some(Tracking::Path) = self.tracking { - self.pop_element(); - } - } - - fn visit(&mut self, change: git_diff::tree::visit::Change) -> git_diff::tree::visit::Action { - use git_diff::tree::visit::Change::*; - let event = match change { - Addition { entry_mode, oid } => change::Event::Addition { - entry_mode, - id: oid.attach(self.other_repo), - }, - Deletion { entry_mode, oid } => change::Event::Deletion { - entry_mode, - id: oid.attach(self.repo), - }, - Modification { - previous_entry_mode, - previous_oid, - entry_mode, - oid, - } => change::Event::Modification { - previous_entry_mode, - entry_mode, - previous_id: previous_oid.attach(self.repo), - id: oid.attach(self.other_repo), - }, - }; - match (self.visit)(Change { - event, - location: self.location.as_ref(), - }) { - Ok(Action::Cancel) => git_diff::tree::visit::Action::Cancel, - Ok(Action::Continue) => git_diff::tree::visit::Action::Continue, - Err(err) => { - self.err = Some(err); - git_diff::tree::visit::Action::Cancel - } - } - } -} diff --git a/git-repository/src/object/tree/diff/change.rs b/git-repository/src/object/tree/diff/change.rs new file mode 100644 index 00000000000..49b72fea5a6 --- /dev/null +++ b/git-repository/src/object/tree/diff/change.rs @@ -0,0 +1,116 @@ +use git_object::tree::EntryMode; + +use crate::{bstr::ByteSlice, Id, Repository}; + +/// An event emitted when finding differences between two trees. +#[derive(Debug, Clone, Copy)] +pub enum Event<'old, 'new> { + /// An entry was added, like the addition of a file or directory. + Addition { + /// The mode of the added entry. + entry_mode: git_object::tree::EntryMode, + /// The object id of the added entry. + id: Id<'new>, + }, + /// An entry was deleted, like the deletion of a file or directory. + Deletion { + /// The mode of the deleted entry. + entry_mode: git_object::tree::EntryMode, + /// The object id of the deleted entry. + id: Id<'old>, + }, + /// An entry was modified, e.g. changing the contents of a file adjusts its object id and turning + /// a file into a symbolic link adjusts its mode. + Modification { + /// The mode of the entry before the modification. + previous_entry_mode: git_object::tree::EntryMode, + /// The object id of the entry before the modification. + previous_id: Id<'old>, + + /// The mode of the entry after the modification. + entry_mode: git_object::tree::EntryMode, + /// The object id after the modification. + id: Id<'new>, + }, +} + +/// A platform to keep temporary information to perform line diffs. +pub struct DiffPlatform<'old, 'new> { + old: crate::Object<'old>, + new: crate::Object<'new>, + algo: git_diff::text::Algorithm, +} + +impl<'old, 'new> Event<'old, 'new> { + fn repo(&self) -> &Repository { + match self { + Event::Addition { id, .. } => id.repo, + Event::Deletion { id, .. } => id.repo, + Event::Modification { id, .. } => id.repo, + } + } +} + +/// +pub mod event { + /// + pub mod diff { + /// The error returned by [`Event::diff()`][super::super::Event::diff()]. + #[derive(Debug, thiserror::Error)] + #[allow(missing_docs)] + pub enum Error { + #[error("Could not find the previous object to diff against")] + FindPrevious(#[from] crate::object::find::existing::Error), + #[error("Could not obtain diff algorithm from configuration")] + DiffAlgorithm(#[from] crate::config::diff::algorithm::Error), + } + } +} + +impl<'old, 'new> Event<'old, 'new> { + /// Produce a platform for performing a line-diff, or `None` if this is not a [`Modification`][Event::Modification] + /// or one of the entries to compare is not a blob. + pub fn diff(&self) -> Option, event::diff::Error>> { + match self { + Event::Modification { + previous_entry_mode: EntryMode::BlobExecutable | EntryMode::Blob, + previous_id, + entry_mode: EntryMode::BlobExecutable | EntryMode::Blob, + id, + } => match previous_id.object().and_then(|old| id.object().map(|new| (old, new))) { + Ok((old, new)) => { + let algo = match self.repo().config.diff_algorithm() { + Ok(algo) => algo, + Err(err) => return Some(Err(err.into())), + }; + Some(Ok(DiffPlatform { old, new, algo })) + } + Err(err) => Some(Err(err.into())), + }, + _ => None, + } + } +} + +impl<'old, 'new> DiffPlatform<'old, 'new> { + /// Perform a diff on lines between the old and the new version of a blob. + /// The algorithm is determined by the `diff.algorithm` configuration. + /// Note that the [`Sink`][git_diff::text::imara::Sink] implementation is + /// what makes the diff usable and relies heavily on what the caller requires, as created by `make_sink`. + pub fn lines(&self, new_sink: FnS) -> S::Out + where + FnS: for<'a> FnOnce(&git_diff::text::imara::intern::InternedInput<&'a [u8]>) -> S, + S: git_diff::text::imara::Sink, + { + git_diff::text::with( + self.old.data.as_bstr(), + self.new.data.as_bstr(), + self.algo, + // TODO: make use of `core.eol` and/or filters to do line-counting correctly. It's probably + // OK to just know how these objects are saved to know what constitutes a line. + git_diff::text::imara::intern::InternedInput::new, + new_sink, + ) + .1 + } +} diff --git a/git-repository/src/object/tree/diff/for_each.rs b/git-repository/src/object/tree/diff/for_each.rs new file mode 100644 index 00000000000..6ece1089034 --- /dev/null +++ b/git-repository/src/object/tree/diff/for_each.rs @@ -0,0 +1,155 @@ +use super::{change, Action, Change, Platform, Tracking}; +use crate::bstr::{BStr, BString, ByteSlice, ByteVec}; +use crate::ext::ObjectIdExt; +use crate::{Repository, Tree}; +use git_object::TreeRefIter; +use git_odb::FindExt; +use std::collections::VecDeque; + +/// The error return by methods on the [diff platform][Platform]. +#[derive(Debug, thiserror::Error)] +#[allow(missing_docs)] +pub enum Error { + #[error(transparent)] + Diff(#[from] git_diff::tree::changes::Error), + #[error("The user-provided callback failed")] + ForEach(#[source] Box), +} + +/// Add the item to compare to. +impl<'a, 'old> Platform<'a, 'old> { + /// Call `for_each` repeatedly with all changes that are needed to convert the source of the diff to the tree to `other`. + pub fn for_each_to_obtain_tree<'new, E>( + &mut self, + other: &Tree<'new>, + for_each: impl FnMut(Change<'_, 'old, 'new>) -> Result, + ) -> Result<(), Error> + where + E: std::error::Error + Sync + Send + 'static, + { + let repo = self.lhs.repo; + let mut delegate = Delegate { + repo: self.lhs.repo, + other_repo: other.repo, + tracking: self.tracking, + location: BString::default(), + path_deque: Default::default(), + visit: for_each, + err: None, + }; + git_diff::tree::Changes::from(TreeRefIter::from_bytes(&self.lhs.data)).needed_to_obtain( + TreeRefIter::from_bytes(&other.data), + &mut self.state, + |oid, buf| repo.objects.find_tree_iter(oid, buf), + &mut delegate, + )?; + match delegate.err { + Some(err) => Err(Error::ForEach(Box::new(err))), + None => Ok(()), + } + } +} + +struct Delegate<'old, 'new, VisitFn, E> { + repo: &'old Repository, + other_repo: &'new Repository, + tracking: Option, + location: BString, + path_deque: VecDeque, + visit: VisitFn, + err: Option, +} + +impl Delegate<'_, '_, A, B> { + fn pop_element(&mut self) { + if let Some(pos) = self.location.rfind_byte(b'/') { + self.location.resize(pos, 0); + } else { + self.location.clear(); + } + } + + fn push_element(&mut self, name: &BStr) { + if !self.location.is_empty() { + self.location.push(b'/'); + } + self.location.push_str(name); + } +} + +impl<'old, 'new, VisitFn, E> git_diff::tree::Visit for Delegate<'old, 'new, VisitFn, E> +where + VisitFn: for<'delegate> FnMut(Change<'delegate, 'old, 'new>) -> Result, + E: std::error::Error + Sync + Send + 'static, +{ + fn pop_front_tracked_path_and_set_current(&mut self) { + if let Some(Tracking::Path) = self.tracking { + self.location = self + .path_deque + .pop_front() + .expect("every call is matched with push_tracked_path_component"); + } + } + + fn push_back_tracked_path_component(&mut self, component: &BStr) { + if let Some(Tracking::Path) = self.tracking { + self.push_element(component); + self.path_deque.push_back(self.location.clone()); + } + } + + fn push_path_component(&mut self, component: &BStr) { + match self.tracking { + Some(Tracking::FileName) => { + self.location.clear(); + self.location.push_str(component); + } + Some(Tracking::Path) => { + self.push_element(component); + } + None => {} + } + } + + fn pop_path_component(&mut self) { + if let Some(Tracking::Path) = self.tracking { + self.pop_element(); + } + } + + fn visit(&mut self, change: git_diff::tree::visit::Change) -> git_diff::tree::visit::Action { + use git_diff::tree::visit::Change::*; + let event = match change { + Addition { entry_mode, oid } => change::Event::Addition { + entry_mode, + id: oid.attach(self.other_repo), + }, + Deletion { entry_mode, oid } => change::Event::Deletion { + entry_mode, + id: oid.attach(self.repo), + }, + Modification { + previous_entry_mode, + previous_oid, + entry_mode, + oid, + } => change::Event::Modification { + previous_entry_mode, + entry_mode, + previous_id: previous_oid.attach(self.repo), + id: oid.attach(self.other_repo), + }, + }; + match (self.visit)(Change { + event, + location: self.location.as_ref(), + }) { + Ok(Action::Cancel) => git_diff::tree::visit::Action::Cancel, + Ok(Action::Continue) => git_diff::tree::visit::Action::Continue, + Err(err) => { + self.err = Some(err); + git_diff::tree::visit::Action::Cancel + } + } + } +} diff --git a/git-repository/src/object/tree/diff/mod.rs b/git-repository/src/object/tree/diff/mod.rs new file mode 100644 index 00000000000..4cc05c50f3e --- /dev/null +++ b/git-repository/src/object/tree/diff/mod.rs @@ -0,0 +1,80 @@ +use crate::{bstr::BStr, Tree}; + +/// Returned by the `for_each` function to control flow. +#[derive(Clone, Copy, PartialOrd, PartialEq, Ord, Eq, Hash)] +pub enum Action { + /// Continue the traversal of changes. + Continue, + /// Stop the traversal of changes and stop calling this function. + Cancel, +} + +impl Default for Action { + fn default() -> Self { + Action::Continue + } +} + +/// Represents any possible change in order to turn one tree into another. +#[derive(Debug, Clone, Copy)] +pub struct Change<'a, 'old, 'new> { + /// The location of the file or directory described by `event`, if tracking was enabled. + /// + /// Otherwise this value is always an empty path. + pub location: &'a BStr, + /// The diff event itself to provide information about what would need to change. + pub event: change::Event<'old, 'new>, +} + +/// +pub mod change; + +/// Diffing +impl<'repo> Tree<'repo> { + /// Return a platform to see the changes needed to create other trees, for instance. + /// + /// # Performance + /// + /// It's highly recommended to set an object cache to avoid extracting the same object multiple times. + pub fn changes<'a>(&'a self) -> Platform<'a, 'repo> { + Platform { + state: Default::default(), + lhs: self, + tracking: None, + } + } +} + +/// The diffing platform returned by [`Tree::changes()`]. +#[derive(Clone)] +pub struct Platform<'a, 'repo> { + state: git_diff::tree::State, + lhs: &'a Tree<'repo>, + tracking: Option, +} + +#[derive(Clone, Copy)] +enum Tracking { + FileName, + Path, +} + +/// Configuration +impl<'a, 'repo> Platform<'a, 'repo> { + /// Keep track of file-names, which makes the [`location`][Change::location] field usable with the filename of the changed item. + pub fn track_filename(&mut self) -> &mut Self { + self.tracking = Some(Tracking::FileName); + self + } + + /// Keep track of the entire path of a change, relative to the repository. + /// + /// This makes the [`location`][Change::location] field usable. + pub fn track_path(&mut self) -> &mut Self { + self.tracking = Some(Tracking::Path); + self + } +} + +/// +pub mod for_each; diff --git a/git-repository/src/open.rs b/git-repository/src/open.rs index 132ae8c8224..fb949bd5b42 100644 --- a/git-repository/src/open.rs +++ b/git-repository/src/open.rs @@ -2,7 +2,8 @@ use std::path::PathBuf; use git_features::threading::OwnShared; -use crate::{config, config::cache::interpolate_context, permission, Permissions, ThreadSafeRepository}; +use crate::bstr::BString; +use crate::{config, config::cache::interpolate_context, permission, Permissions, Repository, ThreadSafeRepository}; /// A way to configure the usage of replacement objects, see `git replace`. #[derive(Debug, Clone)] @@ -72,6 +73,7 @@ pub struct Options { pub(crate) lossy_config: Option, pub(crate) lenient_config: bool, pub(crate) bail_if_untrusted: bool, + pub(crate) config_overrides: Vec, } impl Default for Options { @@ -85,6 +87,7 @@ impl Default for Options { lossy_config: None, lenient_config: true, bail_if_untrusted: false, + config_overrides: Vec::new(), } } } @@ -126,6 +129,14 @@ impl Options { /// Builder methods impl Options { + /// Apply the given configuration `values` like `init.defaultBranch=special` or `core.bool-implicit-true` in memory to as early + /// as the configuration is initialized to allow affecting the repository instantiation phase, both on disk or when opening. + /// The configuration is marked with [source API][git_config::Source::Api]. + pub fn config_overrides(mut self, values: impl IntoIterator>) -> Self { + self.config_overrides = values.into_iter().map(Into::into).collect(); + self + } + /// Set the amount of slots to use for the object database. It's a value that doesn't need changes on the client, typically, /// but should be controlled on the server. pub fn object_store_slots(mut self, slots: git_odb::store::init::Slots) -> Self { @@ -225,6 +236,7 @@ impl git_sec::trust::DefaultForLevel for Options { lossy_config: None, bail_if_untrusted: false, lenient_config: true, + config_overrides: Vec::new(), }, git_sec::Trust::Reduced => Options { object_store_slots: git_odb::store::init::Slots::Given(32), // limit resource usage @@ -235,6 +247,7 @@ impl git_sec::trust::DefaultForLevel for Options { bail_if_untrusted: false, lenient_config: true, lossy_config: None, + config_overrides: Vec::new(), }, } } @@ -330,6 +343,7 @@ impl ThreadSafeRepository { lenient_config, bail_if_untrusted, permissions: Permissions { ref env, config }, + ref config_overrides, } = options; let git_dir_trust = git_dir_trust.expect("trust must be been determined by now"); @@ -368,6 +382,7 @@ impl ThreadSafeRepository { env.clone(), config, lenient_config, + config_overrides, )?; if bail_if_untrusted && git_dir_trust != git_sec::Trust::Full { @@ -401,14 +416,7 @@ impl ThreadSafeRepository { None => {} } - refs.write_reflog = config.reflog.unwrap_or_else(|| { - if worktree_dir.is_none() { - git_ref::store::WriteReflog::Disable - } else { - git_ref::store::WriteReflog::Normal - } - }); - + refs.write_reflog = reflog_or_default(config.reflog, worktree_dir.is_some()); let replacements = replacement_objects .clone() .refs_prefix() @@ -450,6 +458,44 @@ impl ThreadSafeRepository { } } +impl Repository { + /// Causes our configuration to re-read cached values which will also be applied to the repository in-memory state if applicable. + /// + /// Similar to `reread_values_and_clear_caches_replacing_config()`, but works on the existing instance instead of a passed + /// in one that it them makes the default. + #[cfg(feature = "blocking-network-client")] + pub(crate) fn reread_values_and_clear_caches(&mut self) -> Result<(), config::Error> { + self.config.reread_values_and_clear_caches()?; + self.apply_changed_values(); + Ok(()) + } + + /// Replace our own configuration with `config` and re-read all cached values, and apply them to select in-memory instances. + pub(crate) fn reread_values_and_clear_caches_replacing_config( + &mut self, + config: crate::Config, + ) -> Result<(), config::Error> { + self.config.reread_values_and_clear_caches_replacing_config(config)?; + self.apply_changed_values(); + Ok(()) + } + + fn apply_changed_values(&mut self) { + self.refs.write_reflog = reflog_or_default(self.config.reflog, self.work_dir().is_some()); + } +} + +fn reflog_or_default( + config_reflog: Option, + has_worktree: bool, +) -> git_ref::store::WriteReflog { + config_reflog.unwrap_or_else(|| { + has_worktree + .then(|| git_ref::store::WriteReflog::Normal) + .unwrap_or(git_ref::store::WriteReflog::Disable) + }) +} + fn check_safe_directories( git_dir: &std::path::Path, git_install_dir: Option<&std::path::Path>, @@ -505,7 +551,7 @@ mod tests { fn size_of_options() { assert_eq!( std::mem::size_of::(), - 72, + 96, "size shouldn't change without us knowing" ); } diff --git a/git-repository/src/reference/remote.rs b/git-repository/src/reference/remote/mod.rs similarity index 70% rename from git-repository/src/reference/remote.rs rename to git-repository/src/reference/remote/mod.rs index 79600bd5238..9176c48b1de 100644 --- a/git-repository/src/reference/remote.rs +++ b/git-repository/src/reference/remote/mod.rs @@ -1,7 +1,8 @@ use std::borrow::Cow; +use std::convert::TryInto; use crate::{ - bstr::{BStr, ByteSlice, ByteVec}, + bstr::{BStr, ByteSlice}, remote, Reference, }; @@ -14,27 +15,11 @@ pub enum Name<'repo> { Url(Cow<'repo, BStr>), } -impl Name<'_> { - /// Return this instance as a symbolic name, if it is one. - pub fn as_symbol(&self) -> Option<&str> { - match self { - Name::Symbol(n) => n.as_ref().into(), - Name::Url(_) => None, - } - } - - /// Return this instance as url, if it is one. - pub fn as_url(&self) -> Option<&BStr> { - match self { - Name::Url(n) => n.as_ref().into(), - Name::Symbol(_) => None, - } - } -} +mod name; /// Remotes impl<'repo> Reference<'repo> { - /// Find the name of our remote for `direction` as configured in `branch..remote|pushRemote` respectively. + /// Find the unvalidated name of our remote for `direction` as configured in `branch..remote|pushRemote` respectively. /// If `Some()` it can be used in [`Repository::find_remote(…)`][crate::Repository::find_remote()], or if `None` then /// [Repository::remote_default_name()][crate::Repository::remote_default_name()] could be used in its place. /// @@ -56,17 +41,7 @@ impl<'repo> Reference<'repo> { }) .flatten() .or_else(|| config.string("branch", Some(name), "remote")) - .and_then(|name| { - if name.contains(&b'/') { - Some(Name::Url(name)) - } else { - match name { - Cow::Borrowed(n) => n.to_str().ok().map(Cow::Borrowed), - Cow::Owned(n) => Vec::from(n).into_string().ok().map(Cow::Owned), - } - .map(Name::Symbol) - } - }) + .and_then(|name| name.try_into().ok()) } /// Like [`remote_name(…)`][Self::remote_name()], but configures the returned `Remote` with additional information like diff --git a/git-repository/src/reference/remote/name.rs b/git-repository/src/reference/remote/name.rs new file mode 100644 index 00000000000..49bfba0c89d --- /dev/null +++ b/git-repository/src/reference/remote/name.rs @@ -0,0 +1,55 @@ +use super::Name; +use crate::bstr::{BStr, ByteSlice, ByteVec}; +use std::borrow::Cow; +use std::convert::TryFrom; + +impl Name<'_> { + /// Obtain the name as string representation. + pub fn as_bstr(&self) -> &BStr { + match self { + Name::Symbol(v) => v.as_ref().into(), + Name::Url(v) => v.as_ref(), + } + } + + /// Return this instance as a symbolic name, if it is one. + pub fn as_symbol(&self) -> Option<&str> { + match self { + Name::Symbol(n) => n.as_ref().into(), + Name::Url(_) => None, + } + } + + /// Return this instance as url, if it is one. + pub fn as_url(&self) -> Option<&BStr> { + match self { + Name::Url(n) => n.as_ref().into(), + Name::Symbol(_) => None, + } + } +} + +impl<'a> TryFrom> for Name<'a> { + type Error = Cow<'a, BStr>; + + fn try_from(name: Cow<'a, BStr>) -> Result { + if name.contains(&b'/') || name.as_ref() == "." { + Ok(Name::Url(name)) + } else { + match name { + Cow::Borrowed(n) => n.to_str().ok().map(Cow::Borrowed).ok_or(name), + Cow::Owned(n) => Vec::from(n) + .into_string() + .map_err(|err| Cow::Owned(err.into_vec().into())) + .map(Cow::Owned), + } + .map(Name::Symbol) + } + } +} + +impl<'a> AsRef for Name<'a> { + fn as_ref(&self) -> &BStr { + self.as_bstr() + } +} diff --git a/git-repository/src/remote/connection/fetch/error.rs b/git-repository/src/remote/connection/fetch/error.rs new file mode 100644 index 00000000000..19956bcb6e7 --- /dev/null +++ b/git-repository/src/remote/connection/fetch/error.rs @@ -0,0 +1,31 @@ +/// The error returned by [`receive()`](super::Prepare::receive()). +#[derive(Debug, thiserror::Error)] +#[allow(missing_docs)] +pub enum Error { + #[error("{message}{}", desired.map(|n| format!(" (got {})", n)).unwrap_or_default())] + Configuration { + message: &'static str, + desired: Option, + source: Option, + }, + #[error("Could not decode server reply")] + FetchResponse(#[from] git_protocol::fetch::response::Error), + #[error("Cannot fetch from a remote that uses {remote} while local repository uses {local} for object hashes")] + IncompatibleObjectHash { + local: git_hash::Kind, + remote: git_hash::Kind, + }, + #[error(transparent)] + Negotiate(#[from] super::negotiate::Error), + #[error(transparent)] + Client(#[from] git_protocol::transport::client::Error), + #[error(transparent)] + WritePack(#[from] git_pack::bundle::write::Error), + #[error(transparent)] + UpdateRefs(#[from] super::refs::update::Error), + #[error("Failed to remove .keep file at \"{}\"", path.display())] + RemovePackKeepFile { + path: std::path::PathBuf, + source: std::io::Error, + }, +} diff --git a/git-repository/src/remote/connection/fetch/mod.rs b/git-repository/src/remote/connection/fetch/mod.rs index f9a84ce2936..7d2ed0737d5 100644 --- a/git-repository/src/remote/connection/fetch/mod.rs +++ b/git-repository/src/remote/connection/fetch/mod.rs @@ -1,47 +1,40 @@ -use std::sync::atomic::AtomicBool; - -use git_odb::FindExt; +use crate::bstr::BString; use git_protocol::transport::client::Transport; use crate::{ remote, remote::{ - fetch, fetch::{DryRun, RefMap}, ref_map, Connection, }, Progress, }; -mod error { - /// The error returned by [`receive()`](super::Prepare::receive()). - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("{message}{}", desired.map(|n| format!(" (got {})", n)).unwrap_or_default())] - Configuration { - message: &'static str, - desired: Option, - source: Option, - }, - #[error("Could not decode server reply")] - FetchResponse(#[from] git_protocol::fetch::response::Error), - #[error(transparent)] - Negotiate(#[from] super::negotiate::Error), - #[error(transparent)] - Client(#[from] git_protocol::transport::client::Error), - #[error(transparent)] - WritePack(#[from] git_pack::bundle::write::Error), - #[error(transparent)] - UpdateRefs(#[from] super::refs::update::Error), - #[error("Failed to remove .keep file at \"{}\"", path.display())] - RemovePackKeepFile { - path: std::path::PathBuf, - source: std::io::Error, - }, +mod error; +pub use error::Error; + +/// The way reflog messages should be composed whenever a ref is written with recent objects from a remote. +pub enum RefLogMessage { + /// Prefix the log with `action` and generate the typical suffix as `git` would. + Prefixed { + /// The action to use, like `fetch` or `pull`. + action: String, + }, + /// Control the entire message, using `message` verbatim. + Override { + /// The complete reflog message. + message: BString, + }, +} + +impl RefLogMessage { + pub(crate) fn compose(&self, context: &str) -> BString { + match self { + RefLogMessage::Prefixed { action } => format!("{}: {}", action, context).into(), + RefLogMessage::Override { message } => message.to_owned(), + } } } -pub use error::Error; /// The status of the repository after the fetch operation #[derive(Debug, Clone)] @@ -113,6 +106,7 @@ where con: Some(self), ref_map, dry_run: DryRun::No, + reflog_message: None, }) } } @@ -120,171 +114,15 @@ where impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P> where T: Transport, - P: Progress, { - /// Receive the pack and perform the operation as configured by git via `git-config` or overridden by various builder methods. - /// Return `Ok(None)` if there was nothing to do because all remote refs are at the same state as they are locally, or `Ok(Some(outcome))` - /// to inform about all the changes that were made. - /// - /// ### Negotiation - /// - /// "fetch.negotiationAlgorithm" describes algorithms `git` uses currently, with the default being `consecutive` and `skipping` being - /// experimented with. We currently implement something we could call 'naive' which works for now. - /// - /// ### Pack `.keep` files - /// - /// That packs that are freshly written to the object database are vulnerable to garbage collection for the brief time that it takes between - /// them being placed and the respective references to be written to disk which binds their objects to the commit graph, making them reachable. - /// - /// To circumvent this issue, a `.keep` file is created before any pack related file (i.e. `.pack` or `.idx`) is written, which indicates the - /// garbage collector (like `git maintenance`, `git gc`) to leave the corresponding pack file alone. - /// - /// If there were any ref updates or the received pack was empty, the `.keep` file will be deleted automatically leaving in its place at - /// `write_pack_bundle.keep_path` a `None`. - /// However, if no ref-update happened the path will still be present in `write_pack_bundle.keep_path` and is expected to be handled by the caller. - /// A known application for this behaviour is in `remote-helper` implementations which should send this path via `lock ` to stdout - /// to inform git about the file that it will remove once it updated the refs accordingly. - /// - /// ### Deviation - /// - /// When **updating refs**, the `git-fetch` docs state that the following: - /// - /// > Unlike when pushing with git-push, any updates outside of refs/{tags,heads}/* will be accepted without + in the refspec (or --force), whether that’s swapping e.g. a tree object for a blob, or a commit for another commit that’s doesn’t have the previous commit as an ancestor etc. - /// - /// We explicitly don't special case those refs and expect the user to take control. Note that by its nature, - /// force only applies to refs pointing to commits and if they don't, they will be updated either way in our - /// implementation as well. - pub fn receive(mut self, should_interrupt: &AtomicBool) -> Result { - let mut con = self.con.take().expect("receive() can only be called once"); - - let handshake = &self.ref_map.handshake; - let protocol_version = handshake.server_protocol_version; - - let fetch = git_protocol::fetch::Command::Fetch; - let fetch_features = fetch.default_features(protocol_version, &handshake.capabilities); - - git_protocol::fetch::Response::check_required_features(protocol_version, &fetch_features)?; - let sideband_all = fetch_features.iter().any(|(n, _)| *n == "sideband-all"); - let mut arguments = git_protocol::fetch::Arguments::new(protocol_version, fetch_features); - let mut previous_response = None::; - let mut round = 1; - let progress = &mut con.progress; - let repo = con.remote.repo; - - let reader = 'negotiation: loop { - progress.step(); - progress.set_name(format!("negotiate (round {})", round)); - - let is_done = match negotiate::one_round( - negotiate::Algorithm::Naive, - round, - repo, - &self.ref_map, - &mut arguments, - previous_response.as_ref(), - ) { - Ok(_) if arguments.is_empty() => { - git_protocol::fetch::indicate_end_of_interaction(&mut con.transport).ok(); - return Ok(Outcome { - ref_map: std::mem::take(&mut self.ref_map), - status: Status::NoChange, - }); - } - Ok(is_done) => is_done, - Err(err) => { - git_protocol::fetch::indicate_end_of_interaction(&mut con.transport).ok(); - return Err(err.into()); - } - }; - round += 1; - let mut reader = arguments.send(&mut con.transport, is_done)?; - if sideband_all { - setup_remote_progress(progress, &mut reader); - } - let response = git_protocol::fetch::Response::from_line_reader(protocol_version, &mut reader)?; - if response.has_pack() { - progress.step(); - progress.set_name("receiving pack"); - if !sideband_all { - setup_remote_progress(progress, &mut reader); - } - break 'negotiation reader; - } else { - previous_response = Some(response); - } - }; - - let options = git_pack::bundle::write::Options { - thread_limit: config::index_threads(repo)?, - index_version: config::pack_index_version(repo)?, - iteration_mode: git_pack::data::input::Mode::Verify, - object_hash: con.remote.repo.object_hash(), - }; - - let mut write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) { - Some(git_pack::Bundle::write_to_directory( - reader, - Some(repo.objects.store_ref().path().join("pack")), - con.progress, - should_interrupt, - Some(Box::new({ - let repo = repo.clone(); - move |oid, buf| repo.objects.find(oid, buf).ok() - })), - options, - )?) - } else { - drop(reader); - None - }; - - if matches!(protocol_version, git_protocol::transport::Protocol::V2) { - git_protocol::fetch::indicate_end_of_interaction(&mut con.transport).ok(); - } - - let update_refs = refs::update( - repo, - "fetch", - &self.ref_map.mappings, - con.remote.refspecs(remote::Direction::Fetch), - self.dry_run, - )?; - - if let Some(bundle) = write_pack_bundle.as_mut() { - if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 { - if let Some(path) = bundle.keep_path.take() { - std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?; - } - } - } - - Ok(Outcome { - ref_map: std::mem::take(&mut self.ref_map), - status: match write_pack_bundle { - Some(write_pack_bundle) => Status::Change { - write_pack_bundle, - update_refs, - }, - None => Status::DryRun { update_refs }, - }, - }) + /// Return the ref_map (that includes the server handshake) which was part of listing refs prior to fetching a pack. + pub fn ref_map(&self) -> &RefMap { + &self.ref_map } } -fn setup_remote_progress( - progress: &mut impl Progress, - reader: &mut Box, -) { - use git_protocol::transport::client::ExtendedBufRead; - reader.set_progress_handler(Some(Box::new({ - let mut remote_progress = progress.add_child("remote"); - move |is_err: bool, data: &[u8]| { - git_protocol::RemoteProgress::translate_to_progress(is_err, data, &mut remote_progress) - } - }) as git_protocol::transport::client::HandleProgress)); -} - mod config; +mod receive_pack; /// #[path = "update_refs/mod.rs"] pub mod refs; @@ -297,6 +135,7 @@ where con: Option>, ref_map: RefMap, dry_run: DryRun, + reflog_message: Option, } /// Builder @@ -311,6 +150,12 @@ where self.dry_run = enabled.then(|| DryRun::Yes).unwrap_or(DryRun::No); self } + + /// Set the reflog message to use when updating refs after fetching a pack. + pub fn with_reflog_message(mut self, reflog_message: RefLogMessage) -> Self { + self.reflog_message = reflog_message.into(); + self + } } impl<'remote, 'repo, T, P> Drop for Prepare<'remote, 'repo, T, P> diff --git a/git-repository/src/remote/connection/fetch/negotiate.rs b/git-repository/src/remote/connection/fetch/negotiate.rs index 51bb25ff294..e7132c2f8db 100644 --- a/git-repository/src/remote/connection/fetch/negotiate.rs +++ b/git-repository/src/remote/connection/fetch/negotiate.rs @@ -35,14 +35,19 @@ pub(crate) fn one_round( .and_then(|r| r.target().try_id().map(ToOwned::to_owned)) }); match have_id { - Some(have_id) if mapping.remote.as_id() != have_id => { - arguments.want(mapping.remote.as_id()); - arguments.have(have_id); + Some(have_id) => { + if let Some(want_id) = mapping.remote.as_id() { + if want_id != have_id { + arguments.want(want_id); + arguments.have(have_id); + } + } } - Some(_) => {} None => { - arguments.want(mapping.remote.as_id()); - has_missing_tracking_branch = true; + if let Some(have_id) = mapping.remote.as_id() { + arguments.want(have_id); + has_missing_tracking_branch = true; + } } } } diff --git a/git-repository/src/remote/connection/fetch/receive_pack.rs b/git-repository/src/remote/connection/fetch/receive_pack.rs new file mode 100644 index 00000000000..7d2d8645854 --- /dev/null +++ b/git-repository/src/remote/connection/fetch/receive_pack.rs @@ -0,0 +1,193 @@ +use crate::remote::connection::fetch::config; +use crate::remote::fetch::{negotiate, refs, Prepare, RefLogMessage}; +use crate::{ + remote, + remote::{ + fetch, + fetch::{Error, Outcome, Status}, + }, + Progress, +}; +use git_odb::FindExt; +use git_protocol::transport::client::Transport; +use std::sync::atomic::AtomicBool; + +impl<'remote, 'repo, T, P> Prepare<'remote, 'repo, T, P> +where + T: Transport, + P: Progress, + P::SubProgress: 'static, +{ + /// Receive the pack and perform the operation as configured by git via `git-config` or overridden by various builder methods. + /// Return `Ok(None)` if there was nothing to do because all remote refs are at the same state as they are locally, or `Ok(Some(outcome))` + /// to inform about all the changes that were made. + /// + /// ### Negotiation + /// + /// "fetch.negotiationAlgorithm" describes algorithms `git` uses currently, with the default being `consecutive` and `skipping` being + /// experimented with. We currently implement something we could call 'naive' which works for now. + /// + /// ### Pack `.keep` files + /// + /// That packs that are freshly written to the object database are vulnerable to garbage collection for the brief time that it takes between + /// them being placed and the respective references to be written to disk which binds their objects to the commit graph, making them reachable. + /// + /// To circumvent this issue, a `.keep` file is created before any pack related file (i.e. `.pack` or `.idx`) is written, which indicates the + /// garbage collector (like `git maintenance`, `git gc`) to leave the corresponding pack file alone. + /// + /// If there were any ref updates or the received pack was empty, the `.keep` file will be deleted automatically leaving in its place at + /// `write_pack_bundle.keep_path` a `None`. + /// However, if no ref-update happened the path will still be present in `write_pack_bundle.keep_path` and is expected to be handled by the caller. + /// A known application for this behaviour is in `remote-helper` implementations which should send this path via `lock ` to stdout + /// to inform git about the file that it will remove once it updated the refs accordingly. + /// + /// ### Deviation + /// + /// When **updating refs**, the `git-fetch` docs state that the following: + /// + /// > Unlike when pushing with git-push, any updates outside of refs/{tags,heads}/* will be accepted without + in the refspec (or --force), whether that’s swapping e.g. a tree object for a blob, or a commit for another commit that’s doesn’t have the previous commit as an ancestor etc. + /// + /// We explicitly don't special case those refs and expect the user to take control. Note that by its nature, + /// force only applies to refs pointing to commits and if they don't, they will be updated either way in our + /// implementation as well. + pub fn receive(mut self, should_interrupt: &AtomicBool) -> Result { + let mut con = self.con.take().expect("receive() can only be called once"); + + let handshake = &self.ref_map.handshake; + let protocol_version = handshake.server_protocol_version; + + let fetch = git_protocol::fetch::Command::Fetch; + let fetch_features = fetch.default_features(protocol_version, &handshake.capabilities); + + git_protocol::fetch::Response::check_required_features(protocol_version, &fetch_features)?; + let sideband_all = fetch_features.iter().any(|(n, _)| *n == "sideband-all"); + let mut arguments = git_protocol::fetch::Arguments::new(protocol_version, fetch_features); + let mut previous_response = None::; + let mut round = 1; + let progress = &mut con.progress; + let repo = con.remote.repo; + + if self.ref_map.object_hash != repo.object_hash() { + return Err(Error::IncompatibleObjectHash { + local: repo.object_hash(), + remote: self.ref_map.object_hash, + }); + } + + let reader = 'negotiation: loop { + progress.step(); + progress.set_name(format!("negotiate (round {})", round)); + + let is_done = match negotiate::one_round( + negotiate::Algorithm::Naive, + round, + repo, + &self.ref_map, + &mut arguments, + previous_response.as_ref(), + ) { + Ok(_) if arguments.is_empty() => { + git_protocol::fetch::indicate_end_of_interaction(&mut con.transport).ok(); + return Ok(Outcome { + ref_map: std::mem::take(&mut self.ref_map), + status: Status::NoChange, + }); + } + Ok(is_done) => is_done, + Err(err) => { + git_protocol::fetch::indicate_end_of_interaction(&mut con.transport).ok(); + return Err(err.into()); + } + }; + round += 1; + let mut reader = arguments.send(&mut con.transport, is_done)?; + if sideband_all { + setup_remote_progress(progress, &mut reader); + } + let response = git_protocol::fetch::Response::from_line_reader(protocol_version, &mut reader)?; + if response.has_pack() { + progress.step(); + progress.set_name("receiving pack"); + if !sideband_all { + setup_remote_progress(progress, &mut reader); + } + break 'negotiation reader; + } else { + previous_response = Some(response); + } + }; + + let options = git_pack::bundle::write::Options { + thread_limit: config::index_threads(repo)?, + index_version: config::pack_index_version(repo)?, + iteration_mode: git_pack::data::input::Mode::Verify, + object_hash: con.remote.repo.object_hash(), + }; + + let mut write_pack_bundle = if matches!(self.dry_run, fetch::DryRun::No) { + Some(git_pack::Bundle::write_to_directory( + reader, + Some(repo.objects.store_ref().path().join("pack")), + con.progress, + should_interrupt, + Some(Box::new({ + let repo = repo.clone(); + move |oid, buf| repo.objects.find(oid, buf).ok() + })), + options, + )?) + } else { + drop(reader); + None + }; + + if matches!(protocol_version, git_protocol::transport::Protocol::V2) { + git_protocol::fetch::indicate_end_of_interaction(&mut con.transport).ok(); + } + + let update_refs = refs::update( + repo, + self.reflog_message + .take() + .unwrap_or_else(|| RefLogMessage::Prefixed { action: "fetch".into() }), + &self.ref_map.mappings, + con.remote.refspecs(remote::Direction::Fetch), + self.dry_run, + )?; + + if let Some(bundle) = write_pack_bundle.as_mut() { + if !update_refs.edits.is_empty() || bundle.index.num_objects == 0 { + if let Some(path) = bundle.keep_path.take() { + std::fs::remove_file(&path).map_err(|err| Error::RemovePackKeepFile { path, source: err })?; + } + } + } + + Ok(Outcome { + ref_map: std::mem::take(&mut self.ref_map), + status: match write_pack_bundle { + Some(write_pack_bundle) => Status::Change { + write_pack_bundle, + update_refs, + }, + None => Status::DryRun { update_refs }, + }, + }) + } +} + +fn setup_remote_progress

( + progress: &mut P, + reader: &mut Box, +) where + P: Progress, + P::SubProgress: 'static, +{ + use git_protocol::transport::client::ExtendedBufRead; + reader.set_progress_handler(Some(Box::new({ + let mut remote_progress = progress.add_child("remote"); + move |is_err: bool, data: &[u8]| { + git_protocol::RemoteProgress::translate_to_progress(is_err, data, &mut remote_progress) + } + }) as git_protocol::transport::client::HandleProgress)); +} diff --git a/git-repository/src/remote/connection/fetch/update_refs/mod.rs b/git-repository/src/remote/connection/fetch/update_refs/mod.rs index ecd8ccbacf8..9d2623a417b 100644 --- a/git-repository/src/remote/connection/fetch/update_refs/mod.rs +++ b/git-repository/src/remote/connection/fetch/update_refs/mod.rs @@ -1,12 +1,12 @@ use std::{collections::BTreeMap, convert::TryInto, path::PathBuf}; -use git_odb::FindExt; -use git_pack::Find; +use git_odb::{Find, FindExt}; use git_ref::{ transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog}, Target, TargetRef, }; +use crate::remote::fetch::{RefLogMessage, Source}; use crate::{ ext::ObjectIdExt, remote::{fetch, fetch::refs::update::Mode}, @@ -41,7 +41,7 @@ impl From for Update { /// It can be used to produce typical information that one is used to from `git fetch`. pub(crate) fn update( repo: &Repository, - action: &str, + message: RefLogMessage, mappings: &[fetch::Mapping], refspecs: &[git_refspec::RefSpec], dry_run: fetch::DryRun, @@ -49,13 +49,17 @@ pub(crate) fn update( let mut edits = Vec::new(); let mut updates = Vec::new(); - for fetch::Mapping { - remote, - local, - spec_index, - } in mappings - { - let remote_id = remote.as_id(); + for (remote, local, spec) in mappings.iter().filter_map( + |fetch::Mapping { + remote, + local, + spec_index, + }| refspecs.get(*spec_index).map(|spec| (remote, local, spec)), + ) { + let remote_id = match remote.as_id() { + Some(id) => id, + None => continue, + }; if dry_run == fetch::DryRun::No && !repo.objects.contains(remote_id) { updates.push(update::Mode::RejectedSourceObjectNotFound { id: remote_id.into() }.into()); continue; @@ -83,14 +87,14 @@ pub(crate) fn update( let (mode, reflog_message) = if local_id == remote_id { (update::Mode::NoChangeNeeded, "no update will be performed") } else if let Some(git_ref::Category::Tag) = existing.name().category() { - if refspecs[*spec_index].allow_non_fast_forward() { + if spec.allow_non_fast_forward() { (update::Mode::Forced, "updating tag") } else { updates.push(update::Mode::RejectedTagUpdate.into()); continue; } } else { - let mut force = refspecs[*spec_index].allow_non_fast_forward(); + let mut force = spec.allow_non_fast_forward(); let is_fast_forward = match dry_run { fetch::DryRun::No => { let ancestors = repo @@ -160,10 +164,30 @@ pub(crate) fn update( log: LogChange { mode: RefLog::AndReference, force_create_reflog: false, - message: format!("{}: {}", action, reflog_message).into(), + message: message.compose(reflog_message), }, expected: previous_value, - new: Target::Peeled(remote_id.into()), + new: if let Source::Ref(git_protocol::fetch::Ref::Symbolic { target, .. }) = &remote { + match mappings.iter().find_map(|m| { + m.remote.as_name().and_then(|name| { + (name == target) + .then(|| m.local.as_ref().and_then(|local| local.try_into().ok())) + .flatten() + }) + }) { + Some(local_branch) => { + // This is always safe because… + // - the reference may exist already + // - if it doesn't exist it will be created - we are here because it's in the list of mappings after all + // - if it exists and is updated, and the update is rejected due to non-fastforward for instance, the + // target reference still exists and we can point to it. + Target::Symbolic(local_branch) + } + None => Target::Peeled(remote_id.into()), + } + } else { + Target::Peeled(remote_id.into()) + }, }, name, deref: false, @@ -178,7 +202,26 @@ pub(crate) fn update( } let edits = match dry_run { - fetch::DryRun::No => repo.edit_references(edits)?, + fetch::DryRun::No => { + let (file_lock_fail, packed_refs_lock_fail) = repo + .config + .lock_timeout() + .map_err(crate::reference::edit::Error::from)?; + repo.refs + .transaction() + .packed_refs(git_ref::file::transaction::PackedRefs::DeletionsAndNonSymbolicUpdates( + Box::new(|oid, buf| { + repo.objects + .try_find(oid, buf) + .map(|obj| obj.map(|obj| obj.kind)) + .map_err(|err| Box::new(err) as Box) + }), + )) + .prepare(edits, file_lock_fail, packed_refs_lock_fail) + .map_err(crate::reference::edit::Error::from)? + .commit(repo.committer_or_default()) + .map_err(crate::reference::edit::Error::from)? + } fetch::DryRun::Yes => edits, }; diff --git a/git-repository/src/remote/connection/fetch/update_refs/tests.rs b/git-repository/src/remote/connection/fetch/update_refs/tests.rs index 14ec2f28ecc..20d69965935 100644 --- a/git-repository/src/remote/connection/fetch/update_refs/tests.rs +++ b/git-repository/src/remote/connection/fetch/update_refs/tests.rs @@ -1,5 +1,6 @@ mod update { use git_testtools::{hex_to_id, Result}; + use std::convert::TryInto; use crate as git; @@ -29,9 +30,11 @@ mod update { let repo = git::open_opts(dir.path().join(name), git::open::Options::isolated()).unwrap(); (repo, dir) } + use crate::bstr::BString; use git_ref::{transaction::Change, TargetRef}; use crate::remote::fetch; + use crate::remote::fetch::{Mapping, RefLogMessage, Source}; #[test] fn various_valid_updates() { @@ -117,7 +120,7 @@ mod update { let (mapping, specs) = mapping_from_spec(spec, &repo); let out = fetch::refs::update( &repo, - "action", + prefixed("action"), &mapping, &specs, reflog_message.map(|_| fetch::DryRun::Yes).unwrap_or(fetch::DryRun::No), @@ -176,7 +179,7 @@ mod update { ] { let spec = format!("refs/heads/main:refs/heads/{}", branch); let (mappings, specs) = mapping_from_spec(&spec, &repo); - let out = fetch::refs::update(&repo, "action", &mappings, &specs, fetch::DryRun::Yes)?; + let out = fetch::refs::update(&repo, prefixed("action"), &mappings, &specs, fetch::DryRun::Yes)?; assert_eq!( out.updates, @@ -194,27 +197,171 @@ mod update { } #[test] - fn symbolic_refs_are_never_written() { + fn local_symbolic_refs_are_never_written() { let repo = repo("two-origins"); - let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/heads/symbolic", &repo); - let out = fetch::refs::update(&repo, "action", &mappings, &specs, fetch::DryRun::Yes).unwrap(); + for source in ["refs/heads/main", "refs/heads/symbolic", "HEAD"] { + let (mappings, specs) = mapping_from_spec(&format!("{source}:refs/heads/symbolic"), &repo); + let out = fetch::refs::update(&repo, prefixed("action"), &mappings, &specs, fetch::DryRun::Yes).unwrap(); + assert_eq!(out.edits.len(), 0); + assert_eq!( + out.updates, + vec![fetch::refs::Update { + mode: fetch::refs::update::Mode::RejectedSymbolic, + edit_index: None + }], + "we don't overwrite these as the checked-out check needs to consider much more than it currently does, we are playing it safe" + ); + } + } + + #[test] + fn remote_symbolic_refs_can_always_be_set_as_there_is_no_scenario_where_it_could_be_nonexisting_and_rejected() { + let repo = repo("two-origins"); + let (mut mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/remotes/origin/new", &repo); + mappings.push(Mapping { + remote: Source::Ref(git_protocol::fetch::Ref::Direct { + full_ref_name: "refs/heads/main".try_into().unwrap(), + object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"), + }), + local: Some("refs/heads/symbolic".into()), + spec_index: 0, + }); + let out = fetch::refs::update(&repo, prefixed("action"), &mappings, &specs, fetch::DryRun::Yes).unwrap(); + + assert_eq!(out.edits.len(), 1); + assert_eq!( + out.updates, + vec![ + fetch::refs::Update { + mode: fetch::refs::update::Mode::New, + edit_index: Some(0) + }, + fetch::refs::Update { + mode: fetch::refs::update::Mode::RejectedSymbolic, + edit_index: None + } + ], + ); + let edit = &out.edits[0]; + match &edit.change { + Change::Update { log, new, .. } => { + assert_eq!(log.message, "action: storing ref"); + assert!( + new.try_name().is_some(), + "remote falls back to peeled id as it's the only thing we seem to have locally, it won't refer to a non-existing local ref" + ); + } + _ => unreachable!("only updates"), + } + } + + #[test] + fn local_direct_refs_are_never_written_with_symbolic_ones_but_see_only_the_destination() { + let repo = repo("two-origins"); + let (mappings, specs) = mapping_from_spec("refs/heads/symbolic:refs/heads/not-currently-checked-out", &repo); + let out = fetch::refs::update(&repo, prefixed("action"), &mappings, &specs, fetch::DryRun::Yes).unwrap(); + + assert_eq!(out.edits.len(), 1); assert_eq!( out.updates, vec![fetch::refs::Update { - mode: fetch::refs::update::Mode::RejectedSymbolic, - edit_index: None, + mode: fetch::refs::update::Mode::NoChangeNeeded, + edit_index: Some(0) }], - "this also protects from writing HEAD, which should in theory be impossible to get from a refspec as it normalizes partial ref names" ); - assert_eq!(out.edits.len(), 0); + } + + #[test] + fn remote_refs_cannot_map_to_local_head() { + let repo = repo("two-origins"); + let (mappings, specs) = mapping_from_spec("refs/heads/main:HEAD", &repo); + let out = fetch::refs::update(&repo, prefixed("action"), &mappings, &specs, fetch::DryRun::Yes).unwrap(); + + assert_eq!(out.edits.len(), 1); + assert_eq!( + out.updates, + vec![fetch::refs::Update { + mode: fetch::refs::update::Mode::New, + edit_index: Some(0), + }], + ); + let edit = &out.edits[0]; + match &edit.change { + Change::Update { log, new, .. } => { + assert_eq!(log.message, "action: storing head"); + assert!( + new.try_id().is_some(), + "remote is peeled, so local will be peeled as well" + ); + } + _ => unreachable!("only updates"), + } + assert_eq!( + edit.name.as_bstr(), + "refs/heads/HEAD", + "it's not possible to refer to the local HEAD with refspecs" + ); + } + + #[test] + fn remote_symbolic_refs_can_be_written_locally_and_point_to_tracking_branch() { + let repo = repo("two-origins"); + let (mut mappings, specs) = mapping_from_spec("HEAD:refs/remotes/origin/new-HEAD", &repo); + mappings.push(Mapping { + remote: Source::Ref(git_protocol::fetch::Ref::Direct { + full_ref_name: "refs/heads/main".try_into().unwrap(), + object: hex_to_id("f99771fe6a1b535783af3163eba95a927aae21d5"), + }), + local: Some("refs/remotes/origin/main".into()), + spec_index: 0, + }); + let out = fetch::refs::update(&repo, prefixed("action"), &mappings, &specs, fetch::DryRun::Yes).unwrap(); + + assert_eq!( + out.updates, + vec![ + fetch::refs::Update { + mode: fetch::refs::update::Mode::New, + edit_index: Some(0), + }, + fetch::refs::Update { + mode: fetch::refs::update::Mode::NoChangeNeeded, + edit_index: Some(1), + } + ], + ); + assert_eq!(out.edits.len(), 2); + let edit = &out.edits[0]; + match &edit.change { + Change::Update { log, new, .. } => { + assert_eq!(log.message, "action: storing ref"); + assert_eq!( + new.try_name().expect("symbolic ref").as_bstr(), + "refs/remotes/origin/main", + "remote is symbolic, so local will be symbolic as well, but is rewritten to tracking branch" + ); + } + _ => unreachable!("only updates"), + } + assert_eq!(edit.name.as_bstr(), "refs/remotes/origin/new-HEAD",); } #[test] fn non_fast_forward_is_rejected_but_appears_to_be_fast_forward_in_dryrun_mode() { let repo = repo("two-origins"); let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo); - let out = fetch::refs::update(&repo, "action", &mappings, &specs, fetch::DryRun::Yes).unwrap(); + let reflog_message: BString = "very special".into(); + let out = fetch::refs::update( + &repo, + RefLogMessage::Override { + message: reflog_message.clone(), + }, + &mappings, + &specs, + fetch::DryRun::Yes, + ) + .unwrap(); assert_eq!( out.updates, @@ -225,13 +372,20 @@ mod update { "The caller has to be aware and note that dry-runs can't know about fast-forwards as they don't have remote objects" ); assert_eq!(out.edits.len(), 1); + let edit = &out.edits[0]; + match &edit.change { + Change::Update { log, .. } => { + assert_eq!(log.message, reflog_message); + } + _ => unreachable!("only updates"), + } } #[test] fn non_fast_forward_is_rejected_if_dry_run_is_disabled() { let (repo, _tmp) = repo_rw("two-origins"); let (mappings, specs) = mapping_from_spec("refs/remotes/origin/g:refs/heads/not-currently-checked-out", &repo); - let out = fetch::refs::update(&repo, "action", &mappings, &specs, fetch::DryRun::No).unwrap(); + let out = fetch::refs::update(&repo, prefixed("action"), &mappings, &specs, fetch::DryRun::No).unwrap(); assert_eq!( out.updates, @@ -243,7 +397,7 @@ mod update { assert_eq!(out.edits.len(), 0); let (mappings, specs) = mapping_from_spec("refs/heads/main:refs/remotes/origin/g", &repo); - let out = fetch::refs::update(&repo, "prefix", &mappings, &specs, fetch::DryRun::No).unwrap(); + let out = fetch::refs::update(&repo, prefixed("prefix"), &mappings, &specs, fetch::DryRun::No).unwrap(); assert_eq!( out.updates, @@ -266,7 +420,7 @@ mod update { fn fast_forwards_are_called_out_even_if_force_is_given() { let (repo, _tmp) = repo_rw("two-origins"); let (mappings, specs) = mapping_from_spec("+refs/heads/main:refs/remotes/origin/g", &repo); - let out = fetch::refs::update(&repo, "prefix", &mappings, &specs, fetch::DryRun::No).unwrap(); + let out = fetch::refs::update(&repo, prefixed("prefix"), &mappings, &specs, fetch::DryRun::No).unwrap(); assert_eq!( out.updates, @@ -289,7 +443,8 @@ mod update { let spec = git_refspec::parse(spec.into(), git_refspec::parse::Operation::Fetch).unwrap(); let group = git_refspec::MatchGroup::from_fetch_specs(Some(spec)); let references = repo.references().unwrap(); - let references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect(); + let mut references: Vec<_> = references.all().unwrap().map(|r| into_remote_ref(r.unwrap())).collect(); + references.push(into_remote_ref(repo.find_reference("HEAD").unwrap())); let mappings = group .match_remotes(references.iter().map(remote_ref_to_item)) .mappings @@ -332,8 +487,12 @@ mod update { let (full_ref_name, target, object) = r.unpack(); git_refspec::match_group::Item { full_ref_name, - target, + target: target.expect("no unborn HEAD"), object, } } + + fn prefixed(action: &str) -> RefLogMessage { + RefLogMessage::Prefixed { action: action.into() } + } } diff --git a/git-repository/src/remote/connection/fetch/update_refs/update.rs b/git-repository/src/remote/connection/fetch/update_refs/update.rs index 3b612625e6b..ddebc733b80 100644 --- a/git-repository/src/remote/connection/fetch/update_refs/update.rs +++ b/git-repository/src/remote/connection/fetch/update_refs/update.rs @@ -94,6 +94,9 @@ impl std::fmt::Display for Mode { impl Outcome { /// Produce an iterator over all information used to produce the this outcome, ref-update by ref-update, using the `mappings` /// used when producing the ref update. + /// + /// Note that mappings that don't have a corresponding entry in `refspecs` these will be `None` even though that should never be the case. + /// This can happen if the `refspecs` passed in aren't the respecs used to create the `mapping`, and it's up to the caller to sort it out. pub fn iter_mapping_updates<'a, 'b>( &self, mappings: &'a [fetch::Mapping], @@ -102,7 +105,7 @@ impl Outcome { Item = ( &super::Update, &'a fetch::Mapping, - &'b git_refspec::RefSpec, + Option<&'b git_refspec::RefSpec>, Option<&git_ref::transaction::RefEdit>, ), > { @@ -110,7 +113,7 @@ impl Outcome { ( update, mapping, - &refspecs[mapping.spec_index], + refspecs.get(mapping.spec_index), update.edit_index.and_then(|idx| self.edits.get(idx)), ) }) diff --git a/git-repository/src/remote/connection/ref_map.rs b/git-repository/src/remote/connection/ref_map.rs index fc43a332096..316b3705443 100644 --- a/git-repository/src/remote/connection/ref_map.rs +++ b/git-repository/src/remote/connection/ref_map.rs @@ -4,6 +4,7 @@ use git_features::progress::Progress; use git_protocol::transport::client::Transport; use crate::{ + bstr, bstr::{BString, ByteVec}, remote::{connection::HandshakeWithRefs, fetch, Connection, Direction}, }; @@ -14,6 +15,8 @@ use crate::{ pub enum Error { #[error(transparent)] Handshake(#[from] git_protocol::fetch::handshake::Error), + #[error("The object format {format:?} as used by the remote is unsupported")] + UnknownObjectFormat { format: BString }, #[error(transparent)] ListRefs(#[from] git_protocol::fetch::refs::Error), #[error(transparent)] @@ -79,6 +82,7 @@ where handshake_parameters, }: Options, ) -> Result { + let null = git_hash::ObjectId::null(git_hash::Kind::Sha1); // OK to hardcode Sha1, it's not supposed to match, ever. let remote = self .fetch_refs(prefix_from_spec_as_filter_on_remote, handshake_parameters) .await?; @@ -88,7 +92,7 @@ where let (full_ref_name, target, object) = r.unpack(); git_refspec::match_group::Item { full_ref_name, - target, + target: target.unwrap_or(&null), object, } })) @@ -110,11 +114,14 @@ where spec_index: m.spec_index, }) .collect(); + + let object_hash = extract_object_format(self.remote.repo, &remote.outcome)?; Ok(fetch::RefMap { mappings, fixes, remote_refs: remote.refs, handshake: remote.outcome, + object_hash, }) } #[git_protocol::maybe_async::maybe_async] @@ -156,10 +163,11 @@ where for spec in specs { let spec = spec.to_ref(); if seen.insert(spec.instruction()) { - if let Some(prefix) = spec.prefix() { - let mut arg: BString = "ref-prefix ".into(); - arg.push_str(prefix); - arguments.push(arg) + let mut prefixes = Vec::with_capacity(1); + spec.expand_prefixes(&mut prefixes); + for mut prefix in prefixes { + prefix.insert_str(0, "ref-prefix "); + arguments.push(prefix); } } } @@ -174,3 +182,24 @@ where Ok(HandshakeWithRefs { outcome, refs }) } } + +/// Assume sha1 if server says nothing, otherwise configure anything beyond sha1 in the local repo configuration +fn extract_object_format( + _repo: &crate::Repository, + outcome: &git_protocol::fetch::handshake::Outcome, +) -> Result { + use bstr::ByteSlice; + let object_hash = + if let Some(object_format) = outcome.capabilities.capability("object-format").and_then(|c| c.value()) { + let object_format = object_format.to_str().map_err(|_| Error::UnknownObjectFormat { + format: object_format.into(), + })?; + match object_format { + "sha1" => git_hash::Kind::Sha1, + unknown => return Err(Error::UnknownObjectFormat { format: unknown.into() }), + } + } else { + git_hash::Kind::Sha1 + }; + Ok(object_hash) +} diff --git a/git-repository/src/remote/fetch.rs b/git-repository/src/remote/fetch.rs index b23cad887fe..80dfd39146a 100644 --- a/git-repository/src/remote/fetch.rs +++ b/git-repository/src/remote/fetch.rs @@ -1,4 +1,4 @@ -use crate::bstr::BString; +use crate::bstr::{BStr, BString}; /// If `Yes`, don't really make changes but do as much as possible to get an idea of what would be done. #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -23,6 +23,10 @@ pub struct RefMap { /// /// Note that the `refs` field is always `None` as the refs are placed in `remote_refs`. pub handshake: git_protocol::fetch::handshake::Outcome, + /// The kind of hash used for all data sent by the server, if understood by this client implementation. + /// + /// It was extracted from the `handshake` as advertised by the server. + pub object_hash: git_hash::Kind, } /// Either an object id that the remote has or the matched remote ref itself. @@ -36,12 +40,27 @@ pub enum Source { impl Source { /// Return either the direct object id we refer to or the direct target that a reference refers to. - pub fn as_id(&self) -> &git_hash::oid { + /// The latter may be a direct or a symbolic reference, and we degenerate this to the peeled object id. + /// If unborn, `None` is returned. + pub fn as_id(&self) -> Option<&git_hash::oid> { match self { - Source::ObjectId(id) => id, + Source::ObjectId(id) => Some(id), Source::Ref(r) => r.unpack().1, } } + + /// Return ourselves as the full name of the reference we represent, or `None` if this source isn't a reference but an object. + pub fn as_name(&self) -> Option<&BStr> { + match self { + Source::ObjectId(_) => None, + Source::Ref(r) => match r { + git_protocol::fetch::Ref::Unborn { full_ref_name, .. } + | git_protocol::fetch::Ref::Symbolic { full_ref_name, .. } + | git_protocol::fetch::Ref::Direct { full_ref_name, .. } + | git_protocol::fetch::Ref::Peeled { full_ref_name, .. } => Some(full_ref_name.as_ref()), + }, + } + } } /// A mapping between a single remote reference and its advertised objects to a local destination which may or may not exist. @@ -56,4 +75,4 @@ pub struct Mapping { } #[cfg(feature = "blocking-network-client")] -pub use super::connection::fetch::{negotiate, prepare, refs, Error, Outcome, Prepare, Status}; +pub use super::connection::fetch::{negotiate, prepare, refs, Error, Outcome, Prepare, RefLogMessage, Status}; diff --git a/git-repository/src/repository/config.rs b/git-repository/src/repository/config.rs index 3a2de9298ea..c0565f74073 100644 --- a/git-repository/src/repository/config.rs +++ b/git-repository/src/repository/config.rs @@ -83,8 +83,6 @@ mod branch { use git_ref::FullNameRef; use git_validate::reference::name::Error as ValidateNameError; - use crate::bstr::BStr; - impl crate::Repository { /// Return a set of unique short branch names for which custom configuration exists in the configuration, /// if we deem them [trustworthy][crate::open::Options::filter_config_section()]. @@ -92,10 +90,10 @@ mod branch { self.subsection_names_of("branch") } - /// Returns a reference to the remote associated with the given `short_branch_name`, + /// Returns the validated reference on the remote associated with the given `short_branch_name`, /// always `main` instead of `refs/heads/main`. /// - /// The remote-ref is the one we track on the remote side for merging and pushing. + /// The returned reference is the one we track on the remote side for merging and pushing. /// Returns `None` if the remote reference was not found. /// May return an error if the reference is invalid. pub fn branch_remote_ref( @@ -111,11 +109,18 @@ mod branch { }) } - /// Returns the name of the remote associated with the given `short_branch_name`, typically `main` instead of `refs/heads/main`. + /// Returns the unvalidated name of the remote associated with the given `short_branch_name`, + /// typically `main` instead of `refs/heads/main`. /// In some cases, the returned name will be an URL. - /// Returns `None` if the remote was not found. - pub fn branch_remote_name(&self, short_branch_name: &str) -> Option> { - self.config.resolved.string("branch", Some(short_branch_name), "remote") + /// Returns `None` if the remote was not found or if the name contained illformed UTF-8. + /// + /// See also [Reference::remote_name()][crate::Reference::remote_name()] for a more typesafe version + /// to be used when a `Reference` is available. + pub fn branch_remote_name(&self, short_branch_name: &str) -> Option> { + self.config + .resolved + .string("branch", Some(short_branch_name), "remote") + .and_then(|name| name.try_into().ok()) } } } diff --git a/git-repository/src/worktree/mod.rs b/git-repository/src/worktree/mod.rs index 771036e1fb0..00036ad0d88 100644 --- a/git-repository/src/worktree/mod.rs +++ b/git-repository/src/worktree/mod.rs @@ -133,14 +133,15 @@ pub mod excludes { .then(|| git_glob::pattern::Case::Fold) .unwrap_or_default(); let mut buf = Vec::with_capacity(512); + let excludes_file = match repo.config.excludes_file().transpose()? { + Some(user_path) => Some(user_path), + None => repo.config.xdg_config_path("ignore")?, + }; let state = git_worktree::fs::cache::State::IgnoreStack(git_worktree::fs::cache::state::Ignore::new( overrides.unwrap_or_default(), git_attributes::MatchGroup::::from_git_dir( repo.git_dir(), - match repo.config.excludes_file()?.as_ref() { - Some(user_path) => Some(user_path.to_owned()), - None => repo.config.xdg_config_path("ignore")?, - }, + excludes_file, &mut buf, )?, None, diff --git a/git-repository/tests/clone/mod.rs b/git-repository/tests/clone/mod.rs index e65ecd17eeb..2fd49a9882b 100644 --- a/git-repository/tests/clone/mod.rs +++ b/git-repository/tests/clone/mod.rs @@ -2,13 +2,29 @@ use git_repository as git; use crate::remote; -#[test] #[cfg(feature = "blocking-network-client")] -fn fetch_only_with_configuration() -> crate::Result { - let tmp = git_testtools::tempfile::TempDir::new()?; - let called_configure_remote = std::sync::Arc::new(std::sync::atomic::AtomicBool::default()); - let remote_name = "special"; - let mut prepare = git::prepare_clone_bare(remote::repo("base").path(), tmp.path())? +mod blocking_io { + use git_object::bstr::ByteSlice; + use git_ref::TargetRef; + use git_repository as git; + + use crate::remote; + + #[test] + fn fetch_only_with_configuration() -> crate::Result { + let tmp = git_testtools::tempfile::TempDir::new()?; + let called_configure_remote = std::sync::Arc::new(std::sync::atomic::AtomicBool::default()); + let remote_name = "special"; + let mut prepare = git::clone::PrepareFetch::new( + remote::repo("base").path(), + tmp.path(), + git::create::Kind::Bare, + Default::default(), + git::open::Options::isolated().config_overrides([ + "init.defaultBranch=unused-as-overridden-by-remote", + "core.logAllRefUpdates", + ]), + )? .with_remote_name(remote_name)? .configure_remote({ let called_configure_remote = called_configure_remote.clone(); @@ -20,54 +36,223 @@ fn fetch_only_with_configuration() -> crate::Result { ) } }); - let (repo, out) = prepare.fetch_only(git::progress::Discard, &std::sync::atomic::AtomicBool::default())?; - drop(prepare); - - assert!( - called_configure_remote.load(std::sync::atomic::Ordering::Relaxed), - "custom remote configuration is called" - ); - assert_eq!(repo.remote_names().len(), 1, "only ever one remote"); - let remote = repo.find_remote(remote_name)?; - assert_eq!( - remote.refspecs(git::remote::Direction::Fetch).len(), - 2, - "our added spec was stored as well" - ); - - assert_eq!(out.ref_map.mappings.len(), 13); - match out.status { - git_repository::remote::fetch::Status::Change { update_refs, .. } => { - for edit in &update_refs.edits { - use git_odb::Find; - assert!( - repo.objects.contains(edit.change.new_value().expect("always set").id()), - "part of the fetched pack" - ); + let (repo, out) = prepare.fetch_only(git::progress::Discard, &std::sync::atomic::AtomicBool::default())?; + drop(prepare); + + assert!( + called_configure_remote.load(std::sync::atomic::Ordering::Relaxed), + "custom remote configuration is called" + ); + assert_eq!(repo.remote_names().len(), 1, "only ever one remote"); + let remote = repo.find_remote(remote_name)?; + assert_eq!( + remote.refspecs(git::remote::Direction::Fetch).len(), + 2, + "our added spec was stored as well" + ); + assert!( + git::path::from_bstr( + remote + .url(git::remote::Direction::Fetch) + .expect("present") + .path + .as_ref() + ) + .is_absolute(), + "file urls can't be relative paths" + ); + + assert_eq!(out.ref_map.mappings.len(), 14); + match out.status { + git_repository::remote::fetch::Status::Change { update_refs, .. } => { + for edit in &update_refs.edits { + use git_odb::Find; + match edit.change.new_value().expect("always set/no deletion") { + TargetRef::Symbolic(referent) => { + assert!( + repo.find_reference(referent).is_ok(), + "if we set up a symref, the target should exist by now" + ) + } + TargetRef::Peeled(id) => { + assert!(repo.objects.contains(id), "part of the fetched pack"); + } + } + let r = repo.find_reference(edit.name.as_ref()).expect("created"); + if r.name().category().expect("known") != git_ref::Category::Tag { + assert!(r + .name() + .category_and_short_name() + .expect("computable") + .1 + .starts_with_str(remote_name)); + let mut logs = r.log_iter(); + assert_reflog(logs.all()); + } + } } + _ => unreachable!("clones are always causing changes and dry-runs aren't possible"), + } + + let remote_head = repo + .find_reference(&format!("refs/remotes/{remote_name}/HEAD")) + .expect("remote HEAD present"); + assert_eq!( + remote_head + .target() + .try_name() + .expect("remote HEAD is symbolic") + .as_bstr(), + format!("refs/remotes/{remote_name}/main"), + "it points to the local tracking branch of what the remote actually points to" + ); + + let packed_refs = repo + .refs + .cached_packed_buffer()? + .expect("packed refs should be present"); + assert_eq!( + packed_refs.iter()?.count(), + 14, + "all non-symbolic refs should be stored" + ); + + let head = repo.head()?; + { + let mut logs = head.log_iter(); + assert_reflog(logs.all()); } - _ => unreachable!("clones are always causing changes and dry-runs aren't possible"), + + let referent = head.try_into_referent().expect("symbolic ref is present"); + assert!( + referent.id().object().is_ok(), + "the object pointed to by HEAD was fetched as well" + ); + assert_eq!( + referent.name().as_bstr(), + remote::repo("base").head_name()?.expect("symbolic").as_bstr(), + "local clone always adopts the name of the remote" + ); + + let short_name = referent.name().shorten().to_str_lossy(); + assert_eq!( + repo.branch_remote_name(short_name.as_ref()) + .expect("remote is set") + .as_ref(), + remote_name, + "the remote branch information is fully configured" + ); + assert_eq!( + repo.branch_remote_ref(short_name.as_ref()).expect("present")?.as_bstr(), + "refs/heads/main" + ); + + { + let mut logs = referent.log_iter(); + assert_reflog(logs.all()); + } + Ok(()) } - Ok(()) -} -#[test] -#[cfg(feature = "blocking-network-client")] -fn fetch_only_without_configuration() -> crate::Result { - let tmp = git_testtools::tempfile::TempDir::new()?; - let (repo, out) = git::prepare_clone_bare(remote::repo("base").path(), tmp.path())? - .fetch_only(git::progress::Discard, &std::sync::atomic::AtomicBool::default())?; - assert!(repo.find_remote("origin").is_ok(), "default remote name is 'origin'"); - match out.status { - git_repository::remote::fetch::Status::Change { write_pack_bundle, .. } => { - assert!( - write_pack_bundle.keep_path.is_none(), - "keep files aren't kept if refs are written" + fn assert_reflog(log: std::io::Result>>) { + let lines = log + .unwrap() + .expect("log present") + .collect::, _>>() + .unwrap(); + assert_eq!(lines.len(), 1, "just created"); + let line = &lines[0]; + assert!( + line.message.starts_with(b"clone: from "), + "{:?} unexpected", + line.message + ); + let path = git_path::from_bstr(line.message.rsplit(|b| *b == b' ').next().expect("path").as_bstr()); + assert!(path.is_absolute(), "{:?} must be absolute", path); + } + + #[test] + fn fetch_and_checkout() -> crate::Result { + let tmp = git_testtools::tempfile::TempDir::new()?; + let mut prepare = git::prepare_clone(remote::repo("base").path(), tmp.path())?; + let (mut checkout, _out) = + prepare.fetch_then_checkout(git::progress::Discard, &std::sync::atomic::AtomicBool::default())?; + let (repo, _) = checkout.main_worktree(git::progress::Discard, &std::sync::atomic::AtomicBool::default())?; + + let index = repo.index()?; + assert_eq!(index.entries().len(), 1, "All entries are known as per HEAD tree"); + + let work_dir = repo.work_dir().expect("non-bare"); + for entry in index.entries() { + let entry_path = work_dir.join(git_path::from_bstr(entry.path(&index))); + assert!(entry_path.is_file(), "{:?} not found on disk", entry_path) + } + Ok(()) + } + + #[test] + fn fetch_and_checkout_empty_remote_repo() -> crate::Result { + let tmp = git_testtools::tempfile::TempDir::new()?; + let mut prepare = git::prepare_clone( + git_testtools::scripted_fixture_repo_read_only("make_empty_repo.sh")?, + tmp.path(), + )?; + let (mut checkout, out) = prepare + .fetch_then_checkout(git::progress::Discard, &std::sync::atomic::AtomicBool::default()) + .unwrap(); + let (repo, _) = checkout.main_worktree(git::progress::Discard, &std::sync::atomic::AtomicBool::default())?; + + assert!(!repo.index_path().is_file(), "newly initialized repos have no index"); + let head = repo.head()?; + assert!(head.is_unborn()); + + assert!( + head.log_iter().all()?.is_none(), + "no reflog for unborn heads (as it needs non-null destination hash)" + ); + + if out + .ref_map + .handshake + .capabilities + .capability("ls-refs") + .expect("has ls-refs") + .supports("unborn") + == Some(true) + { + assert_eq!( + head.referent_name().expect("present").as_bstr(), + "refs/heads/special", + "we pick up the name as present on the server, not the one we default to" + ); + } else { + assert_eq!( + head.referent_name().expect("present").as_bstr(), + "refs/heads/main", + "we simply keep our own post-init HEAD which defaults to the branch name we configured locally" ); } - _ => unreachable!("a clone always carries a change"), + + Ok(()) + } + + #[test] + fn fetch_only_without_configuration() -> crate::Result { + let tmp = git_testtools::tempfile::TempDir::new()?; + let (repo, out) = git::prepare_clone_bare(remote::repo("base").path(), tmp.path())? + .fetch_only(git::progress::Discard, &std::sync::atomic::AtomicBool::default())?; + assert!(repo.find_remote("origin").is_ok(), "default remote name is 'origin'"); + match out.status { + git_repository::remote::fetch::Status::Change { write_pack_bundle, .. } => { + assert!( + write_pack_bundle.keep_path.is_none(), + "keep files aren't kept if refs are written" + ); + } + _ => unreachable!("a clone always carries a change"), + } + Ok(()) } - Ok(()) } #[test] @@ -78,6 +263,19 @@ fn clone_and_early_persist_without_receive() -> crate::Result { Ok(()) } +#[test] +fn clone_and_destination_must_be_empty() -> crate::Result { + let tmp = git_testtools::tempfile::TempDir::new()?; + std::fs::write(tmp.path().join("file"), b"hello")?; + match git::prepare_clone_bare(remote::repo("base").path(), tmp.path()) { + Ok(_) => unreachable!("this should fail as the directory isn't empty"), + Err(err) => assert!(err + .to_string() + .starts_with("Refusing to initialize the non-empty directory as ")), + } + Ok(()) +} + #[test] fn clone_bare_into_empty_directory_and_early_drop() -> crate::Result { let tmp = git_testtools::tempfile::TempDir::new()?; diff --git a/git-repository/tests/fixtures/generated-archives/make_empty_repo.tar.xz b/git-repository/tests/fixtures/generated-archives/make_empty_repo.tar.xz new file mode 100644 index 00000000000..7a1592d166d --- /dev/null +++ b/git-repository/tests/fixtures/generated-archives/make_empty_repo.tar.xz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:875f0aa4a343935bc881d6c595efffeb34063a460239178cf2264e5e44163c2d +size 9068 diff --git a/git-repository/tests/fixtures/make_empty_repo.sh b/git-repository/tests/fixtures/make_empty_repo.sh new file mode 100644 index 00000000000..2ca9da4d632 --- /dev/null +++ b/git-repository/tests/fixtures/make_empty_repo.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu -o pipefail + +git -c init.defaultBranch=special init -q diff --git a/git-repository/tests/fixtures/make_fetch_repos.sh b/git-repository/tests/fixtures/make_fetch_repos.sh index d26e3a46660..65807432aa3 100644 --- a/git-repository/tests/fixtures/make_fetch_repos.sh +++ b/git-repository/tests/fixtures/make_fetch_repos.sh @@ -8,6 +8,7 @@ git clone --shared base clone-as-base-with-changes git add new-file git commit -m "add new-file" git tag -m "new-file introduction" v1.0 + git symbolic-ref refs/heads/symbolic refs/tags/v1.0 ) git clone --shared base two-origins diff --git a/git-repository/tests/fixtures/make_remote_repos.sh b/git-repository/tests/fixtures/make_remote_repos.sh index 2f05bac7822..b7aed46ceaa 100644 --- a/git-repository/tests/fixtures/make_remote_repos.sh +++ b/git-repository/tests/fixtures/make_remote_repos.sh @@ -126,6 +126,12 @@ git clone --shared base branch-push-remote git config branch.main.pushRemote myself ) +git clone --shared base branch-dot-remote +(cd branch-dot-remote + + git config branch.main.remote . +) + git init --bare url-rewriting (cd url-rewriting diff --git a/git-repository/tests/init/mod.rs b/git-repository/tests/init/mod.rs index aff094a9f96..09af75a25c9 100644 --- a/git-repository/tests/init/mod.rs +++ b/git-repository/tests/init/mod.rs @@ -1,6 +1,27 @@ mod bare { + use git_testtools::tempfile; + #[test] - fn init_into_empty_directory_creates_a_dot_git_dir() -> crate::Result { + fn init_into_non_existing_directory_creates_it() -> crate::Result { + let tmp = tempfile::tempdir()?; + let git_dir = tmp.path().join("bare.git"); + let repo = git_repository::init_bare(&git_dir)?; + assert_eq!(repo.kind(), git_repository::Kind::Bare); + assert!( + repo.work_dir().is_none(), + "a worktree isn't present in bare repositories" + ); + assert_eq!( + repo.git_dir(), + git_dir, + "the repository is placed into the given directory without added sub-directories" + ); + assert_eq!(git_repository::open(repo.git_dir())?, repo); + Ok(()) + } + + #[test] + fn init_into_empty_directory_uses_it_directly() -> crate::Result { let tmp = tempfile::tempdir()?; let repo = git_repository::init_bare(tmp.path())?; assert_eq!(repo.kind(), git_repository::Kind::Bare); @@ -31,31 +52,74 @@ mod bare { } mod non_bare { + use git_repository as git; + use git_testtools::tempfile; + + #[test] + fn init_bare_with_custom_branch_name() -> crate::Result { + let tmp = tempfile::tempdir()?; + let repo: git::Repository = git::ThreadSafeRepository::init_opts( + tmp.path(), + git::create::Kind::Bare, + git::create::Options::default(), + git::open::Options::isolated().config_overrides(Some("init.defaultBranch=special")), + )? + .into(); + assert_eq!( + repo.head()?.referent_name().expect("name").as_bstr(), + "refs/heads/special" + ); + Ok(()) + } #[test] fn init_into_empty_directory_creates_a_dot_git_dir() -> crate::Result { let tmp = tempfile::tempdir()?; - let repo = git_repository::init(tmp.path())?; - assert_eq!(repo.kind(), git_repository::Kind::WorkTree { is_linked: false }); + let repo = git::init(tmp.path())?; + assert_eq!(repo.kind(), git::Kind::WorkTree { is_linked: false }); assert_eq!(repo.work_dir(), Some(tmp.path()), "there is a work tree by default"); assert_eq!( repo.git_dir(), tmp.path().join(".git"), "there is a work tree by default" ); - assert_eq!(git_repository::open(repo.git_dir())?, repo); - assert_eq!( - git_repository::open(repo.work_dir().as_ref().expect("non-bare repo"))?, - repo - ); + assert_eq!(git::open(repo.git_dir())?, repo); + assert_eq!(git::open(repo.work_dir().as_ref().expect("non-bare repo"))?, repo); Ok(()) } #[test] - fn init_into_non_empty_directory_is_allowed() -> crate::Result { + fn init_into_non_empty_directory_is_not_allowed_if_option_is_set_as_used_for_clone() -> crate::Result { let tmp = tempfile::tempdir()?; std::fs::write(tmp.path().join("existing.txt"), b"I was here before you")?; - git_repository::init(tmp.path())?; + let err = git::ThreadSafeRepository::init_opts( + tmp.path(), + git::create::Kind::WithWorktree, + git::create::Options { + destination_must_be_empty: true, + ..Default::default() + }, + git::open::Options::isolated(), + ) + .unwrap_err(); + assert!(err + .to_string() + .starts_with("Refusing to initialize the non-empty directory as")); + Ok(()) + } + + #[test] + fn init_into_non_empty_directory_is_allowed_by_default() -> crate::Result { + let tmp = tempfile::tempdir()?; + std::fs::write(tmp.path().join("existing.txt"), b"I was here before you")?; + + let repo = git_repository::init(tmp.path())?; + assert_eq!(repo.work_dir().expect("present"), tmp.path()); + assert_eq!( + repo.git_dir(), + tmp.path().join(".git"), + "gitdir is inside of the workdir" + ); Ok(()) } } diff --git a/git-repository/tests/object/tree.rs b/git-repository/tests/object/tree.rs index 2f1c497f8aa..56719a3764b 100644 --- a/git-repository/tests/object/tree.rs +++ b/git-repository/tests/object/tree.rs @@ -35,9 +35,7 @@ mod diff { .diff() .expect("changed file") .expect("objects available") - .lines(git::diff::text::Algorithm::Myers, |_| { - git_diff::text::imara::sink::Counter::default() - }); + .lines(|_| git_diff::text::imara::sink::Counter::default()); assert_eq!(count.insertions, 1); assert_eq!(count.removals, 0); Ok(Default::default()) diff --git a/git-repository/tests/reference/remote.rs b/git-repository/tests/reference/remote.rs index aa0511c4699..fc2fea7a9a3 100644 --- a/git-repository/tests/reference/remote.rs +++ b/git-repository/tests/reference/remote.rs @@ -82,6 +82,40 @@ fn not_configured() -> crate::Result { Ok(()) } +#[test] +fn dot_remote_behind_symbol() -> crate::Result { + let repo = remote::repo("branch-dot-remote"); + let head = repo.head()?; + let branch = head.clone().try_into_referent().expect("history"); + + assert_eq!( + branch + .remote_name(git::remote::Direction::Push) + .expect("derived push") + .as_url(), + Some(".".into()) + ); + assert_eq!( + branch + .remote_name(git::remote::Direction::Fetch) + .expect("fetch") + .as_url(), + Some(".".into()) + ); + + { + let remote = branch + .remote(git::remote::Direction::Push) + .transpose()? + .expect("present"); + assert_eq!(remote.name(), None, "It's a url after all, anonymous"); + assert_eq!(remote.url(git::remote::Direction::Push).unwrap().path, "."); + assert_eq!(remote.url(git::remote::Direction::Fetch).unwrap().path, "."); + } + + Ok(()) +} + #[test] fn url_as_remote_name() -> crate::Result { let repo = remote::repo("remote-as-url"); diff --git a/git-repository/tests/remote/fetch.rs b/git-repository/tests/remote/fetch.rs index 5db8778934b..729661c9d7a 100644 --- a/git-repository/tests/remote/fetch.rs +++ b/git-repository/tests/remote/fetch.rs @@ -83,8 +83,8 @@ mod blocking_io { ), ( Some(git::protocol::transport::Protocol::V1), - 3, - "c75114f60ab2c9389916f3de1082bbaa47491e3b", + 4, + "d07c527cf14e524a8494ce6d5d08e28079f5c6ea", // TODO: if these are the same, remove them ), ] { let (mut repo, _tmp) = repo_rw("two-origins"); @@ -128,7 +128,7 @@ mod blocking_io { } => { assert_eq!(write_pack_bundle.pack_version, git::odb::pack::data::Version::V2); assert_eq!(write_pack_bundle.object_hash, repo.object_hash()); - assert_eq!(write_pack_bundle.index.num_objects, expected_objects, "this value is 4 when git does it with 'consecutive' negotiation style, but could be 33 if completely naive."); + assert_eq!(write_pack_bundle.index.num_objects, expected_objects, "{dry_run}: this value is 4 when git does it with 'consecutive' negotiation style, but could be 33 if completely naive."); assert_eq!( write_pack_bundle.index.index_version, git::odb::pack::index::Version::V2 @@ -136,7 +136,21 @@ mod blocking_io { assert_eq!(write_pack_bundle.index.index_hash, hex_to_id(expected_hash)); assert!(write_pack_bundle.data_path.map_or(false, |f| f.is_file())); assert!(write_pack_bundle.index_path.map_or(false, |f| f.is_file())); - assert_eq!(update_refs.edits.len(), 1); + assert_eq!(update_refs.edits.len(), 2); + + let edit = &update_refs.edits[0]; + assert_eq!(edit.name.as_bstr(), "refs/remotes/changes-on-top-of-origin/main"); + assert!( + edit.change.new_value().expect("no deletion").try_id().is_some(), + "a simple peeled ref" + ); + let edit = &update_refs.edits[1]; + assert_eq!(edit.name.as_bstr(), "refs/remotes/changes-on-top-of-origin/symbolic"); + assert!( + edit.change.new_value().expect("no deletion").try_id().is_some(), + "on the remote this is a symbolic ref, we just write its destination object id though" + ); + assert!( !write_pack_bundle.keep_path.map_or(false, |f| f.is_file()), ".keep files are deleted if there is one edit" @@ -150,10 +164,16 @@ mod blocking_io { assert_eq!( refs.updates, - vec![fetch::refs::Update { - mode: fetch::refs::update::Mode::New, - edit_index: Some(0), - }] + vec![ + fetch::refs::Update { + mode: fetch::refs::update::Mode::New, + edit_index: Some(0), + }, + fetch::refs::Update { + mode: fetch::refs::update::Mode::New, + edit_index: Some(1), + } + ] ); for (_update, mapping, _spec, edit) in refs.iter_mapping_updates(&outcome.ref_map.mappings, remote.refspecs(Fetch)) @@ -162,7 +182,7 @@ mod blocking_io { if dry_run { assert_eq!( edit.change.new_value().expect("no deletions").id(), - mapping.remote.as_id() + mapping.remote.as_id().expect("no unborn") ); assert!( repo.try_find_reference(edit.name.as_ref())?.is_none(), @@ -172,7 +192,7 @@ mod blocking_io { let r = repo.find_reference(edit.name.as_ref()).unwrap(); assert_eq!( r.id(), - *mapping.remote.as_id(), + *mapping.remote.as_id().expect("no unborn"), "local reference should point to remote id" ); } diff --git a/git-repository/tests/repository/mod.rs b/git-repository/tests/repository/mod.rs index 89c00f76a20..c0e24b69051 100644 --- a/git-repository/tests/repository/mod.rs +++ b/git-repository/tests/repository/mod.rs @@ -10,11 +10,12 @@ mod worktree; #[test] fn size_in_memory() { - let expected = [728, 744, 784]; let actual_size = std::mem::size_of::(); + let limit = 850; assert!( - expected.contains(&actual_size), - "size of Repository shouldn't change without us noticing, it's meant to be cloned: should have been within {:?}, was {}", - expected, actual_size + actual_size < limit, + "size of Repository shouldn't change without us noticing, it's meant to be cloned: should have been below {:?}, was {}", + limit, + actual_size ); } diff --git a/git-repository/tests/repository/object.rs b/git-repository/tests/repository/object.rs index 8809770d872..ff6b99d68c9 100644 --- a/git-repository/tests/repository/object.rs +++ b/git-repository/tests/repository/object.rs @@ -1,4 +1,5 @@ use git_repository as git; +use git_testtools::tempfile; mod write_object { use crate::repository::object::empty_bare_repo; @@ -148,7 +149,7 @@ mod tag { mod commit { use git_repository as git; - use git_testtools::hex_to_id; + use git_testtools::{hex_to_id, tempfile}; use crate::{freeze_time, restricted_and_git}; @@ -270,10 +271,8 @@ fn empty_bare_repo() -> crate::Result<(tempfile::TempDir, git::Repository)> { let tmp = tempfile::tempdir()?; let repo = git::ThreadSafeRepository::init_opts( tmp.path(), - git::create::Options { - bare: true, - fs_capabilities: None, - }, + git::create::Kind::Bare, + git::create::Options::default(), git::open::Options::isolated(), )? .into(); diff --git a/git-repository/tests/repository/reference.rs b/git-repository/tests/repository/reference.rs index b9ec7c17543..6189a9fea34 100644 --- a/git-repository/tests/repository/reference.rs +++ b/git-repository/tests/repository/reference.rs @@ -1,6 +1,7 @@ mod set_namespace { use git_repository as git; use git_repository::refs::transaction::PreviousValue; + use git_testtools::tempfile; fn easy_repo_rw() -> crate::Result<(git::Repository, tempfile::TempDir)> { crate::repo_rw("make_references_repo.sh") diff --git a/git-repository/tests/util/mod.rs b/git-repository/tests/util/mod.rs index e51af863ddb..8058f3e0032 100644 --- a/git-repository/tests/util/mod.rs +++ b/git-repository/tests/util/mod.rs @@ -1,4 +1,5 @@ use git_repository::{open, Repository, ThreadSafeRepository}; +use git_testtools::tempfile; pub type Result = std::result::Result>; diff --git a/git-transport/src/client/capabilities.rs b/git-transport/src/client/capabilities.rs index 241bf11b3f7..79c9422b0c4 100644 --- a/git-transport/src/client/capabilities.rs +++ b/git-transport/src/client/capabilities.rs @@ -37,7 +37,7 @@ impl<'a> Capability<'a> { /// Returns the name of the capability. /// /// Most capabilities only consist of a name, making them appear like a feature toggle. - pub fn name(&self) -> &BStr { + pub fn name(&self) -> &'a BStr { self.0 .splitn(2, |b| *b == b'=') .next() @@ -48,11 +48,11 @@ impl<'a> Capability<'a> { /// /// Note that the caller must know whether a single or multiple values are expected, in which /// case [`values()`][Capability::values()] should be called. - pub fn value(&self) -> Option<&BStr> { + pub fn value(&self) -> Option<&'a BStr> { self.0.splitn(2, |b| *b == b'=').nth(1).map(|s| s.as_bstr()) } /// Returns the values of a capability if its [`value()`][Capability::value()] is space separated. - pub fn values(&self) -> Option> { + pub fn values(&self) -> Option> { self.value().map(|v| v.split(|b| *b == b' ').map(|s| s.as_bstr())) } /// Returns true if its space-separated [`value()`][Capability::value()] contains the given `want`ed capability. diff --git a/git-url/src/lib.rs b/git-url/src/lib.rs index ed3deedd2c3..f9bd4344a0c 100644 --- a/git-url/src/lib.rs +++ b/git-url/src/lib.rs @@ -28,6 +28,8 @@ pub use scheme::Scheme; /// Additionally there is support for [deserialization][Url::from_bytes()] and serialization /// (_see the `Display::fmt()` implementation_). /// +/// # Deviation +/// /// Note that we do not support passing the password using the URL as it's likely leading to accidents. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] #[cfg_attr(feature = "serde1", derive(serde::Serialize, serde::Deserialize))] @@ -91,6 +93,16 @@ impl Url { self.serialize_alternative_form = use_alternate_form; self } + + /// Turn a file url like `file://relative` into `file:///root/relative`, hence it assures the url's path component is absolute. + pub fn canonicalize(&mut self) -> Result<(), git_path::realpath::Error> { + if self.scheme == Scheme::File { + let path = git_path::from_bstr(self.path.as_ref()); + let abs_path = git_path::realpath(path)?; + self.path = git_path::into_bstr(abs_path).into_owned(); + } + Ok(()) + } } /// Access @@ -123,6 +135,16 @@ impl Url { } } +/// Transformation +impl Url { + /// Turn a file url like `file://relative` into `file:///root/relative`, hence it assures the url's path component is absolute. + pub fn canonicalized(&self) -> Result { + let mut res = self.clone(); + res.canonicalize()?; + Ok(res) + } +} + /// Serialization impl Url { /// Write this URL losslessly to `out`, ready to be parsed again. diff --git a/git-url/tests/access/mod.rs b/git-url/tests/access/mod.rs new file mode 100644 index 00000000000..2e5eeabc408 --- /dev/null +++ b/git-url/tests/access/mod.rs @@ -0,0 +1,26 @@ +mod canonicalized { + #[test] + fn non_file_scheme_is_noop() -> crate::Result { + let url = git_url::parse("https://github.com/byron/gitoxide".into())?; + assert_eq!(url.canonicalized()?, url); + Ok(()) + } + + #[test] + fn absolute_file_url_does_nothing() -> crate::Result { + #[cfg(not(windows))] + let url = git_url::parse("/this/path/does/not/exist".into())?; + #[cfg(windows)] + let url = git_url::parse("C:\\non\\existing".into())?; + assert_eq!(url.canonicalized()?, url); + Ok(()) + } + + #[test] + fn file_that_is_current_dir_is_absolutized() -> crate::Result { + let url = git_url::parse(".".into())?; + assert!(git_path::from_bstr(url.path.as_ref()).is_relative()); + assert!(git_path::from_bstr(url.canonicalized()?.path.as_ref()).is_absolute()); + Ok(()) + } +} diff --git a/git-url/tests/url.rs b/git-url/tests/url.rs index 83a948a6722..9efd0163648 100644 --- a/git-url/tests/url.rs +++ b/git-url/tests/url.rs @@ -1,5 +1,6 @@ pub type Error = Box; pub type Result = std::result::Result<(), Error>; +mod access; mod expand_path; mod parse; diff --git a/git-worktree/src/index/checkout.rs b/git-worktree/src/index/checkout.rs index e5923332cbd..6f094777896 100644 --- a/git-worktree/src/index/checkout.rs +++ b/git-worktree/src/index/checkout.rs @@ -17,6 +17,7 @@ pub struct ErrorRecord { pub error: Box, } +#[derive(Default)] pub struct Outcome { /// The amount of files updated, or created. pub files_updated: usize, @@ -53,7 +54,7 @@ pub struct Options { /// Default true. pub trust_ctime: bool, /// If true, all stat fields will be used when checking for up-to-date'ness of the entry. Otherwise - /// nano-second parts of mtime and ctime,uid, gid, inode and device number won't be used, leaving only + /// nano-second parts of mtime and ctime,uid, gid, inode and device number _will not_ be used, leaving only /// the whole-second part of ctime and mtime and the file size to be checked. /// /// Default true. diff --git a/git-worktree/src/index/entry.rs b/git-worktree/src/index/entry.rs index c5ba0093929..76ba6fe0901 100644 --- a/git-worktree/src/index/entry.rs +++ b/git-worktree/src/index/entry.rs @@ -19,7 +19,7 @@ pub fn checkout( entry_path: &BStr, Context { find, path_cache, buf }: Context<'_, '_, Find>, index::checkout::Options { - fs: crate::fs::Capabilities { + fs: fs::Capabilities { symlink, executable_bit, .. @@ -85,9 +85,7 @@ where .map_err(|_| index::checkout::Error::IllformedUtf8 { path: obj.data.into() })?; if symlink { - try_write_or_unlink(dest, overwrite_existing, |p| { - crate::os::create_symlink(symlink_destination, p) - })?; + try_write_or_unlink(dest, overwrite_existing, |p| os::create_symlink(symlink_destination, p))?; } else { let mut file = try_write_or_unlink(dest, overwrite_existing, |p| { open_options(p, destination_is_initially_empty, overwrite_existing).open(&dest) diff --git a/gitoxide-core/src/hours/mod.rs b/gitoxide-core/src/hours/mod.rs index cd77aeaa7bb..6471eeb514e 100644 --- a/gitoxide-core/src/hours/mod.rs +++ b/gitoxide-core/src/hours/mod.rs @@ -145,7 +145,7 @@ where let mut repo = repo.clone(); repo.object_cache_size_if_unset(4 * 1024 * 1024); let rx = rx.clone(); - move || -> Result<_, git::object::tree::diff::Error> { + move || -> Result<_, git::object::tree::diff::for_each::Error> { let mut out = Vec::new(); for (commit_idx, parent_commit, commit) in rx { if let Some(c) = commit_counter.as_ref() { @@ -229,12 +229,9 @@ where is_text_file.then(|| change.event.diff()).flatten() { let mut nl = 0; - let counts = diff.lines( - git::diff::text::Algorithm::Myers, - |_| { - git::diff::text::imara::sink::Counter::default() - }, - ); + let counts = diff.lines(|_| { + git::diff::text::imara::sink::Counter::default() + }); nl += counts.insertions as usize + counts.removals as usize; lines.added += counts.insertions as usize; diff --git a/gitoxide-core/src/organize.rs b/gitoxide-core/src/organize.rs index 65f3b79e4d8..61fa2f9e12b 100644 --- a/gitoxide-core/src/organize.rs +++ b/gitoxide-core/src/organize.rs @@ -24,7 +24,7 @@ fn find_git_repository_workdirs( debug: bool, ) -> impl Iterator where -

::SubProgress: Sync, + P::SubProgress: Sync, { progress.init(None, progress::count("filesystem items")); fn is_repository(path: &Path) -> Option { @@ -223,7 +223,7 @@ pub fn discover( debug: bool, ) -> anyhow::Result<()> where - <

::SubProgress as Progress>::SubProgress: Sync, + ::SubProgress: Sync, { for (git_workdir, _kind) in find_git_repository_workdirs(source_dir, progress.add_child("Searching repositories"), debug) @@ -240,7 +240,7 @@ pub fn run( mut progress: P, ) -> anyhow::Result<()> where - <

::SubProgress as Progress>::SubProgress: Sync, + ::SubProgress: Sync, { let mut num_errors = 0usize; let destination = destination.as_ref().canonicalize()?; diff --git a/gitoxide-core/src/pack/create.rs b/gitoxide-core/src/pack/create.rs index 4cf676ee174..c8cb435eb71 100644 --- a/gitoxide-core/src/pack/create.rs +++ b/gitoxide-core/src/pack/create.rs @@ -96,12 +96,12 @@ pub struct Context { pub out: W, } -pub fn create( +pub fn create( repository_path: impl AsRef, tips: impl IntoIterator>, input: Option, output_directory: Option>, - mut progress: impl Progress, + mut progress: P, Context { expansion, nondeterministic_thread_count, @@ -115,6 +115,8 @@ pub fn create( ) -> anyhow::Result<()> where W: std::io::Write, + P: Progress, + P::SubProgress: 'static, { let repo = git::discover(repository_path)?.into_sync(); progress.init(Some(2), progress::steps()); diff --git a/gitoxide-core/src/pack/index.rs b/gitoxide-core/src/pack/index.rs index b346039424e..f347d7b04f9 100644 --- a/gitoxide-core/src/pack/index.rs +++ b/gitoxide-core/src/pack/index.rs @@ -75,12 +75,16 @@ pub enum PathOrRead { Read(Box), } -pub fn from_pack( +pub fn from_pack

( pack: PathOrRead, directory: Option, - progress: impl Progress, + progress: P, ctx: Context<'static, impl io::Write>, -) -> anyhow::Result<()> { +) -> anyhow::Result<()> +where + P: Progress, + P::SubProgress: 'static, +{ use anyhow::Context; let options = pack::bundle::write::Options { thread_limit: ctx.thread_limit, diff --git a/gitoxide-core/src/pack/receive.rs b/gitoxide-core/src/pack/receive.rs index 294cb2780c3..869a6a98eaf 100644 --- a/gitoxide-core/src/pack/receive.rs +++ b/gitoxide-core/src/pack/receive.rs @@ -91,13 +91,15 @@ impl protocol::fetch::DelegateBlocking for CloneDelegate { if self.wanted_refs.is_empty() { for r in refs { let (path, id, _) = r.unpack(); - match self.ref_filter { - Some(ref_prefixes) => { - if ref_prefixes.iter().any(|prefix| path.starts_with_str(prefix)) { - arguments.want(id); + if let Some(id) = id { + match self.ref_filter { + Some(ref_prefixes) => { + if ref_prefixes.iter().any(|prefix| path.starts_with_str(prefix)) { + arguments.want(id); + } } + None => arguments.want(id), } - None => arguments.want(id), } } } else { @@ -142,7 +144,7 @@ mod blocking_io { } } - pub fn receive( + pub fn receive( protocol: Option, url: &str, directory: Option, @@ -150,7 +152,12 @@ mod blocking_io { wanted_refs: Vec, progress: P, ctx: Context, - ) -> anyhow::Result<()> { + ) -> anyhow::Result<()> + where + W: std::io::Write, + P: Progress, + P::SubProgress: 'static, + { let transport = net::connect(url, protocol.unwrap_or_default().into())?; let delegate = CloneDelegate { ctx, @@ -210,7 +217,7 @@ mod async_io { } } - pub async fn receive( + pub async fn receive( protocol: Option, url: &str, directory: Option, @@ -218,7 +225,11 @@ mod async_io { wanted_refs: Vec, progress: P, ctx: Context, - ) -> anyhow::Result<()> { + ) -> anyhow::Result<()> + where + P: Progress + 'static, + W: io::Write + Send + 'static, + { let transport = net::connect(url.to_string(), protocol.unwrap_or_default().into()).await?; let mut delegate = CloneDelegate { ctx, @@ -310,6 +321,9 @@ fn write_raw_refs(refs: &[Ref], directory: PathBuf) -> std::io::Result<()> { }; for r in refs { let (path, content) = match r { + Ref::Unborn { full_ref_name, target } => { + (assure_dir_exists(full_ref_name)?, format!("unborn HEAD: {}", target)) + } Ref::Symbolic { full_ref_name: path, target, diff --git a/gitoxide-core/src/repository/clone.rs b/gitoxide-core/src/repository/clone.rs new file mode 100644 index 00000000000..9647ccfd9d4 --- /dev/null +++ b/gitoxide-core/src/repository/clone.rs @@ -0,0 +1,108 @@ +use crate::OutputFormat; + +pub struct Options { + pub format: OutputFormat, + pub bare: bool, + pub handshake_info: bool, +} + +pub const PROGRESS_RANGE: std::ops::RangeInclusive = 1..=3; + +pub(crate) mod function { + use anyhow::bail; + use git_repository as git; + use git_repository::bstr::BString; + use git_repository::remote::fetch::Status; + use git_repository::Progress; + use std::ffi::OsStr; + + use super::Options; + use crate::repository::fetch::function::print_updates; + use crate::OutputFormat; + + pub fn clone

( + remote: impl AsRef, + directory: impl AsRef, + overrides: Vec, + mut progress: P, + mut out: impl std::io::Write, + mut err: impl std::io::Write, + Options { + format, + handshake_info, + bare, + }: Options, + ) -> anyhow::Result<()> + where + P: Progress, + P::SubProgress: 'static, + { + if format != OutputFormat::Human { + bail!("JSON output isn't yet supported for fetching."); + } + + let mut prepare = git::clone::PrepareFetch::new( + remote.as_ref(), + directory, + bare.then(|| git::create::Kind::Bare) + .unwrap_or(git::create::Kind::WithWorktree), + git::create::Options::default(), + { + let mut opts = git::open::Options::default().config_overrides(overrides); + opts.permissions.config.git_binary = true; + opts + }, + )?; + let (mut checkout, fetch_outcome) = + prepare.fetch_then_checkout(&mut progress, &git::interrupt::IS_INTERRUPTED)?; + + let (repo, outcome) = if bare { + (checkout.persist(), None) + } else { + let (repo, outcome) = checkout.main_worktree(progress, &git::interrupt::IS_INTERRUPTED)?; + (repo, Some(outcome)) + }; + + if handshake_info { + writeln!(out, "Handshake Information")?; + writeln!(out, "\t{:?}", fetch_outcome.ref_map.handshake)?; + } + + match fetch_outcome.status { + Status::NoChange => { + unreachable!("clone always has changes") + } + Status::DryRun { .. } => unreachable!("dry-run unsupported"), + Status::Change { update_refs, .. } => { + let remote = repo + .find_default_remote(git::remote::Direction::Fetch) + .expect("one origin remote")?; + let ref_specs = remote.refspecs(git::remote::Direction::Fetch); + print_updates(&repo, update_refs, ref_specs, fetch_outcome.ref_map, &mut out, &mut err)?; + } + }; + + if let Some(git::worktree::index::checkout::Outcome { collisions, errors, .. }) = outcome { + if !(collisions.is_empty() && errors.is_empty()) { + let mut messages = Vec::new(); + if !errors.is_empty() { + messages.push(format!("kept going through {} errors(s)", errors.len())); + for record in errors { + writeln!(err, "{}: {}", record.path, record.error).ok(); + } + } + if !collisions.is_empty() { + messages.push(format!("encountered {} collision(s)", collisions.len())); + for col in collisions { + writeln!(err, "{}: collision ({:?})", col.path, col.error_kind).ok(); + } + } + bail!( + "One or more errors occurred - checkout is incomplete: {}", + messages.join(", ") + ); + } + } + Ok(()) + } +} diff --git a/gitoxide-core/src/repository/fetch.rs b/gitoxide-core/src/repository/fetch.rs index 0fe450845f8..9775e3bd945 100644 --- a/gitoxide-core/src/repository/fetch.rs +++ b/gitoxide-core/src/repository/fetch.rs @@ -22,9 +22,9 @@ pub(crate) mod function { use super::Options; use crate::OutputFormat; - pub fn fetch( + pub fn fetch

( repo: git::Repository, - progress: impl git::Progress, + progress: P, mut out: impl std::io::Write, err: impl std::io::Write, Options { @@ -34,7 +34,11 @@ pub(crate) mod function { handshake_info, ref_specs, }: Options, - ) -> anyhow::Result<()> { + ) -> anyhow::Result<()> + where + P: git::Progress, + P::SubProgress: 'static, + { if format != OutputFormat::Human { bail!("JSON output isn't yet supported for fetching."); } @@ -57,7 +61,15 @@ pub(crate) mod function { let ref_specs = remote.refspecs(git::remote::Direction::Fetch); match res.status { Status::NoChange => { - crate::repository::remote::refs::print_refmap(&repo, ref_specs, res.ref_map, &mut out, err) + let show_unmapped = false; + crate::repository::remote::refs::print_refmap( + &repo, + ref_specs, + res.ref_map, + show_unmapped, + &mut out, + err, + ) } Status::DryRun { update_refs } => print_updates(&repo, update_refs, ref_specs, res.ref_map, &mut out, err), Status::Change { @@ -91,6 +103,7 @@ pub(crate) mod function { let mut last_spec_index = usize::MAX; let mut updates = update_refs .iter_mapping_updates(&map.mappings, refspecs) + .filter_map(|(update, mapping, spec, edit)| spec.map(|spec| (update, mapping, spec, edit))) .collect::>(); updates.sort_by_key(|t| t.2); for (update, mapping, spec, edit) in updates { diff --git a/gitoxide-core/src/repository/mod.rs b/gitoxide-core/src/repository/mod.rs index c966f05fa23..7ce6b6b25a3 100644 --- a/gitoxide-core/src/repository/mod.rs +++ b/gitoxide-core/src/repository/mod.rs @@ -4,12 +4,10 @@ use anyhow::{Context as AnyhowContext, Result}; use git_repository as git; pub fn init(directory: Option) -> Result { - git_repository::create::into( + git::create::into( directory.unwrap_or_default(), - git::create::Options { - bare: false, - fs_capabilities: None, - }, + git::create::Kind::WithWorktree, + git::create::Options::default(), ) .with_context(|| "Repository initialization failed") } @@ -18,10 +16,14 @@ pub mod commit; pub mod config; mod credential; pub use credential::function as credential; +#[cfg(feature = "blocking-client")] +pub mod clone; pub mod exclude; #[cfg(feature = "blocking-client")] pub mod fetch; #[cfg(feature = "blocking-client")] +pub use clone::function::clone; +#[cfg(feature = "blocking-client")] pub use fetch::function::fetch; pub mod index; pub mod mailmap; diff --git a/gitoxide-core/src/repository/remote.rs b/gitoxide-core/src/repository/remote.rs index 2543b6103e7..227deeee737 100644 --- a/gitoxide-core/src/repository/remote.rs +++ b/gitoxide-core/src/repository/remote.rs @@ -2,6 +2,7 @@ mod refs_impl { use anyhow::bail; use git_repository as git; + use git_repository::remote::fetch::Source; use git_repository::{ protocol::fetch, refspec::{match_group::validate::Fix, RefSpec}, @@ -19,7 +20,10 @@ mod refs_impl { pub enum Kind { Remote, - Tracking { ref_specs: Vec }, + Tracking { + ref_specs: Vec, + show_unmapped_remote_refs: bool, + }, } pub struct Options { @@ -46,14 +50,21 @@ mod refs_impl { ) -> anyhow::Result<()> { use anyhow::Context; let mut remote = by_name_or_url(&repo, name_or_url.as_deref())?; - if let refs::Kind::Tracking { ref_specs, .. } = &kind { + let show_unmapped = if let refs::Kind::Tracking { + ref_specs, + show_unmapped_remote_refs, + } = &kind + { if format != OutputFormat::Human { bail!("JSON output isn't yet supported for listing ref-mappings."); } if !ref_specs.is_empty() { remote.replace_refspecs(ref_specs.iter(), git::remote::Direction::Fetch)?; } - } + *show_unmapped_remote_refs + } else { + false + }; progress.info(format!( "Connecting to {:?}", remote @@ -75,9 +86,14 @@ mod refs_impl { writeln!(out, "\t{:?}", map.handshake)?; } match kind { - refs::Kind::Tracking { .. } => { - print_refmap(&repo, remote.refspecs(git::remote::Direction::Fetch), map, out, err) - } + refs::Kind::Tracking { .. } => print_refmap( + &repo, + remote.refspecs(git::remote::Direction::Fetch), + map, + show_unmapped, + out, + err, + ), refs::Kind::Remote => { match format { OutputFormat::Human => drop(print(out, &map.remote_refs)), @@ -96,6 +112,7 @@ mod refs_impl { repo: &git::Repository, refspecs: &[RefSpec], mut map: git::remote::fetch::RefMap, + show_unmapped_remotes: bool, mut out: impl std::io::Write, mut err: impl std::io::Write, ) -> anyhow::Result<()> { @@ -169,6 +186,18 @@ mod refs_impl { map.remote_refs.len() - map.mappings.len(), refspecs.len() )?; + if show_unmapped_remotes { + writeln!(&mut out, "\nFiltered: ")?; + for remote_ref in map.remote_refs.iter().filter(|r| { + !map.mappings.iter().any(|m| match &m.remote { + Source::Ref(other) => other == *r, + Source::ObjectId(_) => false, + }) + }) { + print_ref(&mut out, remote_ref)?; + writeln!(&mut out)?; + } + } } if refspecs.is_empty() { bail!("Without ref-specs there is nothing to show here. Add ref-specs as arguments or configure them in git-config.") @@ -187,6 +216,10 @@ mod refs_impl { path: String, object: String, }, + Unborn { + path: String, + target: String, + }, Symbolic { path: String, target: String, @@ -197,6 +230,10 @@ mod refs_impl { impl From for JsonRef { fn from(value: fetch::Ref) -> Self { match value { + fetch::Ref::Unborn { full_ref_name, target } => JsonRef::Unborn { + path: full_ref_name.to_string(), + target: target.to_string(), + }, fetch::Ref::Direct { full_ref_name: path, object, @@ -242,6 +279,10 @@ mod refs_impl { target, object, } => write!(&mut out, "{} {} symref-target:{}", object, path, target).map(|_| object.as_ref()), + fetch::Ref::Unborn { full_ref_name, target } => { + static NULL: git::hash::ObjectId = git::hash::ObjectId::null(git::hash::Kind::Sha1); + write!(&mut out, "unborn {} symref-target:{}", full_ref_name, target).map(|_| NULL.as_ref()) + } } } diff --git a/src/plumbing/main.rs b/src/plumbing/main.rs index dae2ad0eca6..24633d306c2 100644 --- a/src/plumbing/main.rs +++ b/src/plumbing/main.rs @@ -112,6 +112,27 @@ pub fn main() -> Result<()> { })?; match cmd { + #[cfg(feature = "gitoxide-core-blocking-client")] + Subcommands::Clone(crate::plumbing::options::clone::Platform { + handshake_info, + bare, + remote, + directory, + }) => { + let opts = core::repository::clone::Options { + format, + bare, + handshake_info, + }; + prepare_and_run( + "clone", + verbose, + progress, + progress_keep_open, + core::repository::clone::PROGRESS_RANGE, + move |progress, out, err| core::repository::clone(remote, directory, config, progress, out, err, opts), + ) + } #[cfg(feature = "gitoxide-core-blocking-client")] Subcommands::Fetch(crate::plumbing::options::fetch::Platform { dry_run, @@ -157,9 +178,13 @@ pub fn main() -> Result<()> { remote::Subcommands::Refs | remote::Subcommands::RefMap { .. } => { let kind = match cmd { remote::Subcommands::Refs => core::repository::remote::refs::Kind::Remote, - remote::Subcommands::RefMap { ref_spec } => { - core::repository::remote::refs::Kind::Tracking { ref_specs: ref_spec } - } + remote::Subcommands::RefMap { + ref_spec, + show_unmapped_remote_refs, + } => core::repository::remote::refs::Kind::Tracking { + ref_specs: ref_spec, + show_unmapped_remote_refs, + }, }; let context = core::repository::remote::refs::Options { name_or_url: name, diff --git a/src/plumbing/options/mod.rs b/src/plumbing/options/mod.rs index 25906a241fd..a0971cc40eb 100644 --- a/src/plumbing/options/mod.rs +++ b/src/plumbing/options/mod.rs @@ -84,6 +84,8 @@ pub enum Subcommands { /// Fetch data from remotes and store it in the repository #[cfg(feature = "gitoxide-core-blocking-client")] Fetch(fetch::Platform), + #[cfg(feature = "gitoxide-core-blocking-client")] + Clone(clone::Platform), /// Interact with the mailmap. #[clap(subcommand)] Mailmap(mailmap::Subcommands), @@ -142,6 +144,29 @@ pub mod fetch { } } +#[cfg(feature = "gitoxide-core-blocking-client")] +pub mod clone { + use std::ffi::OsString; + use std::path::PathBuf; + + #[derive(Debug, clap::Parser)] + pub struct Platform { + /// Output additional typically information provided by the server as part of the connection handshake. + #[clap(long, short = 'H')] + pub handshake_info: bool, + + /// If set, the clone will be bare and a working tree checkout won't be available. + #[clap(long)] + pub bare: bool, + + /// The url of the remote to connect to, like `https://github.com/byron/gitoxide`. + pub remote: OsString, + + /// The directory to initialize with the new repository and to which all data should be written. + pub directory: PathBuf, + } +} + #[cfg(any(feature = "gitoxide-core-async-client", feature = "gitoxide-core-blocking-client"))] pub mod remote { use git_repository as git; @@ -170,6 +195,9 @@ pub mod remote { Refs, /// Print all references available on the remote as filtered through ref-specs. RefMap { + /// Also display remote references that were sent by the server, but filtered by the refspec locally. + #[clap(long, short = 'u')] + show_unmapped_remote_refs: bool, /// Override the built-in and configured ref-specs with one or more of the given ones. #[clap(parse(try_from_os_str = git::env::os_str_to_bstring))] ref_spec: Vec, diff --git a/src/plumbing/progress.rs b/src/plumbing/progress.rs index 80526e6e5f6..fb51766e143 100644 --- a/src/plumbing/progress.rs +++ b/src/plumbing/progress.rs @@ -6,7 +6,9 @@ use tabled::{Style, TableIteratorExt, Tabled}; #[derive(Clone)] enum Usage { - NotApplicable, + NotApplicable { + reason: &'static str, + }, Planned { note: Option<&'static str>, }, @@ -26,7 +28,7 @@ impl Display for Usage { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Puzzled => f.write_str("❓")?, - NotApplicable => f.write_str("not applicable")?, + NotApplicable { reason } => write!(f, "not applicable: {reason}")?, NotPlanned { reason } => { write!(f, "{}", "not planned".blink())?; write!(f, " ℹ {} ℹ", reason.bright_white())?; @@ -52,7 +54,7 @@ impl Usage { pub fn icon(&self) -> &'static str { match self { Puzzled => "?", - NotApplicable => "❌", + NotApplicable { .. } => "❌", Planned { .. } => "🕒", NotPlanned { .. } => "🤔", InModule { deviation, .. } => deviation.is_some().then(|| "👌️").unwrap_or("✅"), @@ -83,6 +85,161 @@ impl Tabled for Record { } static GIT_CONFIG: &[Record] = &[ + Record { + config: "core.safeCRLF", + usage: Planned { note: Some("safety is not optional") }, + }, + Record { + config: "core.fileMode", + usage: InModule {name: "config", deviation: None}, + }, + Record { + config: "core.hideDotFiles", + usage: Planned {note: Some("Seems useful, but needs demand from windows users")} + }, + Record { + config: "core.trustCTime", + usage: Planned { note: Some("Needed for checkout - read from config but not used yet") }, + }, + Record { + config: "core.checkStat", + usage: Planned { note: Some("Needed for checkout - read from config but not used yet further down") }, + }, + Record { + config: "core.symlinks", + usage: InModule {name: "config", deviation: None}, + }, + Record { + config: "core.packedGitWindowSize", + usage: NotPlanned { reason: "an optimization for handling many large packs more efficiently seems unnecessary" } + }, + Record { + config: "core.packedGitLimit", + usage: NotApplicable { reason: "we target 32bit systems only and don't use a windowing mechanism" } + }, + Record { + config: "core.deltaBaseCacheLimit", + usage: NotApplicable { reason: "we use a small 64 slot pack delta cache by default, which can be replaced with larger caches as determined by the algorithm. This keeps memory usage low and is fast enough" } + }, + Record { + config: "core.bigFileThreshold", + usage: Planned { note: Some("unfortunately we can't stream packed files yet, even if not delta-compressed, but respecting the threshold for other operations is definitely a must") } + }, + Record { + config: "core.compression", + usage: Planned { note: Some("Allow to remove similar hardcoded value - passing it through will be some effort") }, + }, + Record { + config: "core.loosecompression", + usage: Planned { note: None }, + }, + Record { + config: "core.ignorecase", + usage: InModule {name: "config", deviation: None} + }, + Record { + config: "core.precomposeUnicode", + usage: InModule {name: "config", deviation: Some("This must be explicitly handled when data is coming into the program to fully work")} + }, + Record { + config: "core.protectHFS", + usage: Planned { note: Some("relevant for checkout on MacOS") }, + }, + Record { + config: "core.protectNTFS", + usage: NotPlanned { reason: "lack of demand"}, + }, + Record { + config: "core.sparseCheckout", + usage: Planned { note: Some("we want to support huge repos and be the fastest in doing so") }, + }, + Record { + config: "core.sparseCheckoutCone", + usage: Planned { note: Some("this is a nice improvement over spareCheckout alone and should one day be available too") }, + }, + Record { + config: "checkout.defaultRemote", + usage: Planned { note: Some("needed for correct checkout behaviour, similar to what git does") }, + }, + Record { + config: "core.untrackedCache", + usage: Planned { note: Some("needed for fast worktree operation") }, + }, + Record { + config: "checkout.guess", + usage: Planned { note: None }, + }, + Record { + config: "checkout.workers", + usage: InModule {name: "clone::checkout", deviation: Some("if unset, uses all cores instead of just one")}, + }, + Record { + config: "checkout.thresholdForParallelism", + usage: NotApplicable {reason: "parallelism is efficient enough to always run with benefit"}, + }, + Record { + config: "feature.manyFile", + usage: Planned {note: Some("big repositories are on the roadmap")}, + }, + Record { + config: "core.preloadIndex", + usage: Planned {note: Some("it's enabled by default and allows parallel stat checks - it's using a lot of CPU for just minor performance boosts though")}, + }, + Record { + config: "index.sparse", + usage: Planned {note: Some("we can read sparse indices and support for it will be added early on")}, + }, + Record { + config: "merge.renormalize", + usage: Planned {note: Some("once merging is being implemented, renormalization should be respected")}, + }, + Record { + config: "sparse.expectFilesOutsideOfPatterns", + usage: Planned {note: Some("a feature definitely worth having")}, + }, + Record { + config: "submodule.recurse", + usage: Planned {note: Some("very relevant for doing the right thing during checkouts")}, + }, + Record { + config: "branch.autoSetupRebase", + usage: Planned { + note: Some("for when we allow setting up upstream branches") + }, + }, + Record { + config: "branch..remote", + usage: InModule { + name: "reference::remote", + deviation: None + }, + }, + Record { + config: "branch..pushRemote", + usage: InModule { + name: "reference::remote", + deviation: None + }, + }, + Record { + config: "branch..merge", + usage: InModule { + name: "repository::config", + deviation: None + }, + }, + Record { + config: "branch..rebase", + usage: Planned { + note: Some("for when we can merge, rebase should be supported") + }, + }, + Record { + config: "branch..description", + usage: NotPlanned { + reason: "no plan to implement format-patch or request-pull summary" + }, + }, Record { config: "core.bare", usage: InModule { @@ -97,6 +254,10 @@ static GIT_CONFIG: &[Record] = &[ deviation: None, }, }, + Record { + config: "core.attributesFile", + usage: Planned {note: Some("for checkout - it's already queried but needs building of attributes group, and of course support during checkout")}, + }, Record { config: "core.abbrev", usage: InModule { @@ -104,6 +265,13 @@ static GIT_CONFIG: &[Record] = &[ deviation: None, }, }, + Record { + config: "core.askPass", + usage: InModule { + name: "config::snapshot::credential_helpers", + deviation: None, + }, + }, Record { config: "core.ignoreCase", usage: InModule { @@ -151,6 +319,10 @@ static GIT_CONFIG: &[Record] = &[ deviation: None, }, }, + Record { + config: "diff.algorithm", + usage: InModule {name: "config::cache::access", deviation: Some("'patience' diff is not implemented and can default to 'histogram' if lenient config is used")}, + }, Record { config: "extensions.objectFormat", usage: InModule { @@ -261,11 +433,11 @@ static GIT_CONFIG: &[Record] = &[ }, Record { config: "fetch.showForcedUpdates", - usage: NotApplicable, + usage: NotApplicable {reason: "we don't support advices"}, }, Record { config: "fetch.output", - usage: NotApplicable, + usage: NotPlanned {reason: "'gix' might support it, but there is no intention on copying the 'git' CLI"}, }, Record { config: "fetch.negotiationAlgorithm", @@ -273,6 +445,13 @@ static GIT_CONFIG: &[Record] = &[ note: Some("Implements our own 'naive' algorithm, only"), }, }, + Record { + config: "init.defaultBranch", + usage: InModule { + name: "init", + deviation: Some("If unset, we default to 'main' instead of 'master'") + }, + }, Record { config: "pack.threads", usage: InModule { @@ -332,6 +511,29 @@ pub fn show_progress() -> anyhow::Result<()> { }; println!("{}", sorted.table().with(Style::blank())); - println!("\nTotal records: {}", GIT_CONFIG.len()); + println!( + "\nTotal records: {} ({perfect_icon} = {perfect}, {deviation_icon} = {deviation}, {planned_icon} = {planned})", + GIT_CONFIG.len(), + perfect_icon = InModule { + name: "", + deviation: None + } + .icon(), + deviation_icon = InModule { + name: "", + deviation: Some("") + } + .icon(), + planned_icon = Planned { note: None }.icon(), + planned = GIT_CONFIG.iter().filter(|e| matches!(e.usage, Planned { .. })).count(), + perfect = GIT_CONFIG + .iter() + .filter(|e| matches!(e.usage, InModule { deviation, .. } if deviation.is_none())) + .count(), + deviation = GIT_CONFIG + .iter() + .filter(|e| matches!(e.usage, InModule { deviation, .. } if deviation.is_some())) + .count() + ); Ok(()) } diff --git a/src/shared.rs b/src/shared.rs index d9b5ae05af9..f0765646ce1 100644 --- a/src/shared.rs +++ b/src/shared.rs @@ -121,16 +121,18 @@ pub mod pretty { run(progress::DoOrDiscard::from(None), &mut stdout_lock, &mut stderr_lock) } (true, false) => { - let progress = crate::shared::progress_tree(); + use crate::shared::{self, STANDARD_RANGE}; + let progress = shared::progress_tree(); let sub_progress = progress.add_child(name); - use crate::shared::{self, STANDARD_RANGE}; let handle = shared::setup_line_renderer_range(&progress, range.into().unwrap_or(STANDARD_RANGE)); let mut out = Vec::::new(); - let res = run(progress::DoOrDiscard::from(Some(sub_progress)), &mut out, &mut stderr()); + let mut err = Vec::::new(); + let res = run(progress::DoOrDiscard::from(Some(sub_progress)), &mut out, &mut err); handle.shutdown_and_wait(); std::io::Write::write_all(&mut stdout(), &out)?; + std::io::Write::write_all(&mut stderr(), &err)?; res } #[cfg(not(feature = "prodash-render-tui"))] diff --git a/tests/tools/src/lib.rs b/tests/tools/src/lib.rs index 9a7ee410db9..b7081fd430f 100644 --- a/tests/tools/src/lib.rs +++ b/tests/tools/src/lib.rs @@ -327,11 +327,13 @@ fn scripted_fixture_repo_read_only_with_args_inner( .env("GIT_COMMITTER_DATE", "2000-01-02 00:00:00 +0000") .env("GIT_COMMITTER_EMAIL", "committer@example.com") .env("GIT_COMMITTER_NAME", "committer") - .env("GIT_CONFIG_COUNT", "2") + .env("GIT_CONFIG_COUNT", "3") .env("GIT_CONFIG_KEY_0", "commit.gpgsign") .env("GIT_CONFIG_VALUE_0", "false") .env("GIT_CONFIG_KEY_1", "init.defaultBranch") .env("GIT_CONFIG_VALUE_1", "main") + .env("GIT_CONFIG_KEY_2", "protocol.file.allow") + .env("GIT_CONFIG_VALUE_2", "always") .output()?; if !output.status.success() { write_failure_marker(&failure_marker);