This thread contains a patchset. You're looking at the original emails,
but you may wish to use the patch review UI.
Review patch
49
4
[PATCH v1 0/4] lnk clone
This patch implements a lnk clone command, as well as a few other bits
and pieces to make the integration with gitd easier.
The most interesting thing here is the addition of
`librad::git::identities::project::heads` which contains functions for
working with the default branch of a project identity. This is used by
`lnk clone` to allow checking out a project without having to know which
peer you want to check out.
I also updated lnk-identities to correctly set up the include file for
checked out and created identities (this logic looked to have gotten a
little lost in translation from the daemon). While we were here Fintan
and I also modified `gitd` so that it accepts URLs in the same form as
those in the include file so users can use `url.<base>.insteadOf` to
point rad remotes at a local gitd.
Published-At: https://github.com/alexjg/radicle-link/tree/patches/lnk-clone/v1
Alex Good (4):
Add default_branch_head and set_default_branch
lnk-identities: update path logic and set up include
Make gitd accept the URL format of include files
Add lnk clone
bins/Cargo.lock | 3 +
cli/gitd-lib/src/git_subprocess.rs | 12 +-
cli/gitd-lib/src/git_subprocess/command.rs | 12 +-
cli/gitd-lib/src/lib.rs | 1 +
cli/gitd-lib/src/processes.rs | 20 +-
cli/gitd-lib/src/server.rs | 5 +-
cli/gitd-lib/src/ssh_service.rs | 50 +++
cli/lnk-exe/src/cli/args.rs | 1 +
cli/lnk-identities/Cargo.toml | 3 +
cli/lnk-identities/src/cli/args.rs | 10 +-
cli/lnk-identities/src/cli/eval/person.rs | 7 +-
cli/lnk-identities/src/cli/eval/project.rs | 7 +-
cli/lnk-identities/src/git/checkout.rs | 70 ++--
cli/lnk-identities/src/git/existing.rs | 16 +-
cli/lnk-identities/src/git/new.rs | 20 +-
cli/lnk-identities/src/identity_dir.rs | 38 ++
cli/lnk-identities/src/lib.rs | 1 +
cli/lnk-identities/src/person.rs | 5 +-
cli/lnk-identities/src/project.rs | 32 +-
.../t/src/tests/git/checkout.rs | 5 +-
.../t/src/tests/git/existing.rs | 6 +-
cli/lnk-identities/t/src/tests/git/new.rs | 5 +-
cli/lnk-sync/Cargo.toml | 10 +-
cli/lnk-sync/src/cli/args.rs | 23 +-
cli/lnk-sync/src/cli/main.rs | 46 ++-
cli/lnk-sync/src/forked.rs | 87 ++++
cli/lnk-sync/src/lib.rs | 3 +
librad/src/git/identities/project.rs | 2 +
librad/src/git/identities/project/heads.rs | 305 ++++++++++++++
librad/t/src/integration/scenario.rs | 1 +
.../scenario/default_branch_head.rs | 387 ++++++++++++++++++
test/it-helpers/Cargo.toml | 3 +
test/it-helpers/src/lib.rs | 1 +
test/it-helpers/src/working_copy.rs | 291 +++++++++++++
34 files changed, 1358 insertions(+), 130 deletions(-)
create mode 100644 cli/gitd-lib/src/ssh_service.rs
create mode 100644 cli/lnk-identities/src/identity_dir.rs
create mode 100644 cli/lnk-sync/src/forked.rs
create mode 100644 librad/src/git/identities/project/heads.rs
create mode 100644 librad/t/src/integration/scenario/default_branch_head.rs
create mode 100644 test/it-helpers/src/working_copy.rs
--
2.36.1
[PATCH v1 1/4] Add default_branch_head and set_default_branch
When checking out projects from the monorepo it is useful to set the
`refs/namespaces/<urn>/HEAD` reference to the default branch of the
project so that the resulting working copy is in a useful state (namely
pointing at the latest commit for the default branch).
In general this is not possible because delegates may have diverging
views of the project, but often they do not disagree. Add
`librad::git::identities::project::heads::default_branch_head` to
determine if there is an agreed on default branch commit and
`librad::git::identities::project::heads::set_default_branch` to set the
local `HEAD` ref where possible.
Signed-off-by: Alex Good <alex@memoryandthought.me>
---
librad/src/git/identities/project.rs | 2 +
librad/src/git/identities/project/heads.rs | 305 ++++++++++++++
librad/t/src/integration/scenario.rs | 1 +
.../scenario/default_branch_head.rs | 387 ++++++++++++++++++
test/it-helpers/Cargo.toml | 3 +
test/it-helpers/src/lib.rs | 1 +
test/it-helpers/src/working_copy.rs | 291 +++++++++++++
7 files changed, 990 insertions(+)
create mode 100644 librad/src/git/identities/project/heads.rs
create mode 100644 librad/t/src/integration/scenario/default_branch_head.rs
create mode 100644 test/it-helpers/src/working_copy.rs
diff --git a/librad/src/git/identities/project.rs b/librad/src/git/identities/project.rs
index 753358bf..ed9f003d 100644
--- a/librad/src/git/identities/project.rs
+++ b/librad/src/git/identities/project.rs
@@ -8,6 +8,8 @@ use std::{convert::TryFrom, fmt::Debug};
use either::Either;
use git_ext::{is_not_found_err, OneLevel};
+ pub mod heads;
+
use super::{
super::{
refs::Refs as Sigrefs,
diff --git a/librad/src/git/identities/project/heads.rs b/librad/src/git/identities/project/heads.rs
new file mode 100644
index 00000000..447e352e
--- /dev/null
+++ b/librad/src/git/identities/project/heads.rs
@@ -0,0 +1,305 @@
+ use std::{collections::BTreeSet, convert::TryFrom, fmt::Debug};
+
+ use crate::{
+ git::{
+ storage::{self, ReadOnlyStorage},
+ Urn,
+ },
+ identities::git::VerifiedProject,
+ PeerId,
+ };
+ use git_ext::RefLike;
+ use git_ref_format::{lit, name, Namespaced, Qualified, RefStr, RefString};
+
+ #[derive(Clone, Debug, PartialEq)]
+ pub enum DefaultBranchHead {
+ /// Not all delegates agreed on an ancestry tree. Each set of diverging
+ /// delegates is included as a `Fork`
+ Forked(BTreeSet<Fork>),
+ /// All the delegates agreed on an ancestry tree
+ Head {
+ /// The most recent commit for the tree
+ target: git2::Oid,
+ /// The branch name which is the default branch
+ branch: RefString,
+ },
+ }
+
+ #[derive(Clone, Debug, std::hash::Hash, PartialEq, Eq, PartialOrd, Ord)]
+ pub struct Fork {
+ /// Peers which are in the ancestry set of this fork but not the tips. This
+ /// means that these peers can appear in multiple forks
+ pub ancestor_peers: BTreeSet<PeerId>,
+ /// The peers pointing at the tip of this fork
+ pub tip_peers: BTreeSet<PeerId>,
+ /// The most recent tip
+ pub tip: git2::Oid,
+ }
+
+ pub mod error {
+ use git_ref_format as ref_format;
+ use std::collections::BTreeSet;
+
+ use crate::git::storage::read;
+
+ #[derive(thiserror::Error, Debug)]
+ pub enum FindDefaultBranch {
+ #[error("the project payload does not define a default branch")]
+ NoDefaultBranch,
+ #[error("no peers had published anything for the default branch")]
+ NoTips,
+ #[error(transparent)]
+ RefFormat(#[from] ref_format::Error),
+ #[error(transparent)]
+ Read(#[from] read::Error),
+ }
+
+ #[derive(thiserror::Error, Debug)]
+ pub enum SetDefaultBranch {
+ #[error(transparent)]
+ Find(#[from] FindDefaultBranch),
+ #[error(transparent)]
+ Git(#[from] git2::Error),
+ #[error("the delegates have forked")]
+ Forked(BTreeSet<super::Fork>),
+ }
+ }
+
+ /// Find the head of the default branch of `project`
+ ///
+ /// In general there can be a different view of the default branch of a project
+ /// for each peer ID of each delegate and there is no reason that these would
+ /// all be compatible. It's quite possible that two peers publish entirely
+ /// unrelated ancestry trees for a given branch. In this case this function will
+ /// return [`DefaultBranchHead::Forked`].
+ ///
+ /// However, often it's the case that delegates do agree on an ancestry tree for
+ /// a particular branch and the difference between peers is just that some are
+ /// ahead of others. In this case this function will return
+ /// [`DefaultBranchHead::Head`].
+ ///
+ /// # Errors
+ ///
+ /// * If the project contains no default branch definition
+ /// * No peers had published anything for the default branch
+ pub fn default_branch_head(
+ storage: &storage::Storage,
+ project: VerifiedProject,
+ ) -> Result<DefaultBranchHead, error::FindDefaultBranch> {
+ if let Some(default_branch) = &project.payload().subject.default_branch {
+ let local = storage.peer_id();
+ let branch_refstring = RefString::try_from(default_branch.to_string())?;
+ let mut multiverse = Multiverse::new(branch_refstring.clone());
+ let peers =
+ project
+ .delegations()
+ .into_iter()
+ .flat_map(|d| -> Box<dyn Iterator<Item = PeerId>> {
+ use either::Either::*;
+ match d {
+ Left(key) => Box::new(std::iter::once(PeerId::from(*key))),
+ Right(person) => Box::new(
+ person
+ .delegations()
+ .into_iter()
+ .map(|key| PeerId::from(*key)),
+ ),
+ }
+ });
+ for peer_id in peers {
+ let tip = peer_commit(storage, project.urn(), peer_id, local, &branch_refstring)?;
+ if let Some(tip) = tip {
+ multiverse.add_peer(storage, peer_id, tip)?;
+ }
+ }
+ multiverse.finish()
+ } else {
+ Err(error::FindDefaultBranch::NoDefaultBranch)
+ }
+ }
+
+ /// Determine the default branch for a project and set the local HEAD to this
+ /// branch
+ ///
+ /// In more detail, this function determines the local head using
+ /// [`default_branch_head`] and then sets the following references to the
+ /// `DefaultBranchHead::target` returned:
+ ///
+ /// * `refs/namespaces/<URN>/refs/HEAD`
+ /// * `refs/namespaces/<URN>/refs/<default branch name>`
+ ///
+ /// # Why do this?
+ ///
+ /// When cloning from a namespace representing a project to a working copy we
+ /// would like, if possible, to omit the specification of which particular peer
+ /// we want to clone. Specifically we would like to clone
+ /// `refs/namespaces/<URN>/`. This does work, but the working copy we end up
+ /// with does not have any contents because git uses `refs/HEAD` of the source
+ /// repository to figure out what branch to set the new working copy to.
+ /// Therefore, by setting `refs/HEAD` and `refs/<default branch name>` of the
+ /// namespace `git clone` (and any other clone based workflows) does something
+ /// sensible and we end up with a working copy which is looking at the default
+ /// branch of the project.
+ ///
+ /// # Errors
+ ///
+ /// * If no default branch could be determined
+ pub fn set_default_head(
+ storage: &storage::Storage,
+ project: VerifiedProject,
+ ) -> Result<git2::Oid, error::SetDefaultBranch> {
+ let urn = project.urn();
+ let default_head = default_branch_head(storage, project)?;
+ match default_head {
+ DefaultBranchHead::Head { target, branch } => {
+ // Note that we can't use `Namespaced` because `refs/HEAD` is not a `Qualified`
+ let head =
+ RefString::try_from(format!("refs/namespaces/{}/refs/HEAD", urn.encode_id()))
+ .expect("urn is valid namespace");
+ let branch_head = Namespaced::from(lit::refs_namespaces(
+ &urn,
+ Qualified::from(lit::refs_heads(branch)),
+ ));
+
+ let repo = storage.as_raw();
+ repo.reference(&head, target, true, "set head")?;
+ repo.reference(&branch_head.into_qualified(), target, true, "set head")?;
+ Ok(target)
+ },
+ DefaultBranchHead::Forked(forks) => Err(error::SetDefaultBranch::Forked(forks)),
+ }
+ }
+
+ fn peer_commit(
+ storage: &storage::Storage,
+ urn: Urn,
+ peer_id: PeerId,
+ local: &PeerId,
+ branch: &RefStr,
+ ) -> Result<Option<git2::Oid>, error::FindDefaultBranch> {
+ let remote_name = RefString::try_from(peer_id.default_encoding())?;
+ let reference = if local == &peer_id {
+ RefString::from(Qualified::from(lit::refs_heads(branch)))
+ } else {
+ RefString::from(Qualified::from(lit::refs_remotes(remote_name)))
+ .join(name::HEADS)
+ .join(branch)
+ };
+ let urn = urn.with_path(Some(RefLike::from(reference)));
+ let tip = storage.tip(&urn, git2::ObjectType::Commit)?;
+ Ok(tip.map(|c| c.id()))
+ }
+
+ #[derive(Debug)]
+ struct Multiverse {
+ branch: RefString,
+ histories: Vec<History>,
+ }
+
+ impl Multiverse {
+ fn new(branch: RefString) -> Multiverse {
+ Multiverse {
+ branch,
+ histories: Vec::new(),
+ }
+ }
+
+ fn add_peer(
+ &mut self,
+ storage: &storage::Storage,
+ peer: PeerId,
+ tip: git2::Oid,
+ ) -> Result<(), error::FindDefaultBranch> {
+ // If this peers tip is in the ancestors of any existing histories then we just
+ // add the peer to those histories
+ let mut found_descendant = false;
+ for history in &mut self.histories {
+ if history.ancestors.contains(&tip) {
+ found_descendant = true;
+ history.ancestor_peers.insert(peer);
+ } else if history.tip == tip {
+ found_descendant = true;
+ history.tip_peers.insert(peer);
+ }
+ }
+ if found_descendant {
+ return Ok(());
+ }
+
+ // Otherwise we load a new history
+ let mut history = History::load(storage, peer, tip)?;
+
+ // Then we go through existing histories and check if any of them are ancestors
+ // of the new history. If they are then we incorporate them as ancestors
+ // of the new history and remove them from the multiverse
+ let mut i = 0;
+ while i < self.histories.len() {
+ let other_history = &self.histories[i];
+ if history.ancestors.contains(&other_history.tip) {
+ let other_history = self.histories.remove(i);
+ history.ancestor_peers.extend(other_history.ancestor_peers);
+ history.ancestor_peers.extend(other_history.tip_peers);
+ } else {
+ i += 1;
+ }
+ }
+ self.histories.push(history);
+
+ Ok(())
+ }
+
+ fn finish(self) -> Result<DefaultBranchHead, error::FindDefaultBranch> {
+ if self.histories.is_empty() {
+ Err(error::FindDefaultBranch::NoTips)
+ } else if self.histories.len() == 1 {
+ Ok(DefaultBranchHead::Head {
+ target: self.histories[0].tip,
+ branch: self.branch,
+ })
+ } else {
+ Ok(DefaultBranchHead::Forked(
+ self.histories
+ .into_iter()
+ .map(|h| Fork {
+ ancestor_peers: h.ancestor_peers,
+ tip_peers: h.tip_peers,
+ tip: h.tip,
+ })
+ .collect(),
+ ))
+ }
+ }
+ }
+
+ #[derive(Debug)]
+ struct History {
+ tip: git2::Oid,
+ tip_peers: BTreeSet<PeerId>,
+ ancestor_peers: BTreeSet<PeerId>,
+ ancestors: BTreeSet<git2::Oid>,
+ }
+
+ impl History {
+ fn load(
+ storage: &storage::Storage,
+ peer: PeerId,
+ tip: git2::Oid,
+ ) -> Result<History, storage::Error> {
+ let repo = storage.as_raw();
+ let mut walk = repo.revwalk()?;
+ walk.set_sorting(git2::Sort::TOPOLOGICAL)?;
+ walk.push(tip)?;
+ let mut ancestors = walk.collect::<Result<BTreeSet<git2::Oid>, _>>()?;
+ ancestors.remove(&tip);
+ let mut peers = BTreeSet::new();
+ peers.insert(peer);
+ let mut tip_peers = BTreeSet::new();
+ tip_peers.insert(peer);
+ Ok(History {
+ tip,
+ tip_peers,
+ ancestors,
+ ancestor_peers: BTreeSet::new(),
+ })
+ }
+ }
diff --git a/librad/t/src/integration/scenario.rs b/librad/t/src/integration/scenario.rs
index 9bfdd2ad..c47720a0 100644
--- a/librad/t/src/integration/scenario.rs
+++ b/librad/t/src/integration/scenario.rs
@@ -5,6 +5,7 @@
mod collaboration;
mod collaborative_objects;
+ mod default_branch_head;
mod menage;
mod passive_replication;
#[cfg(feature = "replication-v3")]
diff --git a/librad/t/src/integration/scenario/default_branch_head.rs b/librad/t/src/integration/scenario/default_branch_head.rs
new file mode 100644
index 00000000..2e7048c2
--- /dev/null
+++ b/librad/t/src/integration/scenario/default_branch_head.rs
@@ -0,0 +1,387 @@
+ // Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+ //
+ // This file is part of radicle-link, distributed under the GPLv3 with Radicle
+ // Linking Exception. For full terms see the included LICENSE file.
+
+ use std::{convert::TryFrom, ops::Index as _};
+
+ use tempfile::tempdir;
+
+ use git_ref_format::{lit, name, Namespaced, Qualified, RefString};
+ use it_helpers::{
+ fixed::{TestPerson, TestProject},
+ testnet::{self, RunningTestPeer},
+ working_copy::{WorkingCopy, WorkingRemote as Remote},
+ };
+ use librad::git::{
+ identities::{self, local, project::heads},
+ storage::ReadOnlyStorage,
+ tracking,
+ types::{Namespace, Reference},
+ Urn,
+ };
+ use link_identities::payload;
+ use test_helpers::logging;
+
+ fn config() -> testnet::Config {
+ testnet::Config {
+ num_peers: nonzero!(2usize),
+ min_connected: 2,
+ bootstrap: testnet::Bootstrap::from_env(),
+ }
+ }
+
+ /// This test checks that the logic of `librad::git::identities::project::heads`
+ /// is correct. To do this we need to set up various scenarios where the
+ /// delegates of a project agree or disagree on the default branch of a project.
+ #[test]
+ fn default_branch_head() {
+ logging::init();
+
+ let net = testnet::run(config()).unwrap();
+ net.enter(async {
+ // Setup a testnet with two peers and create a `Person` on each peer
+ let peer1 = net.peers().index(0);
+ let peer2 = net.peers().index(1);
+
+ let id1 = peer1
+ .using_storage::<_, anyhow::Result<TestPerson>>(|s| {
+ let person = TestPerson::create(s)?;
+ let local = local::load(s, person.owner.urn()).unwrap();
+ s.config()?.set_user(local)?;
+ Ok(person)
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ let id2 = peer2
+ .using_storage::<_, anyhow::Result<TestPerson>>(|s| {
+ let person = TestPerson::create(s)?;
+ let local = local::load(s, person.owner.urn()).unwrap();
+ s.config()?.set_user(local)?;
+ Ok(person)
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ id2.pull(peer2, peer1).await.unwrap();
+ id1.pull(peer1, peer2).await.unwrap();
+
+ // Create a project on peer1 with both `Person`s as delegates
+ let proj = peer1
+ .using_storage({
+ let owner = id1.owner.clone();
+ move |s| {
+ TestProject::from_project_payload(
+ s,
+ owner,
+ payload::Project {
+ name: "venus".into(),
+ description: None,
+ default_branch: Some(name::MASTER.to_string().into()),
+ },
+ )
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ // Track peer2 on peer1
+ peer1
+ .using_storage::<_, anyhow::Result<()>>({
+ let urn = proj.project.urn();
+ let peer2_id = peer2.peer_id();
+ move |s| {
+ tracking::track(
+ s,
+ &urn,
+ Some(peer2_id),
+ tracking::Config::default(),
+ tracking::policy::Track::Any,
+ )??;
+ Ok(())
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ proj.pull(peer1, peer2).await.unwrap();
+
+ // Add peer2
+ peer1
+ .using_storage({
+ let urn = proj.project.urn();
+ let owner1 = id1.owner.clone();
+ let owner2 = id2.owner.clone();
+ move |storage| -> Result<(), anyhow::Error> {
+ identities::project::update(
+ storage,
+ &urn,
+ None,
+ None,
+ librad::identities::delegation::Indirect::try_from_iter(
+ vec![either::Either::Right(owner1), either::Either::Right(owner2)]
+ .into_iter(),
+ )
+ .unwrap(),
+ )?;
+ identities::project::verify(storage, &urn)?;
+ Ok(())
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ proj.pull(peer1, peer2).await.unwrap();
+
+ // Sign the project document using peer2
+ peer2
+ .using_storage({
+ let urn = proj.project.urn();
+ let peer_id = peer1.peer_id();
+ let rad =
+ Urn::try_from(Reference::rad_id(Namespace::from(&urn)).with_remote(peer_id))
+ .unwrap();
+ move |storage| -> Result<Option<identities::VerifiedProject>, anyhow::Error> {
+ let project = identities::project::get(&storage, &rad)?.unwrap();
+ identities::project::update(
+ storage,
+ &urn,
+ None,
+ None,
+ project.delegations().clone(),
+ )?;
+ identities::project::merge(storage, &urn, peer_id)?;
+ Ok(identities::project::verify(storage, &urn)?)
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ proj.pull(peer2, peer1).await.unwrap();
+
+ // Merge the signed update into peer1
+ peer1
+ .using_storage({
+ let urn = proj.project.urn();
+ let peer_id = peer2.peer_id();
+ move |storage| -> Result<Option<identities::VerifiedProject>, anyhow::Error> {
+ identities::project::merge(storage, &urn, peer_id)?;
+ Ok(identities::project::verify(storage, &urn)?)
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ id2.pull(peer2, peer1).await.unwrap();
+
+ // Okay, now we have a running testnet with two Peers, each of which has a
+ // `Person` who is a delegate on the `TestProject`
+
+ // Create a commit in peer 1 and pull to peer2, then pull those changes into
+ // peer2, create a new commit on top of the original commit and pull
+ // that back to peer1. Then in peer1 pull the commit, fast forward, and
+ // push.
+ let tmp = tempdir().unwrap();
+ let tip = {
+ let mut working_copy1 =
+ WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
+ let mut working_copy2 =
+ WorkingCopy::new(&proj, tmp.path().join("peer2"), peer2).unwrap();
+
+ let mastor = Qualified::from(lit::refs_heads(name::MASTER));
+ working_copy1
+ .commit("peer 1 initial", mastor.clone())
+ .unwrap();
+ working_copy1.push().unwrap();
+ proj.pull(peer1, peer2).await.unwrap();
+
+ working_copy2.fetch(Remote::Peer(peer1.peer_id())).unwrap();
+ working_copy2
+ .create_remote_tracking_branch(Remote::Peer(peer1.peer_id()), name::MASTER)
+ .unwrap();
+ let tip = working_copy2
+ .commit("peer 2 initial", mastor.clone())
+ .unwrap();
+ working_copy2.push().unwrap();
+ proj.pull(peer2, peer1).await.unwrap();
+
+ working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
+ working_copy1
+ .fast_forward_to(Remote::Peer(peer2.peer_id()), name::MASTER)
+ .unwrap();
+ working_copy1.push().unwrap();
+ tip
+ };
+
+ let default_branch = branch_head(peer1, &proj).await.unwrap();
+ // The two peers hsould have the same view of the default branch
+ assert_eq!(
+ default_branch,
+ identities::project::heads::DefaultBranchHead::Head {
+ target: tip,
+ branch: name::MASTER.to_owned(),
+ }
+ );
+
+ // now update peer1 and push to peer 1s monorepo, we should get the tip of peer1
+ // as the head (because peer2 can be fast forwarded)
+ let tmp = tempdir().unwrap();
+ let tip = {
+ let mut working_copy1 =
+ WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
+ working_copy1
+ .create_remote_tracking_branch(Remote::Rad, name::MASTER)
+ .unwrap();
+
+ let mastor = Qualified::from(lit::refs_heads(name::MASTER));
+ let tip = working_copy1.commit("peer 1 fork", mastor.clone()).unwrap();
+ working_copy1.push().unwrap();
+
+ tip
+ };
+
+ let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
+ assert_eq!(
+ default_branch_peer1,
+ identities::project::heads::DefaultBranchHead::Head {
+ target: tip,
+ branch: name::MASTER.to_owned(),
+ }
+ );
+
+ // now create an alternate commit on peer2 and sync with peer1, on peer1 we
+ // should get a fork
+ let tmp = tempdir().unwrap();
+ let forked_tip = {
+ let mut working_copy2 =
+ WorkingCopy::new(&proj, tmp.path().join("peer2"), peer2).unwrap();
+ working_copy2
+ .create_remote_tracking_branch(Remote::Rad, name::MASTER)
+ .unwrap();
+
+ let mastor = Qualified::from(lit::refs_heads(name::MASTER));
+ let forked_tip = working_copy2.commit("peer 2 fork", mastor.clone()).unwrap();
+ working_copy2.push().unwrap();
+
+ forked_tip
+ };
+
+ proj.pull(peer2, peer1).await.unwrap();
+
+ let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
+ assert_eq!(
+ default_branch_peer1,
+ identities::project::heads::DefaultBranchHead::Forked(
+ vec![
+ identities::project::heads::Fork {
+ ancestor_peers: std::collections::BTreeSet::new(),
+ tip_peers: std::iter::once(peer1.peer_id()).collect(),
+ tip,
+ },
+ identities::project::heads::Fork {
+ ancestor_peers: std::collections::BTreeSet::new(),
+ tip_peers: std::iter::once(peer2.peer_id()).collect(),
+ tip: forked_tip,
+ }
+ ]
+ .into_iter()
+ .collect()
+ )
+ );
+
+ // now update peer1 to match peer2
+ let tmp = tempdir().unwrap();
+ let fixed_tip = {
+ let mut working_copy1 =
+ WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
+ working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
+ working_copy1
+ .create_remote_tracking_branch(Remote::Peer(peer2.peer_id()), name::MASTER)
+ .unwrap();
+
+ working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
+ let tip = working_copy1
+ .merge_remote(peer2.peer_id(), name::MASTER)
+ .unwrap();
+ working_copy1.push().unwrap();
+ tip
+ };
+
+ let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
+ assert_eq!(
+ default_branch_peer1,
+ identities::project::heads::DefaultBranchHead::Head {
+ target: fixed_tip,
+ branch: name::MASTER.to_owned(),
+ }
+ );
+
+ // now set the head in the monorepo and check that the HEAD reference exists
+ let updated_tip = peer1
+ .using_storage::<_, anyhow::Result<_>>({
+ let urn = proj.project.urn();
+ move |s| {
+ let vp = identities::project::verify(s, &urn)?.ok_or_else(|| {
+ anyhow::anyhow!("failed to get project for default branch")
+ })?;
+ identities::project::heads::set_default_head(s, vp).map_err(anyhow::Error::from)
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(updated_tip, fixed_tip);
+
+ let head_ref = RefString::try_from(format!(
+ "refs/namespaces/{}/refs/HEAD",
+ proj.project.urn().encode_id()
+ ))
+ .unwrap();
+ let master_ref = Namespaced::from(lit::refs_namespaces(
+ &proj.project.urn(),
+ Qualified::from(lit::refs_heads(name::MASTER)),
+ ));
+ let (master_oid, head_oid) = peer1
+ .using_storage::<_, anyhow::Result<_>>(move |s| {
+ let master_oid = s
+ .reference(&master_ref.into_qualified().into_refstring())?
+ .ok_or_else(|| anyhow::anyhow!("master ref not found"))?
+ .peel_to_commit()?
+ .id();
+ let head_oid = s
+ .reference(&head_ref)?
+ .ok_or_else(|| anyhow::anyhow!("head ref not found"))?
+ .peel_to_commit()?
+ .id();
+ Ok((master_oid, head_oid))
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(master_oid, updated_tip);
+ assert_eq!(head_oid, updated_tip);
+ });
+ }
+
+ async fn branch_head(
+ peer: &RunningTestPeer,
+ proj: &TestProject,
+ ) -> anyhow::Result<heads::DefaultBranchHead> {
+ peer.using_storage::<_, anyhow::Result<_>>({
+ let urn = proj.project.urn();
+ move |s| {
+ let vp = identities::project::verify(s, &urn)?
+ .ok_or_else(|| anyhow::anyhow!("failed to get project for default branch"))?;
+ heads::default_branch_head(s, vp).map_err(anyhow::Error::from)
+ }
+ })
+ .await?
+ }
diff --git a/test/it-helpers/Cargo.toml b/test/it-helpers/Cargo.toml
index 32c789cd..119d5aa1 100644
--- a/test/it-helpers/Cargo.toml
+++ b/test/it-helpers/Cargo.toml
@@ -40,5 +40,8 @@ path = "../../link-async"
[dependencies.lnk-clib]
path = "../../cli/lnk-clib"
+ [dependencies.radicle-git-ext]
+ path = "../../git-ext"
+
[dependencies.test-helpers]
path = "../test-helpers"
diff --git a/test/it-helpers/src/lib.rs b/test/it-helpers/src/lib.rs
index 981b922d..5012de39 100644
--- a/test/it-helpers/src/lib.rs
+++ b/test/it-helpers/src/lib.rs
@@ -7,3 +7,4 @@ pub mod layout;
pub mod ssh;
pub mod testnet;
pub mod tmp;
+ pub mod working_copy;
diff --git a/test/it-helpers/src/working_copy.rs b/test/it-helpers/src/working_copy.rs
new file mode 100644
index 00000000..5fbef0dd
--- /dev/null
+++ b/test/it-helpers/src/working_copy.rs
@@ -0,0 +1,291 @@
+ use std::path::Path;
+
+ use git_ref_format::{lit, name, refspec, Qualified, RefStr, RefString};
+
+ use librad::{
+ git::{
+ local::url::LocalUrl,
+ types::{
+ remote::{LocalFetchspec, LocalPushspec},
+ Fetchspec,
+ Force,
+ Refspec,
+ Remote,
+ },
+ },
+ git_ext as ext,
+ net::{peer::Peer, protocol::RequestPullGuard},
+ refspec_pattern,
+ PeerId,
+ Signer,
+ };
+
+ use crate::fixed::TestProject;
+
+ /// A remote in the working copy
+ pub enum WorkingRemote {
+ /// A remote representing a remote peer, named `PeerId::encode_id`
+ Peer(PeerId),
+ /// A remote representing the local peer, named "rad"
+ Rad,
+ }
+
+ impl From<PeerId> for WorkingRemote {
+ fn from(p: PeerId) -> Self {
+ WorkingRemote::Peer(p)
+ }
+ }
+
+ impl WorkingRemote {
+ fn fetchspec(&self) -> Fetchspec {
+ match self {
+ Self::Peer(peer_id) => {
+ let name = RefString::try_from(format!("{}", peer_id)).expect("peer is refstring");
+ let dst = RefString::from(Qualified::from(lit::refs_remotes(name.clone())))
+ .with_pattern(refspec::STAR);
+ let src = RefString::from(Qualified::from(lit::refs_remotes(name)))
+ .and(name::HEADS)
+ .with_pattern(refspec::STAR);
+ let refspec = Refspec {
+ src,
+ dst,
+ force: Force::True,
+ };
+ refspec.into_fetchspec()
+ },
+ Self::Rad => {
+ let name = RefString::try_from("rad").unwrap();
+ let src =
+ RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR);
+ Refspec {
+ src,
+ dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
+ .with_pattern(refspec::STAR),
+ force: Force::True,
+ }
+ .into_fetchspec()
+ },
+ }
+ }
+
+ fn remote_ref(&self, branch: &RefStr) -> RefString {
+ let name = match self {
+ Self::Rad => name::RAD.to_owned(),
+ Self::Peer(peer_id) => {
+ RefString::try_from(peer_id.to_string()).expect("peer id is refstring")
+ },
+ };
+ RefString::from(Qualified::from(lit::refs_remotes(name))).join(branch)
+ }
+ }
+
+ /// A `WorkingCopy` for test driving interactions with the monorepo where one
+ /// needs to update the tree of a project.
+ ///
+ /// Remotes are named after the peer ID, except in the case of the remote
+ /// representing the local Peer ID - which is called "rad".
+ pub struct WorkingCopy<'a, S, G> {
+ repo: git2::Repository,
+ peer: &'a Peer<S, G>,
+ project: &'a TestProject,
+ }
+
+ impl<'a, S, G> WorkingCopy<'a, S, G>
+ where
+ S: Signer + Clone,
+ G: RequestPullGuard,
+ {
+ /// Create a new working copy. This initializes a git repository and then
+ /// fetches the state of the local peer into `refs/remotes/rad/*`.
+ pub fn new<P: AsRef<Path>>(
+ project: &'a TestProject,
+ repo_path: P,
+ peer: &'a Peer<S, G>,
+ ) -> Result<WorkingCopy<'a, S, G>, anyhow::Error> {
+ let repo = git2::Repository::init(repo_path.as_ref())?;
+
+ let mut copy = WorkingCopy {
+ peer,
+ project,
+ repo,
+ };
+ copy.fetch(WorkingRemote::Rad)?;
+ Ok(copy)
+ }
+
+ /// Fetch changes from the monorepo into the working copy. The fetchspec
+ /// used depends on the peer ID.
+ ///
+ /// * If `from` is `WorkingRemote::Peer` then `refs/remotes/<peer
+ /// ID>/refs/*:refs/remotes/<peer ID>/heads/*`
+ /// * If `from` is `WorkingRemote::Rad` then
+ /// `refs/heads/*:refs/remotes/rad/*`
+ ///
+ /// I.e. changes from remote peers end up in a remote called
+ /// `PeerId::encode_id` whilst changes from the local peer end up in a
+ /// remote called "rad".
+ pub fn fetch(&mut self, from: WorkingRemote) -> Result<(), anyhow::Error> {
+ let fetchspec = from.fetchspec();
+ let url = LocalUrl::from(self.project.project.urn());
+ let mut remote = Remote::rad_remote(url, fetchspec);
+ let _ = remote.fetch(self.peer.clone(), &self.repo, LocalFetchspec::Configured)?;
+ Ok(())
+ }
+
+ /// Push changes from `refs/heads/*` to the local peer
+ pub fn push(&mut self) -> Result<(), anyhow::Error> {
+ let url = LocalUrl::from(self.project.project.urn());
+ let name = RefString::try_from("rad").unwrap();
+ let fetchspec = Refspec {
+ src: RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR),
+ dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
+ .with_pattern(refspec::STAR),
+ force: Force::True,
+ }
+ .into_fetchspec();
+ let mut remote = Remote::rad_remote(url, fetchspec);
+ let _ = remote.push(
+ self.peer.clone(),
+ &self.repo,
+ LocalPushspec::Matching {
+ pattern: refspec_pattern!("refs/heads/*"),
+ force: Force::True,
+ },
+ )?;
+ Ok(())
+ }
+
+ /// Create a new commit on top of whichever commit is the head of
+ /// `on_branch`. If the branch does not exist this will create it.
+ pub fn commit(
+ &mut self,
+ message: &str,
+ on_branch: Qualified,
+ ) -> Result<git2::Oid, anyhow::Error> {
+ let branch_name = on_branch.non_empty_components().2;
+ let parent = match self.repo.find_branch(&branch_name, git2::BranchType::Local) {
+ Ok(b) => b.get().target().and_then(|o| self.repo.find_commit(o).ok()),
+ Err(e) if ext::error::is_not_found_err(&e) => None,
+ Err(e) => return Err(anyhow::Error::from(e)),
+ };
+ let empty_tree = {
+ let mut index = self.repo.index()?;
+ let oid = index.write_tree()?;
+ self.repo.find_tree(oid).unwrap()
+ };
+ let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
+ let parents = match &parent {
+ Some(p) => vec![p],
+ None => Vec::new(),
+ };
+ self.repo
+ .commit(
+ Some(&on_branch),
+ &author,
+ &author,
+ message,
+ &empty_tree,
+ &parents,
+ )
+ .map_err(anyhow::Error::from)
+ }
+
+ /// Create a branch at `refs/heads/<branch>` which tracks the given remote.
+ /// The remote branch name depends on `from`.
+ ///
+ /// * If `from` is `WorkingCopy::Rad` then `refs/remotes/rad/<branch>`
+ /// * If `from` is `WorkingCopy::Peer(peer_id)` then `refs/remotes/<peer
+ /// id>/<branch>`
+ pub fn create_remote_tracking_branch(
+ &self,
+ from: WorkingRemote,
+ branch: &RefStr,
+ ) -> Result<(), anyhow::Error> {
+ let target = self
+ .repo
+ .find_reference(from.remote_ref(branch).as_str())?
+ .target()
+ .ok_or_else(|| anyhow::anyhow!("remote ref is not a direct reference"))?;
+ let commit = self.repo.find_commit(target)?;
+ self.repo.branch(branch.as_str(), &commit, false)?;
+ Ok(())
+ }
+
+ /// Fast forward the local branch `refs/heads/<branch>` to whatever is
+ /// pointed to by `refs/remotes/<remote>/<branch>`
+ ///
+ /// * If `from` is `WorkingRemote::Peer(peer_id)` then `remote` is
+ /// `peer_id.encode_id()`
+ /// * If `from` is `WorkingRemote::Rad` then `remote` is `"rad"`
+ ///
+ /// # Errors
+ ///
+ /// * If the local branch does not exist
+ /// * If the remote branch does not exist
+ /// * If either of the branches does not point at a commit
+ /// * If the remote branch is not a descendant of the local branch
+ pub fn fast_forward_to(&self, from: WorkingRemote, branch: &RefStr) -> anyhow::Result<()> {
+ let remote_ref = from.remote_ref(branch);
+ let remote_target = self
+ .repo
+ .find_reference(&remote_ref)?
+ .target()
+ .ok_or_else(|| anyhow::anyhow!("remote ref had no target"))?;
+ let local_ref = RefString::from(Qualified::from(lit::refs_heads(branch)));
+ let local_target = self
+ .repo
+ .find_reference(&local_ref)?
+ .target()
+ .ok_or_else(|| anyhow::anyhow!("local ref had no target"))?;
+ if !self.repo.graph_descendant_of(remote_target, local_target)? {
+ anyhow::bail!("remote ref was not a descendant of local ref");
+ } else {
+ self.repo
+ .reference(&local_ref, remote_target, true, "fast forward")?;
+ }
+ Ok(())
+ }
+
+ /// Create a new commit which merges `refs/heads/<branch>` and
+ /// `refs/remotes/<remote>/<branch>`
+ ///
+ /// this will create a new commit with two parents, one for the remote
+ /// branch and one for the local branch
+ ///
+ /// # Errors
+ ///
+ /// * If the remote branch does not exist
+ /// * If the local branch does not exist
+ /// * If either of the references does not point to a commit
+ pub fn merge_remote(&self, remote: PeerId, branch: &RefStr) -> anyhow::Result<git2::Oid> {
+ let peer_branch = WorkingRemote::Peer(remote).remote_ref(branch);
+ let peer_commit = self
+ .repo
+ .find_reference(&peer_branch.to_string())?
+ .peel_to_commit()?;
+ let local_branch = Qualified::from(lit::refs_heads(branch));
+ let local_commit = self
+ .repo
+ .find_reference(&local_branch.to_string())?
+ .peel_to_commit()?;
+
+ let message = format!("merge {} into {}", peer_branch, local_branch);
+ let empty_tree = {
+ let mut index = self.repo.index()?;
+ let oid = index.write_tree()?;
+ self.repo.find_tree(oid).unwrap()
+ };
+ let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
+ let parents = vec![&peer_commit, &local_commit];
+ self.repo
+ .commit(
+ Some(&local_branch),
+ &author,
+ &author,
+ &message,
+ &empty_tree,
+ &parents,
+ )
+ .map_err(anyhow::Error::from)
+ }
+ }
--
2.36.1
[PATCH v1 2/4] lnk-identities: update path logic and set up include
The existing logic for checking out an identity in lnk-identities places
the checked out repository in `<selected path>/<identity name>` where
`selected path` is either the working directory or a specified
directory. This is not usually what people expect when checking out a
repository. Here we modify the logic so that if a directory is not
specified then we place the checked out repository in `$PWD/<identity
name>` but if the directory is specified then we place the checkout in
the specified directory directly.
While we're here we implement some missing logic to set the include path
in the newly created or updated repository.
Signed-off-by: Alex Good <alex@memoryandthought.me>
---
cli/lnk-identities/Cargo.toml | 3 +
cli/lnk-identities/src/cli/args.rs | 10 + --
cli/lnk-identities/src/cli/eval/person.rs | 7 + -
cli/lnk-identities/src/cli/eval/project.rs | 7 + -
cli/lnk-identities/src/git/checkout.rs | 70 +++++++++++ --------
cli/lnk-identities/src/git/existing.rs | 16 + ----
cli/lnk-identities/src/git/new.rs | 20 ++ ----
cli/lnk-identities/src/identity_dir.rs | 38 ++++++++++
cli/lnk-identities/src/lib.rs | 1 +
cli/lnk-identities/src/person.rs | 5 + -
cli/lnk-identities/src/project.rs | 32 ++++ -----
.../t/src/tests/git/checkout.rs | 5 +-
.../t/src/tests/git/existing.rs | 6 +-
cli/lnk-identities/t/src/tests/git/new.rs | 5 + -
14 files changed, 130 insertions(+), 95 deletions(-)
create mode 100644 cli/lnk-identities/src/identity_dir.rs
diff --git a/cli/lnk-identities/Cargo.toml b/cli/lnk-identities/Cargo.toml
index 1ee6bafb..b6a42736 100644
--- a/cli/lnk-identities/Cargo.toml
+++ b/cli/lnk-identities/Cargo.toml
@@ -45,6 +45,9 @@ default-features = false
[dependencies.radicle-git-ext]
path = "../../git-ext"
+ [dependencies.git-ref-format]
+ path = "../../git-ref-format"
+
[dependencies.radicle-std-ext]
path = "../../std-ext"
diff --git a/cli/lnk-identities/src/cli/args.rs b/cli/lnk-identities/src/cli/args.rs
index 7ad234ca..1a281256 100644
--- a/cli/lnk-identities/src/cli/args.rs
+++ b/cli/lnk-identities/src/cli/args.rs
@@ -255,9 +255,10 @@ pub mod project {
#[clap(long)]
pub urn: Urn,
- /// the location for creating the working copy in
+ /// the location for creating the working copy in. If not specified will
+ /// clone into <working directory>/<identity name>
#[clap(long)]
- pub path: PathBuf,
+ pub path: Option<PathBuf>,
/// the peer for which the initial working copy is based off. Note that
/// if this value is not provided, or the value that is provided is the
@@ -360,7 +361,8 @@ pub mod person {
#[clap(long, parse(try_from_str = direct_delegation))]
pub delegations: Vec<PublicKey>,
- /// the path where the working copy should be created
+ /// the path where the working copy should be created If not specified
+ /// will clone into <working directory>/<identity name>
#[clap(long)]
pub path: Option<PathBuf>,
}
@@ -444,7 +446,7 @@ pub mod person {
/// the location for creating the working copy in
#[clap(long)]
- pub path: PathBuf,
+ pub path: Option<PathBuf>,
/// the peer for which the initial working copy is based off. Note that
/// if this value is not provided, or the value that is provided is the
diff --git a/cli/lnk-identities/src/cli/eval/person.rs b/cli/lnk-identities/src/cli/eval/person.rs
index 8eaa54b0..78791b4a 100644
--- a/cli/lnk-identities/src/cli/eval/person.rs
+++ b/cli/lnk-identities/src/cli/eval/person.rs
@@ -24,7 +24,7 @@ use lnk_clib::{
storage::{self, ssh},
};
- use crate::{cli::args::person::*, display, person};
+ use crate::{cli::args::person::*, display, identity_dir::IdentityDir, person};
pub fn eval(profile: &Profile, sock: SshAuthSock, opts: Options) -> anyhow::Result<()> {
match opts {
@@ -138,12 +138,13 @@ fn eval_checkout(
profile: &Profile,
sock: SshAuthSock,
urn: Urn,
- path: PathBuf,
+ path: Option<PathBuf>,
peer: Option<PeerId>,
) -> anyhow::Result<()> {
let paths = profile.paths();
let (signer, storage) = ssh::storage(profile, sock)?;
- let repo = person::checkout(&storage, paths.clone(), signer, &urn, peer, path)?;
+ let checkout_path = IdentityDir::at_or_current_dir(path)?;
+ let repo = person::checkout(&storage, paths.clone(), signer, &urn, peer, checkout_path)?;
println!("working copy created at `{}`", repo.path().display());
Ok(())
}
diff --git a/cli/lnk-identities/src/cli/eval/project.rs b/cli/lnk-identities/src/cli/eval/project.rs
index b5ae1636..455c0d72 100644
--- a/cli/lnk-identities/src/cli/eval/project.rs
+++ b/cli/lnk-identities/src/cli/eval/project.rs
@@ -26,7 +26,7 @@ use lnk_clib::{
storage::{self, ssh},
};
- use crate::{cli::args::project::*, display, project};
+ use crate::{cli::args::project::*, display, identity_dir::IdentityDir, project};
pub fn eval(profile: &Profile, sock: SshAuthSock, opts: Options) -> anyhow::Result<()> {
match opts {
@@ -143,12 +143,13 @@ fn eval_checkout(
profile: &Profile,
sock: SshAuthSock,
urn: Urn,
- path: PathBuf,
+ path: Option<PathBuf>,
peer: Option<PeerId>,
) -> anyhow::Result<()> {
let (signer, storage) = ssh::storage(profile, sock)?;
let paths = profile.paths();
- let repo = project::checkout(&storage, paths.clone(), signer, &urn, peer, path)?;
+ let checkout_path = IdentityDir::at_or_current_dir(path)?;
+ let repo = project::checkout(&storage, paths.clone(), signer, &urn, peer, checkout_path)?;
println!("working copy created at `{}`", repo.path().display());
Ok(())
}
diff --git a/cli/lnk-identities/src/git/checkout.rs b/cli/lnk-identities/src/git/checkout.rs
index 5a949465..756020ee 100644
--- a/cli/lnk-identities/src/git/checkout.rs
+++ b/cli/lnk-identities/src/git/checkout.rs
@@ -3,7 +3,7 @@
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.
- use std::{convert::TryFrom, ffi, path::PathBuf};
+ use std::{convert::TryFrom, path::PathBuf};
use either::Either;
@@ -21,13 +21,18 @@ use librad::{
},
},
git_ext::{self, OneLevel, Qualified, RefLike},
+ paths::Paths,
refspec_pattern,
PeerId,
};
+ use git_ref_format as ref_format;
+
use crate::{
field::{HasBranch, HasName, HasUrn, MissingDefaultBranch},
git,
+ git::include,
+ identity_dir::IdentityDir,
};
#[derive(Debug, thiserror::Error)]
@@ -46,6 +51,15 @@ pub enum Error {
#[error(transparent)]
Transport(#[from] librad::git::local::transport::Error),
+
+ #[error(transparent)]
+ Include(Box<include::Error>),
+
+ #[error(transparent)]
+ SetInclude(#[from] librad::git::include::Error),
+
+ #[error(transparent)]
+ OpenStorage(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl From<identities::Error> for Error {
@@ -89,7 +103,9 @@ impl From<identities::Error> for Error {
/// merge = refs/heads/master
/// [include]
/// path = /home/user/.config/radicle-link/git-includes/hwd1yrerzpjbmtshsqw6ajokqtqrwaswty6p7kfeer3yt1n76t46iqggzcr.inc
+ /// ```
pub fn checkout<F, I>(
+ paths: &Paths,
open_storage: F,
identity: &I,
from: Either<Local, Peer>,
@@ -101,10 +117,20 @@ where
let default_branch = identity.branch_or_die(identity.urn())?;
let (repo, rad) = match from {
- Either::Left(local) => local.checkout(open_storage)?,
- Either::Right(peer) => peer.checkout(open_storage)?,
+ Either::Left(local) => local.checkout(open_storage.clone())?,
+ Either::Right(peer) => peer.checkout(open_storage.clone())?,
};
+ {
+ let _box = open_storage
+ .open_storage()
+ .map_err(|e| Error::OpenStorage(e))?;
+ let storage = _box.as_ref();
+ let include_path = include::update(storage.as_ref(), paths, identity)
+ .map_err(|e| Error::Include(Box::new(e)))?;
+ librad::git::include::set_include_path(&repo, include_path)?;
+ }
+
// Set configurations
git::set_upstream(&repo, &rad, default_branch.clone())?;
repo.set_head(Qualified::from(default_branch).as_str())
@@ -123,7 +149,6 @@ impl Local {
where
I: HasName + HasUrn,
{
- let path = resolve_path(identity, path);
Self {
url: LocalUrl::from(identity.urn()),
path,
@@ -160,7 +185,6 @@ impl Peer {
{
let urn = identity.urn();
let default_branch = identity.branch_or_die(urn.clone())?;
- let path = resolve_path(identity, path);
Ok(Self {
url: LocalUrl::from(urn),
remote,
@@ -175,12 +199,16 @@ impl Peer {
{
let (person, peer) = self.remote;
let handle = &person.subject().name;
- let name =
- RefLike::try_from(format!("{}@{}", handle, peer)).expect("failed to parse remote name");
- let remote = Remote::new(self.url.clone(), name.clone()).with_fetchspecs(vec![Refspec {
+ let name = ref_format::RefString::try_from(format!("{}@{}", handle, peer))
+ .expect("handle and peer are reflike");
+ let dst = ref_format::RefString::from(ref_format::Qualified::from(
+ ref_format::lit::refs_remotes(name.clone()),
+ ))
+ .with_pattern(ref_format::refspec::STAR);
+ let remote = Remote::new(self.url.clone(), name).with_fetchspecs(vec![Refspec {
src: Reference::heads(Flat, peer),
- dst: GenericRef::heads(Flat, name),
+ dst,
force: Force::True,
}]);
@@ -216,34 +244,14 @@ impl Peer {
pub fn from_whom<I>(
identity: &I,
remote: Option<(Person, PeerId)>,
- path: PathBuf,
+ path: IdentityDir,
) -> Result<Either<Local, Peer>, Error>
where
I: HasBranch + HasName + HasUrn,
{
+ let path = path.resolve(identity.name());
Ok(match remote {
None => Either::Left(Local::new(identity, path)),
Some(remote) => Either::Right(Peer::new(identity, remote, path)?),
})
}
-
- fn resolve_path<I>(identity: &I, path: PathBuf) -> PathBuf
- where
- I: HasName,
- {
- let name = identity.name();
-
- // Check if the path provided ends in the 'directory_name' provided. If not we
- // create the full path to that name.
- path.components()
- .next_back()
- .map_or(path.join(&**name), |destination| {
- let destination: &ffi::OsStr = destination.as_ref();
- let name: &ffi::OsStr = name.as_ref();
- if destination == name {
- path.to_path_buf()
- } else {
- path.join(name)
- }
- })
- }
diff --git a/cli/lnk-identities/src/git/existing.rs b/cli/lnk-identities/src/git/existing.rs
index 702ec727..c828ab5a 100644
--- a/cli/lnk-identities/src/git/existing.rs
+++ b/cli/lnk-identities/src/git/existing.rs
@@ -8,17 +8,13 @@ use std::{fmt, marker::PhantomData, path::PathBuf};
use serde::{Deserialize, Serialize};
use librad::{
- canonical::Cstring,
git::local::{transport::CanOpenStorage, url::LocalUrl},
git_ext,
std_ext::result::ResultExt as _,
};
use std_ext::Void;
- use crate::{
- field::{HasBranch, HasName},
- git,
- };
+ use crate::{field::HasBranch, git};
#[derive(Debug, thiserror::Error)]
pub enum Error {
@@ -47,20 +43,13 @@ pub struct Existing<V, P> {
valid: V,
}
- impl<V, P: HasName> Existing<V, P> {
- pub fn name(&self) -> &Cstring {
- self.payload.name()
- }
- }
-
type Invalid = PhantomData<Void>;
- impl<P: HasName + HasBranch> Existing<Invalid, P> {
+ impl<P: HasBranch> Existing<Invalid, P> {
pub fn new(payload: P, path: PathBuf) -> Self {
// Note(finto): The current behaviour in Upstream is that an existing repository
// is initialised with the suffix of the path is the name of the project.
// Perhaps this should just be done upstream and no assumptions made here.
- let path = path.join(payload.name().as_str());
Self {
payload,
path,
@@ -116,6 +105,7 @@ impl<P: HasBranch> Existing<Valid, P> {
);
let _remote = git::validation::remote(&repo, &url)?;
git::setup_remote(&repo, open_storage, url, &self.payload.branch_or_default())?;
+
Ok(repo)
}
}
diff --git a/cli/lnk-identities/src/git/new.rs b/cli/lnk-identities/src/git/new.rs
index 758af792..c8c620f6 100644
--- a/cli/lnk-identities/src/git/new.rs
+++ b/cli/lnk-identities/src/git/new.rs
@@ -41,12 +41,6 @@ pub struct New<V, P> {
valid: V,
}
- impl<V, P: HasName> New<V, P> {
- pub fn path(&self) -> PathBuf {
- self.path.join(self.payload.name().as_str())
- }
- }
-
pub type Invalid = PhantomData<Void>;
pub type Valid = PhantomData<Void>;
@@ -64,14 +58,12 @@ impl<P> New<Invalid, P> {
where
P: HasName,
{
- let repo_path = self.path();
-
- if repo_path.is_file() {
- return Err(Error::AlreadyExists(repo_path));
+ if self.path.is_file() {
+ return Err(Error::AlreadyExists(self.path));
}
- if repo_path.exists() && repo_path.is_dir() && repo_path.read_dir()?.next().is_some() {
- return Err(Error::AlreadyExists(repo_path));
+ if self.path.exists() && self.path.is_dir() && self.path.read_dir()?.next().is_some() {
+ return Err(Error::AlreadyExists(self.path));
}
Ok(Self {
@@ -87,7 +79,7 @@ impl New<Valid, payload::ProjectPayload> {
where
F: CanOpenStorage + Clone + 'static,
{
- let path = self.path();
+ let path = self.path;
let default = self.payload.branch_or_default();
init(
path,
@@ -104,7 +96,7 @@ impl New<Valid, payload::PersonPayload> {
where
F: CanOpenStorage + Clone + 'static,
{
- let path = self.path();
+ let path = self.path;
let default = self.payload.branch_or_default();
init(path, default, &None, url, open_storage)
}
diff --git a/cli/lnk-identities/src/identity_dir.rs b/cli/lnk-identities/src/identity_dir.rs
new file mode 100644
index 00000000..bbf87bdb
--- /dev/null
+++ b/cli/lnk-identities/src/identity_dir.rs
@@ -0,0 +1,38 @@
+ use std::path::{Path, PathBuf};
+
+ /// Where to checkout or create an identity
+ pub enum IdentityDir {
+ /// A directory within this directory named after the identity
+ Within(PathBuf),
+ /// Directly at the given path, which must be a directory
+ At(PathBuf),
+ }
+
+ impl IdentityDir {
+ /// If `at` is `Some` then return `CheckoutPath::At(at)`, otherwise
+ /// `CheckoutPath::Within(current directory)`.
+ pub fn at_or_current_dir<P: AsRef<Path>>(at: Option<P>) -> Result<IdentityDir, std::io::Error> {
+ match at {
+ Some(p) => Ok(IdentityDir::At(p.as_ref().to_path_buf())),
+ None => Ok(IdentityDir::Within(std::env::current_dir()?)),
+ }
+ }
+ }
+
+ impl std::fmt::Display for IdentityDir {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ IdentityDir::At(p) => p.display().fmt(f),
+ IdentityDir::Within(p) => write!(f, "{}/<name>", p.display()),
+ }
+ }
+ }
+
+ impl IdentityDir {
+ pub(crate) fn resolve(&self, identity_name: &str) -> PathBuf {
+ match self {
+ Self::At(p) => p.clone(),
+ Self::Within(p) => p.join(identity_name),
+ }
+ }
+ }
diff --git a/cli/lnk-identities/src/lib.rs b/cli/lnk-identities/src/lib.rs
index d3cdcfd3..4ca9e261 100644
--- a/cli/lnk-identities/src/lib.rs
+++ b/cli/lnk-identities/src/lib.rs
@@ -15,6 +15,7 @@ use thiserror::Error;
pub mod cli;
pub mod any;
+ pub mod identity_dir;
pub mod local;
pub mod person;
pub mod project;
diff --git a/cli/lnk-identities/src/person.rs b/cli/lnk-identities/src/person.rs
index df1b588e..8381f0ce 100644
--- a/cli/lnk-identities/src/person.rs
+++ b/cli/lnk-identities/src/person.rs
@@ -27,6 +27,7 @@ use librad::{
use crate::{
display,
git::{self, checkout, include},
+ identity_dir::IdentityDir,
};
pub type Display = display::Display<PersonPayload>;
@@ -172,7 +173,7 @@ pub fn checkout<S>(
signer: BoxedSigner,
urn: &Urn,
peer: Option<PeerId>,
- path: PathBuf,
+ path: IdentityDir,
) -> Result<git2::Repository, Error>
where
S: AsRef<ReadOnly>,
@@ -199,7 +200,7 @@ where
paths: paths.clone(),
signer,
};
- let repo = git::checkout::checkout(settings, &person, from)?;
+ let repo = git::checkout::checkout(&paths, settings, &person, from)?;
include::update(&storage, &paths, &person)?;
Ok(repo)
}
diff --git a/cli/lnk-identities/src/project.rs b/cli/lnk-identities/src/project.rs
index 65dc76e2..07378708 100644
--- a/cli/lnk-identities/src/project.rs
+++ b/cli/lnk-identities/src/project.rs
@@ -30,6 +30,7 @@ use librad::{
use crate::{
display,
git::{self, checkout, include},
+ identity_dir::IdentityDir,
MissingDefaultIdentity,
};
@@ -74,12 +75,6 @@ impl From<identities::Error> for Error {
}
}
- impl From<include::Error> for Error {
- fn from(err: include::Error) -> Self {
- Self::Include(Box::new(err))
- }
- }
-
pub enum Creation {
New { path: Option<PathBuf> },
Existing { path: PathBuf },
@@ -136,26 +131,32 @@ where
signer,
};
- let project = match creation {
+ let (project, maybe_repo) = match creation {
Creation::New { path } => {
if let Some(path) = path {
let valid = git::new::New::new(payload.clone(), path).validate()?;
let project = project::create(storage, whoami, payload, delegations)?;
- valid.init(url, settings)?;
- project
+ let repo = valid.init(url, settings)?;
+ (project, Some(repo))
} else {
- project::create(storage, whoami, payload, delegations)?
+ (
+ project::create(storage, whoami, payload, delegations)?,
+ None,
+ )
}
},
Creation::Existing { path } => {
let valid = git::existing::Existing::new(payload.clone(), path).validate()?;
let project = project::create(storage, whoami, payload, delegations)?;
- valid.init(url, settings)?;
- project
+ let repo = valid.init(url, settings)?;
+ (project, Some(repo))
},
};
- include::update(storage, &paths, &project)?;
+ let include_path = include::update(storage, &paths, &project)?;
+ if let Some(repo) = maybe_repo {
+ librad::git::include::set_include_path(&repo, include_path)?;
+ }
Ok(project)
}
@@ -242,7 +243,7 @@ pub fn checkout<S>(
signer: BoxedSigner,
urn: &Urn,
peer: Option<PeerId>,
- path: PathBuf,
+ path: IdentityDir,
) -> Result<git2::Repository, Error>
where
S: AsRef<ReadOnly>,
@@ -269,8 +270,7 @@ where
paths: paths.clone(),
signer,
};
- let repo = git::checkout::checkout(settings, &project, from)?;
- include::update(&storage, &paths, &project)?;
+ let repo = git::checkout::checkout(&paths, settings, &project, from)?;
Ok(repo)
}
diff --git a/cli/lnk-identities/t/src/tests/git/checkout.rs b/cli/lnk-identities/t/src/tests/git/checkout.rs
index c4fa2642..c0e919f4 100644
--- a/cli/lnk-identities/t/src/tests/git/checkout.rs
+++ b/cli/lnk-identities/t/src/tests/git/checkout.rs
@@ -48,7 +48,7 @@ fn local_checkout() -> anyhow::Result<()> {
};
let local = Local::new(&proj.project, temp.path().to_path_buf());
- let repo = checkout(settings, &proj.project, Either::Left(local))?;
+ let repo = checkout(&paths, settings, &proj.project, Either::Left(local))?;
let branch = proj.project.subject().default_branch.as_ref().unwrap();
assert_head(&repo, branch)?;
assert_remote(&repo, branch, &LocalUrl::from(proj.project.urn()))?;
@@ -102,9 +102,10 @@ fn remote_checkout() {
signer: peer2.signer().clone().into(),
};
+ let paths = peer2.protocol_config().paths.clone();
let remote = (proj.owner.clone(), peer1.peer_id());
let peer = Peer::new(&proj.project, remote, temp.path().to_path_buf()).unwrap();
- let repo = checkout(settings, &proj.project, Either::Right(peer)).unwrap();
+ let repo = checkout(&paths, settings, &proj.project, Either::Right(peer)).unwrap();
let branch = proj.project.subject().default_branch.as_ref().unwrap();
assert_head(&repo, branch).unwrap();
assert_remote(&repo, branch, &LocalUrl::from(proj.project.urn())).unwrap();
diff --git a/cli/lnk-identities/t/src/tests/git/existing.rs b/cli/lnk-identities/t/src/tests/git/existing.rs
index 4bbea636..f3fbc2e8 100644
--- a/cli/lnk-identities/t/src/tests/git/existing.rs
+++ b/cli/lnk-identities/t/src/tests/git/existing.rs
@@ -51,7 +51,7 @@ fn validation_path_is_not_a_repo() -> anyhow::Result<()> {
fn validation_default_branch_is_missing() -> anyhow::Result<()> {
let payload = TestProject::default_payload();
let temp = tempdir()?;
- let dir = temp.path().join(payload.name.as_str());
+ let dir = temp.path();
let _repo = git2::Repository::init(dir)?;
let existing = Existing::new(ProjectPayload::new(payload), temp.path().to_path_buf());
let result = existing.validate();
@@ -68,7 +68,7 @@ fn validation_default_branch_is_missing() -> anyhow::Result<()> {
fn validation_different_remote_exists() -> anyhow::Result<()> {
let payload = TestProject::default_payload();
let temp = tempdir()?;
- let dir = temp.path().join(payload.name.as_str());
+ let dir = temp.path();
let _repo = {
let branch = payload.default_branch.as_ref().unwrap();
let mut opts = git2::RepositoryInitOptions::new();
@@ -153,7 +153,7 @@ fn validation_remote_exists() -> anyhow::Result<()> {
fn creation() -> anyhow::Result<()> {
let payload = TestProject::default_payload();
let temp = tempdir()?;
- let dir = temp.path().join(payload.name.as_str());
+ let dir = temp.path();
let _repo = {
let branch = payload.default_branch.as_ref().unwrap();
let mut opts = git2::RepositoryInitOptions::new();
diff --git a/cli/lnk-identities/t/src/tests/git/new.rs b/cli/lnk-identities/t/src/tests/git/new.rs
index dde7d44b..edb85d8b 100644
--- a/cli/lnk-identities/t/src/tests/git/new.rs
+++ b/cli/lnk-identities/t/src/tests/git/new.rs
@@ -72,10 +72,7 @@ fn creation() -> anyhow::Result<()> {
let branch = payload.default_branch.unwrap();
assert_eq!(
repo.path().canonicalize()?,
- temp.path()
- .join(payload.name.as_str())
- .join(".git")
- .canonicalize()?
+ temp.path().join(".git").canonicalize()?
);
assert_head(&repo, &branch)?;
assert_remote(&repo, &branch, &url)?;
--
2.36.1
[PATCH v1 3/4] Make gitd accept the URL format of include files
The include files generated by librad create remotes with URLs of the
form `rad://rad:git:<base32-z multihash>.git`. Modify gitd to accept URLs with
a path component of `rad:git:<base32-z multihash>.git`. This allows
using gits `url.rad.insteadOf` config to point all such URLs at a local
`gitd`.
Signed-off-by: Alex Good <alex@memoryandthought.me>
---
cli/gitd-lib/src/git_subprocess.rs | 12 +++ ---
cli/gitd-lib/src/git_subprocess/command.rs | 12 +++ ---
cli/gitd-lib/src/lib.rs | 1 +
cli/gitd-lib/src/processes.rs | 20 +++++ ----
cli/gitd-lib/src/server.rs | 5 + --
cli/gitd-lib/src/ssh_service.rs | 50 ++++++++++++++++++++++
6 files changed, 77 insertions(+), 23 deletions(-)
create mode 100644 cli/gitd-lib/src/ssh_service.rs
diff --git a/cli/gitd-lib/src/git_subprocess.rs b/cli/gitd-lib/src/git_subprocess.rs
index 27fedb11..731db465 100644
--- a/cli/gitd-lib/src/git_subprocess.rs
+++ b/cli/gitd-lib/src/git_subprocess.rs
@@ -20,13 +20,13 @@ use tokio::{
process::Child,
};
- use librad::git::{storage, Urn};
+ use librad::git::storage;
use link_async::Spawner;
- use link_git::service::SshService;
use crate::{
hooks::{self, Hooks},
processes::ProcessReply,
+ ssh_service,
};
pub mod command;
@@ -51,7 +51,7 @@ pub(crate) async fn run_git_subprocess<Replier, S>(
pool: Arc<storage::Pool<storage::Storage>>,
incoming: tokio::sync::mpsc::Receiver<Message>,
mut out: Replier,
- service: SshService<Urn>,
+ service: ssh_service::SshService,
hooks: Hooks<S>,
) -> Result<(), Error<Replier::Error>>
where
@@ -74,7 +74,7 @@ async fn run_git_subprocess_inner<Replier, S>(
pool: Arc<storage::Pool<storage::Storage>>,
mut incoming: tokio::sync::mpsc::Receiver<Message>,
out: &mut Replier,
- service: SshService<Urn>,
+ service: ssh_service::SshService,
hooks: Hooks<S>,
) -> Result<(), Error<Replier::Error>>
where
@@ -87,7 +87,7 @@ where
if service.is_upload() {
match hooks
- .pre_upload(&mut progress_reporter, service.path.clone())
+ .pre_upload(&mut progress_reporter, service.path.clone().into())
.await
{
Ok(()) => {},
@@ -260,7 +260,7 @@ where
// Run hooks
if service.service == GitService::ReceivePack.into() {
if let Err(e) = hooks
- .post_receive(&mut progress_reporter, service.path.clone())
+ .post_receive(&mut progress_reporter, service.path.into())
.await
{
match e {
diff --git a/cli/gitd-lib/src/git_subprocess/command.rs b/cli/gitd-lib/src/git_subprocess/command.rs
index 0c89b70e..215d2263 100644
--- a/cli/gitd-lib/src/git_subprocess/command.rs
+++ b/cli/gitd-lib/src/git_subprocess/command.rs
@@ -16,9 +16,10 @@ use librad::{
},
reflike,
};
- use link_git::service::SshService;
use radicle_git_ext as ext;
+ use crate::ssh_service;
+
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("no such URN {0}")]
@@ -44,13 +45,14 @@ pub enum Error {
// crate.
pub(super) fn create_command(
storage: &storage::Storage,
- service: SshService<Urn>,
+ service: ssh_service::SshService,
) -> Result<tokio::process::Command, Error> {
- guard_has_urn(storage, &service.path)?;
+ let urn = service.path.into();
+ guard_has_urn(storage, &urn)?;
let mut git = tokio::process::Command::new("git");
git.current_dir(&storage.path()).args(&[
- &format!("--namespace={}", Namespace::from(&service.path)),
+ &format!("--namespace={}", Namespace::from(&urn)),
"-c",
"transfer.hiderefs=refs/remotes",
"-c",
@@ -62,7 +64,7 @@ pub(super) fn create_command(
match service.service.0 {
GitService::UploadPack | GitService::UploadPackLs => {
// Fetching remotes is ok, pushing is not
- visible_remotes(storage, &service.path)?.for_each(|remote_ref| {
+ visible_remotes(storage, &urn)?.for_each(|remote_ref| {
git.arg("-c")
.arg(format!("uploadpack.hiderefs=!^{}", remote_ref));
});
diff --git a/cli/gitd-lib/src/lib.rs b/cli/gitd-lib/src/lib.rs
index a4461f97..9163016e 100644
--- a/cli/gitd-lib/src/lib.rs
+++ b/cli/gitd-lib/src/lib.rs
@@ -31,6 +31,7 @@ pub mod git_subprocess;
pub mod hooks;
mod processes;
mod server;
+ mod ssh_service;
#[derive(thiserror::Error, Debug)]
pub enum RunError {
diff --git a/cli/gitd-lib/src/processes.rs b/cli/gitd-lib/src/processes.rs
index da40593e..2324d9f9 100644
--- a/cli/gitd-lib/src/processes.rs
+++ b/cli/gitd-lib/src/processes.rs
@@ -25,15 +25,11 @@ use futures::{
stream::{FuturesUnordered, StreamExt},
FutureExt,
};
- use librad::git::{
- storage::{pool::Pool, Storage},
- Urn,
- };
+ use librad::git::storage::{pool::Pool, Storage};
use link_async::{Spawner, Task};
- use link_git::service::SshService;
use tracing::instrument;
- use crate::{git_subprocess, hooks::Hooks};
+ use crate::{git_subprocess, hooks::Hooks, ssh_service};
const MAX_IN_FLIGHT_GITS: usize = 10;
@@ -69,7 +65,7 @@ enum Message<Id> {
/// sent on a separate channel, which allows us to exert backpressure on
/// incoming exec requests.
struct ExecGit<Id, Reply, Signer> {
- service: SshService<Urn>,
+ service: ssh_service::SshService,
channel: Id,
handle: Reply,
hooks: Hooks<Signer>,
@@ -112,7 +108,7 @@ where
&self,
channel: Id,
handle: Reply,
- service: SshService<Urn>,
+ service: ssh_service::SshService,
hooks: Hooks<Signer>,
) -> Result<(), ProcessesLoopGone> {
self.exec_git_send
@@ -215,7 +211,13 @@ where
}
#[instrument(skip(self, handle, hooks))]
- fn exec_git(&mut self, id: Id, handle: Reply, service: SshService<Urn>, hooks: Hooks<S>) {
+ fn exec_git(
+ &mut self,
+ id: Id,
+ handle: Reply,
+ service: ssh_service::SshService,
+ hooks: Hooks<S>,
+ ) {
let (tx, rx) = tokio::sync::mpsc::channel(1);
let task = self.spawner.spawn({
let spawner = self.spawner.clone();
diff --git a/cli/gitd-lib/src/server.rs b/cli/gitd-lib/src/server.rs
index 87019468..0cfb5120 100644
--- a/cli/gitd-lib/src/server.rs
+++ b/cli/gitd-lib/src/server.rs
@@ -13,9 +13,8 @@ use rand::Rng;
use tokio::net::{TcpListener, TcpStream};
use tracing::instrument;
- use librad::{git::Urn, PeerId};
+ use librad::PeerId;
use link_async::{incoming::TcpListenerExt, Spawner};
- use link_git::service;
use crate::{
hooks::Hooks,
@@ -268,7 +267,7 @@ where
) -> Self::FutureUnit {
let exec_str = String::from_utf8_lossy(data);
tracing::debug!(?exec_str, "received exec_request");
- let ssh_service: service::SshService<Urn> = match exec_str.parse() {
+ let ssh_service: crate::ssh_service::SshService = match exec_str.parse() {
Ok(s) => s,
Err(e) => {
tracing::error!(err=?e, ?exec_str, "unable to parse exec str for exec_request");
diff --git a/cli/gitd-lib/src/ssh_service.rs b/cli/gitd-lib/src/ssh_service.rs
new file mode 100644
index 00000000..4a6ec444
--- /dev/null
+++ b/cli/gitd-lib/src/ssh_service.rs
@@ -0,0 +1,50 @@
+ use std::str::FromStr;
+
+ use librad::{git::Urn, git_ext};
+
+ /// A wrapper around Urn which parses strings of the form "rad:git:<id>.git",
+ /// this is used as the path parameter of `link_git::SshService`.
+ #[derive(Debug, Clone)]
+ pub(crate) struct UrnPath(Urn);
+
+ pub(crate) type SshService = link_git::service::SshService<UrnPath>;
+
+ #[derive(thiserror::Error, Debug)]
+ pub(crate) enum Error {
+ #[error("path component of remote should end with '.git'")]
+ MissingSuffix,
+ #[error(transparent)]
+ Urn(#[from] librad::identities::urn::error::FromStr<git_ext::oid::FromMultihashError>),
+ }
+
+ impl std::fmt::Display for UrnPath {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}.git", self.0)
+ }
+ }
+
+ impl AsRef<Urn> for UrnPath {
+ fn as_ref(&self) -> &Urn {
+ &self.0
+ }
+ }
+
+ impl FromStr for UrnPath {
+ type Err = Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s.strip_suffix(".git") {
+ Some(prefix) => {
+ let urn = Urn::from_str(prefix)?;
+ Ok(Self(urn))
+ },
+ None => Err(Error::MissingSuffix),
+ }
+ }
+ }
+
+ impl From<UrnPath> for Urn {
+ fn from(u: UrnPath) -> Self {
+ u.0
+ }
+ }
--
2.36.1
[PATCH v1 4/4] Add lnk clone
lnk clone first syncs the local monorepo state with configured seeds for
the given URN, then checks out a working copy of the URN.
If a peer ID is given `lnk clone` checks out the given peers copy. If
not `lnk clone` will attempt to determine if there is a head the project
delegates agree on and set `refs/namespaces/<urn>/HEAD` to this
reference and then check this reference out to the working copy; if the
delegates have forked `lnk clone` will print an error message with
information on which peers are pointing at what so the user can decide
for themselves which peer to check out.
Signed-of-by: Alex Good <alex@memoryandthought.me>
Signed-off-by: Alex Good <alex@memoryandthought.me>
---
bins/Cargo.lock | 3 ++
cli/lnk-exe/src/cli/args.rs | 1 +
cli/lnk-sync/Cargo.toml | 10 ++++ -
cli/lnk-sync/src/cli/args.rs | 23 +++++++ ---
cli/lnk-sync/src/cli/main.rs | 46 +++++++++++++++++ --
cli/lnk-sync/src/forked.rs | 87 ++++++++++++++++++++++++++++++++++++
cli/lnk-sync/src/lib.rs | 3 ++
7 files changed, 161 insertions(+), 12 deletions(-)
create mode 100644 cli/lnk-sync/src/forked.rs
diff --git a/bins/Cargo.lock b/bins/Cargo.lock
index 4cb8dd64..5f9b3f4d 100644
--- a/bins/Cargo.lock
@@ -2289,6 +2289,7 @@ dependencies = [
"anyhow",
"clap",
"either",
+ "git-ref-format",
"git2",
"lazy_static",
"libgit2-sys",
@@ -2349,10 +2350,12 @@ dependencies = [
"either",
"futures",
"git-ref-format",
+ "git2",
"librad",
"link-async",
"link-replication",
"lnk-clib",
+ "lnk-identities",
"serde",
"serde_json",
"thiserror",
diff --git a/cli/lnk-exe/src/cli/args.rs b/cli/lnk-exe/src/cli/args.rs
index 6bac13d9..494404de 100644
--- a/cli/lnk-exe/src/cli/args.rs
+++ b/cli/lnk-exe/src/cli/args.rs
@@ -56,5 +56,6 @@ pub enum Command {
/// Manage your Radicle profiles
Profile(lnk_profile::cli::args::Args),
/// Sync with your configured seeds
+ #[clap(flatten)]
Sync(lnk_sync::cli::args::Args),
}
diff --git a/cli/lnk-sync/Cargo.toml b/cli/lnk-sync/Cargo.toml
index 6e2b20ce..ff9dffed 100644
--- a/cli/lnk-sync/Cargo.toml
+++ b/cli/lnk-sync/Cargo.toml
@@ -21,6 +21,11 @@ tracing = "0.1"
version = "3.1"
features = ["derive"]
+ [dependencies.git2]
+ version = "0.13.24"
+ default-features = false
+ features = ["vendored-libgit2"]
+
[dependencies.git-ref-format]
path = "../../git-ref-format"
features = ["serde"]
@@ -38,10 +43,13 @@ path = "../../link-async"
[dependencies.lnk-clib]
path = "../lnk-clib"
+ [dependencies.lnk-identities]
+ path = "../lnk-identities"
+
[dependencies.serde]
version = "1"
features = ["derive"]
[dependencies.tokio]
version = "1.17"
- features = ["rt"]
\ No newline at end of file
+ features = ["rt"]
diff --git a/cli/lnk-sync/src/cli/args.rs b/cli/lnk-sync/src/cli/args.rs
index 38f054d0..4d338279 100644
--- a/cli/lnk-sync/src/cli/args.rs
+++ b/cli/lnk-sync/src/cli/args.rs
@@ -1,15 +1,24 @@
// Copyright © 2022 The Radicle Link Contributors
// SPDX-License-Identifier: GPL-3.0-or-later
- use clap::Parser;
use librad::git::Urn;
use crate::Mode;
- #[derive(Clone, Debug, Parser)]
- pub struct Args {
- #[clap(long)]
- pub urn: Urn,
- #[clap(long, default_value_t)]
- pub mode: Mode,
+ #[derive(Clone, Debug, clap::Subcommand)]
+ pub enum Args {
+ Sync {
+ #[clap(long)]
+ urn: Urn,
+ #[clap(long, default_value_t)]
+ mode: Mode,
+ },
+ Clone {
+ #[clap(long)]
+ urn: Urn,
+ #[clap(long)]
+ path: Option<std::path::PathBuf>,
+ #[clap(long)]
+ peer: Option<librad::PeerId>,
+ },
}
diff --git a/cli/lnk-sync/src/cli/main.rs b/cli/lnk-sync/src/cli/main.rs
index 1d2193e0..c51a0ce2 100644
--- a/cli/lnk-sync/src/cli/main.rs
+++ b/cli/lnk-sync/src/cli/main.rs
@@ -3,9 +3,11 @@
use std::sync::Arc;
+ use lnk_identities::identity_dir::IdentityDir;
use tokio::runtime::Runtime;
use librad::{
+ git::identities::project::heads,
net::{
self,
peer::{client, Client},
@@ -20,7 +22,7 @@ use lnk_clib::{
seed::{self, Seeds},
};
- use crate::{cli::args::Args, sync};
+ use crate::{cli::args::Args, forked, sync};
pub fn main(
args: Args,
@@ -48,7 +50,7 @@ pub fn main(
user_storage: client::config::Storage::default(),
network: Network::default(),
};
- let endpoint = quic::SendOnly::new(signer, Network::default()).await?;
+ let endpoint = quic::SendOnly::new(signer.clone(), Network::default()).await?;
let client = Client::new(config, spawner, endpoint)?;
let seeds = {
let seeds_file = profile.paths().seeds_file();
@@ -70,8 +72,44 @@ pub fn main(
seeds
};
- let synced = sync(&client, args.urn, seeds, args.mode).await;
- println!("{}", serde_json::to_string(&synced)?);
+ match args {
+ Args::Sync { urn, mode } => {
+ let synced = sync(&client, urn, seeds, mode).await;
+ println!("{}", serde_json::to_string(&synced)?);
+ },
+ Args::Clone { urn, path, peer } => {
+ let path = IdentityDir::at_or_current_dir(path)?;
+ println!("cloning urn {} into {}", urn, path);
+ println!("syncing monorepo with seeds");
+ sync(&client, urn.clone(), seeds, crate::Mode::Fetch).await;
+
+ let storage = librad::git::Storage::open(paths, signer.clone())?;
+
+ let vp = librad::git::identities::project::verify(&storage, &urn)?
+ .ok_or_else(|| anyhow::anyhow!("no such project"))?;
+
+ if peer.is_none() {
+ match heads::set_default_head(&storage, vp) {
+ Ok(_) => {},
+ Err(heads::error::SetDefaultBranch::Forked(forks)) => {
+ let error = forked::ForkError::from_forked(&storage, forks);
+ println!("{}", error);
+ return Ok(());
+ },
+ Err(e) => anyhow::bail!("error setting HEAD for project: {}", e),
+ }
+ }
+ let repo = lnk_identities::project::checkout(
+ &storage,
+ paths.clone(),
+ signer,
+ &urn,
+ peer,
+ path,
+ )?;
+ println!("working copy created at `{}`", repo.path().display());
+ },
+ }
Ok(())
})
}
diff --git a/cli/lnk-sync/src/forked.rs b/cli/lnk-sync/src/forked.rs
new file mode 100644
index 00000000..11e4b43b
--- /dev/null
+++ b/cli/lnk-sync/src/forked.rs
@@ -0,0 +1,87 @@
+ use std::collections::BTreeSet;
+
+ use librad::git::{identities::project::heads, storage::ReadOnlyStorage};
+
+ /// A nicely formatted error message describing the forks in a forked project
+ pub struct ForkError(Vec<ForkDescription>);
+
+ impl ForkError {
+ pub(crate) fn from_forked<S>(storage: &S, forked: BTreeSet<heads::Fork>) -> Self
+ where
+ S: ReadOnlyStorage,
+ {
+ ForkError(
+ forked
+ .into_iter()
+ .map(|f| ForkDescription::from_fork(storage, f))
+ .collect(),
+ )
+ }
+ }
+
+ impl std::fmt::Display for ForkError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ writeln!(f, "the delegates for this project have forked")?;
+ writeln!(f, "you must choose a specific peer to clone")?;
+ writeln!(f, "you can do this using the --peer <peer id> argument")?;
+ writeln!(f, "and one of the peers listed below")?;
+ writeln!(f)?;
+ writeln!(f, "There are {} different forks", self.0.len())?;
+ writeln!(f)?;
+ for fork in &self.0 {
+ fork.fmt(f)?;
+ writeln!(f)?;
+ }
+ Ok(())
+ }
+ }
+
+ struct ForkDescription {
+ fork: heads::Fork,
+ tip_commit_message: Option<String>,
+ }
+
+ impl ForkDescription {
+ fn from_fork<S>(storage: &S, fork: heads::Fork) -> Self
+ where
+ S: ReadOnlyStorage,
+ {
+ let tip = std::rc::Rc::new(fork.tip);
+ let tip_commit_message = storage
+ .find_object(&tip)
+ .ok()
+ .and_then(|o| o.and_then(|o| o.as_commit().map(|c| c.summary().map(|m| m.to_string()))))
+ .unwrap_or(None);
+ ForkDescription {
+ fork,
+ tip_commit_message,
+ }
+ }
+ }
+
+ impl std::fmt::Display for ForkDescription {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ writeln!(
+ f,
+ "{} peers pointing at {}",
+ self.fork.tip_peers.len(),
+ self.fork.tip
+ )?;
+ match &self.tip_commit_message {
+ Some(m) => {
+ writeln!(f, "Commit message:")?;
+ writeln!(f, " {}", m)?;
+ },
+ None => {
+ writeln!(f)?;
+ writeln!(f, "unable to determine commit message")?;
+ writeln!(f)?;
+ },
+ }
+ writeln!(f, "Peers:")?;
+ for peer in &self.fork.tip_peers {
+ writeln!(f, " {}", peer)?;
+ }
+ Ok(())
+ }
+ }
diff --git a/cli/lnk-sync/src/lib.rs b/cli/lnk-sync/src/lib.rs
index aa3d26fd..b9038ea1 100644
--- a/cli/lnk-sync/src/lib.rs
+++ b/cli/lnk-sync/src/lib.rs
@@ -17,6 +17,7 @@ use librad::{
use lnk_clib::seed::{Seed, Seeds};
pub mod cli;
+ mod forked;
pub mod replication;
pub mod request_pull;
@@ -146,3 +147,5 @@ where
}
syncs
}
+
+ pub async fn clone() {}
--
2.36.1
Re: [PATCH v1 1/4] Add default_branch_head and set_default_branch
Does it indeed work to set HEAD to an oid? I was under the impression that
`git-clone` would expect it to be a symref; if it is not, it doesn't know which
branch to check out. I believe it will leave the cloned repo in a detached head
state, which is gradually less scary than not checking out anything at all.
Re: [PATCH v1 4/4] Add lnk clone
> +pub async fn clone() {}
This function summarises Rust.
Re: [PATCH v1 4/4] Add lnk clone
On 27/05/22 09:16pm, Kim Altintop wrote:
> > +pub async fn clone() {}
>
> This function summarises Rust.
🤣 I have no idea how that got there.
Re: [PATCH v1 1/4] Add default_branch_head and set_default_branch
On 27/05/22 09:12pm, Kim Altintop wrote:
> Does it indeed work to set HEAD to an oid? I was under the impression that
> `git-clone` would expect it to be a symref; if it is not, it doesn't know which
> branch to check out. I believe it will leave the cloned repo in a detached head
> state, which is gradually less scary than not checking out anything at all.
Ah, yes, this should be a symref. In my experiments setting an OID
worked fine, but there's no reason not to set a symref, will fix.
Re: [PATCH v1 1/4] Add default_branch_head and set_default_branch
On Fri May 27, 2022 at 6:10 PM IST, Alex Good wrote:
> When checking out projects from the monorepo it is useful to set the
> `refs/namespaces/<urn>/HEAD` reference to the default branch of the
> project so that the resulting working copy is in a useful state (namely
> pointing at the latest commit for the default branch).
>
> In general this is not possible because delegates may have diverging
> views of the project, but often they do not disagree. Add
> `librad::git::identities::project::heads::default_branch_head` to
> determine if there is an agreed on default branch commit and
> `librad::git::identities::project::heads::set_default_branch` to set the
> local `HEAD` ref where possible.
>
> Signed-off-by: Alex Good <alex@memoryandthought.me >
> ---
> librad/src/git/identities/project.rs | 2 +
> librad/src/git/identities/project/heads.rs | 305 ++++++++++++++
> librad/t/src/integration/scenario.rs | 1 +
> .../scenario/default_branch_head.rs | 387 ++++++++++++++++++
> test/it-helpers/Cargo.toml | 3 +
> test/it-helpers/src/lib.rs | 1 +
> test/it-helpers/src/working_copy.rs | 291 +++++++++++++
> 7 files changed, 990 insertions(+)
> create mode 100644 librad/src/git/identities/project/heads.rs
> create mode 100644 librad/t/src/integration/scenario/default_branch_head.rs
> create mode 100644 test/it-helpers/src/working_copy.rs
>
> diff --git a/librad/src/git/identities/project.rs b/librad/src/git/identities/project.rs
> index 753358bf..ed9f003d 100644
> --- a/librad/src/git/identities/project.rs
> +++ b/librad/src/git/identities/project.rs
> @@ -8,6 +8,8 @@ use std::{convert::TryFrom, fmt::Debug};
> use either::Either;
> use git_ext::{is_not_found_err, OneLevel};
>
> +pub mod heads;
> +
> use super::{
> super::{
> refs::Refs as Sigrefs,
> diff --git a/librad/src/git/identities/project/heads.rs b/librad/src/git/identities/project/heads.rs
> new file mode 100644
> index 00000000..447e352e
> --- /dev/null
> +++ b/librad/src/git/identities/project/heads.rs
> @@ -0,0 +1,305 @@
> +use std::{collections::BTreeSet, convert::TryFrom, fmt::Debug};
> +
> +use crate::{
> + git::{
> + storage::{self, ReadOnlyStorage},
> + Urn,
> + },
> + identities::git::VerifiedProject,
> + PeerId,
> +};
> +use git_ext::RefLike;
> +use git_ref_format::{lit, name, Namespaced, Qualified, RefStr, RefString};
> +
> +#[derive(Clone, Debug, PartialEq)]
> +pub enum DefaultBranchHead {
> + /// Not all delegates agreed on an ancestry tree. Each set of diverging
> + /// delegates is included as a `Fork`
> + Forked(BTreeSet<Fork>),
> + /// All the delegates agreed on an ancestry tree
> + Head {
> + /// The most recent commit for the tree
> + target: git2::Oid,
> + /// The branch name which is the default branch
> + branch: RefString,
> + },
> +}
> +
> +#[derive(Clone, Debug, std::hash::Hash, PartialEq, Eq, PartialOrd, Ord)]
> +pub struct Fork {
> + /// Peers which are in the ancestry set of this fork but not the tips. This
> + /// means that these peers can appear in multiple forks
> + pub ancestor_peers: BTreeSet<PeerId>,
> + /// The peers pointing at the tip of this fork
> + pub tip_peers: BTreeSet<PeerId>,
> + /// The most recent tip
> + pub tip: git2::Oid,
> +}
> +
> +pub mod error {
> + use git_ref_format as ref_format;
> + use std::collections::BTreeSet;
> +
> + use crate::git::storage::read;
> +
> + #[derive(thiserror::Error, Debug)]
> + pub enum FindDefaultBranch {
> + #[error("the project payload does not define a default branch")]
> + NoDefaultBranch,
> + #[error("no peers had published anything for the default branch")]
> + NoTips,
> + #[error(transparent)]
> + RefFormat(#[from] ref_format::Error),
> + #[error(transparent)]
> + Read(#[from] read::Error),
> + }
> +
> + #[derive(thiserror::Error, Debug)]
> + pub enum SetDefaultBranch {
> + #[error(transparent)]
> + Find(#[from] FindDefaultBranch),
> + #[error(transparent)]
> + Git(#[from] git2::Error),
> + #[error("the delegates have forked")]
> + Forked(BTreeSet<super::Fork>),
> + }
> +}
> +
> +/// Find the head of the default branch of `project`
> +///
> +/// In general there can be a different view of the default branch of a project
> +/// for each peer ID of each delegate and there is no reason that these would
> +/// all be compatible. It's quite possible that two peers publish entirely
> +/// unrelated ancestry trees for a given branch. In this case this function will
> +/// return [`DefaultBranchHead::Forked`].
> +///
> +/// However, often it's the case that delegates do agree on an ancestry tree for
> +/// a particular branch and the difference between peers is just that some are
> +/// ahead of others. In this case this function will return
> +/// [`DefaultBranchHead::Head`].
> +///
> +/// # Errors
> +///
> +/// * If the project contains no default branch definition
> +/// * No peers had published anything for the default branch
> +pub fn default_branch_head(
> + storage: &storage::Storage,
> + project: VerifiedProject,
> +) -> Result<DefaultBranchHead, error::FindDefaultBranch> {
> + if let Some(default_branch) = &project.payload().subject.default_branch {
> + let local = storage.peer_id();
> + let branch_refstring = RefString::try_from(default_branch.to_string())?;
> + let mut multiverse = Multiverse::new(branch_refstring.clone());
> + let peers =
> + project
> + .delegations()
> + .into_iter()
> + .flat_map(|d| -> Box<dyn Iterator<Item = PeerId>> {
> + use either::Either::*;
> + match d {
> + Left(key) => Box::new(std::iter::once(PeerId::from(*key))),
> + Right(person) => Box::new(
> + person
> + .delegations()
> + .into_iter()
> + .map(|key| PeerId::from(*key)),
> + ),
> + }
> + });
tip: `Either<impl Iterator, impl Iterator>` is also `Iterator`, so you
could use `map_left` and `map_right` instead of match and `Box`. But
this is just as good :)
> + for peer_id in peers {
> + let tip = peer_commit(storage, project.urn(), peer_id, local, &branch_refstring)?;
> + if let Some(tip) = tip {
> + multiverse.add_peer(storage, peer_id, tip)?;
> + }
It would be a bit worrying if a peer didn't have a commit, but maybe
this isn't the place to worry about? Maybe a warning log at the least?
> + }
> + multiverse.finish()
> + } else {
> + Err(error::FindDefaultBranch::NoDefaultBranch)
> + }
> +}
> +
> +/// Determine the default branch for a project and set the local HEAD to this
> +/// branch
> +///
> +/// In more detail, this function determines the local head using
> +/// [`default_branch_head`] and then sets the following references to the
> +/// `DefaultBranchHead::target` returned:
> +///
> +/// * `refs/namespaces/<URN>/refs/HEAD`
> +/// * `refs/namespaces/<URN>/refs/<default branch name>`
> +///
> +/// # Why do this?
> +///
> +/// When cloning from a namespace representing a project to a working copy we
> +/// would like, if possible, to omit the specification of which particular peer
> +/// we want to clone. Specifically we would like to clone
> +/// `refs/namespaces/<URN>/`. This does work, but the working copy we end up
> +/// with does not have any contents because git uses `refs/HEAD` of the source
> +/// repository to figure out what branch to set the new working copy to.
> +/// Therefore, by setting `refs/HEAD` and `refs/<default branch name>` of the
> +/// namespace `git clone` (and any other clone based workflows) does something
> +/// sensible and we end up with a working copy which is looking at the default
> +/// branch of the project.
> +///
> +/// # Errors
> +///
> +/// * If no default branch could be determined
> +pub fn set_default_head(
> + storage: &storage::Storage,
> + project: VerifiedProject,
> +) -> Result<git2::Oid, error::SetDefaultBranch> {
> + let urn = project.urn();
> + let default_head = default_branch_head(storage, project)?;
> + match default_head {
> + DefaultBranchHead::Head { target, branch } => {
> + // Note that we can't use `Namespaced` because `refs/HEAD` is not a `Qualified`
> + let head =
> + RefString::try_from(format!("refs/namespaces/{}/refs/HEAD", urn.encode_id()))
> + .expect("urn is valid namespace");
> + let branch_head = Namespaced::from(lit::refs_namespaces(
> + &urn,
> + Qualified::from(lit::refs_heads(branch)),
> + ));
> +
> + let repo = storage.as_raw();
> + repo.reference(&head, target, true, "set head")?;
> + repo.reference(&branch_head.into_qualified(), target, true, "set head")?;
> + Ok(target)
> + },
> + DefaultBranchHead::Forked(forks) => Err(error::SetDefaultBranch::Forked(forks)),
> + }
> +}
> +
> +fn peer_commit(
> + storage: &storage::Storage,
> + urn: Urn,
> + peer_id: PeerId,
> + local: &PeerId,
> + branch: &RefStr,
> +) -> Result<Option<git2::Oid>, error::FindDefaultBranch> {
> + let remote_name = RefString::try_from(peer_id.default_encoding())?;
> + let reference = if local == &peer_id {
> + RefString::from(Qualified::from(lit::refs_heads(branch)))
> + } else {
> + RefString::from(Qualified::from(lit::refs_remotes(remote_name)))
> + .join(name::HEADS)
> + .join(branch)
> + };
> + let urn = urn.with_path(Some(RefLike::from(reference)));
> + let tip = storage.tip(&urn, git2::ObjectType::Commit)?;
> + Ok(tip.map(|c| c.id()))
> +}
> +
> +#[derive(Debug)]
> +struct Multiverse {
> + branch: RefString,
> + histories: Vec<History>,
> +}
Love the naming :D
> +
> +impl Multiverse {
> + fn new(branch: RefString) -> Multiverse {
> + Multiverse {
> + branch,
> + histories: Vec::new(),
> + }
> + }
> +
> + fn add_peer(
> + &mut self,
> + storage: &storage::Storage,
> + peer: PeerId,
> + tip: git2::Oid,
> + ) -> Result<(), error::FindDefaultBranch> {
> + // If this peers tip is in the ancestors of any existing histories then we just
> + // add the peer to those histories
> + let mut found_descendant = false;
> + for history in &mut self.histories {
> + if history.ancestors.contains(&tip) {
> + found_descendant = true;
> + history.ancestor_peers.insert(peer);
> + } else if history.tip == tip {
> + found_descendant = true;
> + history.tip_peers.insert(peer);
> + }
> + }
> + if found_descendant {
> + return Ok(());
> + }
> +
> + // Otherwise we load a new history
> + let mut history = History::load(storage, peer, tip)?;
> +
> + // Then we go through existing histories and check if any of them are ancestors
> + // of the new history. If they are then we incorporate them as ancestors
> + // of the new history and remove them from the multiverse
> + let mut i = 0;
> + while i < self.histories.len() {
> + let other_history = &self.histories[i];
> + if history.ancestors.contains(&other_history.tip) {
> + let other_history = self.histories.remove(i);
> + history.ancestor_peers.extend(other_history.ancestor_peers);
> + history.ancestor_peers.extend(other_history.tip_peers);
> + } else {
> + i += 1;
> + }
> + }
> + self.histories.push(history);
> +
> + Ok(())
> + }
> +
> + fn finish(self) -> Result<DefaultBranchHead, error::FindDefaultBranch> {
> + if self.histories.is_empty() {
> + Err(error::FindDefaultBranch::NoTips)
> + } else if self.histories.len() == 1 {
> + Ok(DefaultBranchHead::Head {
> + target: self.histories[0].tip,
> + branch: self.branch,
> + })
> + } else {
> + Ok(DefaultBranchHead::Forked(
> + self.histories
> + .into_iter()
> + .map(|h| Fork {
> + ancestor_peers: h.ancestor_peers,
> + tip_peers: h.tip_peers,
> + tip: h.tip,
> + })
> + .collect(),
> + ))
> + }
> + }
> +}
> +
> +#[derive(Debug)]
> +struct History {
> + tip: git2::Oid,
> + tip_peers: BTreeSet<PeerId>,
> + ancestor_peers: BTreeSet<PeerId>,
> + ancestors: BTreeSet<git2::Oid>,
> +}
> +
> +impl History {
> + fn load(
> + storage: &storage::Storage,
> + peer: PeerId,
> + tip: git2::Oid,
> + ) -> Result<History, storage::Error> {
> + let repo = storage.as_raw();
> + let mut walk = repo.revwalk()?;
> + walk.set_sorting(git2::Sort::TOPOLOGICAL)?;
> + walk.push(tip)?;
> + let mut ancestors = walk.collect::<Result<BTreeSet<git2::Oid>, _>>()?;
> + ancestors.remove(&tip);
> + let mut peers = BTreeSet::new();
> + peers.insert(peer);
> + let mut tip_peers = BTreeSet::new();
> + tip_peers.insert(peer);
> + Ok(History {
> + tip,
> + tip_peers,
> + ancestors,
> + ancestor_peers: BTreeSet::new(),
> + })
> + }
> +}
> diff --git a/librad/t/src/integration/scenario.rs b/librad/t/src/integration/scenario.rs
> index 9bfdd2ad..c47720a0 100644
> --- a/librad/t/src/integration/scenario.rs
> +++ b/librad/t/src/integration/scenario.rs
> @@ -5,6 +5,7 @@
>
> mod collaboration;
> mod collaborative_objects;
> +mod default_branch_head;
> mod menage;
> mod passive_replication;
> #[cfg(feature = "replication-v3")]
> diff --git a/librad/t/src/integration/scenario/default_branch_head.rs b/librad/t/src/integration/scenario/default_branch_head.rs
> new file mode 100644
> index 00000000..2e7048c2
> --- /dev/null
> +++ b/librad/t/src/integration/scenario/default_branch_head.rs
> @@ -0,0 +1,387 @@
> +// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation >
> +//
> +// This file is part of radicle-link, distributed under the GPLv3 with Radicle
> +// Linking Exception. For full terms see the included LICENSE file.
> +
> +use std::{convert::TryFrom, ops::Index as _};
> +
> +use tempfile::tempdir;
> +
> +use git_ref_format::{lit, name, Namespaced, Qualified, RefString};
> +use it_helpers::{
> + fixed::{TestPerson, TestProject},
> + testnet::{self, RunningTestPeer},
> + working_copy::{WorkingCopy, WorkingRemote as Remote},
> +};
> +use librad::git::{
> + identities::{self, local, project::heads},
> + storage::ReadOnlyStorage,
> + tracking,
> + types::{Namespace, Reference},
> + Urn,
> +};
> +use link_identities::payload;
> +use test_helpers::logging;
> +
> +fn config() -> testnet::Config {
> + testnet::Config {
> + num_peers: nonzero!(2usize),
> + min_connected: 2,
> + bootstrap: testnet::Bootstrap::from_env(),
> + }
> +}
> +
> +/// This test checks that the logic of `librad::git::identities::project::heads`
> +/// is correct. To do this we need to set up various scenarios where the
> +/// delegates of a project agree or disagree on the default branch of a project.
> +#[test]
> +fn default_branch_head() {
> + logging::init();
> +
> + let net = testnet::run(config()).unwrap();
> + net.enter(async {
> + // Setup a testnet with two peers and create a `Person` on each peer
> + let peer1 = net.peers().index(0);
> + let peer2 = net.peers().index(1);
> +
> + let id1 = peer1
> + .using_storage::<_, anyhow::Result<TestPerson>>(|s| {
> + let person = TestPerson::create(s)?;
> + let local = local::load(s, person.owner.urn()).unwrap();
> + s.config()?.set_user(local)?;
> + Ok(person)
> + })
> + .await
> + .unwrap()
> + .unwrap();
You could change this to be `TestProject` to combine the creation of
the `Person` and `Project` in one go.
> +
> + let id2 = peer2
> + .using_storage::<_, anyhow::Result<TestPerson>>(|s| {
> + let person = TestPerson::create(s)?;
> + let local = local::load(s, person.owner.urn()).unwrap();
> + s.config()?.set_user(local)?;
> + Ok(person)
> + })
> + .await
> + .unwrap()
> + .unwrap();
> +
> + id2.pull(peer2, peer1).await.unwrap();
> + id1.pull(peer1, peer2).await.unwrap();
> +
> + // Create a project on peer1 with both `Person`s as delegates
> + let proj = peer1
> + .using_storage({
> + let owner = id1.owner.clone();
> + move |s| {
> + TestProject::from_project_payload(
> + s,
> + owner,
> + payload::Project {
> + name: "venus".into(),
> + description: None,
> + default_branch: Some(name::MASTER.to_string().into()),
> + },
> + )
> + }
> + })
> + .await
> + .unwrap()
> + .unwrap();
> +
> + // Track peer2 on peer1
> + peer1
> + .using_storage::<_, anyhow::Result<()>>({
> + let urn = proj.project.urn();
> + let peer2_id = peer2.peer_id();
> + move |s| {
> + tracking::track(
> + s,
> + &urn,
> + Some(peer2_id),
> + tracking::Config::default(),
> + tracking::policy::Track::Any,
> + )??;
> + Ok(())
> + }
> + })
> + .await
> + .unwrap()
> + .unwrap();
> +
> + proj.pull(peer1, peer2).await.unwrap();
> +
> + // Add peer2
> + peer1
> + .using_storage({
> + let urn = proj.project.urn();
> + let owner1 = id1.owner.clone();
> + let owner2 = id2.owner.clone();
> + move |storage| -> Result<(), anyhow::Error> {
> + identities::project::update(
> + storage,
> + &urn,
> + None,
> + None,
> + librad::identities::delegation::Indirect::try_from_iter(
> + vec![either::Either::Right(owner1), either::Either::Right(owner2)]
> + .into_iter(),
> + )
> + .unwrap(),
> + )?;
> + identities::project::verify(storage, &urn)?;
> + Ok(())
> + }
> + })
> + .await
> + .unwrap()
> + .unwrap();
> +
> + proj.pull(peer1, peer2).await.unwrap();
> +
> + // Sign the project document using peer2
> + peer2
> + .using_storage({
> + let urn = proj.project.urn();
> + let peer_id = peer1.peer_id();
> + let rad =
> + Urn::try_from(Reference::rad_id(Namespace::from(&urn)).with_remote(peer_id))
> + .unwrap();
> + move |storage| -> Result<Option<identities::VerifiedProject>, anyhow::Error> {
> + let project = identities::project::get(&storage, &rad)?.unwrap();
> + identities::project::update(
> + storage,
> + &urn,
> + None,
> + None,
> + project.delegations().clone(),
> + )?;
> + identities::project::merge(storage, &urn, peer_id)?;
> + Ok(identities::project::verify(storage, &urn)?)
> + }
> + })
> + .await
> + .unwrap()
> + .unwrap();
> +
> + proj.pull(peer2, peer1).await.unwrap();
I think this exchange of x-signing might be useful as a utility method
on `TestProject`. I get the feeling it'll come up a bit as we begin to
want to test these delegate scenarios.
It'll also allow this test to focus on the details instead of
following through all the setup :)
> +
> + // Merge the signed update into peer1
> + peer1
> + .using_storage({
> + let urn = proj.project.urn();
> + let peer_id = peer2.peer_id();
> + move |storage| -> Result<Option<identities::VerifiedProject>, anyhow::Error> {
> + identities::project::merge(storage, &urn, peer_id)?;
> + Ok(identities::project::verify(storage, &urn)?)
> + }
> + })
> + .await
> + .unwrap()
> + .unwrap();
> +
> + id2.pull(peer2, peer1).await.unwrap();
> +
> + // Okay, now we have a running testnet with two Peers, each of which has a
> + // `Person` who is a delegate on the `TestProject`
> +
> + // Create a commit in peer 1 and pull to peer2, then pull those changes into
> + // peer2, create a new commit on top of the original commit and pull
> + // that back to peer1. Then in peer1 pull the commit, fast forward, and
> + // push.
> + let tmp = tempdir().unwrap();
> + let tip = {
> + let mut working_copy1 =
> + WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
> + let mut working_copy2 =
> + WorkingCopy::new(&proj, tmp.path().join("peer2"), peer2).unwrap();
> +
> + let mastor = Qualified::from(lit::refs_heads(name::MASTER));
> + working_copy1
> + .commit("peer 1 initial", mastor.clone())
> + .unwrap();
> + working_copy1.push().unwrap();
> + proj.pull(peer1, peer2).await.unwrap();
> +
> + working_copy2.fetch(Remote::Peer(peer1.peer_id())).unwrap();
> + working_copy2
> + .create_remote_tracking_branch(Remote::Peer(peer1.peer_id()), name::MASTER)
> + .unwrap();
> + let tip = working_copy2
> + .commit("peer 2 initial", mastor.clone())
> + .unwrap();
> + working_copy2.push().unwrap();
> + proj.pull(peer2, peer1).await.unwrap();
> +
> + working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
> + working_copy1
> + .fast_forward_to(Remote::Peer(peer2.peer_id()), name::MASTER)
> + .unwrap();
> + working_copy1.push().unwrap();
> + tip
> + };
> +
> + let default_branch = branch_head(peer1, &proj).await.unwrap();
> + // The two peers hsould have the same view of the default branch
> + assert_eq!(
> + default_branch,
> + identities::project::heads::DefaultBranchHead::Head {
> + target: tip,
> + branch: name::MASTER.to_owned(),
> + }
> + );
> +
> + // now update peer1 and push to peer 1s monorepo, we should get the tip of peer1
> + // as the head (because peer2 can be fast forwarded)
> + let tmp = tempdir().unwrap();
> + let tip = {
> + let mut working_copy1 =
> + WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
> + working_copy1
> + .create_remote_tracking_branch(Remote::Rad, name::MASTER)
> + .unwrap();
> +
> + let mastor = Qualified::from(lit::refs_heads(name::MASTER));
> + let tip = working_copy1.commit("peer 1 fork", mastor.clone()).unwrap();
> + working_copy1.push().unwrap();
> +
> + tip
> + };
> +
> + let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
> + assert_eq!(
> + default_branch_peer1,
> + identities::project::heads::DefaultBranchHead::Head {
> + target: tip,
> + branch: name::MASTER.to_owned(),
> + }
> + );
> +
> + // now create an alternate commit on peer2 and sync with peer1, on peer1 we
> + // should get a fork
> + let tmp = tempdir().unwrap();
> + let forked_tip = {
> + let mut working_copy2 =
> + WorkingCopy::new(&proj, tmp.path().join("peer2"), peer2).unwrap();
> + working_copy2
> + .create_remote_tracking_branch(Remote::Rad, name::MASTER)
> + .unwrap();
> +
> + let mastor = Qualified::from(lit::refs_heads(name::MASTER));
> + let forked_tip = working_copy2.commit("peer 2 fork", mastor.clone()).unwrap();
> + working_copy2.push().unwrap();
> +
> + forked_tip
> + };
> +
> + proj.pull(peer2, peer1).await.unwrap();
> +
> + let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
> + assert_eq!(
> + default_branch_peer1,
> + identities::project::heads::DefaultBranchHead::Forked(
> + vec![
> + identities::project::heads::Fork {
> + ancestor_peers: std::collections::BTreeSet::new(),
> + tip_peers: std::iter::once(peer1.peer_id()).collect(),
> + tip,
> + },
> + identities::project::heads::Fork {
> + ancestor_peers: std::collections::BTreeSet::new(),
> + tip_peers: std::iter::once(peer2.peer_id()).collect(),
> + tip: forked_tip,
> + }
> + ]
> + .into_iter()
> + .collect()
> + )
> + );
> +
> + // now update peer1 to match peer2
> + let tmp = tempdir().unwrap();
> + let fixed_tip = {
> + let mut working_copy1 =
> + WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
> + working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
> + working_copy1
> + .create_remote_tracking_branch(Remote::Peer(peer2.peer_id()), name::MASTER)
> + .unwrap();
> +
> + working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
> + let tip = working_copy1
> + .merge_remote(peer2.peer_id(), name::MASTER)
> + .unwrap();
> + working_copy1.push().unwrap();
> + tip
> + };
> +
> + let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
> + assert_eq!(
> + default_branch_peer1,
> + identities::project::heads::DefaultBranchHead::Head {
> + target: fixed_tip,
> + branch: name::MASTER.to_owned(),
> + }
> + );
> +
> + // now set the head in the monorepo and check that the HEAD reference exists
> + let updated_tip = peer1
> + .using_storage::<_, anyhow::Result<_>>({
> + let urn = proj.project.urn();
> + move |s| {
> + let vp = identities::project::verify(s, &urn)?.ok_or_else(|| {
> + anyhow::anyhow!("failed to get project for default branch")
> + })?;
> + identities::project::heads::set_default_head(s, vp).map_err(anyhow::Error::from)
> + }
> + })
> + .await
> + .unwrap()
> + .unwrap();
> + assert_eq!(updated_tip, fixed_tip);
> +
> + let head_ref = RefString::try_from(format!(
> + "refs/namespaces/{}/refs/HEAD",
> + proj.project.urn().encode_id()
> + ))
> + .unwrap();
> + let master_ref = Namespaced::from(lit::refs_namespaces(
> + &proj.project.urn(),
> + Qualified::from(lit::refs_heads(name::MASTER)),
> + ));
> + let (master_oid, head_oid) = peer1
> + .using_storage::<_, anyhow::Result<_>>(move |s| {
> + let master_oid = s
> + .reference(&master_ref.into_qualified().into_refstring())?
> + .ok_or_else(|| anyhow::anyhow!("master ref not found"))?
> + .peel_to_commit()?
> + .id();
> + let head_oid = s
> + .reference(&head_ref)?
> + .ok_or_else(|| anyhow::anyhow!("head ref not found"))?
> + .peel_to_commit()?
> + .id();
> + Ok((master_oid, head_oid))
> + })
> + .await
> + .unwrap()
> + .unwrap();
> + assert_eq!(master_oid, updated_tip);
> + assert_eq!(head_oid, updated_tip);
> + });
> +}
> +
> +async fn branch_head(
> + peer: &RunningTestPeer,
> + proj: &TestProject,
> +) -> anyhow::Result<heads::DefaultBranchHead> {
> + peer.using_storage::<_, anyhow::Result<_>>({
> + let urn = proj.project.urn();
> + move |s| {
> + let vp = identities::project::verify(s, &urn)?
> + .ok_or_else(|| anyhow::anyhow!("failed to get project for default branch"))?;
> + heads::default_branch_head(s, vp).map_err(anyhow::Error::from)
> + }
> + })
> + .await?
> +}
> diff --git a/test/it-helpers/Cargo.toml b/test/it-helpers/Cargo.toml
> index 32c789cd..119d5aa1 100644
> --- a/test/it-helpers/Cargo.toml
> +++ b/test/it-helpers/Cargo.toml
> @@ -40,5 +40,8 @@ path = "../../link-async"
> [dependencies.lnk-clib]
> path = "../../cli/lnk-clib"
>
> +[dependencies.radicle-git-ext]
> +path = "../../git-ext"
> +
> [dependencies.test-helpers]
> path = "../test-helpers"
> diff --git a/test/it-helpers/src/lib.rs b/test/it-helpers/src/lib.rs
> index 981b922d..5012de39 100644
> --- a/test/it-helpers/src/lib.rs
> +++ b/test/it-helpers/src/lib.rs
> @@ -7,3 +7,4 @@ pub mod layout;
> pub mod ssh;
> pub mod testnet;
> pub mod tmp;
> +pub mod working_copy;
> diff --git a/test/it-helpers/src/working_copy.rs b/test/it-helpers/src/working_copy.rs
> new file mode 100644
> index 00000000..5fbef0dd
> --- /dev/null
> +++ b/test/it-helpers/src/working_copy.rs
> @@ -0,0 +1,291 @@
This is cool! I wrote some code under `fixed::repository` to have a
`TmpRepository` that you can commit to. This was used in the menage
and tracking policy tests. I wonder if we could switch to using this
code instead? It seems like we could, but maybe you could check :)
> +use std::path::Path;
> +
> +use git_ref_format::{lit, name, refspec, Qualified, RefStr, RefString};
> +
> +use librad::{
> + git::{
> + local::url::LocalUrl,
> + types::{
> + remote::{LocalFetchspec, LocalPushspec},
> + Fetchspec,
> + Force,
> + Refspec,
> + Remote,
> + },
> + },
> + git_ext as ext,
> + net::{peer::Peer, protocol::RequestPullGuard},
> + refspec_pattern,
> + PeerId,
> + Signer,
> +};
> +
> +use crate::fixed::TestProject;
> +
> +/// A remote in the working copy
> +pub enum WorkingRemote {
> + /// A remote representing a remote peer, named `PeerId::encode_id`
> + Peer(PeerId),
> + /// A remote representing the local peer, named "rad"
> + Rad,
> +}
> +
> +impl From<PeerId> for WorkingRemote {
> + fn from(p: PeerId) -> Self {
> + WorkingRemote::Peer(p)
> + }
> +}
> +
> +impl WorkingRemote {
> + fn fetchspec(&self) -> Fetchspec {
> + match self {
> + Self::Peer(peer_id) => {
> + let name = RefString::try_from(format!("{}", peer_id)).expect("peer is refstring");
> + let dst = RefString::from(Qualified::from(lit::refs_remotes(name.clone())))
> + .with_pattern(refspec::STAR);
> + let src = RefString::from(Qualified::from(lit::refs_remotes(name)))
> + .and(name::HEADS)
> + .with_pattern(refspec::STAR);
> + let refspec = Refspec {
> + src,
> + dst,
> + force: Force::True,
> + };
> + refspec.into_fetchspec()
> + },
> + Self::Rad => {
> + let name = RefString::try_from("rad").unwrap();
> + let src =
> + RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR);
> + Refspec {
> + src,
> + dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
> + .with_pattern(refspec::STAR),
> + force: Force::True,
> + }
> + .into_fetchspec()
> + },
> + }
> + }
> +
> + fn remote_ref(&self, branch: &RefStr) -> RefString {
> + let name = match self {
> + Self::Rad => name::RAD.to_owned(),
> + Self::Peer(peer_id) => {
> + RefString::try_from(peer_id.to_string()).expect("peer id is refstring")
> + },
> + };
> + RefString::from(Qualified::from(lit::refs_remotes(name))).join(branch)
> + }
> +}
> +
> +/// A `WorkingCopy` for test driving interactions with the monorepo where one
> +/// needs to update the tree of a project.
> +///
> +/// Remotes are named after the peer ID, except in the case of the remote
> +/// representing the local Peer ID - which is called "rad".
> +pub struct WorkingCopy<'a, S, G> {
> + repo: git2::Repository,
> + peer: &'a Peer<S, G>,
> + project: &'a TestProject,
> +}
> +
> +impl<'a, S, G> WorkingCopy<'a, S, G>
> +where
> + S: Signer + Clone,
> + G: RequestPullGuard,
> +{
> + /// Create a new working copy. This initializes a git repository and then
> + /// fetches the state of the local peer into `refs/remotes/rad/*`.
> + pub fn new<P: AsRef<Path>>(
> + project: &'a TestProject,
> + repo_path: P,
> + peer: &'a Peer<S, G>,
> + ) -> Result<WorkingCopy<'a, S, G>, anyhow::Error> {
> + let repo = git2::Repository::init(repo_path.as_ref())?;
> +
> + let mut copy = WorkingCopy {
> + peer,
> + project,
> + repo,
> + };
> + copy.fetch(WorkingRemote::Rad)?;
> + Ok(copy)
> + }
> +
> + /// Fetch changes from the monorepo into the working copy. The fetchspec
> + /// used depends on the peer ID.
> + ///
> + /// * If `from` is `WorkingRemote::Peer` then `refs/remotes/<peer
> + /// ID>/refs/*:refs/remotes/<peer ID>/heads/*`
> + /// * If `from` is `WorkingRemote::Rad` then
> + /// `refs/heads/*:refs/remotes/rad/*`
> + ///
> + /// I.e. changes from remote peers end up in a remote called
> + /// `PeerId::encode_id` whilst changes from the local peer end up in a
> + /// remote called "rad".
> + pub fn fetch(&mut self, from: WorkingRemote) -> Result<(), anyhow::Error> {
> + let fetchspec = from.fetchspec();
> + let url = LocalUrl::from(self.project.project.urn());
> + let mut remote = Remote::rad_remote(url, fetchspec);
> + let _ = remote.fetch(self.peer.clone(), &self.repo, LocalFetchspec::Configured)?;
> + Ok(())
> + }
> +
> + /// Push changes from `refs/heads/*` to the local peer
> + pub fn push(&mut self) -> Result<(), anyhow::Error> {
> + let url = LocalUrl::from(self.project.project.urn());
> + let name = RefString::try_from("rad").unwrap();
> + let fetchspec = Refspec {
> + src: RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR),
> + dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
> + .with_pattern(refspec::STAR),
> + force: Force::True,
> + }
> + .into_fetchspec();
> + let mut remote = Remote::rad_remote(url, fetchspec);
> + let _ = remote.push(
> + self.peer.clone(),
> + &self.repo,
> + LocalPushspec::Matching {
> + pattern: refspec_pattern!("refs/heads/*"),
> + force: Force::True,
> + },
> + )?;
> + Ok(())
> + }
> +
> + /// Create a new commit on top of whichever commit is the head of
> + /// `on_branch`. If the branch does not exist this will create it.
> + pub fn commit(
> + &mut self,
> + message: &str,
> + on_branch: Qualified,
> + ) -> Result<git2::Oid, anyhow::Error> {
> + let branch_name = on_branch.non_empty_components().2;
> + let parent = match self.repo.find_branch(&branch_name, git2::BranchType::Local) {
> + Ok(b) => b.get().target().and_then(|o| self.repo.find_commit(o).ok()),
> + Err(e) if ext::error::is_not_found_err(&e) => None,
> + Err(e) => return Err(anyhow::Error::from(e)),
> + };
> + let empty_tree = {
> + let mut index = self.repo.index()?;
> + let oid = index.write_tree()?;
> + self.repo.find_tree(oid).unwrap()
> + };
> + let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
> + let parents = match &parent {
> + Some(p) => vec![p],
> + None => Vec::new(),
> + };
> + self.repo
> + .commit(
> + Some(&on_branch),
> + &author,
> + &author,
> + message,
> + &empty_tree,
> + &parents,
> + )
> + .map_err(anyhow::Error::from)
> + }
> +
> + /// Create a branch at `refs/heads/<branch>` which tracks the given remote.
> + /// The remote branch name depends on `from`.
> + ///
> + /// * If `from` is `WorkingCopy::Rad` then `refs/remotes/rad/<branch>`
> + /// * If `from` is `WorkingCopy::Peer(peer_id)` then `refs/remotes/<peer
> + /// id>/<branch>`
> + pub fn create_remote_tracking_branch(
> + &self,
> + from: WorkingRemote,
> + branch: &RefStr,
> + ) -> Result<(), anyhow::Error> {
> + let target = self
> + .repo
> + .find_reference(from.remote_ref(branch).as_str())?
> + .target()
> + .ok_or_else(|| anyhow::anyhow!("remote ref is not a direct reference"))?;
> + let commit = self.repo.find_commit(target)?;
> + self.repo.branch(branch.as_str(), &commit, false)?;
> + Ok(())
> + }
> +
> + /// Fast forward the local branch `refs/heads/<branch>` to whatever is
> + /// pointed to by `refs/remotes/<remote>/<branch>`
> + ///
> + /// * If `from` is `WorkingRemote::Peer(peer_id)` then `remote` is
> + /// `peer_id.encode_id()`
> + /// * If `from` is `WorkingRemote::Rad` then `remote` is `"rad"`
> + ///
> + /// # Errors
> + ///
> + /// * If the local branch does not exist
> + /// * If the remote branch does not exist
> + /// * If either of the branches does not point at a commit
> + /// * If the remote branch is not a descendant of the local branch
> + pub fn fast_forward_to(&self, from: WorkingRemote, branch: &RefStr) -> anyhow::Result<()> {
> + let remote_ref = from.remote_ref(branch);
> + let remote_target = self
> + .repo
> + .find_reference(&remote_ref)?
> + .target()
> + .ok_or_else(|| anyhow::anyhow!("remote ref had no target"))?;
> + let local_ref = RefString::from(Qualified::from(lit::refs_heads(branch)));
> + let local_target = self
> + .repo
> + .find_reference(&local_ref)?
> + .target()
> + .ok_or_else(|| anyhow::anyhow!("local ref had no target"))?;
> + if !self.repo.graph_descendant_of(remote_target, local_target)? {
> + anyhow::bail!("remote ref was not a descendant of local ref");
> + } else {
> + self.repo
> + .reference(&local_ref, remote_target, true, "fast forward")?;
> + }
> + Ok(())
> + }
> +
> + /// Create a new commit which merges `refs/heads/<branch>` and
> + /// `refs/remotes/<remote>/<branch>`
> + ///
> + /// this will create a new commit with two parents, one for the remote
> + /// branch and one for the local branch
> + ///
> + /// # Errors
> + ///
> + /// * If the remote branch does not exist
> + /// * If the local branch does not exist
> + /// * If either of the references does not point to a commit
> + pub fn merge_remote(&self, remote: PeerId, branch: &RefStr) -> anyhow::Result<git2::Oid> {
> + let peer_branch = WorkingRemote::Peer(remote).remote_ref(branch);
> + let peer_commit = self
> + .repo
> + .find_reference(&peer_branch.to_string())?
> + .peel_to_commit()?;
> + let local_branch = Qualified::from(lit::refs_heads(branch));
> + let local_commit = self
> + .repo
> + .find_reference(&local_branch.to_string())?
> + .peel_to_commit()?;
> +
> + let message = format!("merge {} into {}", peer_branch, local_branch);
> + let empty_tree = {
> + let mut index = self.repo.index()?;
> + let oid = index.write_tree()?;
> + self.repo.find_tree(oid).unwrap()
> + };
> + let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
> + let parents = vec![&peer_commit, &local_commit];
> + self.repo
> + .commit(
> + Some(&local_branch),
> + &author,
> + &author,
> + &message,
> + &empty_tree,
> + &parents,
> + )
> + .map_err(anyhow::Error::from)
> + }
> +}
> --
> 2.36.1
Re: [PATCH v1 1/4] Add default_branch_head and set_default_branch
> +#[derive(Debug)]
> +struct History {
> + tip: git2::Oid,
> + tip_peers: BTreeSet<PeerId>,
> + ancestor_peers: BTreeSet<PeerId>,
> + ancestors: BTreeSet<git2::Oid>,
> +}
> +
> +impl History {
> + fn load(
> + storage: &storage::Storage,
> + peer: PeerId,
> + tip: git2::Oid,
> + ) -> Result<History, storage::Error> {
nit: Result<Self, storage::Error>
> + let repo = storage.as_raw();
> + let mut walk = repo.revwalk()?;
> + walk.set_sorting(git2::Sort::TOPOLOGICAL)?;
> + walk.push(tip)?;
> + let mut ancestors = walk.collect::<Result<BTreeSet<git2::Oid>, _>>()?;
> + ancestors.remove(&tip);
> + let mut peers = BTreeSet::new();
> + peers.insert(peer);
> + let mut tip_peers = BTreeSet::new();
> + tip_peers.insert(peer);
> + Ok(History {
nit: Ok(Self {
> + tip,
> + tip_peers,
> + ancestors,
> + ancestor_peers: BTreeSet::new(),
> + })
> + }
> +}
Re: [PATCH v1 2/4] lnk-identities: update path logic and set up include
On Fri May 27, 2022 at 6:10 PM IST, Alex Good wrote:
> The existing logic for checking out an identity in lnk-identities places
> the checked out repository in `<selected path>/<identity name>` where
> `selected path` is either the working directory or a specified
> directory. This is not usually what people expect when checking out a
> repository. Here we modify the logic so that if a directory is not
> specified then we place the checked out repository in `$PWD/<identity
> name>` but if the directory is specified then we place the checkout in
> the specified directory directly.
>
> While we're here we implement some missing logic to set the include path
> in the newly created or updated repository.
>
> Signed-off-by: Alex Good <alex@memoryandthought.me >
> ---
> cli/lnk-identities/Cargo.toml | 3 +
> cli/lnk-identities/src/cli/args.rs | 10 +--
> cli/lnk-identities/src/cli/eval/person.rs | 7 +-
> cli/lnk-identities/src/cli/eval/project.rs | 7 +-
> cli/lnk-identities/src/git/checkout.rs | 70 +++++++++++--------
> cli/lnk-identities/src/git/existing.rs | 16 +----
> cli/lnk-identities/src/git/new.rs | 20 ++----
> cli/lnk-identities/src/identity_dir.rs | 38 ++++++++++
> cli/lnk-identities/src/lib.rs | 1 +
> cli/lnk-identities/src/person.rs | 5 +-
> cli/lnk-identities/src/project.rs | 32 ++++-----
> .../t/src/tests/git/checkout.rs | 5 +-
> .../t/src/tests/git/existing.rs | 6 +-
> cli/lnk-identities/t/src/tests/git/new.rs | 5 +-
> 14 files changed, 130 insertions(+), 95 deletions(-)
> create mode 100644 cli/lnk-identities/src/identity_dir.rs
>
> diff --git a/cli/lnk-identities/Cargo.toml b/cli/lnk-identities/Cargo.toml
> index 1ee6bafb..b6a42736 100644
> --- a/cli/lnk-identities/Cargo.toml
> +++ b/cli/lnk-identities/Cargo.toml
> @@ -45,6 +45,9 @@ default-features = false
> [dependencies.radicle-git-ext]
> path = "../../git-ext"
>
> +[dependencies.git-ref-format]
> +path = "../../git-ref-format"
> +
> [dependencies.radicle-std-ext]
> path = "../../std-ext"
>
> diff --git a/cli/lnk-identities/src/cli/args.rs b/cli/lnk-identities/src/cli/args.rs
> index 7ad234ca..1a281256 100644
> --- a/cli/lnk-identities/src/cli/args.rs
> +++ b/cli/lnk-identities/src/cli/args.rs
> @@ -255,9 +255,10 @@ pub mod project {
> #[clap(long)]
> pub urn: Urn,
>
> - /// the location for creating the working copy in
> + /// the location for creating the working copy in. If not specified will
> + /// clone into <working directory>/<identity name>
> #[clap(long)]
> - pub path: PathBuf,
> + pub path: Option<PathBuf>,
>
> /// the peer for which the initial working copy is based off. Note that
> /// if this value is not provided, or the value that is provided is the
> @@ -360,7 +361,8 @@ pub mod person {
> #[clap(long, parse(try_from_str = direct_delegation))]
> pub delegations: Vec<PublicKey>,
>
> - /// the path where the working copy should be created
> + /// the path where the working copy should be created If not specified
> + /// will clone into <working directory>/<identity name>
> #[clap(long)]
> pub path: Option<PathBuf>,
> }
> @@ -444,7 +446,7 @@ pub mod person {
>
> /// the location for creating the working copy in
> #[clap(long)]
> - pub path: PathBuf,
> + pub path: Option<PathBuf>,
>
> /// the peer for which the initial working copy is based off. Note that
> /// if this value is not provided, or the value that is provided is the
> diff --git a/cli/lnk-identities/src/cli/eval/person.rs b/cli/lnk-identities/src/cli/eval/person.rs
> index 8eaa54b0..78791b4a 100644
> --- a/cli/lnk-identities/src/cli/eval/person.rs
> +++ b/cli/lnk-identities/src/cli/eval/person.rs
> @@ -24,7 +24,7 @@ use lnk_clib::{
> storage::{self, ssh},
> };
>
> -use crate::{cli::args::person::*, display, person};
> +use crate::{cli::args::person::*, display, identity_dir::IdentityDir, person};
>
> pub fn eval(profile: &Profile, sock: SshAuthSock, opts: Options) -> anyhow::Result<()> {
> match opts {
> @@ -138,12 +138,13 @@ fn eval_checkout(
> profile: &Profile,
> sock: SshAuthSock,
> urn: Urn,
> - path: PathBuf,
> + path: Option<PathBuf>,
> peer: Option<PeerId>,
> ) -> anyhow::Result<()> {
> let paths = profile.paths();
> let (signer, storage) = ssh::storage(profile, sock)?;
> - let repo = person::checkout(&storage, paths.clone(), signer, &urn, peer, path)?;
> + let checkout_path = IdentityDir::at_or_current_dir(path)?;
> + let repo = person::checkout(&storage, paths.clone(), signer, &urn, peer, checkout_path)?;
> println!("working copy created at `{}`", repo.path().display());
> Ok(())
> }
> diff --git a/cli/lnk-identities/src/cli/eval/project.rs b/cli/lnk-identities/src/cli/eval/project.rs
> index b5ae1636..455c0d72 100644
> --- a/cli/lnk-identities/src/cli/eval/project.rs
> +++ b/cli/lnk-identities/src/cli/eval/project.rs
> @@ -26,7 +26,7 @@ use lnk_clib::{
> storage::{self, ssh},
> };
>
> -use crate::{cli::args::project::*, display, project};
> +use crate::{cli::args::project::*, display, identity_dir::IdentityDir, project};
>
> pub fn eval(profile: &Profile, sock: SshAuthSock, opts: Options) -> anyhow::Result<()> {
> match opts {
> @@ -143,12 +143,13 @@ fn eval_checkout(
> profile: &Profile,
> sock: SshAuthSock,
> urn: Urn,
> - path: PathBuf,
> + path: Option<PathBuf>,
> peer: Option<PeerId>,
> ) -> anyhow::Result<()> {
> let (signer, storage) = ssh::storage(profile, sock)?;
> let paths = profile.paths();
> - let repo = project::checkout(&storage, paths.clone(), signer, &urn, peer, path)?;
> + let checkout_path = IdentityDir::at_or_current_dir(path)?;
> + let repo = project::checkout(&storage, paths.clone(), signer, &urn, peer, checkout_path)?;
> println!("working copy created at `{}`", repo.path().display());
> Ok(())
> }
> diff --git a/cli/lnk-identities/src/git/checkout.rs b/cli/lnk-identities/src/git/checkout.rs
> index 5a949465..756020ee 100644
> --- a/cli/lnk-identities/src/git/checkout.rs
> +++ b/cli/lnk-identities/src/git/checkout.rs
> @@ -3,7 +3,7 @@
> // This file is part of radicle-link, distributed under the GPLv3 with Radicle
> // Linking Exception. For full terms see the included LICENSE file.
>
> -use std::{convert::TryFrom, ffi, path::PathBuf};
> +use std::{convert::TryFrom, path::PathBuf};
>
> use either::Either;
>
> @@ -21,13 +21,18 @@ use librad::{
> },
> },
> git_ext::{self, OneLevel, Qualified, RefLike},
> + paths::Paths,
> refspec_pattern,
> PeerId,
> };
>
> +use git_ref_format as ref_format;
> +
> use crate::{
> field::{HasBranch, HasName, HasUrn, MissingDefaultBranch},
> git,
> + git::include,
> + identity_dir::IdentityDir,
> };
>
> #[derive(Debug, thiserror::Error)]
> @@ -46,6 +51,15 @@ pub enum Error {
>
> #[error(transparent)]
> Transport(#[from] librad::git::local::transport::Error),
> +
> + #[error(transparent)]
> + Include(Box<include::Error>),
> +
> + #[error(transparent)]
> + SetInclude(#[from] librad::git::include::Error),
> +
> + #[error(transparent)]
> + OpenStorage(Box<dyn std::error::Error + Send + Sync + 'static>),
> }
>
> impl From<identities::Error> for Error {
> @@ -89,7 +103,9 @@ impl From<identities::Error> for Error {
> /// merge = refs/heads/master
> /// [include]
> /// path = /home/user/.config/radicle-link/git-includes/hwd1yrerzpjbmtshsqw6ajokqtqrwaswty6p7kfeer3yt1n76t46iqggzcr.inc
> +/// ```
> pub fn checkout<F, I>(
> + paths: &Paths,
> open_storage: F,
> identity: &I,
> from: Either<Local, Peer>,
> @@ -101,10 +117,20 @@ where
> let default_branch = identity.branch_or_die(identity.urn())?;
>
> let (repo, rad) = match from {
> - Either::Left(local) => local.checkout(open_storage)?,
> - Either::Right(peer) => peer.checkout(open_storage)?,
> + Either::Left(local) => local.checkout(open_storage.clone())?,
> + Either::Right(peer) => peer.checkout(open_storage.clone())?,
> };
>
> + {
> + let _box = open_storage
> + .open_storage()
> + .map_err(|e| Error::OpenStorage(e))?;
> + let storage = _box.as_ref();
> + let include_path = include::update(storage.as_ref(), paths, identity)
> + .map_err(|e| Error::Include(Box::new(e)))?;
> + librad::git::include::set_include_path(&repo, include_path)?;
> + }
I think we should just pass the `AsRef<Storage>` here, since we would
have a `Storage` in scope when calling this.
> +
> // Set configurations
> git::set_upstream(&repo, &rad, default_branch.clone())?;
> repo.set_head(Qualified::from(default_branch).as_str())
> @@ -123,7 +149,6 @@ impl Local {
> where
> I: HasName + HasUrn,
> {
> - let path = resolve_path(identity, path);
> Self {
> url: LocalUrl::from(identity.urn()),
> path,
> @@ -160,7 +185,6 @@ impl Peer {
> {
> let urn = identity.urn();
> let default_branch = identity.branch_or_die(urn.clone())?;
> - let path = resolve_path(identity, path);
> Ok(Self {
> url: LocalUrl::from(urn),
> remote,
> @@ -175,12 +199,16 @@ impl Peer {
> {
> let (person, peer) = self.remote;
> let handle = &person.subject().name;
> - let name =
> - RefLike::try_from(format!("{}@{}", handle, peer)).expect("failed to parse remote name");
>
> - let remote = Remote::new(self.url.clone(), name.clone()).with_fetchspecs(vec![Refspec {
> + let name = ref_format::RefString::try_from(format!("{}@{}", handle, peer))
> + .expect("handle and peer are reflike");
> + let dst = ref_format::RefString::from(ref_format::Qualified::from(
> + ref_format::lit::refs_remotes(name.clone()),
> + ))
> + .with_pattern(ref_format::refspec::STAR);
> + let remote = Remote::new(self.url.clone(), name).with_fetchspecs(vec![Refspec {
> src: Reference::heads(Flat, peer),
> - dst: GenericRef::heads(Flat, name),
> + dst,
> force: Force::True,
> }]);
>
> @@ -216,34 +244,14 @@ impl Peer {
> pub fn from_whom<I>(
> identity: &I,
> remote: Option<(Person, PeerId)>,
> - path: PathBuf,
> + path: IdentityDir,
> ) -> Result<Either<Local, Peer>, Error>
> where
> I: HasBranch + HasName + HasUrn,
> {
> + let path = path.resolve(identity.name());
> Ok(match remote {
> None => Either::Left(Local::new(identity, path)),
> Some(remote) => Either::Right(Peer::new(identity, remote, path)?),
> })
> }
> -
> -fn resolve_path<I>(identity: &I, path: PathBuf) -> PathBuf
> -where
> - I: HasName,
> -{
> - let name = identity.name();
> -
> - // Check if the path provided ends in the 'directory_name' provided. If not we
> - // create the full path to that name.
> - path.components()
> - .next_back()
> - .map_or(path.join(&**name), |destination| {
> - let destination: &ffi::OsStr = destination.as_ref();
> - let name: &ffi::OsStr = name.as_ref();
> - if destination == name {
> - path.to_path_buf()
> - } else {
> - path.join(name)
> - }
> - })
> -}
> diff --git a/cli/lnk-identities/src/git/existing.rs b/cli/lnk-identities/src/git/existing.rs
> index 702ec727..c828ab5a 100644
> --- a/cli/lnk-identities/src/git/existing.rs
> +++ b/cli/lnk-identities/src/git/existing.rs
> @@ -8,17 +8,13 @@ use std::{fmt, marker::PhantomData, path::PathBuf};
> use serde::{Deserialize, Serialize};
>
> use librad::{
> - canonical::Cstring,
> git::local::{transport::CanOpenStorage, url::LocalUrl},
> git_ext,
> std_ext::result::ResultExt as _,
> };
> use std_ext::Void;
>
> -use crate::{
> - field::{HasBranch, HasName},
> - git,
> -};
> +use crate::{field::HasBranch, git};
>
> #[derive(Debug, thiserror::Error)]
> pub enum Error {
> @@ -47,20 +43,13 @@ pub struct Existing<V, P> {
> valid: V,
> }
>
> -impl<V, P: HasName> Existing<V, P> {
> - pub fn name(&self) -> &Cstring {
> - self.payload.name()
> - }
> -}
> -
> type Invalid = PhantomData<Void>;
>
> -impl<P: HasName + HasBranch> Existing<Invalid, P> {
> +impl<P: HasBranch> Existing<Invalid, P> {
> pub fn new(payload: P, path: PathBuf) -> Self {
> // Note(finto): The current behaviour in Upstream is that an existing repository
> // is initialised with the suffix of the path is the name of the project.
> // Perhaps this should just be done upstream and no assumptions made here.
> - let path = path.join(payload.name().as_str());
The note can be removed now, right?
> Self {
> payload,
> path,
> @@ -116,6 +105,7 @@ impl<P: HasBranch> Existing<Valid, P> {
> );
> let _remote = git::validation::remote(&repo, &url)?;
> git::setup_remote(&repo, open_storage, url, &self.payload.branch_or_default())?;
> +
> Ok(repo)
> }
> }
> diff --git a/cli/lnk-identities/src/git/new.rs b/cli/lnk-identities/src/git/new.rs
> index 758af792..c8c620f6 100644
> --- a/cli/lnk-identities/src/git/new.rs
> +++ b/cli/lnk-identities/src/git/new.rs
> @@ -41,12 +41,6 @@ pub struct New<V, P> {
> valid: V,
> }
>
> -impl<V, P: HasName> New<V, P> {
> - pub fn path(&self) -> PathBuf {
> - self.path.join(self.payload.name().as_str())
> - }
> -}
> -
> pub type Invalid = PhantomData<Void>;
> pub type Valid = PhantomData<Void>;
>
> @@ -64,14 +58,12 @@ impl<P> New<Invalid, P> {
> where
> P: HasName,
> {
> - let repo_path = self.path();
> -
> - if repo_path.is_file() {
> - return Err(Error::AlreadyExists(repo_path));
> + if self.path.is_file() {
> + return Err(Error::AlreadyExists(self.path));
> }
>
> - if repo_path.exists() && repo_path.is_dir() && repo_path.read_dir()?.next().is_some() {
> - return Err(Error::AlreadyExists(repo_path));
> + if self.path.exists() && self.path.is_dir() && self.path.read_dir()?.next().is_some() {
> + return Err(Error::AlreadyExists(self.path));
> }
>
> Ok(Self {
> @@ -87,7 +79,7 @@ impl New<Valid, payload::ProjectPayload> {
> where
> F: CanOpenStorage + Clone + 'static,
> {
> - let path = self.path();
> + let path = self.path;
> let default = self.payload.branch_or_default();
> init(
> path,
> @@ -104,7 +96,7 @@ impl New<Valid, payload::PersonPayload> {
> where
> F: CanOpenStorage + Clone + 'static,
> {
> - let path = self.path();
> + let path = self.path;
> let default = self.payload.branch_or_default();
> init(path, default, &None, url, open_storage)
> }
> diff --git a/cli/lnk-identities/src/identity_dir.rs b/cli/lnk-identities/src/identity_dir.rs
> new file mode 100644
> index 00000000..bbf87bdb
> --- /dev/null
> +++ b/cli/lnk-identities/src/identity_dir.rs
> @@ -0,0 +1,38 @@
> +use std::path::{Path, PathBuf};
> +
> +/// Where to checkout or create an identity
> +pub enum IdentityDir {
> + /// A directory within this directory named after the identity
> + Within(PathBuf),
> + /// Directly at the given path, which must be a directory
> + At(PathBuf),
> +}
bikeshed: would this be better named `WorkingCopy` or something like that?
Bit of a nit, but I'd prefer if the data was given at construction,
instead of at `resolve`:
---
pub enum IdentityDir {
/// A directory within this `path` named after the `identity`
Within {
path: PathBuf,
identity: String,
},
/// Directly at the given path, which must be a directory
At(PathBuf),
}
impl IdentityDir {
pub fn new(at: Option<PathBuf>, identity: String) -> Self {
match at {
Some(at) => Ok(Self::At(at))
None => Ok(IdentityDir::Within {
path: std::env::current_dir()?,
identity,
})
}
}
}
---
Not sure if this plays well with how it's passed around now though.
> +
> +impl IdentityDir {
> + /// If `at` is `Some` then return `CheckoutPath::At(at)`, otherwise
> + /// `CheckoutPath::Within(current directory)`.
> + pub fn at_or_current_dir<P: AsRef<Path>>(at: Option<P>) -> Result<IdentityDir, std::io::Error> {
> + match at {
> + Some(p) => Ok(IdentityDir::At(p.as_ref().to_path_buf())),
> + None => Ok(IdentityDir::Within(std::env::current_dir()?)),
> + }
> + }
> +}
> +
> +impl std::fmt::Display for IdentityDir {
> + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
> + match self {
> + IdentityDir::At(p) => p.display().fmt(f),
> + IdentityDir::Within(p) => write!(f, "{}/<name>", p.display()),
> + }
> + }
> +}
> +
> +impl IdentityDir {
> + pub(crate) fn resolve(&self, identity_name: &str) -> PathBuf {
> + match self {
> + Self::At(p) => p.clone(),
> + Self::Within(p) => p.join(identity_name),
> + }
> + }
> +}
> diff --git a/cli/lnk-identities/src/lib.rs b/cli/lnk-identities/src/lib.rs
> index d3cdcfd3..4ca9e261 100644
> --- a/cli/lnk-identities/src/lib.rs
> +++ b/cli/lnk-identities/src/lib.rs
> @@ -15,6 +15,7 @@ use thiserror::Error;
> pub mod cli;
>
> pub mod any;
> +pub mod identity_dir;
> pub mod local;
> pub mod person;
> pub mod project;
> diff --git a/cli/lnk-identities/src/person.rs b/cli/lnk-identities/src/person.rs
> index df1b588e..8381f0ce 100644
> --- a/cli/lnk-identities/src/person.rs
> +++ b/cli/lnk-identities/src/person.rs
> @@ -27,6 +27,7 @@ use librad::{
> use crate::{
> display,
> git::{self, checkout, include},
> + identity_dir::IdentityDir,
> };
>
> pub type Display = display::Display<PersonPayload>;
> @@ -172,7 +173,7 @@ pub fn checkout<S>(
> signer: BoxedSigner,
> urn: &Urn,
> peer: Option<PeerId>,
> - path: PathBuf,
> + path: IdentityDir,
> ) -> Result<git2::Repository, Error>
> where
> S: AsRef<ReadOnly>,
> @@ -199,7 +200,7 @@ where
> paths: paths.clone(),
> signer,
> };
> - let repo = git::checkout::checkout(settings, &person, from)?;
> + let repo = git::checkout::checkout(&paths, settings, &person, from)?;
> include::update(&storage, &paths, &person)?;
> Ok(repo)
> }
> diff --git a/cli/lnk-identities/src/project.rs b/cli/lnk-identities/src/project.rs
> index 65dc76e2..07378708 100644
> --- a/cli/lnk-identities/src/project.rs
> +++ b/cli/lnk-identities/src/project.rs
> @@ -30,6 +30,7 @@ use librad::{
> use crate::{
> display,
> git::{self, checkout, include},
> + identity_dir::IdentityDir,
> MissingDefaultIdentity,
> };
>
> @@ -74,12 +75,6 @@ impl From<identities::Error> for Error {
> }
> }
>
> -impl From<include::Error> for Error {
> - fn from(err: include::Error) -> Self {
> - Self::Include(Box::new(err))
> - }
> -}
> -
> pub enum Creation {
> New { path: Option<PathBuf> },
> Existing { path: PathBuf },
> @@ -136,26 +131,32 @@ where
> signer,
> };
>
> - let project = match creation {
> + let (project, maybe_repo) = match creation {
> Creation::New { path } => {
> if let Some(path) = path {
> let valid = git::new::New::new(payload.clone(), path).validate()?;
> let project = project::create(storage, whoami, payload, delegations)?;
> - valid.init(url, settings)?;
> - project
> + let repo = valid.init(url, settings)?;
> + (project, Some(repo))
> } else {
> - project::create(storage, whoami, payload, delegations)?
> + (
> + project::create(storage, whoami, payload, delegations)?,
> + None,
> + )
> }
> },
> Creation::Existing { path } => {
> let valid = git::existing::Existing::new(payload.clone(), path).validate()?;
> let project = project::create(storage, whoami, payload, delegations)?;
> - valid.init(url, settings)?;
> - project
> + let repo = valid.init(url, settings)?;
> + (project, Some(repo))
> },
> };
>
> - include::update(storage, &paths, &project)?;
> + let include_path = include::update(storage, &paths, &project)?;
> + if let Some(repo) = maybe_repo {
> + librad::git::include::set_include_path(&repo, include_path)?;
> + }
Could this be done in the `valid.init` calls instead?
>
> Ok(project)
> }
> @@ -242,7 +243,7 @@ pub fn checkout<S>(
> signer: BoxedSigner,
> urn: &Urn,
> peer: Option<PeerId>,
> - path: PathBuf,
> + path: IdentityDir,
> ) -> Result<git2::Repository, Error>
> where
> S: AsRef<ReadOnly>,
> @@ -269,8 +270,7 @@ where
> paths: paths.clone(),
> signer,
> };
> - let repo = git::checkout::checkout(settings, &project, from)?;
> - include::update(&storage, &paths, &project)?;
> + let repo = git::checkout::checkout(&paths, settings, &project, from)?;
> Ok(repo)
> }
>
> diff --git a/cli/lnk-identities/t/src/tests/git/checkout.rs b/cli/lnk-identities/t/src/tests/git/checkout.rs
> index c4fa2642..c0e919f4 100644
> --- a/cli/lnk-identities/t/src/tests/git/checkout.rs
> +++ b/cli/lnk-identities/t/src/tests/git/checkout.rs
> @@ -48,7 +48,7 @@ fn local_checkout() -> anyhow::Result<()> {
> };
>
> let local = Local::new(&proj.project, temp.path().to_path_buf());
> - let repo = checkout(settings, &proj.project, Either::Left(local))?;
> + let repo = checkout(&paths, settings, &proj.project, Either::Left(local))?;
> let branch = proj.project.subject().default_branch.as_ref().unwrap();
> assert_head(&repo, branch)?;
> assert_remote(&repo, branch, &LocalUrl::from(proj.project.urn()))?;
> @@ -102,9 +102,10 @@ fn remote_checkout() {
> signer: peer2.signer().clone().into(),
> };
>
> + let paths = peer2.protocol_config().paths.clone();
> let remote = (proj.owner.clone(), peer1.peer_id());
> let peer = Peer::new(&proj.project, remote, temp.path().to_path_buf()).unwrap();
> - let repo = checkout(settings, &proj.project, Either::Right(peer)).unwrap();
> + let repo = checkout(&paths, settings, &proj.project, Either::Right(peer)).unwrap();
> let branch = proj.project.subject().default_branch.as_ref().unwrap();
> assert_head(&repo, branch).unwrap();
> assert_remote(&repo, branch, &LocalUrl::from(proj.project.urn())).unwrap();
> diff --git a/cli/lnk-identities/t/src/tests/git/existing.rs b/cli/lnk-identities/t/src/tests/git/existing.rs
> index 4bbea636..f3fbc2e8 100644
> --- a/cli/lnk-identities/t/src/tests/git/existing.rs
> +++ b/cli/lnk-identities/t/src/tests/git/existing.rs
> @@ -51,7 +51,7 @@ fn validation_path_is_not_a_repo() -> anyhow::Result<()> {
> fn validation_default_branch_is_missing() -> anyhow::Result<()> {
> let payload = TestProject::default_payload();
> let temp = tempdir()?;
> - let dir = temp.path().join(payload.name.as_str());
> + let dir = temp.path();
> let _repo = git2::Repository::init(dir)?;
> let existing = Existing::new(ProjectPayload::new(payload), temp.path().to_path_buf());
> let result = existing.validate();
> @@ -68,7 +68,7 @@ fn validation_default_branch_is_missing() -> anyhow::Result<()> {
> fn validation_different_remote_exists() -> anyhow::Result<()> {
> let payload = TestProject::default_payload();
> let temp = tempdir()?;
> - let dir = temp.path().join(payload.name.as_str());
> + let dir = temp.path();
> let _repo = {
> let branch = payload.default_branch.as_ref().unwrap();
> let mut opts = git2::RepositoryInitOptions::new();
> @@ -153,7 +153,7 @@ fn validation_remote_exists() -> anyhow::Result<()> {
> fn creation() -> anyhow::Result<()> {
> let payload = TestProject::default_payload();
> let temp = tempdir()?;
> - let dir = temp.path().join(payload.name.as_str());
> + let dir = temp.path();
> let _repo = {
> let branch = payload.default_branch.as_ref().unwrap();
> let mut opts = git2::RepositoryInitOptions::new();
> diff --git a/cli/lnk-identities/t/src/tests/git/new.rs b/cli/lnk-identities/t/src/tests/git/new.rs
> index dde7d44b..edb85d8b 100644
> --- a/cli/lnk-identities/t/src/tests/git/new.rs
> +++ b/cli/lnk-identities/t/src/tests/git/new.rs
> @@ -72,10 +72,7 @@ fn creation() -> anyhow::Result<()> {
> let branch = payload.default_branch.unwrap();
> assert_eq!(
> repo.path().canonicalize()?,
> - temp.path()
> - .join(payload.name.as_str())
> - .join(".git")
> - .canonicalize()?
> + temp.path().join(".git").canonicalize()?
> );
> assert_head(&repo, &branch)?;
> assert_remote(&repo, &branch, &url)?;
> --
> 2.36.1
Re: [PATCH v1 4/4] Add lnk clone
On Fri May 27, 2022 at 6:10 PM IST, Alex Good wrote:
> lnk clone first syncs the local monorepo state with configured seeds for
> the given URN, then checks out a working copy of the URN.
>
> If a peer ID is given `lnk clone` checks out the given peers copy. If
> not `lnk clone` will attempt to determine if there is a head the project
> delegates agree on and set `refs/namespaces/<urn>/HEAD` to this
> reference and then check this reference out to the working copy; if the
> delegates have forked `lnk clone` will print an error message with
> information on which peers are pointing at what so the user can decide
> for themselves which peer to check out.
>
> Signed-of-by: Alex Good <alex@memoryandthought.me >
lol-wut?
> Signed-off-by: Alex Good <alex@memoryandthought.me >
> ---
> bins/Cargo.lock | 3 ++
> cli/lnk-exe/src/cli/args.rs | 1 +
> cli/lnk-sync/Cargo.toml | 10 ++++-
> cli/lnk-sync/src/cli/args.rs | 23 +++++++---
> cli/lnk-sync/src/cli/main.rs | 46 +++++++++++++++++--
> cli/lnk-sync/src/forked.rs | 87 ++++++++++++++++++++++++++++++++++++
> cli/lnk-sync/src/lib.rs | 3 ++
> 7 files changed, 161 insertions(+), 12 deletions(-)
> create mode 100644 cli/lnk-sync/src/forked.rs
>
> diff --git a/bins/Cargo.lock b/bins/Cargo.lock
> index 4cb8dd64..5f9b3f4d 100644
> --- a/bins/Cargo.lock
> +++ b/bins/Cargo.lock
> @@ -2289,6 +2289,7 @@ dependencies = [
> "anyhow",
> "clap",
> "either",
> + "git-ref-format",
> "git2",
> "lazy_static",
> "libgit2-sys",
> @@ -2349,10 +2350,12 @@ dependencies = [
> "either",
> "futures",
> "git-ref-format",
> + "git2",
> "librad",
> "link-async",
> "link-replication",
> "lnk-clib",
> + "lnk-identities",
> "serde",
> "serde_json",
> "thiserror",
> diff --git a/cli/lnk-exe/src/cli/args.rs b/cli/lnk-exe/src/cli/args.rs
> index 6bac13d9..494404de 100644
> --- a/cli/lnk-exe/src/cli/args.rs
> +++ b/cli/lnk-exe/src/cli/args.rs
> @@ -56,5 +56,6 @@ pub enum Command {
> /// Manage your Radicle profiles
> Profile(lnk_profile::cli::args::Args),
> /// Sync with your configured seeds
> + #[clap(flatten)]
> Sync(lnk_sync::cli::args::Args),
> }
> diff --git a/cli/lnk-sync/Cargo.toml b/cli/lnk-sync/Cargo.toml
> index 6e2b20ce..ff9dffed 100644
> --- a/cli/lnk-sync/Cargo.toml
> +++ b/cli/lnk-sync/Cargo.toml
> @@ -21,6 +21,11 @@ tracing = "0.1"
> version = "3.1"
> features = ["derive"]
>
> +[dependencies.git2]
> +version = "0.13.24"
> +default-features = false
> +features = ["vendored-libgit2"]
> +
> [dependencies.git-ref-format]
> path = "../../git-ref-format"
> features = ["serde"]
> @@ -38,10 +43,13 @@ path = "../../link-async"
> [dependencies.lnk-clib]
> path = "../lnk-clib"
>
> +[dependencies.lnk-identities]
> +path = "../lnk-identities"
> +
> [dependencies.serde]
> version = "1"
> features = ["derive"]
>
> [dependencies.tokio]
> version = "1.17"
> -features = ["rt"]
> \ No newline at end of file
> +features = ["rt"]
> diff --git a/cli/lnk-sync/src/cli/args.rs b/cli/lnk-sync/src/cli/args.rs
> index 38f054d0..4d338279 100644
> --- a/cli/lnk-sync/src/cli/args.rs
> +++ b/cli/lnk-sync/src/cli/args.rs
> @@ -1,15 +1,24 @@
> // Copyright © 2022 The Radicle Link Contributors
> // SPDX-License-Identifier: GPL-3.0-or-later
>
> -use clap::Parser;
> use librad::git::Urn;
>
> use crate::Mode;
>
> -#[derive(Clone, Debug, Parser)]
> -pub struct Args {
> - #[clap(long)]
> - pub urn: Urn,
> - #[clap(long, default_value_t)]
> - pub mode: Mode,
> +#[derive(Clone, Debug, clap::Subcommand)]
> +pub enum Args {
> + Sync {
> + #[clap(long)]
> + urn: Urn,
> + #[clap(long, default_value_t)]
> + mode: Mode,
> + },
> + Clone {
> + #[clap(long)]
> + urn: Urn,
> + #[clap(long)]
> + path: Option<std::path::PathBuf>,
nit: let's import this above.
> + #[clap(long)]
> + peer: Option<librad::PeerId>,
nit: same.
> + },
> }
I'm just seeing I never gave any documentation, whoops! Can we remedy
this now for both?
> diff --git a/cli/lnk-sync/src/cli/main.rs b/cli/lnk-sync/src/cli/main.rs
> index 1d2193e0..c51a0ce2 100644
> --- a/cli/lnk-sync/src/cli/main.rs
> +++ b/cli/lnk-sync/src/cli/main.rs
> @@ -3,9 +3,11 @@
>
> use std::sync::Arc;
>
> +use lnk_identities::identity_dir::IdentityDir;
> use tokio::runtime::Runtime;
>
> use librad::{
> + git::identities::project::heads,
> net::{
> self,
> peer::{client, Client},
> @@ -20,7 +22,7 @@ use lnk_clib::{
> seed::{self, Seeds},
> };
>
> -use crate::{cli::args::Args, sync};
> +use crate::{cli::args::Args, forked, sync};
>
> pub fn main(
> args: Args,
> @@ -48,7 +50,7 @@ pub fn main(
> user_storage: client::config::Storage::default(),
> network: Network::default(),
> };
> - let endpoint = quic::SendOnly::new(signer, Network::default()).await?;
> + let endpoint = quic::SendOnly::new(signer.clone(), Network::default()).await?;
> let client = Client::new(config, spawner, endpoint)?;
> let seeds = {
> let seeds_file = profile.paths().seeds_file();
> @@ -70,8 +72,44 @@ pub fn main(
>
> seeds
> };
> - let synced = sync(&client, args.urn, seeds, args.mode).await;
> - println!("{}", serde_json::to_string(&synced)?);
> + match args {
> + Args::Sync { urn, mode } => {
> + let synced = sync(&client, urn, seeds, mode).await;
> + println!("{}", serde_json::to_string(&synced)?);
> + },
> + Args::Clone { urn, path, peer } => {
> + let path = IdentityDir::at_or_current_dir(path)?;
> + println!("cloning urn {} into {}", urn, path);
> + println!("syncing monorepo with seeds");
> + sync(&client, urn.clone(), seeds, crate::Mode::Fetch).await;
> +
> + let storage = librad::git::Storage::open(paths, signer.clone())?;
> +
> + let vp = librad::git::identities::project::verify(&storage, &urn)?
> + .ok_or_else(|| anyhow::anyhow!("no such project"))?;
> +
> + if peer.is_none() {
> + match heads::set_default_head(&storage, vp) {
> + Ok(_) => {},
> + Err(heads::error::SetDefaultBranch::Forked(forks)) => {
> + let error = forked::ForkError::from_forked(&storage, forks);
> + println!("{}", error);
> + return Ok(());
> + },
> + Err(e) => anyhow::bail!("error setting HEAD for project: {}", e),
> + }
> + }
> + let repo = lnk_identities::project::checkout(
> + &storage,
> + paths.clone(),
> + signer,
> + &urn,
> + peer,
> + path,
> + )?;
> + println!("working copy created at `{}`", repo.path().display());
> + },
> + }
> Ok(())
> })
> }
> diff --git a/cli/lnk-sync/src/forked.rs b/cli/lnk-sync/src/forked.rs
> new file mode 100644
> index 00000000..11e4b43b
> --- /dev/null
> +++ b/cli/lnk-sync/src/forked.rs
> @@ -0,0 +1,87 @@
> +use std::collections::BTreeSet;
> +
> +use librad::git::{identities::project::heads, storage::ReadOnlyStorage};
> +
> +/// A nicely formatted error message describing the forks in a forked project
> +pub struct ForkError(Vec<ForkDescription>);
> +
> +impl ForkError {
> + pub(crate) fn from_forked<S>(storage: &S, forked: BTreeSet<heads::Fork>) -> Self
> + where
> + S: ReadOnlyStorage,
> + {
> + ForkError(
> + forked
> + .into_iter()
> + .map(|f| ForkDescription::from_fork(storage, f))
> + .collect(),
> + )
> + }
> +}
> +
> +impl std::fmt::Display for ForkError {
> + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
> + writeln!(f, "the delegates for this project have forked")?;
> + writeln!(f, "you must choose a specific peer to clone")?;
> + writeln!(f, "you can do this using the --peer <peer id> argument")?;
> + writeln!(f, "and one of the peers listed below")?;
> + writeln!(f)?;
> + writeln!(f, "There are {} different forks", self.0.len())?;
> + writeln!(f)?;
> + for fork in &self.0 {
> + fork.fmt(f)?;
> + writeln!(f)?;
> + }
> + Ok(())
> + }
> +}
> +
> +struct ForkDescription {
> + fork: heads::Fork,
> + tip_commit_message: Option<String>,
> +}
> +
> +impl ForkDescription {
> + fn from_fork<S>(storage: &S, fork: heads::Fork) -> Self
> + where
> + S: ReadOnlyStorage,
> + {
> + let tip = std::rc::Rc::new(fork.tip);
> + let tip_commit_message = storage
> + .find_object(&tip)
> + .ok()
> + .and_then(|o| o.and_then(|o| o.as_commit().map(|c| c.summary().map(|m| m.to_string()))))
> + .unwrap_or(None);
> + ForkDescription {
> + fork,
> + tip_commit_message,
> + }
> + }
> +}
> +
> +impl std::fmt::Display for ForkDescription {
> + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
> + writeln!(
> + f,
> + "{} peers pointing at {}",
> + self.fork.tip_peers.len(),
> + self.fork.tip
> + )?;
> + match &self.tip_commit_message {
> + Some(m) => {
> + writeln!(f, "Commit message:")?;
> + writeln!(f, " {}", m)?;
> + },
> + None => {
> + writeln!(f)?;
> + writeln!(f, "unable to determine commit message")?;
> + writeln!(f)?;
> + },
> + }
> + writeln!(f, "Peers:")?;
> + for peer in &self.fork.tip_peers {
> + writeln!(f, " {}", peer)?;
> + }
> + Ok(())
> + }
> +}
> diff --git a/cli/lnk-sync/src/lib.rs b/cli/lnk-sync/src/lib.rs
> index aa3d26fd..b9038ea1 100644
> --- a/cli/lnk-sync/src/lib.rs
> +++ b/cli/lnk-sync/src/lib.rs
> @@ -17,6 +17,7 @@ use librad::{
> use lnk_clib::seed::{Seed, Seeds};
>
> pub mod cli;
> +mod forked;
> pub mod replication;
> pub mod request_pull;
>
> @@ -146,3 +147,5 @@ where
> }
> syncs
> }
> +
> +pub async fn clone() {}
> --
> 2.36.1
Re: [PATCH v1 2/4] lnk-identities: update path logic and set up include
On 30/05/22 10:32am, Fintan Halpenny wrote:
> On Fri May 27, 2022 at 6:10 PM IST, Alex Good wrote:
> > diff --git a/cli/lnk-identities/src/git/checkout.rs b/cli/lnk-identities/src/git/checkout.rs
> > index 5a949465..756020ee 100644
> > --- a/cli/lnk-identities/src/git/checkout.rs
> > +++ b/cli/lnk-identities/src/git/checkout.rs
> > @@ -3,7 +3,7 @@
> > // This file is part of radicle-link, distributed under the GPLv3 with Radicle
> > // Linking Exception. For full terms see the included LICENSE file.
> >
> > -use std::{convert::TryFrom, ffi, path::PathBuf};
> > +use std::{convert::TryFrom, path::PathBuf};
> >
> > use either::Either;
> >
> > @@ -21,13 +21,18 @@ use librad::{
> > },
> > },
> > git_ext::{self, OneLevel, Qualified, RefLike},
> > + paths::Paths,
> > refspec_pattern,
> > PeerId,
> > };
> >
> > +use git_ref_format as ref_format;
> > +
> > use crate::{
> > field::{HasBranch, HasName, HasUrn, MissingDefaultBranch},
> > git,
> > + git::include,
> > + identity_dir::IdentityDir,
> > };
> >
> > #[derive(Debug, thiserror::Error)]
> > @@ -46,6 +51,15 @@ pub enum Error {
> >
> > #[error(transparent)]
> > Transport(#[from] librad::git::local::transport::Error),
> > +
> > + #[error(transparent)]
> > + Include(Box<include::Error>),
> > +
> > + #[error(transparent)]
> > + SetInclude(#[from] librad::git::include::Error),
> > +
> > + #[error(transparent)]
> > + OpenStorage(Box<dyn std::error::Error + Send + Sync + 'static>),
> > }
> >
> > impl From<identities::Error> for Error {
> > @@ -89,7 +103,9 @@ impl From<identities::Error> for Error {
> > /// merge = refs/heads/master
> > /// [include]
> > /// path = /home/user/.config/radicle-link/git-includes/hwd1yrerzpjbmtshsqw6ajokqtqrwaswty6p7kfeer3yt1n76t46iqggzcr.inc
> > +/// ```
> > pub fn checkout<F, I>(
> > + paths: &Paths,
> > open_storage: F,
> > identity: &I,
> > from: Either<Local, Peer>,
> > @@ -101,10 +117,20 @@ where
> > let default_branch = identity.branch_or_die(identity.urn())?;
> >
> > let (repo, rad) = match from {
> > - Either::Left(local) => local.checkout(open_storage)?,
> > - Either::Right(peer) => peer.checkout(open_storage)?,
> > + Either::Left(local) => local.checkout(open_storage.clone())?,
> > + Either::Right(peer) => peer.checkout(open_storage.clone())?,
> > };
> >
> > + {
> > + let _box = open_storage
> > + .open_storage()
> > + .map_err(|e| Error::OpenStorage(e))?;
> > + let storage = _box.as_ref();
> > + let include_path = include::update(storage.as_ref(), paths, identity)
> > + .map_err(|e| Error::Include(Box::new(e)))?;
> > + librad::git::include::set_include_path(&repo, include_path)?;
> > + }
>
> I think we should just pass the `AsRef<Storage>` here, since we would
> have a `Storage` in scope when calling this.
You mean as the argument to `checkout`? The difficulty with that would
be that the fetch logic in `Peer::checkout` and `Local::checkout` needs
a `CanOpenStorage`.
> > diff --git a/cli/lnk-identities/src/identity_dir.rs b/cli/lnk-identities/src/identity_dir.rs
> > new file mode 100644
> > index 00000000..bbf87bdb
> > --- /dev/null
> > +++ b/cli/lnk-identities/src/identity_dir.rs
> > @@ -0,0 +1,38 @@
> > +use std::path::{Path, PathBuf};
> > +
> > +/// Where to checkout or create an identity
> > +pub enum IdentityDir {
> > + /// A directory within this directory named after the identity
> > + Within(PathBuf),
> > + /// Directly at the given path, which must be a directory
> > + At(PathBuf),
> > +}
>
> bikeshed: would this be better named `WorkingCopy` or something like that?
`WorkingCopyDir` sounds good to me, you?
>
> Bit of a nit, but I'd prefer if the data was given at construction,
> instead of at `resolve`:
>
> ---
> pub enum IdentityDir {
> /// A directory within this `path` named after the `identity`
> Within {
> path: PathBuf,
> identity: String,
> },
> /// Directly at the given path, which must be a directory
> At(PathBuf),
> }
>
> impl IdentityDir {
> pub fn new(at: Option<PathBuf>, identity: String) -> Self {
> match at {
> Some(at) => Ok(Self::At(at))
> None => Ok(IdentityDir::Within {
> path: std::env::current_dir()?,
> identity,
> })
> }
> }
> }
> ---
>
> Not sure if this plays well with how it's passed around now though.
>
The main reason for the split as it stands is that the `IdentityDir` is
constructed before we have a verified identity to resolve the name of.
Doing resolution at construction time would mean restructuring quite a
bit of the checkout code to resolve the URN earlier on in the control
flow (we would need to know the identity in `{person,
project}::eval_checkout`). On that basis I left it in this slightly
awkward state. Happy to do more surgery to fix it if you think it's
worth it.
> > diff --git a/cli/lnk-identities/src/project.rs b/cli/lnk-identities/src/project.rs
> > index 65dc76e2..07378708 100644
> > --- a/cli/lnk-identities/src/project.rs
> > +++ b/cli/lnk-identities/src/project.rs
> > @@ -30,6 +30,7 @@ use librad::{
> > use crate::{
> > display,
> > git::{self, checkout, include},
> > + identity_dir::IdentityDir,
> > MissingDefaultIdentity,
> > };
> >
> > @@ -74,12 +75,6 @@ impl From<identities::Error> for Error {
> > }
> > }
> >
> > -impl From<include::Error> for Error {
> > - fn from(err: include::Error) -> Self {
> > - Self::Include(Box::new(err))
> > - }
> > -}
> > -
> > pub enum Creation {
> > New { path: Option<PathBuf> },
> > Existing { path: PathBuf },
> > @@ -136,26 +131,32 @@ where
> > signer,
> > };
> >
> > - let project = match creation {
> > + let (project, maybe_repo) = match creation {
> > Creation::New { path } => {
> > if let Some(path) = path {
> > let valid = git::new::New::new(payload.clone(), path).validate()?;
> > let project = project::create(storage, whoami, payload, delegations)?;
> > - valid.init(url, settings)?;
> > - project
> > + let repo = valid.init(url, settings)?;
> > + (project, Some(repo))
> > } else {
> > - project::create(storage, whoami, payload, delegations)?
> > + (
> > + project::create(storage, whoami, payload, delegations)?,
> > + None,
> > + )
> > }
> > },
> > Creation::Existing { path } => {
> > let valid = git::existing::Existing::new(payload.clone(), path).validate()?;
> > let project = project::create(storage, whoami, payload, delegations)?;
> > - valid.init(url, settings)?;
> > - project
> > + let repo = valid.init(url, settings)?;
> > + (project, Some(repo))
> > },
> > };
> >
> > - include::update(storage, &paths, &project)?;
> > + let include_path = include::update(storage, &paths, &project)?;
> > + if let Some(repo) = maybe_repo {
> > + librad::git::include::set_include_path(&repo, include_path)?;
> > + }
>
> Could this be done in the `valid.init` calls instead?
>
Yes but then we end up with duplicated code because the `init` calls are
on separate types (i.e. `git::existing::Existing::init` and
`git::new::New:init`). I suppose we could pass a callback into `init`?
Re: [PATCH v1 2/4] lnk-identities: update path logic and set up include
On Mon May 30, 2022 at 11:09 AM IST, Alex Good wrote:
> On 30/05/22 10:32am, Fintan Halpenny wrote:
> > On Fri May 27, 2022 at 6:10 PM IST, Alex Good wrote:
> > > diff --git a/cli/lnk-identities/src/git/checkout.rs b/cli/lnk-identities/src/git/checkout.rs
> > > index 5a949465..756020ee 100644
> > > --- a/cli/lnk-identities/src/git/checkout.rs
> > > +++ b/cli/lnk-identities/src/git/checkout.rs
> > > @@ -3,7 +3,7 @@
> > > // This file is part of radicle-link, distributed under the GPLv3 with Radicle
> > > // Linking Exception. For full terms see the included LICENSE file.
> > >
> > > -use std::{convert::TryFrom, ffi, path::PathBuf};
> > > +use std::{convert::TryFrom, path::PathBuf};
> > >
> > > use either::Either;
> > >
> > > @@ -21,13 +21,18 @@ use librad::{
> > > },
> > > },
> > > git_ext::{self, OneLevel, Qualified, RefLike},
> > > + paths::Paths,
> > > refspec_pattern,
> > > PeerId,
> > > };
> > >
> > > +use git_ref_format as ref_format;
> > > +
> > > use crate::{
> > > field::{HasBranch, HasName, HasUrn, MissingDefaultBranch},
> > > git,
> > > + git::include,
> > > + identity_dir::IdentityDir,
> > > };
> > >
> > > #[derive(Debug, thiserror::Error)]
> > > @@ -46,6 +51,15 @@ pub enum Error {
> > >
> > > #[error(transparent)]
> > > Transport(#[from] librad::git::local::transport::Error),
> > > +
> > > + #[error(transparent)]
> > > + Include(Box<include::Error>),
> > > +
> > > + #[error(transparent)]
> > > + SetInclude(#[from] librad::git::include::Error),
> > > +
> > > + #[error(transparent)]
> > > + OpenStorage(Box<dyn std::error::Error + Send + Sync + 'static>),
> > > }
> > >
> > > impl From<identities::Error> for Error {
> > > @@ -89,7 +103,9 @@ impl From<identities::Error> for Error {
> > > /// merge = refs/heads/master
> > > /// [include]
> > > /// path = /home/user/.config/radicle-link/git-includes/hwd1yrerzpjbmtshsqw6ajokqtqrwaswty6p7kfeer3yt1n76t46iqggzcr.inc
> > > +/// ```
> > > pub fn checkout<F, I>(
> > > + paths: &Paths,
> > > open_storage: F,
> > > identity: &I,
> > > from: Either<Local, Peer>,
> > > @@ -101,10 +117,20 @@ where
> > > let default_branch = identity.branch_or_die(identity.urn())?;
> > >
> > > let (repo, rad) = match from {
> > > - Either::Left(local) => local.checkout(open_storage)?,
> > > - Either::Right(peer) => peer.checkout(open_storage)?,
> > > + Either::Left(local) => local.checkout(open_storage.clone())?,
> > > + Either::Right(peer) => peer.checkout(open_storage.clone())?,
> > > };
> > >
> > > + {
> > > + let _box = open_storage
> > > + .open_storage()
> > > + .map_err(|e| Error::OpenStorage(e))?;
> > > + let storage = _box.as_ref();
> > > + let include_path = include::update(storage.as_ref(), paths, identity)
> > > + .map_err(|e| Error::Include(Box::new(e)))?;
> > > + librad::git::include::set_include_path(&repo, include_path)?;
> > > + }
> >
> > I think we should just pass the `AsRef<Storage>` here, since we would
> > have a `Storage` in scope when calling this.
>
> You mean as the argument to `checkout`? The difficulty with that would
> be that the fetch logic in `Peer::checkout` and `Local::checkout` needs
> a `CanOpenStorage`.
Ya, so we could pass `Storage` and `open_storage`.
>
> > > diff --git a/cli/lnk-identities/src/identity_dir.rs b/cli/lnk-identities/src/identity_dir.rs
> > > new file mode 100644
> > > index 00000000..bbf87bdb
> > > --- /dev/null
> > > +++ b/cli/lnk-identities/src/identity_dir.rs
> > > @@ -0,0 +1,38 @@
> > > +use std::path::{Path, PathBuf};
> > > +
> > > +/// Where to checkout or create an identity
> > > +pub enum IdentityDir {
> > > + /// A directory within this directory named after the identity
> > > + Within(PathBuf),
> > > + /// Directly at the given path, which must be a directory
> > > + At(PathBuf),
> > > +}
> >
> > bikeshed: would this be better named `WorkingCopy` or something like that?
>
> `WorkingCopyDir` sounds good to me, you?
Perfect!
>
> >
> > Bit of a nit, but I'd prefer if the data was given at construction,
> > instead of at `resolve`:
> >
> > ---
> > pub enum IdentityDir {
> > /// A directory within this `path` named after the `identity`
> > Within {
> > path: PathBuf,
> > identity: String,
> > },
> > /// Directly at the given path, which must be a directory
> > At(PathBuf),
> > }
> >
> > impl IdentityDir {
> > pub fn new(at: Option<PathBuf>, identity: String) -> Self {
> > match at {
> > Some(at) => Ok(Self::At(at))
> > None => Ok(IdentityDir::Within {
> > path: std::env::current_dir()?,
> > identity,
> > })
> > }
> > }
> > }
> > ---
> >
> > Not sure if this plays well with how it's passed around now though.
> >
>
> The main reason for the split as it stands is that the `IdentityDir` is
> constructed before we have a verified identity to resolve the name of.
> Doing resolution at construction time would mean restructuring quite a
> bit of the checkout code to resolve the URN earlier on in the control
> flow (we would need to know the identity in `{person,
> project}::eval_checkout`). On that basis I left it in this slightly
> awkward state. Happy to do more surgery to fix it if you think it's
> worth it.
Nah, that's fine. I thought that might have been the case :)
>
> > > diff --git a/cli/lnk-identities/src/project.rs b/cli/lnk-identities/src/project.rs
> > > index 65dc76e2..07378708 100644
> > > --- a/cli/lnk-identities/src/project.rs
> > > +++ b/cli/lnk-identities/src/project.rs
> > > @@ -30,6 +30,7 @@ use librad::{
> > > use crate::{
> > > display,
> > > git::{self, checkout, include},
> > > + identity_dir::IdentityDir,
> > > MissingDefaultIdentity,
> > > };
> > >
> > > @@ -74,12 +75,6 @@ impl From<identities::Error> for Error {
> > > }
> > > }
> > >
> > > -impl From<include::Error> for Error {
> > > - fn from(err: include::Error) -> Self {
> > > - Self::Include(Box::new(err))
> > > - }
> > > -}
> > > -
> > > pub enum Creation {
> > > New { path: Option<PathBuf> },
> > > Existing { path: PathBuf },
> > > @@ -136,26 +131,32 @@ where
> > > signer,
> > > };
> > >
> > > - let project = match creation {
> > > + let (project, maybe_repo) = match creation {
> > > Creation::New { path } => {
> > > if let Some(path) = path {
> > > let valid = git::new::New::new(payload.clone(), path).validate()?;
> > > let project = project::create(storage, whoami, payload, delegations)?;
> > > - valid.init(url, settings)?;
> > > - project
> > > + let repo = valid.init(url, settings)?;
> > > + (project, Some(repo))
> > > } else {
> > > - project::create(storage, whoami, payload, delegations)?
> > > + (
> > > + project::create(storage, whoami, payload, delegations)?,
> > > + None,
> > > + )
> > > }
> > > },
> > > Creation::Existing { path } => {
> > > let valid = git::existing::Existing::new(payload.clone(), path).validate()?;
> > > let project = project::create(storage, whoami, payload, delegations)?;
> > > - valid.init(url, settings)?;
> > > - project
> > > + let repo = valid.init(url, settings)?;
> > > + (project, Some(repo))
> > > },
> > > };
> > >
> > > - include::update(storage, &paths, &project)?;
> > > + let include_path = include::update(storage, &paths, &project)?;
> > > + if let Some(repo) = maybe_repo {
> > > + librad::git::include::set_include_path(&repo, include_path)?;
> > > + }
> >
> > Could this be done in the `valid.init` calls instead?
> >
>
> Yes but then we end up with duplicated code because the `init` calls are
> on separate types (i.e. `git::existing::Existing::init` and
> `git::new::New:init`). I suppose we could pass a callback into `init`?
I don't think I'd mind the duplication in that case. But also fine to
keep this as is!
Re: [PATCH v1 3/4] Make gitd accept the URL format of include files
Awesome, one thought around setting the `HEAD` from my perspective:
Currently, from what I understand, the latest commit amongst delegates
is the one chosen as the HEAD commit for the project. I'd propose that we eventually
also allow the caller to specify a threshold of delegates that should "agree"
on this commit (ie. it's in their published history). This could constitute
a sort of quorum, and would be more representative of the intent of the delegate
group than a single delegate. It would also prevent attacks where a single
delegate is able to patch `HEAD` without consent from other delegates.
The `HEAD` would then be set according to this threshold, which could either be
the same threshold that is used to find quorum when updating a project identity,
or a separate configurable threshold that is only used for the code. For projects
with lots of delegates, it might make sense to only require a small subset to
push a commit, to reduce coordination.
Just wanted to mention this as this is the strategy we've planned to use on our
own git bridge, though it hasn't been implemented yet, and we'd benefit from
aligning on this.
------- Original Message -------
On Monday, May 30th, 2022 at 17:41, Alex Good <alex@memoryandthought.me > wrote:
>
>
> The include files generated by librad create remotes with URLs of the
> form `rad://rad:git:<base32-z multihash>.git`. Modify gitd to accept URLs with
>
> a path component of `rad:git:<base32-z multihash>.git`. This allows
>
> using gits `url.rad.insteadOf` config to point all such URLs at a local
> `gitd`.
>
> Signed-off-by: Alex Good alex@memoryandthought.me
>
> ---
> cli/gitd-lib/src/git_subprocess.rs | 12 +++---
> cli/gitd-lib/src/git_subprocess/command.rs | 12 +++---
> cli/gitd-lib/src/lib.rs | 1 +
> cli/gitd-lib/src/processes.rs | 20 +++++----
> cli/gitd-lib/src/server.rs | 5 +--
> cli/gitd-lib/src/ssh_service.rs | 50 ++++++++++++++++++++++
> 6 files changed, 77 insertions(+), 23 deletions(-)
> create mode 100644 cli/gitd-lib/src/ssh_service.rs
>
> diff --git a/cli/gitd-lib/src/git_subprocess.rs b/cli/gitd-lib/src/git_subprocess.rs
> index 27fedb11..731db465 100644
> --- a/cli/gitd-lib/src/git_subprocess.rs
> +++ b/cli/gitd-lib/src/git_subprocess.rs
> @@ -20,13 +20,13 @@ use tokio::{
> process::Child,
> };
>
> -use librad::git::{storage, Urn};
> +use librad::git::storage;
> use link_async::Spawner;
> -use link_git::service::SshService;
>
> use crate::{
> hooks::{self, Hooks},
> processes::ProcessReply,
> + ssh_service,
> };
>
> pub mod command;
> @@ -51,7 +51,7 @@ pub(crate) async fn run_git_subprocess<Replier, S>(
>
> pool: Arc<storage::Poolstorage::Storage>,
>
> incoming: tokio::sync::mpsc::Receiver<Message>,
>
> mut out: Replier,
> - service: SshService<Urn>,
>
> + service: ssh_service::SshService,
> hooks: Hooks<S>,
>
> ) -> Result<(), ErrorReplier::Error>
>
> where
> @@ -74,7 +74,7 @@ async fn run_git_subprocess_inner<Replier, S>(
>
> pool: Arc<storage::Poolstorage::Storage>,
>
> mut incoming: tokio::sync::mpsc::Receiver<Message>,
>
> out: &mut Replier,
> - service: SshService<Urn>,
>
> + service: ssh_service::SshService,
> hooks: Hooks<S>,
>
> ) -> Result<(), ErrorReplier::Error>
>
> where
> @@ -87,7 +87,7 @@ where
>
> if service.is_upload() {
> match hooks
> - .pre_upload(&mut progress_reporter, service.path.clone())
> + .pre_upload(&mut progress_reporter, service.path.clone().into())
> .await
> {
> Ok(()) => {},
>
> @@ -260,7 +260,7 @@ where
> // Run hooks
> if service.service == GitService::ReceivePack.into() {
> if let Err(e) = hooks
> - .post_receive(&mut progress_reporter, service.path.clone())
> + .post_receive(&mut progress_reporter, service.path.into())
> .await
> {
> match e {
> diff --git a/cli/gitd-lib/src/git_subprocess/command.rs b/cli/gitd-lib/src/git_subprocess/command.rs
> index 0c89b70e..215d2263 100644
> --- a/cli/gitd-lib/src/git_subprocess/command.rs
> +++ b/cli/gitd-lib/src/git_subprocess/command.rs
> @@ -16,9 +16,10 @@ use librad::{
> },
> reflike,
> };
> -use link_git::service::SshService;
> use radicle_git_ext as ext;
>
> +use crate::ssh_service;
> +
> #[derive(thiserror::Error, Debug)]
> pub enum Error {
> #[error("no such URN {0}")]
> @@ -44,13 +45,14 @@ pub enum Error {
> // crate.
> pub(super) fn create_command(
> storage: &storage::Storage,
> - service: SshService<Urn>,
>
> + service: ssh_service::SshService,
> ) -> Result<tokio::process::Command, Error> {
>
> - guard_has_urn(storage, &service.path)?;
> + let urn = service.path.into();
> + guard_has_urn(storage, &urn)?;
>
> let mut git = tokio::process::Command::new("git");
> git.current_dir(&storage.path()).args(&[
> - &format!("--namespace={}", Namespace::from(&service.path)),
> + &format!("--namespace={}", Namespace::from(&urn)),
> "-c",
> "transfer.hiderefs=refs/remotes",
> "-c",
> @@ -62,7 +64,7 @@ pub(super) fn create_command(
> match service.service.0 {
> GitService::UploadPack | GitService::UploadPackLs => {
>
> // Fetching remotes is ok, pushing is not
> - visible_remotes(storage, &service.path)?.for_each(|remote_ref| {
> + visible_remotes(storage, &urn)?.for_each(|remote_ref| {
> git.arg("-c")
> .arg(format!("uploadpack.hiderefs=!^{}", remote_ref));
> });
> diff --git a/cli/gitd-lib/src/lib.rs b/cli/gitd-lib/src/lib.rs
> index a4461f97..9163016e 100644
> --- a/cli/gitd-lib/src/lib.rs
> +++ b/cli/gitd-lib/src/lib.rs
> @@ -31,6 +31,7 @@ pub mod git_subprocess;
> pub mod hooks;
> mod processes;
> mod server;
> +mod ssh_service;
>
> #[derive(thiserror::Error, Debug)]
> pub enum RunError {
> diff --git a/cli/gitd-lib/src/processes.rs b/cli/gitd-lib/src/processes.rs
> index da40593e..2324d9f9 100644
> --- a/cli/gitd-lib/src/processes.rs
> +++ b/cli/gitd-lib/src/processes.rs
> @@ -25,15 +25,11 @@ use futures::{
> stream::{FuturesUnordered, StreamExt},
> FutureExt,
> };
> -use librad::git::{
> - storage::{pool::Pool, Storage},
> - Urn,
> -};
> +use librad::git::storage::{pool::Pool, Storage};
> use link_async::{Spawner, Task};
> -use link_git::service::SshService;
> use tracing::instrument;
>
> -use crate::{git_subprocess, hooks::Hooks};
> +use crate::{git_subprocess, hooks::Hooks, ssh_service};
>
> const MAX_IN_FLIGHT_GITS: usize = 10;
>
> @@ -69,7 +65,7 @@ enum Message<Id> {
>
> /// sent on a separate channel, which allows us to exert backpressure on
> /// incoming exec requests.
> struct ExecGit<Id, Reply, Signer> {
>
> - service: SshService<Urn>,
>
> + service: ssh_service::SshService,
> channel: Id,
> handle: Reply,
> hooks: Hooks<Signer>,
>
> @@ -112,7 +108,7 @@ where
> &self,
> channel: Id,
> handle: Reply,
> - service: SshService<Urn>,
>
> + service: ssh_service::SshService,
> hooks: Hooks<Signer>,
>
> ) -> Result<(), ProcessesLoopGone> {
>
> self.exec_git_send
> @@ -215,7 +211,13 @@ where
> }
>
> #[instrument(skip(self, handle, hooks))]
> - fn exec_git(&mut self, id: Id, handle: Reply, service: SshService<Urn>, hooks: Hooks<S>) {
>
> + fn exec_git(
> + &mut self,
> + id: Id,
> + handle: Reply,
> + service: ssh_service::SshService,
> + hooks: Hooks<S>,
>
> + ) {
> let (tx, rx) = tokio::sync::mpsc::channel(1);
> let task = self.spawner.spawn({
> let spawner = self.spawner.clone();
> diff --git a/cli/gitd-lib/src/server.rs b/cli/gitd-lib/src/server.rs
> index 87019468..0cfb5120 100644
> --- a/cli/gitd-lib/src/server.rs
> +++ b/cli/gitd-lib/src/server.rs
> @@ -13,9 +13,8 @@ use rand::Rng;
> use tokio::net::{TcpListener, TcpStream};
> use tracing::instrument;
>
> -use librad::{git::Urn, PeerId};
> +use librad::PeerId;
> use link_async::{incoming::TcpListenerExt, Spawner};
> -use link_git::service;
>
> use crate::{
> hooks::Hooks,
> @@ -268,7 +267,7 @@ where
> ) -> Self::FutureUnit {
>
> let exec_str = String::from_utf8_lossy(data);
> tracing::debug!(?exec_str, "received exec_request");
> - let ssh_service: service::SshService<Urn> = match exec_str.parse() {
>
> + let ssh_service: crate::ssh_service::SshService = match exec_str.parse() {
> Ok(s) => s,
>
> Err(e) => {
>
> tracing::error!(err=?e, ?exec_str, "unable to parse exec str for exec_request");
> diff --git a/cli/gitd-lib/src/ssh_service.rs b/cli/gitd-lib/src/ssh_service.rs
> new file mode 100644
> index 00000000..4a6ec444
> --- /dev/null
> +++ b/cli/gitd-lib/src/ssh_service.rs
> @@ -0,0 +1,50 @@
> +use std::str::FromStr;
> +
> +use librad::{git::Urn, git_ext};
> +
> +/// A wrapper around Urn which parses strings of the form "rad:git:<id>.git",
>
> +/// this is used as the path parameter of `link_git::SshService`.
> +#[derive(Debug, Clone)]
> +pub(crate) struct UrnPath(Urn);
> +
> +pub(crate) type SshService = link_git::service::SshService<UrnPath>;
>
> +
> +#[derive(thiserror::Error, Debug)]
> +pub(crate) enum Error {
> + #[error("path component of remote should end with '.git'")]
> + MissingSuffix,
> + #[error(transparent)]
> + Urn(#[from] librad::identities::urn::error::FromStr<git_ext::oid::FromMultihashError>),
>
> +}
> +
> +impl std::fmt::Display for UrnPath {
> + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
>
> + write!(f, "{}.git", self.0)
> + }
> +}
> +
> +impl AsRef<Urn> for UrnPath {
>
> + fn as_ref(&self) -> &Urn {
>
> + &self.0
> + }
> +}
> +
> +impl FromStr for UrnPath {
> + type Err = Error;
> +
> + fn from_str(s: &str) -> Result<Self, Self::Err> {
>
> + match s.strip_suffix(".git") {
> + Some(prefix) => {
>
> + let urn = Urn::from_str(prefix)?;
> + Ok(Self(urn))
> + },
> + None => Err(Error::MissingSuffix),
>
> + }
> + }
> +}
> +
> +impl From<UrnPath> for Urn {
>
> + fn from(u: UrnPath) -> Self {
>
> + u.0
> + }
> +}
> --
> 2.36.1
Re: [PATCH v1 3/4] Make gitd accept the URL format of include files
On Mon May 30, 2022 at 4:53 PM IST, Alexis Sellier wrote:
> Awesome, one thought around setting the `HEAD` from my perspective:
>
> Currently, from what I understand, the latest commit amongst delegates
> is the one chosen as the HEAD commit for the project. I'd propose that we eventually
> also allow the caller to specify a threshold of delegates that should "agree"
> on this commit (ie. it's in their published history). This could constitute
> a sort of quorum, and would be more representative of the intent of the delegate
> group than a single delegate. It would also prevent attacks where a single
> delegate is able to patch `HEAD` without consent from other delegates.
>
> The `HEAD` would then be set according to this threshold, which could either be
> the same threshold that is used to find quorum when updating a project identity,
> or a separate configurable threshold that is only used for the code. For projects
> with lots of delegates, it might make sense to only require a small subset to
> push a commit, to reduce coordination.
>
> Just wanted to mention this as this is the strategy we've planned to use on our
> own git bridge, though it hasn't been implemented yet, and we'd benefit from
> aligning on this.
If we were to support this, I think the best way would be to provide
the computation of histories and allow the caller to choose how they
threshold it.
So at the moment, it's implemented so that in one case you get back
that every delegate agreed with the latest commit -- where agreement
means that it is either all the tips for each delegate are in the
ancestry path of each other delegate's tipe. In the other case, you
get back a series of forked histories.
My thinking is that in the later case we could have some adjacency
list that tells you what are the latest commits that agree with each
other for a delegate -- I'm not sure if that's the right structure
though.
===
On another note, if the HEAD becomes a symref, then when would this be
updated and how would we prevent the attack Alexis mentioned above?
[PATCH v2 0/4] lnk-clone
Changes from v1:
* Default branch head
* Add tracing::warn! if default branch commit not found for peer
* Add `TestProject::create_with_payload` to simplify project setup
* Add `TestProject::maintainers` to simplify setting up multi-delegate projects
* Use `Self` instead of `History` in `librad::git::identities::project::heads::History::load`
* update path logic
* remote note in cli/lnk-identities/src/git/existing.rs
* rename `IdentityDir` -> `WorkingCopyDir`
* Add a `AsRef<ReadOnly>` argument to `lnk_identities::git::checkout::checkout` to avoid having to open storage twice
* add lnk-clone
* remove stray `pub async fn clone() {}`
* Fix SOB
* Add docs for CLI args
Published-At: https://github.com/alexjg/radicle-link/tree/patches/lnk-clone/v2
Alex Good (4):
Add default_branch_head and set_default_branch
lnk-identities: update path logic and set up include
Make gitd accept the URL format of include files
Add lnk clone
bins/Cargo.lock | 3 +
cli/gitd-lib/src/git_subprocess.rs | 12 +-
cli/gitd-lib/src/git_subprocess/command.rs | 12 +-
cli/gitd-lib/src/lib.rs | 1 +
cli/gitd-lib/src/processes.rs | 20 +-
cli/gitd-lib/src/server.rs | 5 +-
cli/gitd-lib/src/ssh_service.rs | 50 +++
cli/lnk-exe/src/cli/args.rs | 1 +
cli/lnk-identities/Cargo.toml | 3 +
cli/lnk-identities/src/cli/args.rs | 10 +-
cli/lnk-identities/src/cli/eval/person.rs | 7 +-
cli/lnk-identities/src/cli/eval/project.rs | 7 +-
cli/lnk-identities/src/git/checkout.rs | 65 ++--
cli/lnk-identities/src/git/existing.rs | 19 +-
cli/lnk-identities/src/git/new.rs | 20 +-
cli/lnk-identities/src/lib.rs | 1 +
cli/lnk-identities/src/person.rs | 5 +-
cli/lnk-identities/src/project.rs | 32 +-
cli/lnk-identities/src/working_copy_dir.rs | 40 +++
.../t/src/tests/git/checkout.rs | 18 +-
.../t/src/tests/git/existing.rs | 6 +-
cli/lnk-identities/t/src/tests/git/new.rs | 5 +-
cli/lnk-sync/Cargo.toml | 10 +-
cli/lnk-sync/src/cli/args.rs | 50 ++-
cli/lnk-sync/src/cli/main.rs | 46 ++-
cli/lnk-sync/src/forked.rs | 87 +++++
cli/lnk-sync/src/lib.rs | 1 +
librad/src/git/identities/project.rs | 2 +
librad/src/git/identities/project/heads.rs | 312 ++++++++++++++++++
librad/t/src/integration/scenario.rs | 1 +
.../scenario/default_branch_head.rs | 286 ++++++++++++++++
test/it-helpers/Cargo.toml | 4 +
test/it-helpers/src/fixed.rs | 2 +-
test/it-helpers/src/fixed/project.rs | 150 ++++++++-
test/it-helpers/src/lib.rs | 1 +
test/it-helpers/src/testnet.rs | 16 +-
test/it-helpers/src/working_copy.rs | 291 ++++++++++++++++
37 files changed, 1466 insertions(+), 135 deletions(-)
create mode 100644 cli/gitd-lib/src/ssh_service.rs
create mode 100644 cli/lnk-identities/src/working_copy_dir.rs
create mode 100644 cli/lnk-sync/src/forked.rs
create mode 100644 librad/src/git/identities/project/heads.rs
create mode 100644 librad/t/src/integration/scenario/default_branch_head.rs
create mode 100644 test/it-helpers/src/working_copy.rs
--
2.36.1
[PATCH v2 1/4] Add default_branch_head and set_default_branch
When checking out projects from the monorepo it is useful to set the
`refs/namespaces/<urn>/HEAD` reference to the default branch of the
project so that the resulting working copy is in a useful state (namely
pointing at the latest commit for the default branch).
In general this is not possible because delegates may have diverging
views of the project, but often they do not disagree. Add
`librad::git::identities::project::heads::default_branch_head` to
determine if there is an agreed on default branch commit and
`librad::git::identities::project::heads::set_default_branch` to set the
local `HEAD` ref where possible.
Signed-off-by: Alex Good <alex@memoryandthought.me>
---
librad/src/git/identities/project.rs | 2 +
librad/src/git/identities/project/heads.rs | 312 ++++++++++++++++++
librad/t/src/integration/scenario.rs | 1 +
.../scenario/default_branch_head.rs | 286 ++++++++++++++++
test/it-helpers/Cargo.toml | 4 +
test/it-helpers/src/fixed.rs | 2 + -
test/it-helpers/src/fixed/project.rs | 150 ++++++++ -
test/it-helpers/src/lib.rs | 1 +
test/it-helpers/src/testnet.rs | 16 + -
test/it-helpers/src/working_copy.rs | 291 ++++++++++++++++
10 files changed, 1062 insertions(+), 3 deletions(-)
create mode 100644 librad/src/git/identities/project/heads.rs
create mode 100644 librad/t/src/integration/scenario/default_branch_head.rs
create mode 100644 test/it-helpers/src/working_copy.rs
diff --git a/librad/src/git/identities/project.rs b/librad/src/git/identities/project.rs
index 753358bf..ed9f003d 100644
--- a/librad/src/git/identities/project.rs
+++ b/librad/src/git/identities/project.rs
@@ -8,6 +8,8 @@ use std::{convert::TryFrom, fmt::Debug};
use either::Either;
use git_ext::{is_not_found_err, OneLevel};
+ pub mod heads;
+
use super::{
super::{
refs::Refs as Sigrefs,
diff --git a/librad/src/git/identities/project/heads.rs b/librad/src/git/identities/project/heads.rs
new file mode 100644
index 00000000..7f1e912e
--- /dev/null
+++ b/librad/src/git/identities/project/heads.rs
@@ -0,0 +1,312 @@
+ use std::{collections::BTreeSet, convert::TryFrom, fmt::Debug};
+
+ use crate::{
+ git::{
+ storage::{self, ReadOnlyStorage},
+ Urn,
+ },
+ identities::git::VerifiedProject,
+ PeerId,
+ };
+ use git_ext::RefLike;
+ use git_ref_format::{lit, name, Namespaced, Qualified, RefStr, RefString};
+
+ #[derive(Clone, Debug, PartialEq)]
+ pub enum DefaultBranchHead {
+ /// Not all delegates agreed on an ancestry tree. Each set of diverging
+ /// delegates is included as a `Fork`
+ Forked(BTreeSet<Fork>),
+ /// All the delegates agreed on an ancestry tree
+ Head {
+ /// The most recent commit for the tree
+ target: git2::Oid,
+ /// The branch name which is the default branch
+ branch: RefString,
+ },
+ }
+
+ #[derive(Clone, Debug, std::hash::Hash, PartialEq, Eq, PartialOrd, Ord)]
+ pub struct Fork {
+ /// Peers which are in the ancestry set of this fork but not the tips. This
+ /// means that these peers can appear in multiple forks
+ pub ancestor_peers: BTreeSet<PeerId>,
+ /// The peers pointing at the tip of this fork
+ pub tip_peers: BTreeSet<PeerId>,
+ /// The most recent tip
+ pub tip: git2::Oid,
+ }
+
+ pub mod error {
+ use git_ref_format as ref_format;
+ use std::collections::BTreeSet;
+
+ use crate::git::storage::read;
+
+ #[derive(thiserror::Error, Debug)]
+ pub enum FindDefaultBranch {
+ #[error("the project payload does not define a default branch")]
+ NoDefaultBranch,
+ #[error("no peers had published anything for the default branch")]
+ NoTips,
+ #[error(transparent)]
+ RefFormat(#[from] ref_format::Error),
+ #[error(transparent)]
+ Read(#[from] read::Error),
+ }
+
+ #[derive(thiserror::Error, Debug)]
+ pub enum SetDefaultBranch {
+ #[error(transparent)]
+ Find(#[from] FindDefaultBranch),
+ #[error(transparent)]
+ Git(#[from] git2::Error),
+ #[error("the delegates have forked")]
+ Forked(BTreeSet<super::Fork>),
+ }
+ }
+
+ /// Find the head of the default branch of `project`
+ ///
+ /// In general there can be a different view of the default branch of a project
+ /// for each peer ID of each delegate and there is no reason that these would
+ /// all be compatible. It's quite possible that two peers publish entirely
+ /// unrelated ancestry trees for a given branch. In this case this function will
+ /// return [`DefaultBranchHead::Forked`].
+ ///
+ /// However, often it's the case that delegates do agree on an ancestry tree for
+ /// a particular branch and the difference between peers is just that some are
+ /// ahead of others. In this case this function will return
+ /// [`DefaultBranchHead::Head`].
+ ///
+ /// # Errors
+ ///
+ /// * If the project contains no default branch definition
+ /// * No peers had published anything for the default branch
+ pub fn default_branch_head(
+ storage: &storage::Storage,
+ project: VerifiedProject,
+ ) -> Result<DefaultBranchHead, error::FindDefaultBranch> {
+ if let Some(default_branch) = &project.payload().subject.default_branch {
+ let local = storage.peer_id();
+ let branch_refstring = RefString::try_from(default_branch.to_string())?;
+ let mut multiverse = Multiverse::new(branch_refstring.clone());
+ let peers =
+ project
+ .delegations()
+ .into_iter()
+ .flat_map(|d| -> Box<dyn Iterator<Item = PeerId>> {
+ use either::Either::*;
+ match d {
+ Left(key) => Box::new(std::iter::once(PeerId::from(*key))),
+ Right(person) => Box::new(
+ person
+ .delegations()
+ .into_iter()
+ .map(|key| PeerId::from(*key)),
+ ),
+ }
+ });
+ for peer_id in peers {
+ let tip = peer_commit(storage, project.urn(), peer_id, local, &branch_refstring)?;
+ if let Some(tip) = tip {
+ multiverse.add_peer(storage, peer_id, tip)?;
+ } else {
+ tracing::warn!(%peer_id, %default_branch, "no default branch commit found for peer");
+ }
+ }
+ multiverse.finish()
+ } else {
+ Err(error::FindDefaultBranch::NoDefaultBranch)
+ }
+ }
+
+ /// Determine the default branch for a project and set the local HEAD to this
+ /// branch
+ ///
+ /// In more detail, this function determines the local head using
+ /// [`default_branch_head`] and then sets the following references to the
+ /// `DefaultBranchHead::target` returned:
+ ///
+ /// * `refs/namespaces/<URN>/refs/HEAD`
+ /// * `refs/namespaces/<URN>/refs/<default branch name>`
+ ///
+ /// # Why do this?
+ ///
+ /// When cloning from a namespace representing a project to a working copy we
+ /// would like, if possible, to omit the specification of which particular peer
+ /// we want to clone. Specifically we would like to clone
+ /// `refs/namespaces/<URN>/`. This does work, but the working copy we end up
+ /// with does not have any contents because git uses `refs/HEAD` of the source
+ /// repository to figure out what branch to set the new working copy to.
+ /// Therefore, by setting `refs/HEAD` and `refs/<default branch name>` of the
+ /// namespace `git clone` (and any other clone based workflows) does something
+ /// sensible and we end up with a working copy which is looking at the default
+ /// branch of the project.
+ ///
+ /// # Errors
+ ///
+ /// * If no default branch could be determined
+ pub fn set_default_head(
+ storage: &storage::Storage,
+ project: VerifiedProject,
+ ) -> Result<git2::Oid, error::SetDefaultBranch> {
+ let urn = project.urn();
+ let default_head = default_branch_head(storage, project)?;
+ match default_head {
+ DefaultBranchHead::Head { target, branch } => {
+ // Note that we can't use `Namespaced` because `refs/HEAD` is not a `Qualified`
+ let head =
+ RefString::try_from(format!("refs/namespaces/{}/refs/HEAD", urn.encode_id()))
+ .expect("urn is valid namespace");
+ let branch_head = Namespaced::from(lit::refs_namespaces(
+ &urn,
+ Qualified::from(lit::refs_heads(branch)),
+ ));
+
+ let repo = storage.as_raw();
+ repo.reference(
+ &branch_head.clone().into_qualified(),
+ target,
+ true,
+ "set default branch head",
+ )?;
+ repo.reference_symbolic(head.as_str(), branch_head.as_str(), true, "set head")?;
+ Ok(target)
+ },
+ DefaultBranchHead::Forked(forks) => Err(error::SetDefaultBranch::Forked(forks)),
+ }
+ }
+
+ fn peer_commit(
+ storage: &storage::Storage,
+ urn: Urn,
+ peer_id: PeerId,
+ local: &PeerId,
+ branch: &RefStr,
+ ) -> Result<Option<git2::Oid>, error::FindDefaultBranch> {
+ let remote_name = RefString::try_from(peer_id.default_encoding())?;
+ let reference = if local == &peer_id {
+ RefString::from(Qualified::from(lit::refs_heads(branch)))
+ } else {
+ RefString::from(Qualified::from(lit::refs_remotes(remote_name)))
+ .join(name::HEADS)
+ .join(branch)
+ };
+ let urn = urn.with_path(Some(RefLike::from(reference)));
+ let tip = storage.tip(&urn, git2::ObjectType::Commit)?;
+ Ok(tip.map(|c| c.id()))
+ }
+
+ #[derive(Debug)]
+ struct Multiverse {
+ branch: RefString,
+ histories: Vec<History>,
+ }
+
+ impl Multiverse {
+ fn new(branch: RefString) -> Multiverse {
+ Multiverse {
+ branch,
+ histories: Vec::new(),
+ }
+ }
+
+ fn add_peer(
+ &mut self,
+ storage: &storage::Storage,
+ peer: PeerId,
+ tip: git2::Oid,
+ ) -> Result<(), error::FindDefaultBranch> {
+ // If this peers tip is in the ancestors of any existing histories then we just
+ // add the peer to those histories
+ let mut found_descendant = false;
+ for history in &mut self.histories {
+ if history.ancestors.contains(&tip) {
+ found_descendant = true;
+ history.ancestor_peers.insert(peer);
+ } else if history.tip == tip {
+ found_descendant = true;
+ history.tip_peers.insert(peer);
+ }
+ }
+ if found_descendant {
+ return Ok(());
+ }
+
+ // Otherwise we load a new history
+ let mut history = History::load(storage, peer, tip)?;
+
+ // Then we go through existing histories and check if any of them are ancestors
+ // of the new history. If they are then we incorporate them as ancestors
+ // of the new history and remove them from the multiverse
+ let mut i = 0;
+ while i < self.histories.len() {
+ let other_history = &self.histories[i];
+ if history.ancestors.contains(&other_history.tip) {
+ let other_history = self.histories.remove(i);
+ history.ancestor_peers.extend(other_history.ancestor_peers);
+ history.ancestor_peers.extend(other_history.tip_peers);
+ } else {
+ i += 1;
+ }
+ }
+ self.histories.push(history);
+
+ Ok(())
+ }
+
+ fn finish(self) -> Result<DefaultBranchHead, error::FindDefaultBranch> {
+ if self.histories.is_empty() {
+ Err(error::FindDefaultBranch::NoTips)
+ } else if self.histories.len() == 1 {
+ Ok(DefaultBranchHead::Head {
+ target: self.histories[0].tip,
+ branch: self.branch,
+ })
+ } else {
+ Ok(DefaultBranchHead::Forked(
+ self.histories
+ .into_iter()
+ .map(|h| Fork {
+ ancestor_peers: h.ancestor_peers,
+ tip_peers: h.tip_peers,
+ tip: h.tip,
+ })
+ .collect(),
+ ))
+ }
+ }
+ }
+
+ #[derive(Debug)]
+ struct History {
+ tip: git2::Oid,
+ tip_peers: BTreeSet<PeerId>,
+ ancestor_peers: BTreeSet<PeerId>,
+ ancestors: BTreeSet<git2::Oid>,
+ }
+
+ impl History {
+ fn load(
+ storage: &storage::Storage,
+ peer: PeerId,
+ tip: git2::Oid,
+ ) -> Result<Self, storage::Error> {
+ let repo = storage.as_raw();
+ let mut walk = repo.revwalk()?;
+ walk.set_sorting(git2::Sort::TOPOLOGICAL)?;
+ walk.push(tip)?;
+ let mut ancestors = walk.collect::<Result<BTreeSet<git2::Oid>, _>>()?;
+ ancestors.remove(&tip);
+ let mut peers = BTreeSet::new();
+ peers.insert(peer);
+ let mut tip_peers = BTreeSet::new();
+ tip_peers.insert(peer);
+ Ok(Self {
+ tip,
+ tip_peers,
+ ancestors,
+ ancestor_peers: BTreeSet::new(),
+ })
+ }
+ }
diff --git a/librad/t/src/integration/scenario.rs b/librad/t/src/integration/scenario.rs
index 9bfdd2ad..c47720a0 100644
--- a/librad/t/src/integration/scenario.rs
+++ b/librad/t/src/integration/scenario.rs
@@ -5,6 +5,7 @@
mod collaboration;
mod collaborative_objects;
+ mod default_branch_head;
mod menage;
mod passive_replication;
#[cfg(feature = "replication-v3")]
diff --git a/librad/t/src/integration/scenario/default_branch_head.rs b/librad/t/src/integration/scenario/default_branch_head.rs
new file mode 100644
index 00000000..0dcb6578
--- /dev/null
+++ b/librad/t/src/integration/scenario/default_branch_head.rs
@@ -0,0 +1,286 @@
+ // Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
+ //
+ // This file is part of radicle-link, distributed under the GPLv3 with Radicle
+ // Linking Exception. For full terms see the included LICENSE file.
+
+ use std::{convert::TryFrom, ops::Index as _};
+
+ use tempfile::tempdir;
+
+ use git_ref_format::{lit, name, Namespaced, Qualified, RefString};
+ use it_helpers::{
+ fixed::{TestPerson, TestProject},
+ testnet::{self, RunningTestPeer},
+ working_copy::{WorkingCopy, WorkingRemote as Remote},
+ };
+ use librad::git::{
+ identities::{self, local, project::heads},
+ storage::ReadOnlyStorage,
+ };
+ use link_identities::payload;
+ use test_helpers::logging;
+
+ fn config() -> testnet::Config {
+ testnet::Config {
+ num_peers: nonzero!(2usize),
+ min_connected: 2,
+ bootstrap: testnet::Bootstrap::from_env(),
+ }
+ }
+
+ /// This test checks that the logic of `librad::git::identities::project::heads`
+ /// is correct. To do this we need to set up various scenarios where the
+ /// delegates of a project agree or disagree on the default branch of a project.
+ #[test]
+ fn default_branch_head() {
+ logging::init();
+
+ let net = testnet::run(config()).unwrap();
+ net.enter(async {
+ // Setup a testnet with two peers
+ let peer1 = net.peers().index(0);
+ let peer2 = net.peers().index(1);
+
+ // Create an identity on peer2
+ let peer2_id = peer2
+ .using_storage::<_, anyhow::Result<TestPerson>>(|s| {
+ let person = TestPerson::create(s)?;
+ let local = local::load(s, person.owner.urn()).unwrap();
+ s.config()?.set_user(local)?;
+ Ok(person)
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ peer2_id.pull(peer2, peer1).await.unwrap();
+
+ // Create a project on peer1
+ let proj = peer1
+ .using_storage(|s| {
+ TestProject::create_with_payload(
+ s,
+ payload::Project {
+ name: "venus".into(),
+ description: None,
+ default_branch: Some(name::MASTER.to_string().into()),
+ },
+ )
+ })
+ .await
+ .unwrap()
+ .unwrap();
+
+ // Add peer2 as a maintainer
+ proj.maintainers(peer1)
+ .add(&peer2_id, peer2)
+ .setup()
+ .await
+ .unwrap();
+
+ //// Okay, now we have a running testnet with two Peers, each of which has a
+ //// `Person` who is a delegate on the `TestProject`
+
+ // Create a commit in peer 1 and pull to peer2, then pull those changes into
+ // peer2, create a new commit on top of the original commit and pull
+ // that back to peer1. Then in peer1 pull the commit, fast forward, and
+ // push.
+ let tmp = tempdir().unwrap();
+ let tip = {
+ let mut working_copy1 =
+ WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
+ let mut working_copy2 =
+ WorkingCopy::new(&proj, tmp.path().join("peer2"), peer2).unwrap();
+
+ let mastor = Qualified::from(lit::refs_heads(name::MASTER));
+ working_copy1
+ .commit("peer 1 initial", mastor.clone())
+ .unwrap();
+ working_copy1.push().unwrap();
+ proj.pull(peer1, peer2).await.unwrap();
+
+ working_copy2.fetch(Remote::Peer(peer1.peer_id())).unwrap();
+ working_copy2
+ .create_remote_tracking_branch(Remote::Peer(peer1.peer_id()), name::MASTER)
+ .unwrap();
+ let tip = working_copy2
+ .commit("peer 2 initial", mastor.clone())
+ .unwrap();
+ working_copy2.push().unwrap();
+ proj.pull(peer2, peer1).await.unwrap();
+
+ working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
+ working_copy1
+ .fast_forward_to(Remote::Peer(peer2.peer_id()), name::MASTER)
+ .unwrap();
+ working_copy1.push().unwrap();
+ tip
+ };
+
+ let default_branch = branch_head(peer1, &proj).await.unwrap();
+ // The two peers hsould have the same view of the default branch
+ assert_eq!(
+ default_branch,
+ identities::project::heads::DefaultBranchHead::Head {
+ target: tip,
+ branch: name::MASTER.to_owned(),
+ }
+ );
+
+ // now update peer1 and push to peer 1s monorepo, we should get the tip of peer1
+ // as the head (because peer2 can be fast forwarded)
+ let tmp = tempdir().unwrap();
+ let tip = {
+ let mut working_copy1 =
+ WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
+ working_copy1
+ .create_remote_tracking_branch(Remote::Rad, name::MASTER)
+ .unwrap();
+
+ let mastor = Qualified::from(lit::refs_heads(name::MASTER));
+ let tip = working_copy1.commit("peer 1 fork", mastor.clone()).unwrap();
+ working_copy1.push().unwrap();
+
+ tip
+ };
+
+ let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
+ assert_eq!(
+ default_branch_peer1,
+ identities::project::heads::DefaultBranchHead::Head {
+ target: tip,
+ branch: name::MASTER.to_owned(),
+ }
+ );
+
+ // now create an alternate commit on peer2 and sync with peer1, on peer1 we
+ // should get a fork
+ let tmp = tempdir().unwrap();
+ let forked_tip = {
+ let mut working_copy2 =
+ WorkingCopy::new(&proj, tmp.path().join("peer2"), peer2).unwrap();
+ working_copy2
+ .create_remote_tracking_branch(Remote::Rad, name::MASTER)
+ .unwrap();
+
+ let mastor = Qualified::from(lit::refs_heads(name::MASTER));
+ let forked_tip = working_copy2.commit("peer 2 fork", mastor.clone()).unwrap();
+ working_copy2.push().unwrap();
+
+ forked_tip
+ };
+
+ proj.pull(peer2, peer1).await.unwrap();
+
+ let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
+ assert_eq!(
+ default_branch_peer1,
+ identities::project::heads::DefaultBranchHead::Forked(
+ vec![
+ identities::project::heads::Fork {
+ ancestor_peers: std::collections::BTreeSet::new(),
+ tip_peers: std::iter::once(peer1.peer_id()).collect(),
+ tip,
+ },
+ identities::project::heads::Fork {
+ ancestor_peers: std::collections::BTreeSet::new(),
+ tip_peers: std::iter::once(peer2.peer_id()).collect(),
+ tip: forked_tip,
+ }
+ ]
+ .into_iter()
+ .collect()
+ )
+ );
+
+ // now update peer1 to match peer2
+ let tmp = tempdir().unwrap();
+ let fixed_tip = {
+ let mut working_copy1 =
+ WorkingCopy::new(&proj, tmp.path().join("peer1"), peer1).unwrap();
+ working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
+ working_copy1
+ .create_remote_tracking_branch(Remote::Peer(peer2.peer_id()), name::MASTER)
+ .unwrap();
+
+ working_copy1.fetch(Remote::Peer(peer2.peer_id())).unwrap();
+ let tip = working_copy1
+ .merge_remote(peer2.peer_id(), name::MASTER)
+ .unwrap();
+ working_copy1.push().unwrap();
+ tip
+ };
+
+ let default_branch_peer1 = branch_head(peer1, &proj).await.unwrap();
+ assert_eq!(
+ default_branch_peer1,
+ identities::project::heads::DefaultBranchHead::Head {
+ target: fixed_tip,
+ branch: name::MASTER.to_owned(),
+ }
+ );
+
+ // now set the head in the monorepo and check that the HEAD reference exists
+ let updated_tip = peer1
+ .using_storage::<_, anyhow::Result<_>>({
+ let urn = proj.project.urn();
+ move |s| {
+ let vp = identities::project::verify(s, &urn)?.ok_or_else(|| {
+ anyhow::anyhow!("failed to get project for default branch")
+ })?;
+ identities::project::heads::set_default_head(s, vp).map_err(anyhow::Error::from)
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(updated_tip, fixed_tip);
+
+ let head_ref = RefString::try_from(format!(
+ "refs/namespaces/{}/refs/HEAD",
+ proj.project.urn().encode_id()
+ ))
+ .unwrap();
+ let master_ref = Namespaced::from(lit::refs_namespaces(
+ &proj.project.urn(),
+ Qualified::from(lit::refs_heads(name::MASTER)),
+ ));
+ let (master_oid, head_target) = peer1
+ .using_storage::<_, anyhow::Result<_>>({
+ let master_ref = master_ref.clone();
+ move |s| {
+ let master_oid = s
+ .reference(&master_ref.into_qualified().into_refstring())?
+ .ok_or_else(|| anyhow::anyhow!("master ref not found"))?
+ .peel_to_commit()?
+ .id();
+ let head_target = s
+ .reference(&head_ref)?
+ .ok_or_else(|| anyhow::anyhow!("head ref not found"))?
+ .symbolic_target()
+ .map(|s| s.to_string());
+ Ok((master_oid, head_target))
+ }
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(master_oid, updated_tip);
+ assert_eq!(head_target, Some(master_ref.to_string()));
+ });
+ }
+
+ async fn branch_head(
+ peer: &RunningTestPeer,
+ proj: &TestProject,
+ ) -> anyhow::Result<heads::DefaultBranchHead> {
+ peer.using_storage::<_, anyhow::Result<_>>({
+ let urn = proj.project.urn();
+ move |s| {
+ let vp = identities::project::verify(s, &urn)?
+ .ok_or_else(|| anyhow::anyhow!("failed to get project for default branch"))?;
+ heads::default_branch_head(s, vp).map_err(anyhow::Error::from)
+ }
+ })
+ .await?
+ }
diff --git a/test/it-helpers/Cargo.toml b/test/it-helpers/Cargo.toml
index 32c789cd..04aa2166 100644
--- a/test/it-helpers/Cargo.toml
+++ b/test/it-helpers/Cargo.toml
@@ -18,6 +18,7 @@ once_cell = "1.10"
tempfile = "3.3"
tokio = "1.13"
tracing = "0.1"
+ either = "1.6"
[dependencies.git2]
version = "0.13.24"
@@ -40,5 +41,8 @@ path = "../../link-async"
[dependencies.lnk-clib]
path = "../../cli/lnk-clib"
+ [dependencies.radicle-git-ext]
+ path = "../../git-ext"
+
[dependencies.test-helpers]
path = "../test-helpers"
diff --git a/test/it-helpers/src/fixed.rs b/test/it-helpers/src/fixed.rs
index c36f5dd6..53006a31 100644
--- a/test/it-helpers/src/fixed.rs
+++ b/test/it-helpers/src/fixed.rs
@@ -2,7 +2,7 @@ mod person;
pub use person::TestPerson;
mod project;
- pub use project::TestProject;
+ pub use project::{Maintainers, TestProject};
pub mod repository;
pub use repository::{commit, repository};
diff --git a/test/it-helpers/src/fixed/project.rs b/test/it-helpers/src/fixed/project.rs
index 4ce3a41e..37a48832 100644
--- a/test/it-helpers/src/fixed/project.rs
+++ b/test/it-helpers/src/fixed/project.rs
@@ -4,6 +4,8 @@ use librad::{
git::{
identities::{self, Person, Project},
storage::Storage,
+ types::{Namespace, Reference},
+ Urn,
},
identities::{
delegation::{self, Direct},
@@ -18,6 +20,8 @@ use librad::{
};
use tracing::{info, instrument};
+ use crate::testnet::RunningTestPeer;
+
use super::TestPerson;
pub struct TestProject {
@@ -27,6 +31,13 @@ pub struct TestProject {
impl TestProject {
pub fn create(storage: &Storage) -> anyhow::Result<Self> {
+ Self::create_with_payload(storage, Self::default_payload())
+ }
+
+ pub fn create_with_payload(
+ storage: &Storage,
+ payload: payload::Project,
+ ) -> anyhow::Result<Self> {
let peer_id = storage.peer_id();
let alice = identities::person::create(
storage,
@@ -40,7 +51,7 @@ impl TestProject {
let proj = identities::project::create(
storage,
local_id,
- Self::default_payload(),
+ payload,
delegation::Indirect::from(alice.clone()),
)?;
@@ -120,4 +131,141 @@ impl TestProject {
.replicate((remote_peer, remote_addrs), urn, None)
.await?)
}
+
+ /// Add maintainers to a TestProject
+ ///
+ /// The `home` argument must be a peer which is already a delegate of the
+ /// project. The [`Maintainers`] struct which is returned can be used to
+ /// add maintainers using [`Maintainers::add`] before calling
+ /// [`Maintainers::setup`] to perform the cross signing which adds the
+ /// delegates to the project.
+ ///
+ /// # Example
+ ///
+ /// ```rust,no_run
+ /// # use it_helpers::{testnet::RunningTestPeer, fixed::{TestProject, TestPerson}};
+ /// # async fn doit() {
+ /// let peer: RunningTestPeer = unimplemented!();
+ /// let peer2: RunningTestPeer = unimplemented!();
+ ///
+ /// let project = peer.using_storage(TestProject::create).await.unwrap().unwrap();
+ /// let other_person = peer2.using_storage(TestPerson::create).await.unwrap().unwrap();
+ /// project.maintainers(&peer).add(&other_person, &peer2).setup().await.unwrap()
+ /// # }
+ /// ```
+ pub fn maintainers<'a>(&'a self, home: &'a RunningTestPeer) -> Maintainers<'a> {
+ Maintainers {
+ project: self,
+ home,
+ other_maintainers: Vec::new(),
+ }
+ }
+ }
+
+ pub struct Maintainers<'a> {
+ project: &'a TestProject,
+ home: &'a RunningTestPeer,
+ other_maintainers: Vec<(&'a RunningTestPeer, &'a TestPerson)>,
+ }
+
+ impl<'a> Maintainers<'a> {
+ pub fn add(mut self, person: &'a TestPerson, peer: &'a RunningTestPeer) -> Self {
+ self.other_maintainers.push((peer, person));
+ self
+ }
+
+ /// Perform the cross signing necessary to add all the maintainers to the
+ /// project.
+ ///
+ /// What this does is the following:
+ /// * Track each of the maintainers remotes for the given peer on the `home`
+ /// peer
+ /// * Add all of the `Person` identities as indirect delegates of the
+ /// projects on the home peer
+ /// * For each maintainer:
+ /// * Pull the updated document into the maintainers peer and `update`
+ /// the document
+ /// * Pull the updated document back into the home peer
+ /// * On the home peer `update` and `merge` the document
+ /// * Finally pull the completed document back into each of the maintainer
+ /// peers
+ pub async fn setup(self) -> anyhow::Result<()> {
+ // make sure the home peer has all the other identities
+ for (peer, testperson) in &self.other_maintainers {
+ self.home
+ .track(self.project.project.urn(), Some(peer.peer_id()))
+ .await?;
+ testperson.pull(*peer, self.home).await?;
+ }
+ // Add the other identities as delegates of the project
+ self.home
+ .using_storage({
+ let urn = self.project.project.urn();
+ let owners = std::iter::once(self.project.owner.clone())
+ .chain(self.other_maintainers.iter().map(|(_, m)| m.owner.clone()))
+ .map(either::Either::Right)
+ .collect::<Vec<_>>();
+ move |storage| -> Result<(), anyhow::Error> {
+ identities::project::update(
+ storage,
+ &urn,
+ None,
+ None,
+ librad::identities::delegation::Indirect::try_from_iter(owners).unwrap(),
+ )?;
+ identities::project::verify(storage, &urn)?;
+ Ok(())
+ }
+ })
+ .await??;
+
+ // For each maintainer, sign the updated document and merge it back into the
+ // home peer
+ for (peer, _) in &self.other_maintainers {
+ // pull the document into the maintainer peer
+ self.project.pull(self.home, *peer).await?;
+ // Sign the project document using the maintiners peer
+ peer.using_storage({
+ let urn = self.project.project.urn();
+ let peer_id = self.home.peer_id();
+ let rad =
+ Urn::try_from(Reference::rad_id(Namespace::from(&urn)).with_remote(peer_id))
+ .unwrap();
+ move |storage| -> Result<Option<identities::VerifiedProject>, anyhow::Error> {
+ let project = identities::project::get(&storage, &rad)?.unwrap();
+ identities::project::update(
+ storage,
+ &urn,
+ None,
+ None,
+ project.delegations().clone(),
+ )?;
+ identities::project::merge(storage, &urn, peer_id)?;
+ Ok(identities::project::verify(storage, &urn)?)
+ }
+ })
+ .await??;
+
+ // pull the signed update back into the home peer
+ self.project.pull(*peer, self.home).await?;
+
+ // Merge the signed update into peer1
+ self.home
+ .using_storage({
+ let urn = self.project.project.urn();
+ let peer_id = peer.peer_id();
+ move |storage| -> Result<Option<identities::VerifiedProject>, anyhow::Error> {
+ identities::project::merge(storage, &urn, peer_id)?;
+ Ok(identities::project::verify(storage, &urn)?)
+ }
+ })
+ .await??;
+ }
+
+ // pull the finished document back to the maintainer peers
+ for (peer, _) in self.other_maintainers {
+ self.project.pull(self.home, peer).await?;
+ }
+ Ok(())
+ }
}
diff --git a/test/it-helpers/src/lib.rs b/test/it-helpers/src/lib.rs
index 981b922d..5012de39 100644
--- a/test/it-helpers/src/lib.rs
+++ b/test/it-helpers/src/lib.rs
@@ -7,3 +7,4 @@ pub mod layout;
pub mod ssh;
pub mod testnet;
pub mod tmp;
+ pub mod working_copy;
diff --git a/test/it-helpers/src/testnet.rs b/test/it-helpers/src/testnet.rs
index 9274b3f7..4888335b 100644
--- a/test/it-helpers/src/testnet.rs
+++ b/test/it-helpers/src/testnet.rs
@@ -21,7 +21,7 @@ use once_cell::sync::Lazy;
use tempfile::{tempdir, TempDir};
use librad::{
- git,
+ git::{self, tracking, Urn},
net::{
connection::{LocalAddr, LocalPeer},
discovery::{self, Discovery as _},
@@ -138,6 +138,20 @@ impl RunningTestPeer {
pub fn listen_addrs(&self) -> &[SocketAddr] {
&self.listen_addrs
}
+
+ pub async fn track(&self, urn: Urn, peer: Option<PeerId>) -> anyhow::Result<()> {
+ self.using_storage(move |s| {
+ tracking::track(
+ s,
+ &urn,
+ peer,
+ tracking::Config::default(),
+ tracking::policy::Track::Any,
+ )??;
+ Ok(())
+ })
+ .await?
+ }
}
impl LocalPeer for RunningTestPeer {
diff --git a/test/it-helpers/src/working_copy.rs b/test/it-helpers/src/working_copy.rs
new file mode 100644
index 00000000..5fbef0dd
--- /dev/null
+++ b/test/it-helpers/src/working_copy.rs
@@ -0,0 +1,291 @@
+ use std::path::Path;
+
+ use git_ref_format::{lit, name, refspec, Qualified, RefStr, RefString};
+
+ use librad::{
+ git::{
+ local::url::LocalUrl,
+ types::{
+ remote::{LocalFetchspec, LocalPushspec},
+ Fetchspec,
+ Force,
+ Refspec,
+ Remote,
+ },
+ },
+ git_ext as ext,
+ net::{peer::Peer, protocol::RequestPullGuard},
+ refspec_pattern,
+ PeerId,
+ Signer,
+ };
+
+ use crate::fixed::TestProject;
+
+ /// A remote in the working copy
+ pub enum WorkingRemote {
+ /// A remote representing a remote peer, named `PeerId::encode_id`
+ Peer(PeerId),
+ /// A remote representing the local peer, named "rad"
+ Rad,
+ }
+
+ impl From<PeerId> for WorkingRemote {
+ fn from(p: PeerId) -> Self {
+ WorkingRemote::Peer(p)
+ }
+ }
+
+ impl WorkingRemote {
+ fn fetchspec(&self) -> Fetchspec {
+ match self {
+ Self::Peer(peer_id) => {
+ let name = RefString::try_from(format!("{}", peer_id)).expect("peer is refstring");
+ let dst = RefString::from(Qualified::from(lit::refs_remotes(name.clone())))
+ .with_pattern(refspec::STAR);
+ let src = RefString::from(Qualified::from(lit::refs_remotes(name)))
+ .and(name::HEADS)
+ .with_pattern(refspec::STAR);
+ let refspec = Refspec {
+ src,
+ dst,
+ force: Force::True,
+ };
+ refspec.into_fetchspec()
+ },
+ Self::Rad => {
+ let name = RefString::try_from("rad").unwrap();
+ let src =
+ RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR);
+ Refspec {
+ src,
+ dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
+ .with_pattern(refspec::STAR),
+ force: Force::True,
+ }
+ .into_fetchspec()
+ },
+ }
+ }
+
+ fn remote_ref(&self, branch: &RefStr) -> RefString {
+ let name = match self {
+ Self::Rad => name::RAD.to_owned(),
+ Self::Peer(peer_id) => {
+ RefString::try_from(peer_id.to_string()).expect("peer id is refstring")
+ },
+ };
+ RefString::from(Qualified::from(lit::refs_remotes(name))).join(branch)
+ }
+ }
+
+ /// A `WorkingCopy` for test driving interactions with the monorepo where one
+ /// needs to update the tree of a project.
+ ///
+ /// Remotes are named after the peer ID, except in the case of the remote
+ /// representing the local Peer ID - which is called "rad".
+ pub struct WorkingCopy<'a, S, G> {
+ repo: git2::Repository,
+ peer: &'a Peer<S, G>,
+ project: &'a TestProject,
+ }
+
+ impl<'a, S, G> WorkingCopy<'a, S, G>
+ where
+ S: Signer + Clone,
+ G: RequestPullGuard,
+ {
+ /// Create a new working copy. This initializes a git repository and then
+ /// fetches the state of the local peer into `refs/remotes/rad/*`.
+ pub fn new<P: AsRef<Path>>(
+ project: &'a TestProject,
+ repo_path: P,
+ peer: &'a Peer<S, G>,
+ ) -> Result<WorkingCopy<'a, S, G>, anyhow::Error> {
+ let repo = git2::Repository::init(repo_path.as_ref())?;
+
+ let mut copy = WorkingCopy {
+ peer,
+ project,
+ repo,
+ };
+ copy.fetch(WorkingRemote::Rad)?;
+ Ok(copy)
+ }
+
+ /// Fetch changes from the monorepo into the working copy. The fetchspec
+ /// used depends on the peer ID.
+ ///
+ /// * If `from` is `WorkingRemote::Peer` then `refs/remotes/<peer
+ /// ID>/refs/*:refs/remotes/<peer ID>/heads/*`
+ /// * If `from` is `WorkingRemote::Rad` then
+ /// `refs/heads/*:refs/remotes/rad/*`
+ ///
+ /// I.e. changes from remote peers end up in a remote called
+ /// `PeerId::encode_id` whilst changes from the local peer end up in a
+ /// remote called "rad".
+ pub fn fetch(&mut self, from: WorkingRemote) -> Result<(), anyhow::Error> {
+ let fetchspec = from.fetchspec();
+ let url = LocalUrl::from(self.project.project.urn());
+ let mut remote = Remote::rad_remote(url, fetchspec);
+ let _ = remote.fetch(self.peer.clone(), &self.repo, LocalFetchspec::Configured)?;
+ Ok(())
+ }
+
+ /// Push changes from `refs/heads/*` to the local peer
+ pub fn push(&mut self) -> Result<(), anyhow::Error> {
+ let url = LocalUrl::from(self.project.project.urn());
+ let name = RefString::try_from("rad").unwrap();
+ let fetchspec = Refspec {
+ src: RefString::from_iter([name::REFS, name::HEADS]).with_pattern(refspec::STAR),
+ dst: RefString::from(Qualified::from(lit::refs_remotes(name)))
+ .with_pattern(refspec::STAR),
+ force: Force::True,
+ }
+ .into_fetchspec();
+ let mut remote = Remote::rad_remote(url, fetchspec);
+ let _ = remote.push(
+ self.peer.clone(),
+ &self.repo,
+ LocalPushspec::Matching {
+ pattern: refspec_pattern!("refs/heads/*"),
+ force: Force::True,
+ },
+ )?;
+ Ok(())
+ }
+
+ /// Create a new commit on top of whichever commit is the head of
+ /// `on_branch`. If the branch does not exist this will create it.
+ pub fn commit(
+ &mut self,
+ message: &str,
+ on_branch: Qualified,
+ ) -> Result<git2::Oid, anyhow::Error> {
+ let branch_name = on_branch.non_empty_components().2;
+ let parent = match self.repo.find_branch(&branch_name, git2::BranchType::Local) {
+ Ok(b) => b.get().target().and_then(|o| self.repo.find_commit(o).ok()),
+ Err(e) if ext::error::is_not_found_err(&e) => None,
+ Err(e) => return Err(anyhow::Error::from(e)),
+ };
+ let empty_tree = {
+ let mut index = self.repo.index()?;
+ let oid = index.write_tree()?;
+ self.repo.find_tree(oid).unwrap()
+ };
+ let author = git2::Signature::now("The Animal", "animal@muppets.com").unwrap();
+ let parents = match &parent {
+ Some(p) => vec![p],
+ None => Vec::new(),
+ };
+ self.repo
+ .commit(
+ Some(&on_branch),
+ &author,
+ &author,
+ message,
+ &empty_tree,
+ &parents,
+ )
+ .map_err(anyhow::Error::from)
+ }
+
+ /// Create a branch at `refs/heads/<branch>` which tracks the given remote.
+ /// The remote branch name depends on `from`.
+ ///
+ /// * If `from` is `WorkingCopy::Rad` then `refs/remotes/rad/<branch>`
+ /// * If `from` is `WorkingCopy::Peer(peer_id)` then `refs/remotes/<peer
+ /// id>/<branch>`
+ pub fn create_remote_tracking_branch(
+ &self,
+ from: WorkingRemote,
+ branch: &RefStr,
+ ) -> Result<(), anyhow::Error> {
+ let target = self
+ .repo
+ .find_reference(from.remote_ref(branch).as_str())?
+ .target()
+ .ok_or_else(|| anyhow::anyhow!("remote ref is not a direct reference"))?;
+ let commit = self.repo.find_commit(target)?;
+ self.repo.branch(branch.as_str(), &commit, false)?;
+ Ok(())
+ }
+
+ /// Fast forward the local branch `refs/heads/<branch>` to whatever is
+ /// pointed to by `refs/remotes/<remote>/<branch>`
+ ///
+ /// * If `from` is `WorkingRemote::Peer(peer_id)` then `remote` is
+ /// `peer_id.encode_id()`
+ /// * If `from` is `WorkingRemote::Rad` then `remote` is `"rad"`
+ ///
+ /// # Errors
+ ///
+ /// * If the local branch does not exist
+ /// * If the remote branch does not exist
+ /// * If either of the branches does not point at a commit
+ /// * If the remote branch is not a descendant of the local branch
+ pub fn fast_forward_to(&self, from: WorkingRemote, branch: &RefStr) -> anyhow::Result<()> {
+ let remote_ref = from.remote_ref(branch);
+ let remote_target = self
+ .repo
+ .find_reference(&remote_ref)?
+ .target()
+ .ok_or_else(|| anyhow::anyhow!("remote ref had no target"))?;
+ let local_ref = RefString::from(Qualified::from(lit::refs_heads(branch)));
+ let local_target = self
+ .repo
+ .find_reference(&local_ref)?
+ .target()
+ .ok_or_else(|| anyhow::anyhow!("local ref had no target"))?;
+ if !self.repo.graph_descendant_of(remote_target, local_target)? {
+ anyhow::bail!("remote ref was not a descendant of local ref");
+ } else {
+ self.repo
+ .reference(&local_ref, remote_target, true, "fast forward")?;
+ }
+ Ok(())
+ }
+
+ /// Create a new commit which merges `refs/heads/<branch>` and
+ /// `refs/remotes/<remote>/<branch>`
+ ///
+ /// this will create a new commit with two parents, one for the remote
+ /// branch and one for the local branch
+ ///