~radicle-link/dev

This thread contains a patchset. You're looking at the original emails, but you may wish to use the patch review UI. Review patch
10 2

[PATCH 0/2] cobs/remove-schemas

Details
Message ID
<20220701101520.181107-1-alex@memoryandthought.me>
DKIM signature
missing
Download raw message
Re. our discussions on the mailing list this change removes the schema
validation logic from COBs. This has the nice effect of allowing us to
remove jsonschema-rs and automerge-rs from our dependency footprint.

Published-At: https://github.com/alexjg/radicle-link/tree/patches/cobs/remove-schema/v1
Published-At:
    URN: rad:git:hnrkxafojjsz4m55qxbwigh1z8sdt7mai81gy
    peer: hydjhd8q9nkoxzkpddhcuue9xzpfr4bn6d44fo1f4q1japwm4brhh6
    seed: hydtac74mgo8xeh34cy7tmzzfejcybmxgfyawhnb4zj8wxxo4qckgh@seed.lnk.network:8799
    tag: patches/cobs/remove-schema/v1

Alex Good (2):
  cobs: Remove schemas
  cobs: Remove schemas from RFC

 bins/Cargo.lock                               | 330 +-----------
 cob/Cargo.toml                                |  10 -
 cob/src/cache.rs                              |   9 +-
 cob/src/cache/cached_change_graph.rs          |  87 +--
 cob/src/change.rs                             | 167 ++++--
 cob/src/change_graph.rs                       |  45 +-
 cob/src/change_graph/evaluation.rs            |  25 -
 cob/src/change_metadata.rs                    | 167 ------
 cob/src/lib.rs                                | 111 +---
 cob/src/schema.rs                             | 496 ------------------
 cob/src/schema_change.rs                      | 106 ----
 cob/src/validated_automerge.rs                | 128 -----
 cob/t/src/tests.rs                            |   1 -
 cob/t/src/tests/cache.rs                      |  21 +-
 cob/t/src/tests/cached_change_graph.rs        |  16 +-
 cob/t/src/tests/schema.rs                     | 258 ---------
 docs/rfc/0662-collaborative-objects.adoc      | 199 ++-----
 librad/src/collaborative_objects.rs           |  10 +-
 librad/src/git/tracking/refdb.rs              |  32 +-
 .../scenario/collaborative_objects.rs         |  15 -
 20 files changed, 214 insertions(+), 2019 deletions(-)
 delete mode 100644 cob/src/change_metadata.rs
 delete mode 100644 cob/src/schema.rs
 delete mode 100644 cob/src/schema_change.rs
 delete mode 100644 cob/src/validated_automerge.rs
 delete mode 100644 cob/t/src/tests/schema.rs

-- 
2.36.1

[PATCH 1/2] cobs: Remove schemas

Details
Message ID
<20220701101520.181107-2-alex@memoryandthought.me>
In-Reply-To
<20220701101520.181107-1-alex@memoryandthought.me> (view parent)
DKIM signature
missing
Download raw message
Patch: +181 -1853
Checking schemas in the cobs isn't actually that useful. Applications
have to validate each change in a COB anyway as 1) they must validate
that changes are authorized and 2) the schema the application cares
about might be different to the schema the object was published with.

This change removes the schema checking logic, as well as logic to
encode and ship a schema along with a collaborative object. This also
allows us to remove the dependencies on automerge and jsonschema.

Signed-off-by: Alex Good <alex@memoryandthought.me>
---
 bins/Cargo.lock                               | 330 +-----------
 cob/Cargo.toml                                |  10 -
 cob/src/cache.rs                              |   9 +-
 cob/src/cache/cached_change_graph.rs          |  87 +--
 cob/src/change.rs                             | 167 ++++--
 cob/src/change_graph.rs                       |  45 +-
 cob/src/change_graph/evaluation.rs            |  25 -
 cob/src/change_metadata.rs                    | 167 ------
 cob/src/lib.rs                                | 111 +---
 cob/src/schema.rs                             | 496 ------------------
 cob/src/schema_change.rs                      | 106 ----
 cob/src/validated_automerge.rs                | 128 -----
 cob/t/src/tests.rs                            |   1 -
 cob/t/src/tests/cache.rs                      |  21 +-
 cob/t/src/tests/cached_change_graph.rs        |  16 +-
 cob/t/src/tests/schema.rs                     | 258 ---------
 librad/src/collaborative_objects.rs           |  10 +-
 librad/src/git/tracking/refdb.rs              |  32 +-
 .../scenario/collaborative_objects.rs         |  15 -
 19 files changed, 181 insertions(+), 1853 deletions(-)
 delete mode 100644 cob/src/change_metadata.rs
 delete mode 100644 cob/src/schema.rs
 delete mode 100644 cob/src/schema_change.rs
 delete mode 100644 cob/src/validated_automerge.rs
 delete mode 100644 cob/t/src/tests/schema.rs

diff --git a/bins/Cargo.lock b/bins/Cargo.lock
index 6ab3de18..d5756144 100644
--- a/bins/Cargo.lock
@@ -36,18 +36,6 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8fd72866655d1904d6b0997d0b07ba561047d070fbe29de039031c641b61217"

[[package]]
name = "ahash"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
dependencies = [
 "getrandom 0.2.6",
 "once_cell",
 "serde",
 "version_check",
]

[[package]]
name = "aho-corasick"
version = "0.7.18"
@@ -226,75 +214,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"

[[package]]
name = "automerge"
version = "0.0.2"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "automerge-backend",
 "automerge-frontend",
 "automerge-protocol",
 "serde",
 "serde_json",
 "thiserror",
 "uuid",
]

[[package]]
name = "automerge-backend"
version = "0.0.1"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "automerge-protocol",
 "flate2",
 "fxhash",
 "hex",
 "itertools 0.9.0",
 "js-sys",
 "leb128",
 "maplit",
 "nonzero_ext 0.2.0",
 "rand",
 "serde",
 "serde_json",
 "sha2",
 "smol_str",
 "thiserror",
 "tracing",
 "wasm-bindgen",
 "web-sys",
]

[[package]]
name = "automerge-frontend"
version = "0.1.0"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "automerge-protocol",
 "getrandom 0.2.6",
 "maplit",
 "serde",
 "serde_json",
 "smol_str",
 "thiserror",
 "unicode-segmentation",
 "uuid",
]

[[package]]
name = "automerge-protocol"
version = "0.1.0"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "hex",
 "serde",
 "smol_str",
 "strum",
 "thiserror",
 "tinyvec",
 "uuid",
]

[[package]]
name = "backoff"
version = "0.3.0"
@@ -337,15 +256,6 @@ dependencies = [
 "zeroize",
]

[[package]]
name = "bit-set"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de"
dependencies = [
 "bit-vec",
]

[[package]]
name = "bit-vec"
version = "0.6.3"
@@ -475,12 +385,6 @@ version = "3.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899"

[[package]]
name = "bytecount"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"

[[package]]
name = "byteorder"
version = "1.4.3"
@@ -589,7 +493,7 @@ version = "3.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c"
dependencies = [
 "heck 0.4.0",
 "heck",
 "proc-macro-error",
 "proc-macro2",
 "quote",
@@ -624,15 +528,12 @@ dependencies = [
name = "cob"
version = "0.1.0"
dependencies = [
 "automerge",
 "either",
 "git-trailers",
 "git2",
 "jsonschema",
 "lazy_static",
 "link-crypto",
 "link-identities",
 "lru",
 "minicbor",
 "multibase",
 "multihash",
@@ -943,16 +844,6 @@ version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71"

[[package]]
name = "fancy-regex"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d6b8560a05112eb52f04b00e5d3790c0dd75d9d980eb8a122fb23b92a623ccf"
dependencies = [
 "bit-set",
 "regex",
]

[[package]]
name = "fastrand"
version = "1.7.0"
@@ -1017,16 +908,6 @@ dependencies = [
 "percent-encoding",
]

[[package]]
name = "fraction"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aba3510011eee8825018be07f08d9643421de007eaf62a3bde58d89b058abfa7"
dependencies = [
 "lazy_static",
 "num",
]

[[package]]
name = "fsevent"
version = "0.4.0"
@@ -1184,15 +1065,6 @@ dependencies = [
 "pin-project 0.4.29",
]

[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
 "byteorder",
]

[[package]]
name = "generic-array"
version = "0.14.5"
@@ -1222,10 +1094,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
dependencies = [
 "cfg-if 1.0.0",
 "js-sys",
 "libc",
 "wasi 0.10.2+wasi-snapshot-preview1",
 "wasm-bindgen",
]

[[package]]
@@ -1589,7 +1459,7 @@ version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b62f79061a0bc2e046024cb7ba44b08419ed238ecbd9adbd787434b9e8c25"
dependencies = [
 "ahash 0.3.8",
 "ahash",
 "autocfg",
]

@@ -1598,9 +1468,6 @@ name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
 "ahash 0.7.6",
]

[[package]]
name = "hashbrown"
@@ -1608,15 +1475,6 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"

[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
 "unicode-segmentation",
]

[[package]]
name = "heck"
version = "0.4.0"
@@ -1785,24 +1643,6 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b"

[[package]]
name = "iso8601"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a59a3f2be6271b2a844cd0dd13bf8ccc88a9540482d872c7ce58ab1c4db9fab"
dependencies = [
 "nom",
]

[[package]]
name = "itertools"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
dependencies = [
 "either",
]

[[package]]
name = "itertools"
version = "0.10.3"
@@ -1842,31 +1682,6 @@ dependencies = [
 "wasm-bindgen",
]

[[package]]
name = "jsonschema"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877e398ffb23c1c311c417ef5e72e8699c3822dbf835468f009c6ce91b6c206b"
dependencies = [
 "ahash 0.7.6",
 "base64",
 "bytecount",
 "fancy-regex",
 "fraction",
 "iso8601",
 "itoa 0.4.8",
 "lazy_static",
 "num-cmp",
 "parking_lot 0.12.1",
 "percent-encoding",
 "regex",
 "serde",
 "serde_json",
 "time",
 "url",
 "uuid",
]

[[package]]
name = "jwalk"
version = "0.6.0"
@@ -1905,12 +1720,6 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"

[[package]]
name = "leb128"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"

[[package]]
name = "libc"
version = "0.2.126"
@@ -1955,7 +1764,7 @@ dependencies = [
 "governor",
 "if-watch",
 "indexmap",
 "itertools 0.10.3",
 "itertools",
 "lazy_static",
 "libc",
 "libgit2-sys",
@@ -2184,7 +1993,7 @@ dependencies = [
 "either",
 "futures-lite",
 "git-ref-format",
 "itertools 0.10.3",
 "itertools",
 "link-crypto",
 "link-git",
 "parking_lot 0.12.1",
@@ -2265,7 +2074,7 @@ version = "0.1.0"
dependencies = [
 "async-trait",
 "futures",
 "itertools 0.10.3",
 "itertools",
 "librad",
 "lnk-thrussh-agent",
 "minicbor",
@@ -2467,7 +2276,7 @@ dependencies = [
 "lnk-thrussh-libsodium",
 "log",
 "md5",
 "num-bigint 0.4.3",
 "num-bigint",
 "num-integer",
 "pbkdf2 0.8.0",
 "rand",
@@ -2512,21 +2321,6 @@ dependencies = [
 "cfg-if 1.0.0",
]

[[package]]
name = "lru"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8015d95cb7b2ddd3c0d32ca38283ceb1eea09b4713ee380bceb942d85a244228"
dependencies = [
 "hashbrown 0.11.2",
]

[[package]]
name = "maplit"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"

[[package]]
name = "matchers"
version = "0.1.0"
@@ -2800,31 +2594,6 @@ dependencies = [
 "winapi 0.3.9",
]

[[package]]
name = "num"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8536030f9fea7127f841b45bb6243b27255787fb4eb83958aa1ef9d2fdc0c36"
dependencies = [
 "num-bigint 0.2.6",
 "num-complex",
 "num-integer",
 "num-iter",
 "num-rational",
 "num-traits",
]

[[package]]
name = "num-bigint"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
dependencies = [
 "autocfg",
 "num-integer",
 "num-traits",
]

[[package]]
name = "num-bigint"
version = "0.4.3"
@@ -2836,22 +2605,6 @@ dependencies = [
 "num-traits",
]

[[package]]
name = "num-cmp"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa"

[[package]]
name = "num-complex"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6b19411a9719e753aff12e5187b74d60d3dc449ec3f4dc21e3989c3f554bc95"
dependencies = [
 "autocfg",
 "num-traits",
]

[[package]]
name = "num-integer"
version = "0.1.45"
@@ -2862,29 +2615,6 @@ dependencies = [
 "num-traits",
]

[[package]]
name = "num-iter"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252"
dependencies = [
 "autocfg",
 "num-integer",
 "num-traits",
]

[[package]]
name = "num-rational"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef"
dependencies = [
 "autocfg",
 "num-bigint 0.2.6",
 "num-integer",
 "num-traits",
]

[[package]]
name = "num-traits"
version = "0.2.15"
@@ -3752,15 +3482,6 @@ version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"

[[package]]
name = "smol_str"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44"
dependencies = [
 "serde",
]

[[package]]
name = "socket2"
version = "0.3.19"
@@ -3794,27 +3515,6 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"

[[package]]
name = "strum"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aaf86bbcfd1fa9670b7a129f64fc0c9fcbbfe4f1bc4210e9e98fe71ffc12cde2"
dependencies = [
 "strum_macros",
]

[[package]]
name = "strum_macros"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d06aaeeee809dbc59eb4556183dd927df67db1540de5be8d3ec0b6636358a5ec"
dependencies = [
 "heck 0.3.3",
 "proc-macro2",
 "quote",
 "syn",
]

[[package]]
name = "subtle"
version = "2.4.1"
@@ -3910,15 +3610,8 @@ checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd"
dependencies = [
 "libc",
 "num_threads",
 "time-macros",
]

[[package]]
name = "time-macros"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792"

[[package]]
name = "tinyvec"
version = "1.6.0"
@@ -3991,7 +3684,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09"
dependencies = [
 "cfg-if 1.0.0",
 "log",
 "pin-project-lite",
 "tracing-attributes",
 "tracing-core",
@@ -4096,12 +3788,6 @@ dependencies = [
 "tinyvec",
]

[[package]]
name = "unicode-segmentation"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"

[[package]]
name = "unicode-xid"
version = "0.2.3"
@@ -4177,7 +3863,7 @@ version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cd9a7a22c45daf5aeb6bea3dff4ecbb8eb43e492582d467b18ce2979b512cbe"
dependencies = [
 "itertools 0.10.3",
 "itertools",
 "nom",
]

@@ -4420,7 +4106,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e262a29d0e61ccf2b6190d7050d4b237535fc76ce4c1210d9caa316f71dffa75"
dependencies = [
 "bit-vec",
 "num-bigint 0.4.3",
 "num-bigint",
]

[[package]]
diff --git a/cob/Cargo.toml b/cob/Cargo.toml
index 8c4f6f85..c840a7db 100644
--- a/cob/Cargo.toml
+++ b/cob/Cargo.toml
@@ -18,7 +18,6 @@ thiserror = "1.0"
toml = "0.5"
tracing = "0.1"
either = "1.6"
lru = "0.7.1"
multihash = "0.11"
multibase = "0.9"
lazy_static = "1.4"
@@ -33,11 +32,6 @@ version = "0.13.24"
default-features = false
features = ["vendored-libgit2"]

[dependencies.jsonschema]
version = "~0.13"
default-features = false
features = []

[dependencies.link-crypto]
path = "../link-crypto"

@@ -49,7 +43,3 @@ path = "../git-trailers"

[dependencies.radicle-git-ext]
path = "../git-ext"

[dependencies.automerge]
git = "https://github.com/automerge/automerge-rs.git"
rev = "e72571962b51c2f0726fb534890ef3b4f7c74dfc"
diff --git a/cob/src/cache.rs b/cob/src/cache.rs
index a3ecd457..a3586a71 100644
--- a/cob/src/cache.rs
+++ b/cob/src/cache.rs
@@ -18,8 +18,6 @@ pub enum Error {
    #[error(transparent)]
    Io(#[from] std::io::Error),
    #[error(transparent)]
    SchemaParse(#[from] super::schema::error::Parse),
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    MinicborDecode(#[from] minicbor::decode::Error),
@@ -36,10 +34,6 @@ pub trait Cache {
    /// are changed then we will not see those changes. However, we specify
    /// in the RFC that any peer updating a change must update their ref to
    /// the object, so this should not be a problem.
    ///
    /// We return an `Rc<RefCell<CachedChangeGraph>>`. This is so that changes
    /// can be made by calling `CachedChangeGraph::propose_change`, which
    /// mutates the `CachedChangeGraph`.
    fn load(
        &mut self,
        oid: ObjectId,
@@ -64,8 +58,7 @@ pub trait Cache {
///
/// Each file contains a CBOR encoding of a `CachedChangeGraph`. This file
/// contains the OIDs of the tips of the graph that were used to generate the
/// object, the validated automerge history that was generated using those tips,
/// the schema and the schema commit OID.
/// object and the individual automerge change blobs.
///
/// The `v1` directory means we can easily add a `v2` if we need to change the
/// cache layout in backwards incompatible ways.
diff --git a/cob/src/cache/cached_change_graph.rs b/cob/src/cache/cached_change_graph.rs
index e340578f..be2a3e17 100644
--- a/cob/src/cache/cached_change_graph.rs
+++ b/cob/src/cache/cached_change_graph.rs
@@ -3,19 +3,11 @@
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use crate::{
    validated_automerge::error::ProposalError,
    EntryContents,
    History,
    ObjectId,
    Schema,
    TypeName,
    ValidatedAutomerge,
};
use crate::{EntryContents, History, ObjectId, TypeName};

use link_identities::git::Urn;

use std::{cell::RefCell, collections::BTreeSet, ops::ControlFlow, rc::Rc};
use std::{cell::RefCell, collections::BTreeSet, rc::Rc};

/// A CBOR encoding of the change graph which was loaded when the heads of the
/// change graph were `refs`. The `history` contains the bytes of each change
@@ -27,11 +19,6 @@ pub struct CachedChangeGraph {
    #[n(1)]
    #[cbor(with = "encoding::oids")]
    pub refs: BTreeSet<git2::Oid>,
    #[n(2)]
    #[cbor(with = "encoding::oid")]
    pub schema_commit: git2::Oid,
    #[n(3)]
    pub schema: Schema,
    #[n(4)]
    #[cbor(with = "encoding::typename")]
    pub typename: TypeName,
@@ -45,8 +32,6 @@ pub struct CachedChangeGraph {
impl CachedChangeGraph {
    pub fn new(
        tips: impl IntoIterator<Item = git2::Oid>,
        schema: Schema,
        schema_commit: git2::Oid,
        history: History,
        typename: TypeName,
        object_id: ObjectId,
@@ -54,9 +39,7 @@ impl CachedChangeGraph {
    ) -> Rc<RefCell<CachedChangeGraph>> {
        let g = CachedChangeGraph {
            history,
            schema,
            refs: tips.into_iter().collect(),
            schema_commit,
            typename,
            object_id,
            authorizing_identity_urn,
@@ -68,32 +51,6 @@ impl CachedChangeGraph {
        &self.history
    }

    pub(crate) fn propose_change(&mut self, change: &EntryContents) -> Result<(), ProposalError> {
        match change {
            EntryContents::Automerge(change_bytes) => {
                let mut validated = self.history.traverse(
                    ValidatedAutomerge::new(self.schema.clone()),
                    |mut doc, entry| {
                        // This unwrap should be safe as we only save things in the cache when we've
                        // validated them
                        doc.propose_change(entry.contents().as_ref()).unwrap();
                        ControlFlow::Continue(doc)
                    },
                );
                validated.propose_change(change_bytes)?;
            },
        }
        Ok(())
    }

    pub fn schema(&self) -> &Schema {
        &self.schema
    }

    pub fn schema_commit(&self) -> git2::Oid {
        self.schema_commit
    }

    pub fn tips(&self) -> BTreeSet<git2::Oid> {
        self.refs.clone()
    }
@@ -130,8 +87,6 @@ impl CachedChangeGraph {
}

mod encoding {
    use crate::Schema;
    use std::convert::TryFrom;

    struct Json(serde_json::Value);

@@ -155,44 +110,6 @@ mod encoding {
        }
    }

    impl minicbor::Encode for Schema {
        fn encode<W: minicbor::encode::Write>(
            &self,
            e: &mut minicbor::Encoder<W>,
        ) -> Result<(), minicbor::encode::Error<W::Error>> {
            e.encode(self.json_bytes())?;
            Ok(())
        }
    }

    impl<'b> minicbor::Decode<'b> for Schema {
        fn decode(d: &mut minicbor::Decoder<'b>) -> Result<Self, minicbor::decode::Error> {
            let bytes: Vec<u8> = d.decode()?;
            Schema::try_from(&bytes[..])
                .map_err(|_| minicbor::decode::Error::Message("invalid schema JSON"))
        }
    }

    pub(super) mod oid {
        use minicbor::{
            decode::{Decode, Decoder, Error as DecodeError},
            encode::{Encode, Encoder, Error as EncodeError, Write},
        };
        use radicle_git_ext::Oid;

        pub fn encode<W: Write>(
            v: &git2::Oid,
            e: &mut Encoder<W>,
        ) -> Result<(), EncodeError<W::Error>> {
            Oid::from(*v).encode(e)
        }

        pub fn decode(d: &mut Decoder<'_>) -> Result<git2::Oid, DecodeError> {
            let ext = Oid::decode(d)?;
            Ok(ext.into())
        }
    }

    pub(super) mod oids {
        use minicbor::{
            decode::{Decode, Decoder, Error as DecodeError},
diff --git a/cob/src/change.rs b/cob/src/change.rs
index e49a79b9..b99b26b6 100644
--- a/cob/src/change.rs
+++ b/cob/src/change.rs
@@ -3,15 +3,11 @@
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use super::{
    change_metadata::{self, ChangeMetadata, CreateMetadataArgs},
    trailers,
    EntryContents,
    HistoryType,
    TypeName,
};
use super::{trailers, EntryContents, HistoryType, TypeName};

use git_trailers::{parse as parse_trailers, OwnedTrailer};
use link_crypto::BoxedSigner;
use link_identities::sign::Signatures;

use std::{convert::TryFrom, fmt};

@@ -21,14 +17,28 @@ use serde::{Deserialize, Serialize};
/// is specified in the RFC (docs/rfc/0662-collaborative-objects.adoc)
/// under "Change Commits".
pub struct Change {
    /// The OID of the parent commit which points at the schema_commit
    schema_commit: git2::Oid,
    /// The commit where this change lives
    commit: git2::Oid,
    /// The OID of the tree the commit points at, we need this to validate the
    /// signatures
    revision: git2::Oid,
    /// The signatures of this change
    signatures: Signatures,
    /// The OID of the parent commit of this change which points at the author
    /// identity
    author_commit: git2::Oid,
    /// The OID of the parent commit of this change which points at a schema.
    /// Schemas are no longer used but older implementations include a
    /// schema commit as a parent of the change and to stay backwards
    /// compatible we must exclude these commits when loading a change.
    schema_commit: Option<git2::Oid>,
    /// The OID of the parent commit which points at the identity this change
    /// was authorized with respect to at the time the change was authored.
    authorizing_identity_commit: git2::Oid,
    /// The manifest
    manifest: Manifest,
    /// The actual changes this change carries
    contents: EntryContents,
    /// The metadata for this change
    metadata: change_metadata::ChangeMetadata,
}

impl fmt::Display for Change {
@@ -38,9 +48,10 @@ impl fmt::Display for Change {
}

pub mod error {
    use super::{change_metadata, trailers};
    use super::trailers;
    use git_trailers::Error as TrailerError;
    use link_crypto::BoxedSignError;
    use link_identities::git::error::Signatures;
    use link_identities::sign::error::Signatures;
    use thiserror::Error;

    #[derive(Debug, Error)]
@@ -49,8 +60,6 @@ pub mod error {
        Git(#[from] git2::Error),
        #[error(transparent)]
        Signer(#[from] BoxedSignError),
        #[error(transparent)]
        Metadata(#[from] change_metadata::CreateError),
    }

    #[derive(Debug, Error)]
@@ -70,14 +79,21 @@ pub mod error {
        #[error("./change was not a blob")]
        ChangeNotBlob,
        #[error(transparent)]
        InvalidMetadata(#[from] change_metadata::LoadError),
        #[error(transparent)]
        SchemaCommitTrailer(#[from] trailers::error::InvalidSchemaTrailer),
        #[error(transparent)]
        AuthorTrailer(#[from] trailers::error::InvalidAuthorTrailer),
        #[error(transparent)]
        AuthorizingIdentityTrailer(
            #[from] super::trailers::error::InvalidAuthorizingIdentityTrailer,
        ),
        #[error("non utf-8 characters in commit message")]
        Utf8,
        #[error(transparent)]
        Trailer(#[from] TrailerError),
    }
}

pub struct NewChangeSpec {
    pub(crate) schema_commit: git2::Oid,
    pub(crate) typename: TypeName,
    pub(crate) tips: Option<Vec<git2::Oid>>,
    pub(crate) message: Option<String>,
@@ -90,8 +106,8 @@ const CHANGE_BLOB_NAME: &str = "change";
impl Change {
    /// Create a change in the git repo according to the spec
    pub fn create(
        authorizing_identity_commit: git2::Oid,
        author_identity_commit: git2::Oid,
        authorizing_identity_commit_id: git2::Oid,
        author_identity_commit_id: git2::Oid,
        repo: &git2::Repository,
        signer: &BoxedSigner,
        spec: NewChangeSpec,
@@ -116,35 +132,80 @@ impl Change {
        tb.insert(CHANGE_BLOB_NAME, change_blob, git2::FileMode::Blob.into())?;

        let revision = tb.write()?;

        let schema_trailer = trailers::SchemaCommitTrailer::from(spec.schema_commit).into();

        let mut tips = spec.tips.clone().unwrap_or_default();
        tips.push(spec.schema_commit);
        tips.push(authorizing_identity_commit);

        let metadata = ChangeMetadata::create(CreateMetadataArgs {
            revision,
            tips,
            message: spec.message.unwrap_or_else(|| "new change".to_string()),
            extra_trailers: vec![schema_trailer],
            authorizing_identity_commit,
            author_identity_commit,
            signer: signer.clone(),
            repo,
        })?;
        let tree = repo.find_tree(revision)?;

        let author_commit = repo.find_commit(author_identity_commit_id)?;
        let author = repo.signature()?;

        let authorizing_identity_commit = repo.find_commit(authorizing_identity_commit_id)?;

        let signatures = link_identities::git::sign(signer, revision.into())?.into();
        let mut parent_commits = spec
            .tips
            .iter()
            .flat_map(|cs| cs.iter())
            .map(|o| repo.find_commit(*o))
            .collect::<Result<Vec<git2::Commit>, git2::Error>>()?;
        parent_commits.push(authorizing_identity_commit);
        parent_commits.push(author_commit);

        let trailers = vec![
            super::trailers::AuthorCommitTrailer::from(author_identity_commit_id).into(),
            super::trailers::AuthorizingIdentityCommitTrailer::from(authorizing_identity_commit_id)
                .into(),
        ];

        let commit = repo.commit(
            None,
            &author,
            &author,
            &link_identities::git::sign::CommitMessage::new(
                spec.message
                    .unwrap_or_else(|| "new change".to_string())
                    .as_str(),
                &signatures,
                trailers,
            )
            .to_string(),
            &tree,
            &(parent_commits.iter().collect::<Vec<&git2::Commit>>())[..],
        )?;

        Ok(Change {
            schema_commit: spec.schema_commit,
            schema_commit: None,
            manifest,
            contents: spec.contents,
            metadata,
            commit,
            signatures,
            authorizing_identity_commit: authorizing_identity_commit_id,
            author_commit: author_identity_commit_id,
            revision,
        })
    }

    /// Load a change from the given commit
    pub fn load(repo: &git2::Repository, commit: &git2::Commit) -> Result<Change, error::Load> {
        let metadata = ChangeMetadata::try_from(commit)?;
        let trailers = commit
            .message()
            .ok_or(error::Load::Utf8)
            .and_then(|s| parse_trailers(s, ":").map_err(|e| e.into()))?;
        let owned_trailers: Vec<OwnedTrailer> = trailers.iter().map(OwnedTrailer::from).collect();
        let author_commit_trailer =
            super::trailers::AuthorCommitTrailer::try_from(&owned_trailers[..])?;
        let authorizing_identity_trailer =
            super::trailers::AuthorizingIdentityCommitTrailer::try_from(&owned_trailers[..])?;

        // We no longer support schema parents but to remain backwards compatible we
        // still load the commit trailer so we know to omit the schema parent
        // commits when evaluating old object histories which still have a
        // schema parent commit
        let schema_commit_trailer =
            match super::trailers::SchemaCommitTrailer::try_from(&owned_trailers[..]) {
                Ok(t) => Some(t),
                Err(super::trailers::error::InvalidSchemaTrailer::NoTrailer) => None,
                Err(e) => return Err(e.into()),
            };
        let signatures = Signatures::try_from(trailers)?;

        let tree = commit.tree()?;
        let manifest_tree_entry = tree
@@ -170,23 +231,24 @@ impl Change {
            },
        };

        let schema_commit_trailer =
            trailers::SchemaCommitTrailer::try_from(&metadata.trailers[..])?;

        Ok(Change {
            schema_commit: schema_commit_trailer.oid(),
            manifest,
            contents,
            metadata,
            commit: commit.id(),
            schema_commit: schema_commit_trailer.map(|s| s.oid()),
            author_commit: author_commit_trailer.oid(),
            authorizing_identity_commit: authorizing_identity_trailer.oid(),
            signatures,
            revision: tree.id(),
        })
    }

    pub fn commit(&self) -> &git2::Oid {
        &self.metadata.commit
        &self.commit
    }

    pub fn author_commit(&self) -> git2::Oid {
        self.metadata.author_commit
        self.author_commit
    }

    pub fn typename(&self) -> &TypeName {
@@ -197,16 +259,21 @@ impl Change {
        &self.contents
    }

    pub fn schema_commit(&self) -> git2::Oid {
    pub fn schema_commit(&self) -> Option<git2::Oid> {
        self.schema_commit
    }

    pub fn authorizing_identity_commit(&self) -> git2::Oid {
        self.metadata.authorizing_identity_commit
        self.authorizing_identity_commit
    }

    pub fn valid_signatures(&self) -> bool {
        self.metadata.valid_signatures()
        for (key, sig) in self.signatures.iter() {
            if !key.verify(sig, self.revision.as_bytes()) {
                return false;
            }
        }
        true
    }
}

diff --git a/cob/src/change_graph.rs b/cob/src/change_graph.rs
index b020f9e5..4c4bf4cb 100644
--- a/cob/src/change_graph.rs
+++ b/cob/src/change_graph.rs
@@ -4,17 +4,13 @@
// Linking Exception. For full terms see the included LICENSE file.

use super::{
    schema_change,
    AuthorizingIdentity,
    Change,
    CollaborativeObject,
    IdentityStorage,
    ObjectId,
    Schema,
    SchemaChange,
    TypeName,
};
use link_identities::git::Urn;
use petgraph::{
    visit::{EdgeRef, Topo, Walker},
    EdgeDirection,
@@ -34,14 +30,6 @@ pub enum Error {
    MissingRevision(git2::Oid),
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    LoadSchema(#[from] schema_change::error::Load),
    #[error("schema change is authorized by an incorrect identity URN, expected {expected} but was {actual}")]
    SchemaAuthorizingUrnIncorrect { expected: Urn, actual: Urn },
    #[error("no authorizing identity found for schema change")]
    NoSchemaAuthorizingIdentityFound,
    #[error("invalid signature on schema change")]
    InvalidSchemaSignatures,
}

/// The graph of changes for a particular collaborative object
@@ -50,7 +38,6 @@ pub(super) struct ChangeGraph<'a> {
    object_id: ObjectId,
    authorizing_identity: &'a dyn AuthorizingIdentity,
    graph: petgraph::Graph<Change, ()>,
    schema_change: SchemaChange,
}

impl<'a> ChangeGraph<'a> {
@@ -119,12 +106,8 @@ impl<'a> ChangeGraph<'a> {
            let first_node = &self.graph[*root];
            first_node.typename().clone()
        };
        let evaluating = evaluation::Evaluating::new(
            identities,
            self.authorizing_identity,
            self.repo,
            self.schema().clone(),
        );
        let evaluating =
            evaluation::Evaluating::new(identities, self.authorizing_identity, self.repo);
        let topo = Topo::new(&self.graph);
        let items = topo.iter(&self.graph).map(|idx| {
            let node = &self.graph[idx];
@@ -143,7 +126,6 @@ impl<'a> ChangeGraph<'a> {
            typename,
            history,
            id: self.object_id,
            schema: self.schema_change.schema().clone(),
        }
    }

@@ -166,14 +148,6 @@ impl<'a> ChangeGraph<'a> {
        let for_display = self.graph.map(|_ix, n| n.to_string(), |_ix, _e| "");
        petgraph::dot::Dot::new(&for_display).to_string()
    }

    pub(super) fn schema_commit(&self) -> git2::Oid {
        self.schema_change.commit()
    }

    pub(super) fn schema(&self) -> &Schema {
        self.schema_change.schema()
    }
}

struct GraphBuilder {
@@ -209,7 +183,7 @@ impl GraphBuilder {
            .parents()
            .filter_map(|parent| {
                if parent.id() != author_commit
                    && parent.id() != schema_commit
                    && Some(parent.id()) != schema_commit
                    && parent.id() != authorizing_identity_commit
                    && !self.has_edge(parent.id(), commit.id())
                {
@@ -243,15 +217,14 @@ impl GraphBuilder {
        object_id: ObjectId,
        authorizing_identity: &'b dyn AuthorizingIdentity,
    ) -> Result<Option<ChangeGraph<'b>>, Error> {
        if let Some(root) = self.graph.externals(petgraph::Direction::Incoming).next() {
            let root_change = &self.graph[root];
            let schema_change = SchemaChange::load(root_change.schema_commit(), repo)?;
            if !schema_change.valid_signatures() {
                return Err(Error::InvalidSchemaSignatures);
            }
        if self
            .graph
            .externals(petgraph::Direction::Incoming)
            .next()
            .is_some()
        {
            Ok(Some(ChangeGraph {
                repo,
                schema_change,
                object_id,
                authorizing_identity,
                graph: self.graph,
diff --git a/cob/src/change_graph/evaluation.rs b/cob/src/change_graph/evaluation.rs
index a9e91bc0..6058071b 100644
--- a/cob/src/change_graph/evaluation.rs
+++ b/cob/src/change_graph/evaluation.rs
@@ -10,18 +10,15 @@ use crate::{
    history,
    identity_storage::{lookup_authorizing_identity, lookup_person},
    pruning_fold,
    validated_automerge::{error::ProposalError, ValidatedAutomerge},
    AuthDecision,
    AuthorizingIdentity,
    IdentityStorage,
    Schema,
};

pub struct Evaluating<'a, I: IdentityStorage> {
    identities: &'a I,
    authorizing_identity: &'a dyn AuthorizingIdentity,
    repo: &'a git2::Repository,
    in_progress_history: ValidatedAutomerge,
}

impl<'a, I: IdentityStorage> Evaluating<'a, I> {
@@ -29,13 +26,11 @@ impl<'a, I: IdentityStorage> Evaluating<'a, I> {
        identities: &'a I,
        authorizer: &'a dyn AuthorizingIdentity,
        repo: &'a git2::Repository,
        schema: Schema,
    ) -> Evaluating<'a, I> {
        Evaluating {
            identities,
            authorizing_identity: authorizer,
            repo,
            in_progress_history: ValidatedAutomerge::new(schema),
        }
    }

@@ -111,19 +106,6 @@ impl<'a, I: IdentityStorage> Evaluating<'a, I> {
            },
        };

        // Check that the history the change carries is well formed and does not violate
        // the schema
        match &change.contents() {
            history::EntryContents::Automerge(bytes) => {
                match self.in_progress_history.propose_change(bytes) {
                    Ok(()) => {},
                    Err(e) => {
                        return Err(RejectionReason::InvalidChange(e));
                    },
                }
            },
        };

        Ok(history::HistoryEntry::new(
            *change.commit(),
            author.urn(),
@@ -164,7 +146,6 @@ enum RejectionReason {
    Unauthorized {
        reason: &'static str,
    },
    InvalidChange(ProposalError),
}

impl RejectionReason {
@@ -214,12 +195,6 @@ impl RejectionReason {
                    "rejecting change as it was not authorized"
                );
            },
            RejectionReason::InvalidChange(error) => {
                tracing::warn!(
                    err=?error,
                    "rejecting invalid change"
                );
            },
        }
    }
}
diff --git a/cob/src/change_metadata.rs b/cob/src/change_metadata.rs
deleted file mode 100644
index c7855cdb..00000000
--- a/cob/src/change_metadata.rs
@@ -1,167 +0,0 @@
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
//
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use git_trailers::{parse as parse_trailers, Error as TrailerError, OwnedTrailer, Trailer};
use link_crypto::{BoxedSignError, BoxedSigner};
use link_identities::sign::{error::Signatures as SignaturesError, Signatures};

use thiserror::Error as ThisError;

use std::convert::TryFrom;

/// We represent both changes to a collaborative object and changes to the
/// objects schema as commits. `ChangeMetadata` captures the metadata which is
/// common to both object changes and schema changes
pub(super) struct ChangeMetadata {
    /// The commit where this change lives
    pub(super) commit: git2::Oid,
    /// The OID of the tree the commit points at, we need this to validate the
    /// signatures
    pub(super) revision: git2::Oid,
    /// The signatures of this change
    pub(super) signatures: Signatures,
    /// The OID of the parent commit of this change which points at the author
    /// identity
    pub(super) author_commit: git2::Oid,
    /// The OID of the parent commit which points at the identity this change
    /// was authorized with respect to at the time the change was authored.
    pub(super) authorizing_identity_commit: git2::Oid,
    /// The trailers of the commit. We need to hold on to these so more specific
    /// processing can be performed by specific change types. E.g. the
    /// parsing of the `SchemaCommitTrailer` in `Change::load`
    pub(super) trailers: Vec<OwnedTrailer>,
}

#[derive(Debug, ThisError)]
pub enum LoadError {
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    Signatures(#[from] SignaturesError),
    #[error("non utf-8 characters in commit message")]
    Utf8,
    #[error(transparent)]
    Trailer(#[from] TrailerError),
    #[error(transparent)]
    AuthorTrailer(#[from] super::trailers::error::InvalidAuthorTrailer),
    #[error(transparent)]
    AuthorizingIdentityTrailer(#[from] super::trailers::error::InvalidAuthorizingIdentityTrailer),
}

#[derive(Debug, ThisError)]
pub enum CreateError {
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    Signer(#[from] BoxedSignError),
}

impl TryFrom<&git2::Commit<'_>> for ChangeMetadata {
    type Error = LoadError;

    fn try_from(commit: &git2::Commit) -> Result<Self, Self::Error> {
        let trailers = commit
            .message()
            .ok_or(LoadError::Utf8)
            .and_then(|s| parse_trailers(s, ":").map_err(|e| e.into()))?;
        let owned_trailers: Vec<OwnedTrailer> = trailers.iter().map(OwnedTrailer::from).collect();
        let author_commit_trailer =
            super::trailers::AuthorCommitTrailer::try_from(&owned_trailers[..])?;
        let authorizing_identity_trailer =
            super::trailers::AuthorizingIdentityCommitTrailer::try_from(&owned_trailers[..])?;
        let signatures = Signatures::try_from(trailers)?;
        Ok(ChangeMetadata {
            commit: commit.id(),
            revision: commit.tree_id(),
            signatures,
            author_commit: author_commit_trailer.oid(),
            authorizing_identity_commit: authorizing_identity_trailer.oid(),
            trailers: owned_trailers,
        })
    }
}

pub struct CreateMetadataArgs<'a> {
    pub revision: git2::Oid,
    pub tips: Vec<git2::Oid>,
    pub message: String,
    pub extra_trailers: Vec<Trailer<'a>>,
    pub authorizing_identity_commit: git2::Oid,
    pub author_identity_commit: git2::Oid,
    pub signer: BoxedSigner,
    pub repo: &'a git2::Repository,
}

impl ChangeMetadata {
    /// Create a commit in the underlying repository and return the
    /// corresponding metadata
    pub fn create(
        CreateMetadataArgs {
            revision,
            tips,
            message,
            extra_trailers,
            authorizing_identity_commit,
            author_identity_commit,
            signer,
            repo,
        }: CreateMetadataArgs<'_>,
    ) -> Result<ChangeMetadata, CreateError> {
        let owned_trailers = extra_trailers.iter().map(OwnedTrailer::from).collect();

        let author_commit = repo.find_commit(author_identity_commit)?;
        let tree = repo.find_tree(revision)?;

        let author = repo.signature()?;

        let signatures = link_identities::git::sign(&signer, revision.into())?.into();
        let mut parent_commits = Vec::new();
        let tip_commits = tips
            .iter()
            .map(|o| repo.find_commit(*o))
            .collect::<Result<Vec<git2::Commit>, git2::Error>>()?;
        parent_commits.extend(tip_commits);
        parent_commits.push(author_commit.clone());

        let mut trailers = extra_trailers.clone();
        trailers.push(super::trailers::AuthorCommitTrailer::from(author_commit.id()).into());
        trailers.push(
            super::trailers::AuthorizingIdentityCommitTrailer::from(authorizing_identity_commit)
                .into(),
        );

        let commit = repo.commit(
            None,
            &author,
            &author,
            &link_identities::git::sign::CommitMessage::new(
                message.as_str(),
                &signatures,
                trailers,
            )
            .to_string(),
            &tree,
            &(parent_commits.iter().collect::<Vec<&git2::Commit>>())[..],
        )?;

        Ok(ChangeMetadata {
            revision,
            commit,
            author_commit: author_commit.id(),
            authorizing_identity_commit,
            signatures,
            trailers: owned_trailers,
        })
    }

    pub fn valid_signatures(&self) -> bool {
        for (key, sig) in self.signatures.iter() {
            if !key.verify(sig, self.revision.as_bytes()) {
                return false;
            }
        }
        true
    }
}
diff --git a/cob/src/lib.rs b/cob/src/lib.rs
index 0eb479cf..dd1df8d5 100644
--- a/cob/src/lib.rs
+++ b/cob/src/lib.rs
@@ -20,49 +20,36 @@
//! ## Caching
//!
//! When loading a collaborative object we verify that every change in the hash
//! graph is signed and respects the schema of the object. For repositories with
//! a large number of objects, or a smaller number of objects with a large
//! number of changes, this can become a computationally intensive task. To
//! avoid recalculating the state of every object every time we make a change
//! then, we implement a caching layer. Each of the CRUD methods takes an
//! optional cache directory, this cache implements some basic locking so it's
//! safe to use from multiple processes. We also commit to not making backwards
//! incompatible changes to the chache, so it is safe to upgrade
//! without deleting caches (though the cache may need to be regenerated, we
//! only guarantee that applications will not crash).
//! graph is signed. For repositories with a large number of objects, or a
//! smaller number of objects with a large number of changes, this can become a
//! computationally intensive task. To avoid recalculating the state of every
//! object every time we make a change then, we implement a caching layer. Each
//! of the CRUD methods takes an optional cache directory, this cache implements
//! some basic locking so it's safe to use from multiple processes. We also
//! commit to not making backwards incompatible changes to the chache, so it is
//! safe to upgrade without deleting caches (though the cache may need to be
//! regenerated, we only guarantee that applications will not crash).
//!
//! # Implementation Notes
//!
//! This module starts with the basic value types which are part of the public
//! API: `ObjectId`, `TypeName`, `Schema`, all of which compose a
//! API: `ObjectId`, and `TypeName`, all of which compose a
//! `CollaborativeObject`. When loading a `CollaborativeObject` we attempt to
//! load a graph of the automerge changes that make up the object from
//! references to the object ID in the `RefsStorage` we have been passed. There
//! are two representations of a change graph. Firstly there is
//! `change_graph::ChangeGraph`, which is a full directed graph containing all
//! the commits we can find for the given object. `ChangeGraph`
//! has an `evaluate` method which traverses this directed graph validating each
//! change with respect to their signatures, the schema, and the access control
//! policy (only maintainers may make changes). Secondly there is the
//! `cache::ThinChangeGraph`, this is a representation that contains only the
//! automerge history of a fully evaluated change graph and the OIDs of the tips
//! has an `evaluate` method which traverses this directed graph validating that
//! each change has a valid signature. Secondly there is the
//! `cache::CachedChangeGraph`, this is a representation that contains only the
//! all the valid changes in a valid change graph and the OIDs of the tips
//! of the graph that was used to generate the changes. For any of the CRUD
//! methods we first attempt to load a `ThinChangeGraph` from the cache, and if
//! that fails (either because there is no cached object at all, or because the
//! reference to the tips returned by the `RefsStorage` is different to those
//! that were used to generate the cache) then we fall back to evaluating the
//! full change graph of the object.
//!
//! Individual changes within a `ChangeGraph` are represented by a
//! `change::Change`; whereas changes to a schema (of which we currently only
//! support a single initial change per object) are represented by a
//! `schema_change::SchemaChange`. These types both represent commits with a
//! particular set of trailers and which point to trees containing a particular
//! set of objects. Both `SchemaChange`s and `Change`s share some common data,
//! so they are both implemented as extensions to a
//! `change_metadata::ChangeMetadata`, which encapsulates the common logic.
//! These types make use of the logic in `trailers`, which defines some
//! wrapper types around trailers which are `git2::Oid` valued.
//! methods we first attempt to load a `CachedChangeGraph` from the cache, and
//! if that fails (either because there is no cached object at all, or because
//! the reference to the tips returned by the `RefsStorage` is different to
//! those that were used to generate the cache) then we fall back to evaluating
//! the full change graph of the object.

use std::{cell::RefCell, collections::BTreeSet, convert::TryFrom, fmt, rc::Rc, str::FromStr};

@@ -75,30 +62,20 @@ use radicle_git_ext as ext;
mod authorizing_identity;
pub use authorizing_identity::{AuthDecision, AuthorizingIdentity};

mod change_metadata;
mod trailers;

mod change_graph;
use change_graph::ChangeGraph;

pub mod schema;
pub use schema::Schema;

mod change;
use change::Change;

mod schema_change;
use schema_change::SchemaChange;

mod refs_storage;
pub use refs_storage::{ObjectRefs, RefsStorage};

mod cache;
use cache::{Cache, CachedChangeGraph};

mod validated_automerge;
use validated_automerge::ValidatedAutomerge;

mod identity_storage;
pub use identity_storage::IdentityStorage;

@@ -111,10 +88,7 @@ pub mod internals {
    //! This module exposes implementation details of the collaborative object
    //! crate for use in testing

    pub use super::{
        cache::{Cache, CachedChangeGraph, FileSystemCache},
        validated_automerge::ValidatedAutomerge,
    };
    pub use super::cache::{Cache, CachedChangeGraph, FileSystemCache};
}

/// The typename of an object. Valid typenames MUST be sequences of alphanumeric
@@ -230,9 +204,6 @@ pub struct CollaborativeObject {
    history: History,
    /// The id of the object
    id: ObjectId,
    /// The schema any changes to this object must respect
    #[allow(unused)]
    schema: Schema,
}

impl From<Rc<RefCell<CachedChangeGraph>>> for CollaborativeObject {
@@ -243,7 +214,6 @@ impl From<Rc<RefCell<CachedChangeGraph>>> for CollaborativeObject {
            typename: tg.typename().clone(),
            history: tg.history().clone(),
            id: tg.object_id(),
            schema: tg.schema().clone(),
        }
    }
}
@@ -276,13 +246,7 @@ pub struct ChangeGraphInfo {
}

pub mod error {
    pub use super::schema::error::Parse as SchemaParse;
    use super::{
        cache::Error as CacheError,
        change,
        change_graph::Error as ChangeGraphError,
        schema_change,
    };
    use super::{cache::Error as CacheError, change, change_graph::Error as ChangeGraphError};
    use thiserror::Error;

    use radicle_git_ext::FromMultihashError as ExtOidFromMultiHashError;
@@ -296,14 +260,10 @@ pub mod error {
        #[error("Invalid automerge history")]
        InvalidAutomergeHistory,
        #[error(transparent)]
        CreateSchemaChange(#[from] schema_change::error::Create),
        #[error(transparent)]
        CreateChange(#[from] change::error::Create),
        #[error(transparent)]
        Refs(RefsError),
        #[error(transparent)]
        Propose(#[from] super::validated_automerge::error::ProposalError),
        #[error(transparent)]
        Cache(#[from] CacheError),
        #[error(transparent)]
        Io(#[from] std::io::Error),
@@ -340,8 +300,6 @@ pub mod error {
        #[error(transparent)]
        Git(#[from] git2::Error),
        #[error(transparent)]
        Propose(#[from] super::validated_automerge::error::ProposalError),
        #[error(transparent)]
        Io(#[from] std::io::Error),
        #[error("signer must belong to the author")]
        SignerIsNotAuthor,
@@ -362,8 +320,6 @@ pub mod error {

/// The data required to create a new object
pub struct CreateObjectArgs<'a, R: RefsStorage, P: AsRef<std::path::Path>> {
    /// A valid JSON schema which uses the vocabulary at <https://alexjg.github.io/automerge-jsonschema/spec>
    pub schema: Schema,
    /// The CRDT history to initialize this object with
    pub contents: EntryContents,
    /// The typename for this object
@@ -387,9 +343,8 @@ pub struct CreateObjectArgs<'a, R: RefsStorage, P: AsRef<std::path::Path>> {
}

impl<'a, R: RefsStorage, P: AsRef<std::path::Path>> CreateObjectArgs<'a, R, P> {
    fn change_spec(&self, schema_commit: git2::Oid) -> change::NewChangeSpec {
    fn change_spec(&self) -> change::NewChangeSpec {
        change::NewChangeSpec {
            schema_commit,
            typename: self.typename.clone(),
            tips: None,
            message: self.message.clone(),
@@ -409,29 +364,17 @@ pub fn create_object<R: RefsStorage, P: AsRef<std::path::Path>>(
        authorizing_identity,
        ref contents,
        ref typename,
        ref schema,
        ..
    } = args;
    if !is_signer_for(signer, author) {
        return Err(error::Create::SignerIsNotAuthor);
    }
    let schema_change = schema_change::SchemaChange::create(
        authorizing_identity.content_id(),
        author.content_id.into(),
        repo,
        signer,
        schema.clone(),
    )?;

    let mut valid_history = ValidatedAutomerge::new(schema.clone());
    valid_history.propose_change(contents.as_ref())?;

    let init_change = change::Change::create(
        authorizing_identity.content_id(),
        author.content_id.into(),
        repo,
        signer,
        args.change_spec(schema_change.commit()),
        args.change_spec(),
    )
    .map_err(error::Create::from)?;

@@ -449,8 +392,6 @@ pub fn create_object<R: RefsStorage, P: AsRef<std::path::Path>>(
    let mut cache = open_cache(args.cache_dir)?;
    let cached_graph = CachedChangeGraph::new(
        std::iter::once(init_change.author_commit()),
        schema.clone(),
        init_change.schema_commit(),
        history,
        typename.clone(),
        object_id,
@@ -462,7 +403,6 @@ pub fn create_object<R: RefsStorage, P: AsRef<std::path::Path>>(
        authorizing_identity_urn: authorizing_identity.urn(),
        typename: args.typename,
        history,
        schema: args.schema,
        id: init_change.commit().into(),
    })
}
@@ -603,8 +543,6 @@ pub fn update<R: RefsStorage, I: IdentityStorage, P: AsRef<std::path::Path>>(
    .load_or_materialize::<error::Update<R::Error>, _>(identity_storage, cache.as_mut(), repo)?
    .ok_or(error::Update::NoSuchObject)?;

    cached.borrow_mut().propose_change(&changes)?;

    let change = change::Change::create(
        authorizing_identity.content_id(),
        author.content_id.into(),
@@ -612,7 +550,6 @@ pub fn update<R: RefsStorage, I: IdentityStorage, P: AsRef<std::path::Path>>(
        signer,
        change::NewChangeSpec {
            tips: Some(cached.borrow().tips().iter().cloned().collect()),
            schema_commit: cached.borrow().schema_commit(),
            contents: changes.clone(),
            typename: typename.clone(),
            message,
@@ -722,8 +659,6 @@ impl<'a> CobRefs<'a> {
                    let object = graph.evaluate(identity_storage);
                    let cached = cache::CachedChangeGraph::new(
                        tip_oids,
                        graph.schema().clone(),
                        graph.schema_commit(),
                        object.history.clone(),
                        self.typename.clone(),
                        self.oid,
diff --git a/cob/src/schema.rs b/cob/src/schema.rs
deleted file mode 100644
index 37dd738e..00000000
--- a/cob/src/schema.rs
@@ -1,496 +0,0 @@
// Copyright © 2021 The Radicle Link Contributors
//
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use std::{
    convert::{TryFrom, TryInto},
    fmt,
};

#[derive(Debug)]
pub struct Schema {
    json: serde_json::Value,
    schema: jsonschema::JSONSchema,
}

impl PartialEq for Schema {
    fn eq(&self, other: &Self) -> bool {
        self.json == other.json
    }
}

impl Schema {
    pub fn json_bytes(&self) -> Vec<u8> {
        self.json.to_string().as_bytes().into()
    }

    pub fn validate(&self, doc: &mut automerge::Frontend) -> Result<(), error::ValidationErrors> {
        let value = doc.state().to_json();
        let output = self.schema.apply(&value).basic();
        match output {
            jsonschema::output::BasicOutput::Invalid(_) => self
                .schema
                .validate(&value)
                .map_err(error::ValidationErrors::from),
            jsonschema::output::BasicOutput::Valid(annotations) => {
                for annotation in annotations {
                    if let serde_json::Value::Object(kvs) = annotation.value().as_ref() {
                        if let Some(serde_json::Value::String(s)) = kvs.get("automerge_type") {
                            if s.as_str() == "string" {
                                let value = lookup_value(doc, annotation.instance_location());
                                if !matches!(
                                    value,
                                    Some(automerge::Value::Primitive(automerge::Primitive::Str(_)))
                                ) {
                                    return Err(error::ValidationErrors {
                                        errors: vec![ValidationError {
                                            instance_path: annotation.instance_location().clone(),
                                            description: "Value must be of type 'string'"
                                                .to_string(),
                                        }],
                                    });
                                }
                            }
                        }
                    }
                }
                Ok(())
            },
        }
    }
}

impl Clone for Schema {
    fn clone(&self) -> Self {
        Schema {
            json: self.json.clone(),
            // The unwrap here is fine as we've already validated the schema during construction
            schema: jsonschema::JSONSchema::compile(&self.json).unwrap(),
        }
    }
}

#[derive(Debug)]
pub struct ValidationError {
    instance_path: jsonschema::paths::JSONPointer,
    description: String,
}

impl fmt::Display for ValidationError {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        write!(f, "{}: {}", self.instance_path, self.description)
    }
}

impl<'a> From<jsonschema::ValidationError<'a>> for ValidationError {
    fn from(e: jsonschema::ValidationError<'a>) -> Self {
        ValidationError {
            instance_path: e.instance_path.clone(),
            description: e.to_string(),
        }
    }
}

pub mod error {
    use super::ValidationError;
    use thiserror::Error;

    #[derive(Debug, Error)]
    pub enum Parse {
        #[error(transparent)]
        Serde(#[from] serde_json::error::Error),
        #[error("invalid schema: {0}")]
        Validation(String),
        #[error("schemas must have exactly one $vocabulary: https://alexjg.github.io/automerge-jsonschema/spec")]
        InvalidVocabulary,
        #[error("invalid keyword {keyword} at {path}")]
        InvalidKeyword { path: String, keyword: String },
    }

    #[derive(Debug, Error)]
    #[error("{errors:?}")]
    pub struct ValidationErrors {
        pub(super) errors: Vec<ValidationError>,
    }

    impl<'a, I> From<I> for ValidationErrors
    where
        I: Iterator<Item = jsonschema::ValidationError<'a>>,
    {
        fn from(errors: I) -> Self {
            ValidationErrors {
                errors: errors.map(ValidationError::from).collect(),
            }
        }
    }
}

impl TryFrom<&serde_json::Value> for Schema {
    type Error = error::Parse;

    fn try_from(value: &serde_json::Value) -> Result<Self, Self::Error> {
        if let serde_json::Value::Object(kvs) = value {
            if let Some(serde_json::Value::Object(vocabs)) = kvs.get("$vocabulary") {
                if vocabs.len() != 1 {
                    return Err(error::Parse::InvalidVocabulary);
                }
                if let Some(serde_json::Value::Bool(true)) =
                    vocabs.get("https://alexjg.github.io/automerge-jsonschema/spec")
                {
                } else {
                    return Err(error::Parse::InvalidVocabulary);
                }
                validate_keywords(Path::Root, value)?;
            } else {
                return Err(error::Parse::InvalidVocabulary);
            }
        }
        jsonschema::JSONSchema::compile(value)
            .map(|s| Schema {
                json: value.clone(),
                schema: s,
            })
            .map_err(|e| error::Parse::Validation(e.to_string()))
    }
}

impl TryFrom<&[u8]> for Schema {
    type Error = error::Parse;

    fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
        let json: serde_json::Value = serde_json::from_slice(bytes)?;
        (&json).try_into()
    }
}

#[derive(Clone)]
enum PathChunk<'a> {
    Keyword(&'static str),
    ArrayIndex(usize),
    ObjectProperty(&'a String),
}

impl<'a> fmt::Display for PathChunk<'a> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        match self {
            Self::Keyword(k) => write!(f, "{}", k),
            Self::ArrayIndex(i) => write!(f, "{}", i),
            Self::ObjectProperty(p) => write!(f, "{}", p),
        }
    }
}

#[derive(Clone)]
enum Path<'a> {
    Root,
    Child {
        chunk: PathChunk<'a>,
        parent: &'a Path<'a>,
    },
}

impl<'a> Path<'a> {
    fn push(&'a self, chunk: PathChunk<'a>) -> Path<'a> {
        Path::Child {
            parent: self,
            chunk,
        }
    }

    fn to_vec(&'a self) -> Vec<&'a PathChunk<'a>> {
        match self {
            Self::Root => Vec::new(),
            Self::Child { chunk, parent } => {
                let mut result = vec![chunk];
                let mut current_parent = parent;
                while let Path::Child { chunk, parent } = current_parent {
                    current_parent = parent;
                    result.push(chunk);
                }
                result.reverse();
                result
            },
        }
    }
}

impl<'a> fmt::Display for Path<'a> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        let as_str = self
            .to_vec()
            .iter()
            .map(|c| c.to_string())
            .collect::<Vec<String>>()
            .join("/");
        write!(f, "{}", as_str)
    }
}

/// This enum represents all the applicator keywords defined in the core json
/// vocabulary with the exception of "contains" and "prefixItems", which are not
/// allowed by the automerge vocabulary.
#[derive(Debug)]
enum Applicator<'a> {
    AllOf(&'a [serde_json::Value]),
    AnyOf(&'a [serde_json::Value]),
    OneOf(&'a [serde_json::Value]),
    Not(&'a serde_json::Value),
    IfCondition(&'a serde_json::Value),
    ThenClause(&'a serde_json::Value),
    ElseClause(&'a serde_json::Value),
    DependentSchemas(&'a serde_json::Value),
    Items(&'a serde_json::Value),
    Properties(&'a serde_json::Map<String, serde_json::Value>),
    PatternProperties(&'a serde_json::Value),
    AdditionalProperties(&'a serde_json::Value),
    PropertyNames(&'a serde_json::Value),
    UnevaluatedItems(&'a serde_json::Value),
}

impl<'a> Applicator<'a> {
    fn from_keyword<A: AsRef<str>>(
        keyword: A,
        value: &'a serde_json::Value,
    ) -> Option<Applicator<'a>> {
        match (keyword.as_ref(), value) {
            ("allOf", serde_json::Value::Array(vals)) => Some(Applicator::AllOf(vals)),
            ("anyOf", serde_json::Value::Array(vals)) => Some(Applicator::AnyOf(vals)),
            ("OneOf", serde_json::Value::Array(vals)) => Some(Applicator::OneOf(vals)),
            ("not", props) => Some(Applicator::Not(props)),
            ("if", value) => Some(Applicator::IfCondition(value)),
            ("then", value) => Some(Applicator::ThenClause(value)),
            ("else", value) => Some(Applicator::ElseClause(value)),
            ("dependentSchemas", props) => Some(Applicator::DependentSchemas(props)),
            ("items", value) => Some(Applicator::Items(value)),
            ("properties", serde_json::Value::Object(props)) => Some(Applicator::Properties(props)),
            ("patternProperties", props) => Some(Applicator::PatternProperties(props)),
            ("additionalProperties", props) => Some(Applicator::AdditionalProperties(props)),
            ("propertyNames", props) => Some(Applicator::PropertyNames(props)),
            ("unevaluatedItems", props) => Some(Applicator::UnevaluatedItems(props)),
            _ => None,
        }
    }

    fn keyword(&self) -> &'static str {
        match self {
            Self::AllOf(..) => "allOf",
            Self::AnyOf(..) => "anyOf",
            Self::OneOf(..) => "oneOf",
            Self::Not(..) => "not",
            Self::IfCondition(..) => "ifCondition",
            Self::ThenClause(..) => "thenClause",
            Self::ElseClause(..) => "elseClause",
            Self::DependentSchemas(..) => "dependentSchemas",
            Self::Items(..) => "items",
            Self::Properties(..) => "properties",
            Self::PatternProperties(..) => "patternProperties",
            Self::AdditionalProperties(..) => "additionalProperties",
            Self::PropertyNames(..) => "propertyNames",
            Self::UnevaluatedItems(..) => "unevaluatedItems",
        }
    }

    fn children(&'a self) -> ApplicatorChildren<'a> {
        match self {
            Applicator::AllOf(values) => self.array_children(values),
            Applicator::AnyOf(values) => self.array_children(values),
            Applicator::OneOf(values) => self.array_children(values),
            Applicator::Not(value) => self.object_children(value),
            Applicator::IfCondition(cond) => self.object_children(cond),
            Applicator::ThenClause(clause) => self.object_children(clause),
            Applicator::ElseClause(clause) => self.object_children(clause),
            Applicator::DependentSchemas(value) => self.object_children(value),
            Applicator::Items(items) => self.object_children(items),
            Applicator::Properties(kvs) => ApplicatorChildren::Multiple(Box::new(
                kvs.iter().map(|(k, v)| (PathChunk::ObjectProperty(k), v)),
            )),
            Applicator::PatternProperties(kvs) => self.object_children(kvs),
            Applicator::AdditionalProperties(schema) => self.object_children(schema),
            Applicator::PropertyNames(schema) => self.object_children(schema),
            Applicator::UnevaluatedItems(schema) => self.object_children(schema),
        }
    }

    fn object_children(&'a self, props: &'a serde_json::Value) -> ApplicatorChildren<'a> {
        ApplicatorChildren::Single(props)
    }

    fn array_children(&'a self, values: &'a [serde_json::Value]) -> ApplicatorChildren<'a> {
        ApplicatorChildren::Multiple(Box::new(
            values
                .iter()
                .enumerate()
                .map(|(i, v)| (PathChunk::ArrayIndex(i), v)),
        ))
    }
}

enum ApplicatorChildren<'a> {
    Multiple(Box<dyn Iterator<Item = (PathChunk<'a>, &'a serde_json::Value)> + 'a>),
    Single(&'a serde_json::Value),
}

/// Validator keywords allowed by <https://alexjg.github.io/automerge-jsonschema/spec>
enum Validator {
    Type,
    Enum,
    Const,
    MultipleOf,
    Maximum,
    ExclusiveMaximum,
    Minimum,
    ExclusiveMinimum,
    Required,
    DependentRequired,
    AutomergeType,
}

impl Validator {
    fn from_keyword<A: AsRef<str>>(keyword: A) -> Option<Validator> {
        match keyword.as_ref() {
            "type" => Some(Validator::Type),
            "enum" => Some(Validator::Enum),
            "const" => Some(Validator::Const),
            "multipleOf" => Some(Validator::MultipleOf),
            "maximum" => Some(Validator::Maximum),
            "exclusiveMaximum" => Some(Validator::ExclusiveMaximum),
            "minimum" => Some(Validator::Minimum),
            "exclusiveMinimum" => Some(Validator::ExclusiveMinimum),
            "required" => Some(Validator::Required),
            "dependentRequired" => Some(Validator::DependentRequired),
            "automerge_type" => Some(Validator::AutomergeType),
            _ => None,
        }
    }
}

/// Validator keywords which are allowed provided the underlying automerge type
/// is "string"
enum StringValidator {
    MaxLength,
    MinLength,
    Pattern,
    Format,
    ContentEncoding,
    ContentMediaType,
    ContentSchema,
}

impl StringValidator {
    fn from_keyword<A: AsRef<str>>(keyword: A) -> Option<StringValidator> {
        match keyword.as_ref() {
            "maxLength" => Some(StringValidator::MaxLength),
            "minLength" => Some(StringValidator::MinLength),
            "pattern" => Some(StringValidator::Pattern),
            "format" => Some(StringValidator::Format),
            "contentEncoding" => Some(StringValidator::ContentEncoding),
            "contentMediaType" => Some(StringValidator::ContentMediaType),
            "contentSchema" => Some(StringValidator::ContentSchema),
            _ => None,
        }
    }
}

enum MetaKeyword {
    Schema,
    Vocabulary,
    Id,
    Defs,
    Ref,
    DynamicRef,
    Comment,
}

impl MetaKeyword {
    fn from_keyword<A: AsRef<str>>(keyword: A) -> Option<MetaKeyword> {
        match keyword.as_ref() {
            "$schema" => Some(Self::Schema),
            "$vocabulary" => Some(Self::Vocabulary),
            "$id" => Some(Self::Id),
            "$defs" => Some(Self::Defs),
            "$ref" => Some(Self::Ref),
            "$dynamicRef" => Some(Self::DynamicRef),
            "$comment" => Some(Self::Comment),
            _ => None,
        }
    }
}

/// Check that the schema is a valid <https://alexjg.github.io/automerge-jsonschema/spec> schema. We
/// iterate over each of the keys in the object and:
///
/// - If we encounter a validator keyword (i.e a keyword which is not an
///   applicator) we check that it is one of the keywords allowed by the
///   vocabulary. Some keywords are only allowed if the underlying automerge
///   type is "string", which is asserted by a sibling keyword "automerge_type",
///   so we check that sibling is present for the relevant keywords.
/// - If we encounter an applicator keyword (a keyword which composes
///   subschemas) we check that the applicator is allowed by the vocabulary.
///   Then we check that the subschemas it is composed of are valid with respect
///   to the vocabulary
fn validate_keywords(path: Path<'_>, value: &serde_json::Value) -> Result<(), error::Parse> {
    if let serde_json::Value::Object(props) = value {
        for (prop, value) in props {
            if Validator::from_keyword(prop).is_some() {
                continue;
            }
            if let Some(meta_kw) = MetaKeyword::from_keyword(prop) {
                if let MetaKeyword::Defs = meta_kw {
                    if let serde_json::Value::Object(kvs) = value {
                        let path = path.push(PathChunk::Keyword("$defs"));
                        for (prop, value) in kvs {
                            validate_keywords(path.push(PathChunk::ObjectProperty(prop)), value)?
                        }
                    }
                };
                continue;
            }
            if StringValidator::from_keyword(prop).is_some() {
                if let Some("string") = props.get("automerge_type").and_then(|v| v.as_str()) {
                    continue;
                }
            }
            if let Some(applicator) = Applicator::from_keyword(prop, value) {
                let path = path.push(PathChunk::Keyword(applicator.keyword()));
                match applicator.children() {
                    ApplicatorChildren::Single(props) => {
                        validate_keywords(path.push(PathChunk::ObjectProperty(prop)), props)?;
                    },
                    ApplicatorChildren::Multiple(values) => {
                        for (chunk, value) in values {
                            validate_keywords(path.push(chunk), value)?;
                        }
                    },
                }
                continue;
            }
            return Err(error::Parse::InvalidKeyword {
                path: path.push(PathChunk::ObjectProperty(prop)).to_string(),
                keyword: prop.clone(),
            });
        }
    }
    Ok(())
}

fn lookup_value(
    doc: &automerge::Frontend,
    path: &jsonschema::paths::JSONPointer,
) -> Option<automerge::Value> {
    let mut automerge_path = automerge::Path::root();
    for chunk in path.iter() {
        match chunk {
            jsonschema::paths::PathChunk::Keyword(s) => {
                automerge_path = automerge_path.key(*s);
            },
            jsonschema::paths::PathChunk::Property(s) => {
                automerge_path = automerge_path.key(s.as_ref());
            },
            jsonschema::paths::PathChunk::Index(i) => {
                automerge_path = automerge_path.index((*i) as u32);
            },
        }
    }
    doc.get_value(&automerge_path)
}
diff --git a/cob/src/schema_change.rs b/cob/src/schema_change.rs
deleted file mode 100644
index 9162a3d1..00000000
--- a/cob/src/schema_change.rs
@@ -1,106 +0,0 @@
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
//
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use std::convert::TryFrom;

use super::{
    change_metadata::{self, ChangeMetadata, CreateMetadataArgs},
    Schema,
};

use link_crypto::BoxedSigner;

pub mod error {
    use super::{super::schema::error as schema_error, change_metadata};
    use thiserror::Error as ThisError;

    #[derive(Debug, ThisError)]
    pub enum Create {
        #[error(transparent)]
        Git(#[from] git2::Error),
        #[error(transparent)]
        Commit(#[from] change_metadata::CreateError),
    }

    #[derive(Debug, ThisError)]
    pub enum Load {
        #[error(transparent)]
        Git(#[from] git2::Error),
        #[error(transparent)]
        Metadata(#[from] change_metadata::LoadError),
        #[error("no schema.json in commit tree")]
        NoSchemaJson,
        #[error("schema.json was not a blob")]
        SchemaNotBlob,
        #[error("invalid schema in schema.json: {0}")]
        InvalidSchema(#[from] schema_error::Parse),
    }
}

pub(super) struct SchemaChange {
    metadata: ChangeMetadata,
    schema: Schema,
}

const SCHEMA_BLOB_NAME: &str = "schema.json";

impl SchemaChange {
    pub fn create(
        authorizing_identity_commit: git2::Oid,
        author_identity_commit: git2::Oid,
        repo: &git2::Repository,
        signer: &BoxedSigner,
        schema: Schema,
    ) -> Result<SchemaChange, error::Create> {
        let mut tb = repo.treebuilder(None)?;
        let schema_oid = repo.blob(&schema.json_bytes())?;
        tb.insert(SCHEMA_BLOB_NAME, schema_oid, git2::FileMode::Blob.into())?;

        let revision = tb.write()?;

        let metadata = ChangeMetadata::create(CreateMetadataArgs {
            revision,
            tips: Vec::new(),
            message: "create schema".to_string(),
            extra_trailers: Vec::new(),
            authorizing_identity_commit,
            author_identity_commit,
            signer: signer.clone(),
            repo,
        })?;

        Ok(SchemaChange { metadata, schema })
    }

    pub fn load(
        commit_id: git2::Oid,
        repo: &git2::Repository,
    ) -> Result<SchemaChange, error::Load> {
        let commit = repo.find_commit(commit_id)?;
        let metadata = change_metadata::ChangeMetadata::try_from(&commit)?;
        let tree = repo.find_tree(metadata.revision)?;

        let schema_tree_entry = tree
            .get_name(SCHEMA_BLOB_NAME)
            .ok_or(error::Load::NoSchemaJson)?;
        let schema_object = schema_tree_entry.to_object(repo)?;
        let schema_blob = schema_object.as_blob().ok_or(error::Load::SchemaNotBlob)?;
        let schema = Schema::try_from(schema_blob.content())?;

        Ok(SchemaChange { metadata, schema })
    }

    pub fn commit(&self) -> git2::Oid {
        self.metadata.commit
    }

    pub fn schema(&self) -> &Schema {
        &self.schema
    }

    pub fn valid_signatures(&self) -> bool {
        self.metadata.valid_signatures()
    }
}
diff --git a/cob/src/validated_automerge.rs b/cob/src/validated_automerge.rs
deleted file mode 100644
index da3ce869..00000000
--- a/cob/src/validated_automerge.rs
@@ -1,128 +0,0 @@
// Copyright © 2021 The Radicle Link Contributors
//
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use super::Schema;

use std::convert::TryFrom;

pub mod error {
    use super::super::schema::error::Parse as SchemaParseError;
    use thiserror::Error;

    #[derive(Debug, Error)]
    pub enum LoadError {
        #[error(transparent)]
        AutomergeBackend(#[from] automerge::BackendError),
        #[error(transparent)]
        AutomergeFrontend(#[from] automerge::FrontendError),
        #[error(transparent)]
        InvalidPatch(#[from] automerge::InvalidPatch),
        #[error(transparent)]
        SchemaParse(#[from] SchemaParseError),
    }

    #[derive(Debug, Error)]
    pub enum ProposalError {
        #[error("invalid change: {0}")]
        InvalidChange(Box<dyn std::error::Error>),
        #[error("invalidates schema: {0}")]
        InvalidatesSchema(Box<dyn std::error::Error>),
        #[error("there are missing dependencies: {missing:?}")]
        MissingDependencies { missing: Vec<automerge::ChangeHash> },
    }
}

/// A history which is valid with respect to a schema and allows fallibly
/// proposing a new change
///
/// The main purpose of this is to cache the backend and frontend for use when
/// the change does not invalidate the schema (presumably the common case). This
/// is necessary because loading a schema invalidating change requires throwing
/// away the backend and reloading it, which is very wasteful for the happy
/// path.
///
/// There are a number of unwraps which are currently unavoidable due to
/// deficiencies in Automerge's API. Let me explain. Automerge is currently
/// architected as a "frontend" and a "backend". These are components which can
/// run in different processes and even in different languages, so they
/// communicate with each other via (possibly) serialized data structures. The
/// backend stores the entire history of the document and emits patch to the
/// frontend which just has the currently realized state, the frontend in turn
/// generates "changes", which are sent to the backend.  Consequently there are
/// a lot of methods on the frontend and backend which are fallible, even though
/// in our case (everything on one thread, with no serialization or other
/// messing with the data structures involved) there is no possibility of an
/// error.
///
/// This is not an ideal situation and there are plans to update the automerge
/// API to fix this unfortunate state of affairs, as well as increasing
/// performance by an order of magnitude or more. Until then we must make do
/// with a long prose explanation of why the unwraps are okay.
#[derive(Debug)]
pub struct ValidatedAutomerge {
    backend: automerge::Backend,
    frontend: automerge::Frontend,
    schema: Schema,
    valid_history: Vec<u8>,
}

impl ValidatedAutomerge {
    pub(crate) fn new(schema: Schema) -> ValidatedAutomerge {
        ValidatedAutomerge {
            backend: automerge::Backend::new(),
            frontend: automerge::Frontend::new(),
            valid_history: Vec::new(),
            schema,
        }
    }

    pub(crate) fn propose_change(
        &mut self,
        change_bytes: &[u8],
    ) -> Result<(), error::ProposalError> {
        let change = automerge::Change::try_from(change_bytes)
            .map_err(|e| error::ProposalError::InvalidChange(Box::new(e)))?;
        let old_backend = self.backend.clone();
        let patch = self
            .backend
            .apply_changes(vec![change])
            .map_err(|e| error::ProposalError::InvalidChange(Box::new(e)))?;
        // This can only go wrong if the patch is delivered out of order, which we
        // promise we aren't doing
        self.frontend.apply_patch(patch).unwrap();
        let validation_error = self.schema.validate(&mut self.frontend).err();
        match validation_error {
            None => {
                self.valid_history.extend(change_bytes);
            },
            Some(e) => {
                let value = self.frontend.state();
                tracing::debug!(invalid_json=?value.to_json().to_string(), "change invalidated schema");
                self.reset(old_backend);
                return Err(error::ProposalError::InvalidatesSchema(Box::new(e)));
            },
        }
        let missing_deps = self.backend.get_missing_deps(&[]);
        if !missing_deps.is_empty() {
            self.reset(old_backend);
            return Err(error::ProposalError::MissingDependencies {
                missing: missing_deps,
            });
        }
        self.valid_history = self.backend.save().unwrap();
        Ok(())
    }

    fn reset(&mut self, old_backend: automerge::Backend) {
        self.backend = old_backend;
        let mut old_frontend = automerge::Frontend::new();
        // This can only happen if an invalid document is loaded, but we know the
        // backend is in a good state as we had already previously generated a
        // patch from it.
        let patch = self.backend.get_patch().unwrap();
        old_frontend.apply_patch(patch).unwrap();
        self.frontend = old_frontend;
    }
}
diff --git a/cob/t/src/tests.rs b/cob/t/src/tests.rs
index 2e2f39bb..b846c7ff 100644
--- a/cob/t/src/tests.rs
+++ b/cob/t/src/tests.rs
@@ -5,7 +5,6 @@

mod cache;
mod cached_change_graph;
mod schema;

use cob::TypeName;
use std::str::FromStr;
diff --git a/cob/t/src/tests/cache.rs b/cob/t/src/tests/cache.rs
index 5052970c..b8c98d9b 100644
--- a/cob/t/src/tests/cache.rs
+++ b/cob/t/src/tests/cache.rs
@@ -6,28 +6,12 @@
use cob::{
    internals::{Cache, CachedChangeGraph, FileSystemCache},
    ObjectId,
    Schema,
};
use lazy_static::lazy_static;
use rand::Rng;
use std::{cell::RefCell, convert::TryFrom, env::temp_dir, rc::Rc};
use std::{cell::RefCell, env::temp_dir, rc::Rc};

use crate::helpers::{random_history, random_oid};

lazy_static! {
    static ref SCHEMA: Schema = Schema::try_from(&serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "name": { "type": "string" }
        },
        "required": ["name"]
    }))
    .unwrap();
}

struct CacheTestEnv {
    states: Vec<CachedChangeGraph>,
    dir: std::path::PathBuf,
@@ -82,14 +66,11 @@ fn test_load_returns_none_if_refs_dont_match() {

fn object_state(name: &'static str) -> CachedChangeGraph {
    let tips = [0..10].iter().map(|_| random_oid());
    let schema_commit = random_oid();
    let history = random_history(name);
    let urn = radicle_git_ext::Oid::from(random_oid()).into();
    CachedChangeGraph {
        refs: tips.collect(),
        history,
        schema_commit,
        schema: SCHEMA.clone(),
        typename: "some.type.name".parse().unwrap(),
        object_id: random_oid().into(),
        authorizing_identity_urn: urn,
diff --git a/cob/t/src/tests/cached_change_graph.rs b/cob/t/src/tests/cached_change_graph.rs
index d8986f01..5dbf17b8 100644
--- a/cob/t/src/tests/cached_change_graph.rs
+++ b/cob/t/src/tests/cached_change_graph.rs
@@ -3,8 +3,8 @@
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use cob::{internals::CachedChangeGraph, Schema};
use std::{collections::BTreeSet, convert::TryFrom};
use cob::internals::CachedChangeGraph;
use std::collections::BTreeSet;

use minicbor::Decode;

@@ -14,21 +14,9 @@ use crate::helpers::random_history;
fn test_cached_change_graph_encode_decode() {
    let commit = git2::Oid::from_str("f41a052ad0a6b8a17ddae486cf2322cc48215222").unwrap();
    let some_urn = radicle_git_ext::Oid::from(commit).into();
    let schema = Schema::try_from(&serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "name": {"type": "string"}
        }
    }))
    .unwrap();
    let g = CachedChangeGraph {
        history: random_history("somename"),
        refs: BTreeSet::new(),
        schema_commit: commit,
        schema,
        object_id: commit.into(),
        typename: "some.type.name".parse().unwrap(),
        authorizing_identity_urn: some_urn,
diff --git a/cob/t/src/tests/schema.rs b/cob/t/src/tests/schema.rs
deleted file mode 100644
index db5592d0..00000000
--- a/cob/t/src/tests/schema.rs
@@ -1,258 +0,0 @@
// Copyright © 2021 The Radicle Link Contributors
//
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use std::convert::TryFrom;

#[test]
fn valid_schema_can_be_parsed() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "name": {"type": "string"}
        }
    });
    assert!(cob::Schema::try_from(&schema).is_ok());
}

#[test]
fn missing_vocab_fails() {
    let schema = serde_json::json!({
        "type": "object",
        "properties": {
            "name": {"type": "string"}
        }
    });
    assert!(matches!(
        cob::Schema::try_from(&schema),
        Err(cob::schema::error::Parse::InvalidVocabulary)
    ));
}

#[test]
fn non_required_automerge_vocab_fails() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": false,
        },
        "type": "object",
        "properties": {
            "name": {"type": "string"}
        }
    });
    assert!(matches!(
        cob::Schema::try_from(&schema),
        Err(cob::schema::error::Parse::InvalidVocabulary)
    ));
}

#[test]
fn other_vocabs_fails() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
            "https://json-schema.org/draft/2020-12/schema": true
        },
        "type": "object",
        "properties": {
            "name": {"type": "string"}
        }
    });
    assert!(matches!(
        cob::Schema::try_from(&schema),
        Err(cob::schema::error::Parse::InvalidVocabulary)
    ));
}

#[test]
fn invalid_keywords_raise_error() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "addresses": {
                "type": "array",
                "maxLength": 10,
                "items": {
                    "type": "object",
                    "properties": {
                        "line_one": {"type": "string"}
                    }
                }
            }
        }
    });
    let err = cob::schema::Schema::try_from(&schema).err();
    if let Some(cob::schema::error::Parse::InvalidKeyword { path, keyword }) = err {
        assert_eq!(path, "properties/addresses/maxLength".to_string());
        assert_eq!(keyword, "maxLength".to_string());
    } else {
        panic!("expected an InvalidKeyword error, got {:?}", err);
    }
}

#[test]
fn invalid_keywords_in_all_of_raises_error() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "addresses": {
                "allOf": [
                    {"type": "array"},
                    {"maxLength": 10 },
                ]
            }
        }
    });
    let err = cob::schema::Schema::try_from(&schema).err();
    if let Some(cob::schema::error::Parse::InvalidKeyword { path, keyword }) = err {
        assert_eq!(path, "properties/addresses/allOf/1/maxLength".to_string());
        assert_eq!(keyword, "maxLength".to_string());
    } else {
        panic!("expected an InvalidKeyword error, got {:?}", err);
    }
}

#[test]
fn invalid_keywords_in_definitions_raises_error() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "$defs": {
            "address": {
                "type": "array",
                "maxLength": 10
            }
        },
        "properties": {
            "addresses": {
                "$ref": "#/$defs/address"
            }
        }
    });
    let err = cob::schema::Schema::try_from(&schema).err();
    if let Some(cob::schema::error::Parse::InvalidKeyword { path, keyword }) = err {
        assert_eq!(path, "$defs/address/maxLength".to_string());
        assert_eq!(keyword, "maxLength".to_string());
    } else {
        panic!("expected an InvalidKeyword error, got {:?}", err);
    }
}

#[test]
fn string_validation_keywords_valid_if_automerge_type_string() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "name": {
                "automerge_type": "string",
                "type": "string",
                "maxLength": 10
            }
        }
    });
    assert!(cob::schema::Schema::try_from(&schema).is_ok())
}

#[test]
fn string_validation_keywords_invalid_if_not_automerge_type_string() {
    let schema = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "name": {
                "type": "string",
                "maxLength": 10
            }
        }
    });
    let err = cob::schema::Schema::try_from(&schema).err();
    if let Some(cob::schema::error::Parse::InvalidKeyword { path, keyword }) = err {
        assert_eq!(path, "properties/name/maxLength".to_string());
        assert_eq!(keyword, "maxLength".to_string());
    } else {
        panic!("expected an InvalidKeyword error, got {:?}", err);
    }
}

#[test]
fn automerge_document_with_automerge_type_string() {
    let schema = cob::schema::Schema::try_from(&serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "name": {
                "type": "string",
                "automerge_type": "string",
                "maxLength": 10
            }
        }
    }))
    .unwrap();
    let mut good_doc = automerge::Frontend::new();
    let mut good_doc_backend = automerge::Backend::new();
    good_doc
        .apply_patch(good_doc_backend.get_patch().unwrap())
        .unwrap();
    let (_, change) = good_doc
        .change::<_, _, automerge::InvalidChangeRequest>(None, |doc| {
            doc.add_change(automerge::LocalChange::set(
                automerge::Path::root().key("name"),
                automerge::Value::Primitive(automerge::Primitive::Str("somename".into())),
            ))?;
            Ok(())
        })
        .unwrap();
    let (patch, _) = good_doc_backend
        .apply_local_change(change.unwrap())
        .unwrap();
    good_doc.apply_patch(patch).unwrap();
    assert!(schema.validate(&mut good_doc).is_ok());

    let mut bad_doc = automerge::Frontend::new();
    let mut bad_doc_backend = automerge::Backend::new();
    bad_doc
        .apply_patch(bad_doc_backend.get_patch().unwrap())
        .unwrap();
    let (_, change) = bad_doc
        .change::<_, _, automerge::InvalidChangeRequest>(None, |doc| {
            doc.add_change(automerge::LocalChange::set(
                automerge::Path::root().key("name"),
                automerge::Value::Text("some name".chars().map(|c| c.to_string().into()).collect()),
            ))?;
            Ok(())
        })
        .unwrap();
    let (patch, _) = bad_doc_backend.apply_local_change(change.unwrap()).unwrap();
    bad_doc.apply_patch(patch).unwrap();
    assert!(schema.validate(&mut bad_doc).is_err());

    //let mut bad_doc = automerge::Automerge::new();
    //bad_doc.change::<_, _, automerge::InvalidChangeRequest>(None, |doc| {
    //doc.add_change(automerge::LocalChange::set(
    //automerge::Path::root().key("name"),
    //automerge::Value::Text("some name".chars().map(|c|
    // c.to_string().into()).collect()),
    //))?;
    //Ok(())
    //}).unwrap();
    //assert!(schema.validate(&bad_doc).is_err());
}
diff --git a/librad/src/collaborative_objects.rs b/librad/src/collaborative_objects.rs
index 6a09718c..35049a7d 100644
--- a/librad/src/collaborative_objects.rs
+++ b/librad/src/collaborative_objects.rs
@@ -10,7 +10,7 @@ use crate::git::{
    types::{Namespace, Reference, RefsCategory},
};

use std::{collections::HashMap, convert::TryFrom, str::FromStr};
use std::{collections::HashMap, str::FromStr};

pub use cob::{
    AuthorizingIdentity,
@@ -23,7 +23,6 @@ pub use cob::{
    ObjectId,
    ObjectRefs,
    RefsStorage,
    Schema,
    TypeName,
};
use link_crypto::BoxedSigner;
@@ -32,7 +31,6 @@ use link_identities::git::{SomeIdentity, Urn};
pub mod error {
    use super::RefsError;
    use crate::git::identities::Error as IdentitiesError;
    use cob::error::SchemaParse;
    use link_identities::git::Urn;
    use thiserror::Error;

@@ -43,8 +41,6 @@ pub mod error {
        Cob(#[from] cob::error::Create<RefsError>),
        #[error(transparent)]
        ResolveAuth(#[from] ResolveAuthorizer),
        #[error(transparent)]
        InvalidSchema(#[from] SchemaParse),
    }

    #[allow(clippy::large_enum_variant)]
@@ -81,8 +77,6 @@ pub mod error {

/// The data required to create a new object
pub struct NewObjectSpec {
    /// A valid JSON schema which uses the vocabulary at <https://alexjg.github.io/automerge-jsonschema/spec>
    pub schema_json: serde_json::Value,
    /// The CRDT history to initialize this object with
    pub history: EntryContents,
    /// The typename for this object
@@ -129,7 +123,6 @@ impl<'a> CollaborativeObjects<'a> {
        within_identity: &Urn,
        spec: NewObjectSpec,
    ) -> Result<cob::CollaborativeObject, error::Create> {
        let schema = Schema::try_from(&spec.schema_json)?;
        cob::create_object(cob::CreateObjectArgs {
            refs_storage: self,
            repo: self.store.as_raw(),
@@ -137,7 +130,6 @@ impl<'a> CollaborativeObjects<'a> {
            author: whoami,
            authorizing_identity: resolve_authorizing_identity(self.store, within_identity)?
                .as_ref(),
            schema,
            contents: spec.history,
            typename: spec.typename,
            message: spec.message,
diff --git a/librad/src/git/tracking/refdb.rs b/librad/src/git/tracking/refdb.rs
index ffe9808a..e6cd5ca7 100644
--- a/librad/src/git/tracking/refdb.rs
+++ b/librad/src/git/tracking/refdb.rs
@@ -257,21 +257,23 @@ impl Write for Storage {
                        })
                    };
                    match self.reference(&RefString::from(&name))? {
                        Some(r) => reject_or_update(
                            previous
                                .guard(r.target().map(ext::Oid::from).as_ref(), delete)?
                                .map_or(
                                    Ok(Updated::Deleted {
                                        name,
                                        previous: r
                                            .target()
                                            .map(Ok)
                                            .unwrap_or(Err(error::SymbolicRef))?
                                            .into(),
                                    }),
                                    Err,
                                ),
                        ),
                        Some(r) => {
                            reject_or_update(
                                previous
                                    .guard(r.target().map(ext::Oid::from).as_ref(), delete)?
                                    .map_or(
                                        Ok(Updated::Deleted {
                                            name,
                                            previous: r
                                                .target()
                                                .map(Ok)
                                                .unwrap_or(Err(error::SymbolicRef))?
                                                .into(),
                                        }),
                                        Err,
                                    ),
                            )
                        },
                        None => match previous {
                            refdb::PreviousValue::Any
                            | refdb::PreviousValue::MustNotExist
diff --git a/librad/t/src/integration/scenario/collaborative_objects.rs b/librad/t/src/integration/scenario/collaborative_objects.rs
index 36d29e24..999ff65b 100644
--- a/librad/t/src/integration/scenario/collaborative_objects.rs
+++ b/librad/t/src/integration/scenario/collaborative_objects.rs
@@ -35,20 +35,6 @@ macro_rules! assert_state {
}

lazy_static! {
    static ref SCHEMA: serde_json::Value = serde_json::json!({
        "$vocabulary": {
            "https://alexjg.github.io/automerge-jsonschema/spec": true,
        },
        "type": "object",
        "properties": {
            "items": {
                "type": "array",
                "items": {
                    "type": "string",
                }
            }
        }
    });
    static ref TYPENAME: TypeName = FromStr::from_str("xyz.radicle.testobject").unwrap();
    static ref KEY_ONE: SecretKey = SecretKey::from_seed([
        100, 107, 14, 43, 237, 25, 113, 215, 236, 197, 160, 60, 169, 174, 81, 58, 143, 74, 42, 201,
@@ -199,7 +185,6 @@ fn collab_object_crud() {
                                history: init_history(),
                                message: Some("first change".to_string()),
                                typename: TYPENAME.clone(),
                                schema_json: SCHEMA.clone(),
                            },
                        )
                        .unwrap()
-- 
2.36.1

[PATCH 2/2] cobs: Remove schemas from RFC

Details
Message ID
<20220701101520.181107-3-alex@memoryandthought.me>
In-Reply-To
<20220701101520.181107-1-alex@memoryandthought.me> (view parent)
DKIM signature
missing
Download raw message
Patch: +33 -166
As we no longer support schema validation or shipping JSON schemas with
collaborative objects, we update the RFC to merely mention the use of
the `X-Rad-Schema` trailer for backwards compatibility.

Signed-off-by: Alex Good <alex@memoryandthought.me>
---
 docs/rfc/0662-collaborative-objects.adoc | 199 ++++-------------------
 1 file changed, 33 insertions(+), 166 deletions(-)

diff --git a/docs/rfc/0662-collaborative-objects.adoc b/docs/rfc/0662-collaborative-objects.adoc
index 17c287c5..89fc258c 100644
--- a/docs/rfc/0662-collaborative-objects.adoc
+++ b/docs/rfc/0662-collaborative-objects.adoc
@@ -15,9 +15,7 @@ exactly what such metadata look like, different organisations and people will
have different requirements and one of the promises of decentralisation is to
increase user choice. Therefore we should remain agnostic at the protocol level
about exactly what such metadata looks like, instead we should build a single
API for applications to use metadata associated with a project. The schemas and
interpretations of these data types then become composable at the application
layer.
API for applications to use metadata associated with a project. 

== Design Goals

@@ -60,70 +58,8 @@ that the application communicates with a radicle implementation via an HTTP
RPC. However, the HTTP RPC is not a proposal of this RFC, the proposed API will
be specified in terms of `radicle-link` later in this document.

The first thing the developer must do is decide on the schema of their data and
represent it as a JSON schema. We use this simple schema:

[source,json]
----
{
    "$vocabulary": {
        "https://alexjg.github.io/automerge-jsonschema/spec": true
    },
    "type": "object",
    "properties": {
        "id": {
            "type": "string"
        },
        "title": {
            "type": "string",
            "automerge_type": "text"
        },
        "description": {
            "type": "string",
            "automerge_type": "text"
        },
        "author": {
            "description": "The radicle URN of the author of the issue",
            "type": "string"
        },
        "comments": {
            "type": "array",
            "items": {
                "type": "object",
                "properties": {
                    "text": {
                        "type": "string"
                    },
                    "id": "string",
                    "author": {
                        "type": "string",
                        "description": "Radicle URN of the author of the comment"
                    }
                }
            }
        }
    }
}
----

An issue consists of a title, description, and author. This schema may well be
the subject of its own mini standardisation process as it is very likely that
many different applications will want to interoperate with the same issue
model. The important thing is that this standardisation process can happen
independently of the radicle protocol. Note the `$vocabulary`, this is a
vocabulary which only contains keywords which can be used in a CRDT such as 
automerge.

In addition to the schema, the developer must choose a name for their type.
This is similar to an XML namespace and probably standardised as part of the
same process which produces the schema. In this case let's choose
`xyz.radicle.issue` as the type name.

This schema ensures that the data is well formed. In this iteration of 
collaborative objects, only project maintainers can modify an issue, we assume
that project maintainers can be trusted not to modify the issue in malicious
ways.

The developer must choose a name for their type. This is similar to an XML
namespace. In this case let's choose `xyz.radicle.issue` as the type name.

=== Creating an issue

@@ -151,7 +87,6 @@ with the following content:
----
{
    "typename": "xyz.radicle.issue",
    "schema": <the schema above>,
    "auth_policy": "<the policy above>",
    "history": {
        "type": "automerge",
@@ -160,10 +95,9 @@ with the following content:
}
----

This endpoint returns an error if the data does not match the schema. Otherwise 
the endpoint returns an identifier for the newly created object and announces
the new data to the network, anyone tracking the project will pull those 
changes.
Otherwise the endpoint returns an identifier for the newly created object and
announces the new data to the network, anyone tracking the project will pull
those changes.

=== Retrieving an issue

@@ -184,7 +118,6 @@ This will return something like this:
    {
        "id": "<some long string>",
        "typename": "xyz.radicle.issue",
        "schema": <the schema above>,
        "data": {
            "title": "Librad doesn't implement the metadata RFC",
            "description": "It's in the name",
@@ -244,9 +177,7 @@ the following contents:
}
----

This endpoint will return an error if the change does not match the schema of
the object. Otherwise the change will be merged in to the object and announced
to the network.
The change will be merged in to the object and announced to the network.

== Implementation

@@ -305,14 +236,12 @@ Therefore a valid change commit must have four trailers:
* `X-Rad-Author`, this is the multibase-multihash of the parent commit which
  references the author identity. We use this trailer to avoid following the
  author commit reference when constructing the automerge change graph
* `X-Rad-Schema`, this is the multibase-multihash of the parent commit which contains 
  the schema of this object. See <<Schema Commits>>.
* `X-Rad-Schema`, this is the multibase-multihash of the parent commit which
  contains the schema of this object. See <<Schema Commits>>. This trailer is
  retained for backwards compatibility and MUST be omitted in new changes
* `X-Rad-Authorizing-Identity`, the multibase-multihash of the parent commit
  which points to the identity which authorizes this change.

Furthermore a change commit MUST be signed by a delegate of the authorizing
identity.

==== `manifest.toml`

The manifest is a TOML file containing some metadata about the object.
@@ -323,11 +252,6 @@ Specifically it will contain:
  CRDT implementations in the future.


Each object is also created with a JSON schema. The schema is represented by an
initial `schema.json` and a series of schema migrations which extend that
initial schema. Schema migrations will not be addressed in detail in this RFC
but we will show their feasibility.

==== `change`

This is the automerge change which this commit introduces. It is a binary file
@@ -337,17 +261,11 @@ referenced by the parents of the commit.

=== Object IDs

We require that there only be one root of the change graph we're replicating.
Whilst we could merge change graphs with multiple roots this would be insecure.
Imagine that an honest peer creates an object, if an attacker could create 
another root node in the change graph and arrange for it to come before the
honest root in a topological sort then the attacker could override the schema
and other properties of the object.

To have a single root we need an object ID which is derived from the attributes
of the object, that way an attacker cannot manipulate the attributes of an
object with the same ID. We achieve this by using the hash of the initial
commit of the object as the ID.
We require that there only be one root of the change graph we're replicating. To
have a single root we need an object ID which is derived from the attributes of
the object, that way an attacker cannot manipulate the attributes of an object
with the same ID. We achieve this by using the hash of the initial commit of the
object as the ID.

It is entirely plausible that a peer would create two distinct objects with
identical initial states. Under many content addressing schemes this would lead
@@ -365,12 +283,10 @@ ID in this reference is a multibase-multihash encoding of the object ID using
the base32-z encoding.

At this point we have the hash linked graph of automerge changes, but we need
to make sure that the merged document is authenticated and valid with respect
to it's schema. To do this we start at the root of the hash graph and walk
down the tree. As we encounter each change we check it's signature, apply it
and check that the new document does not violate the schema. If it does violate
the schema we discard the change and all dependent changes. Finally, we have an
authenticated document which respects a given schema.
to make sure that the merged document is authenticated. To do this we start at
the root of the hash graph and walk down the tree. As we encounter each change
we check it's signature and discard it if it is not valid, otherwise we merge it
with the document state.

It is important to note that this merging is at this point not stored in the
repository - it can be performed in memory and may be cached. The result is
@@ -406,58 +322,19 @@ ID.

To make a change to an object we load the existing messages for an object. The 
application developer provides us with the binary representation of the change
to that object. We apply the change and ensure that the new object state still
matches the object schema. At this point the state of the object may depend on
many contributions from the tracking graph - not just the ones in our own view
of the project. We now create a commit with our new change in it, referencing
all the commits containing the direct dependencies of the change as parents.
to that object. At this point the state of the object may depend on many
contributions from the tracking graph - not just the ones in our own view of the
project. We now create a commit with our new change in it, referencing all the
commits containing the direct dependencies of the change as parents.

=== Schema Commits

Schemas are important for the interoperability of the system. We need
applications to be able to rely on the data they are working with being valid,
otherwise we impose the problem of schema validation on application developers.

Schemas will need to be able to change over time. Schema migration is out of 
scope for this RFC but we need a minimal mechanism to support it in future. To
this end schemas are represented using their own hash graph. For the purposes
of this RFC a schema is a commit with a tree that contains a `schema.json` and
a `m̀anifest.toml` blob:

[source]
----
|
|-- schema.json
|-- manifest.toml
----

`schema.json` contains a draft 2020-12 JSON schema. This schema MUST use the
https://alexjg.github.io/automerge-jsonschema/spec[Automerge JSON schema vocabulary],
which specifies a subset of the keywords from the JSON schema spec which 
distribute across the merge operation and therefore can be used to validate
automerge documents.

`manifest.toml` is a TOML file with the following contents:

[source,toml]
----
type: jsonschema
version: 1
----

This can be extended in future by creating schema commits that reference this
schema commit and add migrations.

As with change commits the schema commit is signed and references an author
commit, therefore the commit has two trailers:

- `X-Rad-Author`
- `X-Rad-Signature`

With the same definition as for change commits.

Change commits have a schema commit as one of their parents and reference that
commit via the `X-Rad-Schema` trailer.
An earlier version of this RFC specified a separate schema which a collaborative
object should respect. This schema was encoded as a commit which each change in
a collaborative object referenced in it's parents. We no longer include a schema
but in order to remain compatible with older objects implementations MUST parse
the `X-Rad-Schema` trailer in order to discard this parent when evaluating the
change graph of an object.


=== Strange Perspectives
@@ -482,7 +359,7 @@ The APIs librad will provide:
  applications which wish to write data
* update an object by providing the bytes of an automerge change which updates
  the document
* create a new object from a JSON object, a JSON schema, and a type name
* create a new object from a JSON object, and a type name
  
Note that I am referring to "the binary representation of an automerge x" 
because the automerge API works in terms of binary changes.
@@ -503,11 +380,9 @@ enum History {

struct ObjectId(String);
struct TypeName(String);
struct Schema(..);

struct CollaborativeObject {
    typename: TypeName,
    schema: Schema,
    id: ObjectId,
    author: Person, 
    json: serde_json::Value,
@@ -517,7 +392,6 @@ struct CollaborativeObject {
struct NewObjectSpec {
    typename: TypeName,
    history: History,
    schema_json: serde_json::Value,
}

impl CollaborativeObjectStore {
@@ -538,16 +412,9 @@ a few core extensions with librad - issues for example.

== Further work

This RFC limits participants in collaborative objects to project maintainers.
This is a significant limitation, we can't reproduce the common behaviour of
many issue trackers where an issue can be created by anyone. In order to allow
this behaviour we need the ability to make authorization decisions about 
different parts of the document. One way to achieve this would be by adding an
authorization logic a la https://content.iospress.com/articles/journal-of-computer-security/jcs364[SecPAL]
to the collaborative object definition. This would be used in a similar manner
to the schema to validate that changes to a document are authorized by the 
authorization logic.

This RFC does not specify authorization logic and applications must instead
implement this themselves. It may be possible to implement a general
authorization DSL in librad, freeing applications of this logic. 

[[alt-approaches,Alternative Approaches]]
==  Alternative Approaches
-- 
2.36.1

Re: [PATCH 1/2] cobs: Remove schemas

Details
Message ID
<CL495U4TJM1O.222I6A7KESD83@haptop>
In-Reply-To
<20220701101520.181107-2-alex@memoryandthought.me> (view parent)
DKIM signature
missing
Download raw message
On Fri Jul 1, 2022 at 11:15 AM IST, Alex Good wrote:
> --- a/cob/src/change.rs
> +++ b/cob/src/change.rs
> @@ -3,15 +3,11 @@
>  // This file is part of radicle-link, distributed under the GPLv3 with Radicle
>  // Linking Exception. For full terms see the included LICENSE file.
>  
> -use super::{
> -    change_metadata::{self, ChangeMetadata, CreateMetadataArgs},
> -    trailers,
> -    EntryContents,
> -    HistoryType,
> -    TypeName,
> -};
> +use super::{trailers, EntryContents, HistoryType, TypeName};
>  
> +use git_trailers::{parse as parse_trailers, OwnedTrailer};
>  use link_crypto::BoxedSigner;
> +use link_identities::sign::Signatures;
>  
>  use std::{convert::TryFrom, fmt};
>  
> @@ -21,14 +17,28 @@ use serde::{Deserialize, Serialize};
>  /// is specified in the RFC (docs/rfc/0662-collaborative-objects.adoc)
>  /// under "Change Commits".
>  pub struct Change {
> -    /// The OID of the parent commit which points at the schema_commit
> -    schema_commit: git2::Oid,
> +    /// The commit where this change lives
> +    commit: git2::Oid,
> +    /// The OID of the tree the commit points at, we need this to validate the
> +    /// signatures
> +    revision: git2::Oid,
> +    /// The signatures of this change
> +    signatures: Signatures,
> +    /// The OID of the parent commit of this change which points at the author
> +    /// identity
> +    author_commit: git2::Oid,
> +    /// The OID of the parent commit of this change which points at a schema.
> +    /// Schemas are no longer used but older implementations include a
> +    /// schema commit as a parent of the change and to stay backwards
> +    /// compatible we must exclude these commits when loading a change.
> +    schema_commit: Option<git2::Oid>,
> +    /// The OID of the parent commit which points at the identity this change
> +    /// was authorized with respect to at the time the change was authored.
> +    authorizing_identity_commit: git2::Oid,
>      /// The manifest
>      manifest: Manifest,
>      /// The actual changes this change carries
>      contents: EntryContents,
> -    /// The metadata for this change
> -    metadata: change_metadata::ChangeMetadata,
>  }
>  
>  impl fmt::Display for Change {
> @@ -38,9 +48,10 @@ impl fmt::Display for Change {
>  }
>  
>  pub mod error {
> -    use super::{change_metadata, trailers};
> +    use super::trailers;
> +    use git_trailers::Error as TrailerError;
>      use link_crypto::BoxedSignError;
> -    use link_identities::git::error::Signatures;
> +    use link_identities::sign::error::Signatures;
>      use thiserror::Error;
>  
>      #[derive(Debug, Error)]
> @@ -49,8 +60,6 @@ pub mod error {
>          Git(#[from] git2::Error),
>          #[error(transparent)]
>          Signer(#[from] BoxedSignError),
> -        #[error(transparent)]
> -        Metadata(#[from] change_metadata::CreateError),
>      }
>  
>      #[derive(Debug, Error)]
> @@ -70,14 +79,21 @@ pub mod error {
>          #[error("./change was not a blob")]
>          ChangeNotBlob,
>          #[error(transparent)]
> -        InvalidMetadata(#[from] change_metadata::LoadError),
> -        #[error(transparent)]
>          SchemaCommitTrailer(#[from] trailers::error::InvalidSchemaTrailer),
> +        #[error(transparent)]
> +        AuthorTrailer(#[from] trailers::error::InvalidAuthorTrailer),
> +        #[error(transparent)]
> +        AuthorizingIdentityTrailer(
> +            #[from] super::trailers::error::InvalidAuthorizingIdentityTrailer,
> +        ),
> +        #[error("non utf-8 characters in commit message")]
> +        Utf8,
> +        #[error(transparent)]
> +        Trailer(#[from] TrailerError),
>      }
>  }
>  
>  pub struct NewChangeSpec {
> -    pub(crate) schema_commit: git2::Oid,
>      pub(crate) typename: TypeName,
>      pub(crate) tips: Option<Vec<git2::Oid>>,
>      pub(crate) message: Option<String>,
> @@ -90,8 +106,8 @@ const CHANGE_BLOB_NAME: &str = "change";
>  impl Change {
>      /// Create a change in the git repo according to the spec
>      pub fn create(
> -        authorizing_identity_commit: git2::Oid,
> -        author_identity_commit: git2::Oid,
> +        authorizing_identity_commit_id: git2::Oid,
> +        author_identity_commit_id: git2::Oid,
>          repo: &git2::Repository,
>          signer: &BoxedSigner,
>          spec: NewChangeSpec,
> @@ -116,35 +132,80 @@ impl Change {
>          tb.insert(CHANGE_BLOB_NAME, change_blob, git2::FileMode::Blob.into())?;
>  
>          let revision = tb.write()?;
> -
> -        let schema_trailer = trailers::SchemaCommitTrailer::from(spec.schema_commit).into();
> -
> -        let mut tips = spec.tips.clone().unwrap_or_default();
> -        tips.push(spec.schema_commit);
> -        tips.push(authorizing_identity_commit);
> -
> -        let metadata = ChangeMetadata::create(CreateMetadataArgs {
> -            revision,
> -            tips,
> -            message: spec.message.unwrap_or_else(|| "new change".to_string()),
> -            extra_trailers: vec![schema_trailer],
> -            authorizing_identity_commit,
> -            author_identity_commit,
> -            signer: signer.clone(),
> -            repo,
> -        })?;
> +        let tree = repo.find_tree(revision)?;
> +
> +        let author_commit = repo.find_commit(author_identity_commit_id)?;
> +        let author = repo.signature()?;
> +
> +        let authorizing_identity_commit = repo.find_commit(authorizing_identity_commit_id)?;
> +
> +        let signatures = link_identities::git::sign(signer, revision.into())?.into();
> +        let mut parent_commits = spec
> +            .tips
> +            .iter()
> +            .flat_map(|cs| cs.iter())
> +            .map(|o| repo.find_commit(*o))
> +            .collect::<Result<Vec<git2::Commit>, git2::Error>>()?;
> +        parent_commits.push(authorizing_identity_commit);
> +        parent_commits.push(author_commit);
> +
> +        let trailers = vec![
> +            super::trailers::AuthorCommitTrailer::from(author_identity_commit_id).into(),
> +            super::trailers::AuthorizingIdentityCommitTrailer::from(authorizing_identity_commit_id)
> +                .into(),
> +        ];
> +
> +        let commit = repo.commit(
> +            None,
> +            &author,
> +            &author,
> +            &link_identities::git::sign::CommitMessage::new(
> +                spec.message
> +                    .unwrap_or_else(|| "new change".to_string())
> +                    .as_str(),
> +                &signatures,
> +                trailers,
> +            )
> +            .to_string(),
> +            &tree,
> +            &(parent_commits.iter().collect::<Vec<&git2::Commit>>())[..],
> +        )?;
>  
>          Ok(Change {
> -            schema_commit: spec.schema_commit,
> +            schema_commit: None,
>              manifest,
>              contents: spec.contents,
> -            metadata,
> +            commit,
> +            signatures,
> +            authorizing_identity_commit: authorizing_identity_commit_id,
> +            author_commit: author_identity_commit_id,
> +            revision,
>          })
>      }
>  
>      /// Load a change from the given commit
>      pub fn load(repo: &git2::Repository, commit: &git2::Commit) -> Result<Change, error::Load> {
> -        let metadata = ChangeMetadata::try_from(commit)?;
> +        let trailers = commit
> +            .message()
> +            .ok_or(error::Load::Utf8)
> +            .and_then(|s| parse_trailers(s, ":").map_err(|e| e.into()))?;
> +        let owned_trailers: Vec<OwnedTrailer> = trailers.iter().map(OwnedTrailer::from).collect();
> +        let author_commit_trailer =
> +            super::trailers::AuthorCommitTrailer::try_from(&owned_trailers[..])?;
> +        let authorizing_identity_trailer =
> +            super::trailers::AuthorizingIdentityCommitTrailer::try_from(&owned_trailers[..])?;
> +
> +        // We no longer support schema parents but to remain backwards compatible we
> +        // still load the commit trailer so we know to omit the schema parent
> +        // commits when evaluating old object histories which still have a
> +        // schema parent commit
> +        let schema_commit_trailer =
> +            match super::trailers::SchemaCommitTrailer::try_from(&owned_trailers[..]) {
> +                Ok(t) => Some(t),
> +                Err(super::trailers::error::InvalidSchemaTrailer::NoTrailer) => None,
> +                Err(e) => return Err(e.into()),
> +            };
> +        let signatures = Signatures::try_from(trailers)?;
>  
>          let tree = commit.tree()?;
>          let manifest_tree_entry = tree
> @@ -170,23 +231,24 @@ impl Change {
>              },
>          };
>  
> -        let schema_commit_trailer =
> -            trailers::SchemaCommitTrailer::try_from(&metadata.trailers[..])?;
> -
>          Ok(Change {
> -            schema_commit: schema_commit_trailer.oid(),
>              manifest,
>              contents,
> -            metadata,
> +            commit: commit.id(),
> +            schema_commit: schema_commit_trailer.map(|s| s.oid()),
> +            author_commit: author_commit_trailer.oid(),
> +            authorizing_identity_commit: authorizing_identity_trailer.oid(),
> +            signatures,
> +            revision: tree.id(),
>          })
>      }
>  
>      pub fn commit(&self) -> &git2::Oid {
> -        &self.metadata.commit
> +        &self.commit
>      }
>  
>      pub fn author_commit(&self) -> git2::Oid {
> -        self.metadata.author_commit
> +        self.author_commit
>      }
>  
>      pub fn typename(&self) -> &TypeName {
> @@ -197,16 +259,21 @@ impl Change {
>          &self.contents
>      }
>  
> -    pub fn schema_commit(&self) -> git2::Oid {
> +    pub fn schema_commit(&self) -> Option<git2::Oid> {
>          self.schema_commit
>      }
>  
>      pub fn authorizing_identity_commit(&self) -> git2::Oid {
> -        self.metadata.authorizing_identity_commit
> +        self.authorizing_identity_commit
>      }
>  
>      pub fn valid_signatures(&self) -> bool {
> -        self.metadata.valid_signatures()
> +        for (key, sig) in self.signatures.iter() {
> +            if !key.verify(sig, self.revision.as_bytes()) {
> +                return false;
> +            }
> +        }

I think this could be expressed as:

  self.signatures
      .iter()
      .all(|(key, sig)| key.verify(sig, self.revision.as_bytes()))

> +        true
>      }
>  }
Details
Message ID
<CL49NO0HKPYW.1O2MWWC4X3L0C@haptop>
In-Reply-To
<20220701101520.181107-1-alex@memoryandthought.me> (view parent)
DKIM signature
missing
Download raw message
Nice, looks good to me!
Details
Message ID
<CL49XFJXIQSK.34VDCM1Z0LKL7@haptop>
In-Reply-To
<20220701101520.181107-1-alex@memoryandthought.me> (view parent)
DKIM signature
missing
Download raw message
Actually, the `cargo fmt` step failed for me. Could you check if it's
the same on your side?
Details
Message ID
<CAH_DpYRFqkwdiZvU4ww6niL=-OY=K22WwEfHCADogRRbw9SxFg@mail.gmail.com>
In-Reply-To
<CL49XFJXIQSK.34VDCM1Z0LKL7@haptop> (view parent)
DKIM signature
missing
Download raw message
On 01/07/22 12:13pm, Fintan Halpenny wrote:
> Actually, the `cargo fmt` step failed for me. Could you check if it's
> the same on your side?

Well, I ran `rustup update` and now le clippy is complaining about
something in `gitd`. I can fix that in a separate patch or I'm also happy
for you to sort the whole thing in the merge.

[PATCH v2 0/2] cobs/remove-schemas

Details
Message ID
<20220708114620.947031-1-alex@memoryandthought.me>
In-Reply-To
<20220701101520.181107-1-alex@memoryandthought.me> (view parent)
DKIM signature
missing
Download raw message
Changes from v1:

* librad/src/git/tracking/refdb.rs - fix formatting

Alex Good (2):
  cobs: Remove schemas from
  cobs: Remove schemas from RFC

 bins/Cargo.lock                               | 330 +-----------
 cob/Cargo.toml                                |  10 -
 cob/src/cache.rs                              |   9 +-
 cob/src/cache/cached_change_graph.rs          |  87 +--
 cob/src/change.rs                             | 167 ++++--
 cob/src/change_graph.rs                       |  45 +-
 cob/src/change_graph/evaluation.rs            |  25 -
 cob/src/change_metadata.rs                    | 167 ------
 cob/src/lib.rs                                | 111 +---
 cob/src/schema.rs                             | 496 ------------------
 cob/src/schema_change.rs                      | 106 ----
 cob/src/validated_automerge.rs                | 128 -----
 cob/t/src/tests.rs                            |   1 -
 cob/t/src/tests/cache.rs                      |  21 +-
 cob/t/src/tests/cached_change_graph.rs        |  16 +-
 cob/t/src/tests/schema.rs                     | 258 ---------
 docs/rfc/0662-collaborative-objects.adoc      | 199 ++-----
 librad/src/collaborative_objects.rs           |  10 +-
 .../scenario/collaborative_objects.rs         |  15 -
 19 files changed, 197 insertions(+), 2004 deletions(-)
 delete mode 100644 cob/src/change_metadata.rs
 delete mode 100644 cob/src/schema.rs
 delete mode 100644 cob/src/schema_change.rs
 delete mode 100644 cob/src/validated_automerge.rs
 delete mode 100644 cob/t/src/tests/schema.rs

-- 
2.36.1

[PATCH v2 1/2] cobs: Remove schemas from

Details
Message ID
<20220708114620.947031-2-alex@memoryandthought.me>
In-Reply-To
<20220708114620.947031-1-alex@memoryandthought.me> (view parent)
DKIM signature
missing
Download raw message
Patch: +164 -1838
Checking schemas in the cobs isn't actually that useful. Applications
have to validate each change in a COB anyway as 1) they must validate
that changes are authorized and 2) the schema the application cares
about might be different to the schema the object was published with.

This change removes the schema checking logic, as well as logic to
encode and ship a schema along with a collaborative object. This also
allows us to remove the dependencies on automerge and jsonschema.

Signed-off-by: Alex Good <alex@memoryandthought.me>
---
 bins/Cargo.lock                               | 330 +-----------
 cob/Cargo.toml                                |  10 -
 cob/src/cache.rs                              |   9 +-
 cob/src/cache/cached_change_graph.rs          |  87 +--
 cob/src/change.rs                             | 167 ++++--
 cob/src/change_graph.rs                       |  45 +-
 cob/src/change_graph/evaluation.rs            |  25 -
 cob/src/change_metadata.rs                    | 167 ------
 cob/src/lib.rs                                | 111 +---
 cob/src/schema.rs                             | 496 ------------------
 cob/src/schema_change.rs                      | 106 ----
 cob/src/validated_automerge.rs                | 128 -----
 cob/t/src/tests.rs                            |   1 -
 cob/t/src/tests/cache.rs                      |  21 +-
 cob/t/src/tests/cached_change_graph.rs        |  16 +-
 cob/t/src/tests/schema.rs                     | 258 ---------
 librad/src/collaborative_objects.rs           |  10 +-
 .../scenario/collaborative_objects.rs         |  15 -
 18 files changed, 164 insertions(+), 1838 deletions(-)
 delete mode 100644 cob/src/change_metadata.rs
 delete mode 100644 cob/src/schema.rs
 delete mode 100644 cob/src/schema_change.rs
 delete mode 100644 cob/src/validated_automerge.rs
 delete mode 100644 cob/t/src/tests/schema.rs

diff --git a/bins/Cargo.lock b/bins/Cargo.lock
index 6ab3de18..d5756144 100644
--- a/bins/Cargo.lock
@@ -36,18 +36,6 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8fd72866655d1904d6b0997d0b07ba561047d070fbe29de039031c641b61217"

[[package]]
name = "ahash"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
dependencies = [
 "getrandom 0.2.6",
 "once_cell",
 "serde",
 "version_check",
]

[[package]]
name = "aho-corasick"
version = "0.7.18"
@@ -226,75 +214,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"

[[package]]
name = "automerge"
version = "0.0.2"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "automerge-backend",
 "automerge-frontend",
 "automerge-protocol",
 "serde",
 "serde_json",
 "thiserror",
 "uuid",
]

[[package]]
name = "automerge-backend"
version = "0.0.1"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "automerge-protocol",
 "flate2",
 "fxhash",
 "hex",
 "itertools 0.9.0",
 "js-sys",
 "leb128",
 "maplit",
 "nonzero_ext 0.2.0",
 "rand",
 "serde",
 "serde_json",
 "sha2",
 "smol_str",
 "thiserror",
 "tracing",
 "wasm-bindgen",
 "web-sys",
]

[[package]]
name = "automerge-frontend"
version = "0.1.0"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "automerge-protocol",
 "getrandom 0.2.6",
 "maplit",
 "serde",
 "serde_json",
 "smol_str",
 "thiserror",
 "unicode-segmentation",
 "uuid",
]

[[package]]
name = "automerge-protocol"
version = "0.1.0"
source = "git+https://github.com/automerge/automerge-rs.git?rev=e72571962b51c2f0726fb534890ef3b4f7c74dfc#e72571962b51c2f0726fb534890ef3b4f7c74dfc"
dependencies = [
 "hex",
 "serde",
 "smol_str",
 "strum",
 "thiserror",
 "tinyvec",
 "uuid",
]

[[package]]
name = "backoff"
version = "0.3.0"
@@ -337,15 +256,6 @@ dependencies = [
 "zeroize",
]

[[package]]
name = "bit-set"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de"
dependencies = [
 "bit-vec",
]

[[package]]
name = "bit-vec"
version = "0.6.3"
@@ -475,12 +385,6 @@ version = "3.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899"

[[package]]
name = "bytecount"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"

[[package]]
name = "byteorder"
version = "1.4.3"
@@ -589,7 +493,7 @@ version = "3.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c"
dependencies = [
 "heck 0.4.0",
 "heck",
 "proc-macro-error",
 "proc-macro2",
 "quote",
@@ -624,15 +528,12 @@ dependencies = [
name = "cob"
version = "0.1.0"
dependencies = [
 "automerge",
 "either",
 "git-trailers",
 "git2",
 "jsonschema",
 "lazy_static",
 "link-crypto",
 "link-identities",
 "lru",
 "minicbor",
 "multibase",
 "multihash",
@@ -943,16 +844,6 @@ version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71"

[[package]]
name = "fancy-regex"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d6b8560a05112eb52f04b00e5d3790c0dd75d9d980eb8a122fb23b92a623ccf"
dependencies = [
 "bit-set",
 "regex",
]

[[package]]
name = "fastrand"
version = "1.7.0"
@@ -1017,16 +908,6 @@ dependencies = [
 "percent-encoding",
]

[[package]]
name = "fraction"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aba3510011eee8825018be07f08d9643421de007eaf62a3bde58d89b058abfa7"
dependencies = [
 "lazy_static",
 "num",
]

[[package]]
name = "fsevent"
version = "0.4.0"
@@ -1184,15 +1065,6 @@ dependencies = [
 "pin-project 0.4.29",
]

[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
 "byteorder",
]

[[package]]
name = "generic-array"
version = "0.14.5"
@@ -1222,10 +1094,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
dependencies = [
 "cfg-if 1.0.0",
 "js-sys",
 "libc",
 "wasi 0.10.2+wasi-snapshot-preview1",
 "wasm-bindgen",
]

[[package]]
@@ -1589,7 +1459,7 @@ version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b62f79061a0bc2e046024cb7ba44b08419ed238ecbd9adbd787434b9e8c25"
dependencies = [
 "ahash 0.3.8",
 "ahash",
 "autocfg",
]

@@ -1598,9 +1468,6 @@ name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
 "ahash 0.7.6",
]

[[package]]
name = "hashbrown"
@@ -1608,15 +1475,6 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"

[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
 "unicode-segmentation",
]

[[package]]
name = "heck"
version = "0.4.0"
@@ -1785,24 +1643,6 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b"

[[package]]
name = "iso8601"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a59a3f2be6271b2a844cd0dd13bf8ccc88a9540482d872c7ce58ab1c4db9fab"
dependencies = [
 "nom",
]

[[package]]
name = "itertools"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
dependencies = [
 "either",
]

[[package]]
name = "itertools"
version = "0.10.3"
@@ -1842,31 +1682,6 @@ dependencies = [
 "wasm-bindgen",
]

[[package]]
name = "jsonschema"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877e398ffb23c1c311c417ef5e72e8699c3822dbf835468f009c6ce91b6c206b"
dependencies = [
 "ahash 0.7.6",
 "base64",
 "bytecount",
 "fancy-regex",
 "fraction",
 "iso8601",
 "itoa 0.4.8",
 "lazy_static",
 "num-cmp",
 "parking_lot 0.12.1",
 "percent-encoding",
 "regex",
 "serde",
 "serde_json",
 "time",
 "url",
 "uuid",
]

[[package]]
name = "jwalk"
version = "0.6.0"
@@ -1905,12 +1720,6 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"

[[package]]
name = "leb128"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"

[[package]]
name = "libc"
version = "0.2.126"
@@ -1955,7 +1764,7 @@ dependencies = [
 "governor",
 "if-watch",
 "indexmap",
 "itertools 0.10.3",
 "itertools",
 "lazy_static",
 "libc",
 "libgit2-sys",
@@ -2184,7 +1993,7 @@ dependencies = [
 "either",
 "futures-lite",
 "git-ref-format",
 "itertools 0.10.3",
 "itertools",
 "link-crypto",
 "link-git",
 "parking_lot 0.12.1",
@@ -2265,7 +2074,7 @@ version = "0.1.0"
dependencies = [
 "async-trait",
 "futures",
 "itertools 0.10.3",
 "itertools",
 "librad",
 "lnk-thrussh-agent",
 "minicbor",
@@ -2467,7 +2276,7 @@ dependencies = [
 "lnk-thrussh-libsodium",
 "log",
 "md5",
 "num-bigint 0.4.3",
 "num-bigint",
 "num-integer",
 "pbkdf2 0.8.0",
 "rand",
@@ -2512,21 +2321,6 @@ dependencies = [
 "cfg-if 1.0.0",
]

[[package]]
name = "lru"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8015d95cb7b2ddd3c0d32ca38283ceb1eea09b4713ee380bceb942d85a244228"
dependencies = [
 "hashbrown 0.11.2",
]

[[package]]
name = "maplit"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"

[[package]]
name = "matchers"
version = "0.1.0"
@@ -2800,31 +2594,6 @@ dependencies = [
 "winapi 0.3.9",
]

[[package]]
name = "num"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8536030f9fea7127f841b45bb6243b27255787fb4eb83958aa1ef9d2fdc0c36"
dependencies = [
 "num-bigint 0.2.6",
 "num-complex",
 "num-integer",
 "num-iter",
 "num-rational",
 "num-traits",
]

[[package]]
name = "num-bigint"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
dependencies = [
 "autocfg",
 "num-integer",
 "num-traits",
]

[[package]]
name = "num-bigint"
version = "0.4.3"
@@ -2836,22 +2605,6 @@ dependencies = [
 "num-traits",
]

[[package]]
name = "num-cmp"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa"

[[package]]
name = "num-complex"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6b19411a9719e753aff12e5187b74d60d3dc449ec3f4dc21e3989c3f554bc95"
dependencies = [
 "autocfg",
 "num-traits",
]

[[package]]
name = "num-integer"
version = "0.1.45"
@@ -2862,29 +2615,6 @@ dependencies = [
 "num-traits",
]

[[package]]
name = "num-iter"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252"
dependencies = [
 "autocfg",
 "num-integer",
 "num-traits",
]

[[package]]
name = "num-rational"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef"
dependencies = [
 "autocfg",
 "num-bigint 0.2.6",
 "num-integer",
 "num-traits",
]

[[package]]
name = "num-traits"
version = "0.2.15"
@@ -3752,15 +3482,6 @@ version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"

[[package]]
name = "smol_str"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44"
dependencies = [
 "serde",
]

[[package]]
name = "socket2"
version = "0.3.19"
@@ -3794,27 +3515,6 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"

[[package]]
name = "strum"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aaf86bbcfd1fa9670b7a129f64fc0c9fcbbfe4f1bc4210e9e98fe71ffc12cde2"
dependencies = [
 "strum_macros",
]

[[package]]
name = "strum_macros"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d06aaeeee809dbc59eb4556183dd927df67db1540de5be8d3ec0b6636358a5ec"
dependencies = [
 "heck 0.3.3",
 "proc-macro2",
 "quote",
 "syn",
]

[[package]]
name = "subtle"
version = "2.4.1"
@@ -3910,15 +3610,8 @@ checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd"
dependencies = [
 "libc",
 "num_threads",
 "time-macros",
]

[[package]]
name = "time-macros"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792"

[[package]]
name = "tinyvec"
version = "1.6.0"
@@ -3991,7 +3684,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09"
dependencies = [
 "cfg-if 1.0.0",
 "log",
 "pin-project-lite",
 "tracing-attributes",
 "tracing-core",
@@ -4096,12 +3788,6 @@ dependencies = [
 "tinyvec",
]

[[package]]
name = "unicode-segmentation"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"

[[package]]
name = "unicode-xid"
version = "0.2.3"
@@ -4177,7 +3863,7 @@ version = "3.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cd9a7a22c45daf5aeb6bea3dff4ecbb8eb43e492582d467b18ce2979b512cbe"
dependencies = [
 "itertools 0.10.3",
 "itertools",
 "nom",
]

@@ -4420,7 +4106,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e262a29d0e61ccf2b6190d7050d4b237535fc76ce4c1210d9caa316f71dffa75"
dependencies = [
 "bit-vec",
 "num-bigint 0.4.3",
 "num-bigint",
]

[[package]]
diff --git a/cob/Cargo.toml b/cob/Cargo.toml
index 8c4f6f85..c840a7db 100644
--- a/cob/Cargo.toml
+++ b/cob/Cargo.toml
@@ -18,7 +18,6 @@ thiserror = "1.0"
toml = "0.5"
tracing = "0.1"
either = "1.6"
lru = "0.7.1"
multihash = "0.11"
multibase = "0.9"
lazy_static = "1.4"
@@ -33,11 +32,6 @@ version = "0.13.24"
default-features = false
features = ["vendored-libgit2"]

[dependencies.jsonschema]
version = "~0.13"
default-features = false
features = []

[dependencies.link-crypto]
path = "../link-crypto"

@@ -49,7 +43,3 @@ path = "../git-trailers"

[dependencies.radicle-git-ext]
path = "../git-ext"

[dependencies.automerge]
git = "https://github.com/automerge/automerge-rs.git"
rev = "e72571962b51c2f0726fb534890ef3b4f7c74dfc"
diff --git a/cob/src/cache.rs b/cob/src/cache.rs
index a3ecd457..a3586a71 100644
--- a/cob/src/cache.rs
+++ b/cob/src/cache.rs
@@ -18,8 +18,6 @@ pub enum Error {
    #[error(transparent)]
    Io(#[from] std::io::Error),
    #[error(transparent)]
    SchemaParse(#[from] super::schema::error::Parse),
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    MinicborDecode(#[from] minicbor::decode::Error),
@@ -36,10 +34,6 @@ pub trait Cache {
    /// are changed then we will not see those changes. However, we specify
    /// in the RFC that any peer updating a change must update their ref to
    /// the object, so this should not be a problem.
    ///
    /// We return an `Rc<RefCell<CachedChangeGraph>>`. This is so that changes
    /// can be made by calling `CachedChangeGraph::propose_change`, which
    /// mutates the `CachedChangeGraph`.
    fn load(
        &mut self,
        oid: ObjectId,
@@ -64,8 +58,7 @@ pub trait Cache {
///
/// Each file contains a CBOR encoding of a `CachedChangeGraph`. This file
/// contains the OIDs of the tips of the graph that were used to generate the
/// object, the validated automerge history that was generated using those tips,
/// the schema and the schema commit OID.
/// object and the individual automerge change blobs.
///
/// The `v1` directory means we can easily add a `v2` if we need to change the
/// cache layout in backwards incompatible ways.
diff --git a/cob/src/cache/cached_change_graph.rs b/cob/src/cache/cached_change_graph.rs
index e340578f..be2a3e17 100644
--- a/cob/src/cache/cached_change_graph.rs
+++ b/cob/src/cache/cached_change_graph.rs
@@ -3,19 +3,11 @@
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use crate::{
    validated_automerge::error::ProposalError,
    EntryContents,
    History,
    ObjectId,
    Schema,
    TypeName,
    ValidatedAutomerge,
};
use crate::{EntryContents, History, ObjectId, TypeName};

use link_identities::git::Urn;

use std::{cell::RefCell, collections::BTreeSet, ops::ControlFlow, rc::Rc};
use std::{cell::RefCell, collections::BTreeSet, rc::Rc};

/// A CBOR encoding of the change graph which was loaded when the heads of the
/// change graph were `refs`. The `history` contains the bytes of each change
@@ -27,11 +19,6 @@ pub struct CachedChangeGraph {
    #[n(1)]
    #[cbor(with = "encoding::oids")]
    pub refs: BTreeSet<git2::Oid>,
    #[n(2)]
    #[cbor(with = "encoding::oid")]
    pub schema_commit: git2::Oid,
    #[n(3)]
    pub schema: Schema,
    #[n(4)]
    #[cbor(with = "encoding::typename")]
    pub typename: TypeName,
@@ -45,8 +32,6 @@ pub struct CachedChangeGraph {
impl CachedChangeGraph {
    pub fn new(
        tips: impl IntoIterator<Item = git2::Oid>,
        schema: Schema,
        schema_commit: git2::Oid,
        history: History,
        typename: TypeName,
        object_id: ObjectId,
@@ -54,9 +39,7 @@ impl CachedChangeGraph {
    ) -> Rc<RefCell<CachedChangeGraph>> {
        let g = CachedChangeGraph {
            history,
            schema,
            refs: tips.into_iter().collect(),
            schema_commit,
            typename,
            object_id,
            authorizing_identity_urn,
@@ -68,32 +51,6 @@ impl CachedChangeGraph {
        &self.history
    }

    pub(crate) fn propose_change(&mut self, change: &EntryContents) -> Result<(), ProposalError> {
        match change {
            EntryContents::Automerge(change_bytes) => {
                let mut validated = self.history.traverse(
                    ValidatedAutomerge::new(self.schema.clone()),
                    |mut doc, entry| {
                        // This unwrap should be safe as we only save things in the cache when we've
                        // validated them
                        doc.propose_change(entry.contents().as_ref()).unwrap();
                        ControlFlow::Continue(doc)
                    },
                );
                validated.propose_change(change_bytes)?;
            },
        }
        Ok(())
    }

    pub fn schema(&self) -> &Schema {
        &self.schema
    }

    pub fn schema_commit(&self) -> git2::Oid {
        self.schema_commit
    }

    pub fn tips(&self) -> BTreeSet<git2::Oid> {
        self.refs.clone()
    }
@@ -130,8 +87,6 @@ impl CachedChangeGraph {
}

mod encoding {
    use crate::Schema;
    use std::convert::TryFrom;

    struct Json(serde_json::Value);

@@ -155,44 +110,6 @@ mod encoding {
        }
    }

    impl minicbor::Encode for Schema {
        fn encode<W: minicbor::encode::Write>(
            &self,
            e: &mut minicbor::Encoder<W>,
        ) -> Result<(), minicbor::encode::Error<W::Error>> {
            e.encode(self.json_bytes())?;
            Ok(())
        }
    }

    impl<'b> minicbor::Decode<'b> for Schema {
        fn decode(d: &mut minicbor::Decoder<'b>) -> Result<Self, minicbor::decode::Error> {
            let bytes: Vec<u8> = d.decode()?;
            Schema::try_from(&bytes[..])
                .map_err(|_| minicbor::decode::Error::Message("invalid schema JSON"))
        }
    }

    pub(super) mod oid {
        use minicbor::{
            decode::{Decode, Decoder, Error as DecodeError},
            encode::{Encode, Encoder, Error as EncodeError, Write},
        };
        use radicle_git_ext::Oid;

        pub fn encode<W: Write>(
            v: &git2::Oid,
            e: &mut Encoder<W>,
        ) -> Result<(), EncodeError<W::Error>> {
            Oid::from(*v).encode(e)
        }

        pub fn decode(d: &mut Decoder<'_>) -> Result<git2::Oid, DecodeError> {
            let ext = Oid::decode(d)?;
            Ok(ext.into())
        }
    }

    pub(super) mod oids {
        use minicbor::{
            decode::{Decode, Decoder, Error as DecodeError},
diff --git a/cob/src/change.rs b/cob/src/change.rs
index e49a79b9..b99b26b6 100644
--- a/cob/src/change.rs
+++ b/cob/src/change.rs
@@ -3,15 +3,11 @@
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use super::{
    change_metadata::{self, ChangeMetadata, CreateMetadataArgs},
    trailers,
    EntryContents,
    HistoryType,
    TypeName,
};
use super::{trailers, EntryContents, HistoryType, TypeName};

use git_trailers::{parse as parse_trailers, OwnedTrailer};
use link_crypto::BoxedSigner;
use link_identities::sign::Signatures;

use std::{convert::TryFrom, fmt};

@@ -21,14 +17,28 @@ use serde::{Deserialize, Serialize};
/// is specified in the RFC (docs/rfc/0662-collaborative-objects.adoc)
/// under "Change Commits".
pub struct Change {
    /// The OID of the parent commit which points at the schema_commit
    schema_commit: git2::Oid,
    /// The commit where this change lives
    commit: git2::Oid,
    /// The OID of the tree the commit points at, we need this to validate the
    /// signatures
    revision: git2::Oid,
    /// The signatures of this change
    signatures: Signatures,
    /// The OID of the parent commit of this change which points at the author
    /// identity
    author_commit: git2::Oid,
    /// The OID of the parent commit of this change which points at a schema.
    /// Schemas are no longer used but older implementations include a
    /// schema commit as a parent of the change and to stay backwards
    /// compatible we must exclude these commits when loading a change.
    schema_commit: Option<git2::Oid>,
    /// The OID of the parent commit which points at the identity this change
    /// was authorized with respect to at the time the change was authored.
    authorizing_identity_commit: git2::Oid,
    /// The manifest
    manifest: Manifest,
    /// The actual changes this change carries
    contents: EntryContents,
    /// The metadata for this change
    metadata: change_metadata::ChangeMetadata,
}

impl fmt::Display for Change {
@@ -38,9 +48,10 @@ impl fmt::Display for Change {
}

pub mod error {
    use super::{change_metadata, trailers};
    use super::trailers;
    use git_trailers::Error as TrailerError;
    use link_crypto::BoxedSignError;
    use link_identities::git::error::Signatures;
    use link_identities::sign::error::Signatures;
    use thiserror::Error;

    #[derive(Debug, Error)]
@@ -49,8 +60,6 @@ pub mod error {
        Git(#[from] git2::Error),
        #[error(transparent)]
        Signer(#[from] BoxedSignError),
        #[error(transparent)]
        Metadata(#[from] change_metadata::CreateError),
    }

    #[derive(Debug, Error)]
@@ -70,14 +79,21 @@ pub mod error {
        #[error("./change was not a blob")]
        ChangeNotBlob,
        #[error(transparent)]
        InvalidMetadata(#[from] change_metadata::LoadError),
        #[error(transparent)]
        SchemaCommitTrailer(#[from] trailers::error::InvalidSchemaTrailer),
        #[error(transparent)]
        AuthorTrailer(#[from] trailers::error::InvalidAuthorTrailer),
        #[error(transparent)]
        AuthorizingIdentityTrailer(
            #[from] super::trailers::error::InvalidAuthorizingIdentityTrailer,
        ),
        #[error("non utf-8 characters in commit message")]
        Utf8,
        #[error(transparent)]
        Trailer(#[from] TrailerError),
    }
}

pub struct NewChangeSpec {
    pub(crate) schema_commit: git2::Oid,
    pub(crate) typename: TypeName,
    pub(crate) tips: Option<Vec<git2::Oid>>,
    pub(crate) message: Option<String>,
@@ -90,8 +106,8 @@ const CHANGE_BLOB_NAME: &str = "change";
impl Change {
    /// Create a change in the git repo according to the spec
    pub fn create(
        authorizing_identity_commit: git2::Oid,
        author_identity_commit: git2::Oid,
        authorizing_identity_commit_id: git2::Oid,
        author_identity_commit_id: git2::Oid,
        repo: &git2::Repository,
        signer: &BoxedSigner,
        spec: NewChangeSpec,
@@ -116,35 +132,80 @@ impl Change {
        tb.insert(CHANGE_BLOB_NAME, change_blob, git2::FileMode::Blob.into())?;

        let revision = tb.write()?;

        let schema_trailer = trailers::SchemaCommitTrailer::from(spec.schema_commit).into();

        let mut tips = spec.tips.clone().unwrap_or_default();
        tips.push(spec.schema_commit);
        tips.push(authorizing_identity_commit);

        let metadata = ChangeMetadata::create(CreateMetadataArgs {
            revision,
            tips,
            message: spec.message.unwrap_or_else(|| "new change".to_string()),
            extra_trailers: vec![schema_trailer],
            authorizing_identity_commit,
            author_identity_commit,
            signer: signer.clone(),
            repo,
        })?;
        let tree = repo.find_tree(revision)?;

        let author_commit = repo.find_commit(author_identity_commit_id)?;
        let author = repo.signature()?;

        let authorizing_identity_commit = repo.find_commit(authorizing_identity_commit_id)?;

        let signatures = link_identities::git::sign(signer, revision.into())?.into();
        let mut parent_commits = spec
            .tips
            .iter()
            .flat_map(|cs| cs.iter())
            .map(|o| repo.find_commit(*o))
            .collect::<Result<Vec<git2::Commit>, git2::Error>>()?;
        parent_commits.push(authorizing_identity_commit);
        parent_commits.push(author_commit);

        let trailers = vec![
            super::trailers::AuthorCommitTrailer::from(author_identity_commit_id).into(),
            super::trailers::AuthorizingIdentityCommitTrailer::from(authorizing_identity_commit_id)
                .into(),
        ];

        let commit = repo.commit(
            None,
            &author,
            &author,
            &link_identities::git::sign::CommitMessage::new(
                spec.message
                    .unwrap_or_else(|| "new change".to_string())
                    .as_str(),
                &signatures,
                trailers,
            )
            .to_string(),
            &tree,
            &(parent_commits.iter().collect::<Vec<&git2::Commit>>())[..],
        )?;

        Ok(Change {
            schema_commit: spec.schema_commit,
            schema_commit: None,
            manifest,
            contents: spec.contents,
            metadata,
            commit,
            signatures,
            authorizing_identity_commit: authorizing_identity_commit_id,
            author_commit: author_identity_commit_id,
            revision,
        })
    }

    /// Load a change from the given commit
    pub fn load(repo: &git2::Repository, commit: &git2::Commit) -> Result<Change, error::Load> {
        let metadata = ChangeMetadata::try_from(commit)?;
        let trailers = commit
            .message()
            .ok_or(error::Load::Utf8)
            .and_then(|s| parse_trailers(s, ":").map_err(|e| e.into()))?;
        let owned_trailers: Vec<OwnedTrailer> = trailers.iter().map(OwnedTrailer::from).collect();
        let author_commit_trailer =
            super::trailers::AuthorCommitTrailer::try_from(&owned_trailers[..])?;
        let authorizing_identity_trailer =
            super::trailers::AuthorizingIdentityCommitTrailer::try_from(&owned_trailers[..])?;

        // We no longer support schema parents but to remain backwards compatible we
        // still load the commit trailer so we know to omit the schema parent
        // commits when evaluating old object histories which still have a
        // schema parent commit
        let schema_commit_trailer =
            match super::trailers::SchemaCommitTrailer::try_from(&owned_trailers[..]) {
                Ok(t) => Some(t),
                Err(super::trailers::error::InvalidSchemaTrailer::NoTrailer) => None,
                Err(e) => return Err(e.into()),
            };
        let signatures = Signatures::try_from(trailers)?;

        let tree = commit.tree()?;
        let manifest_tree_entry = tree
@@ -170,23 +231,24 @@ impl Change {
            },
        };

        let schema_commit_trailer =
            trailers::SchemaCommitTrailer::try_from(&metadata.trailers[..])?;

        Ok(Change {
            schema_commit: schema_commit_trailer.oid(),
            manifest,
            contents,
            metadata,
            commit: commit.id(),
            schema_commit: schema_commit_trailer.map(|s| s.oid()),
            author_commit: author_commit_trailer.oid(),
            authorizing_identity_commit: authorizing_identity_trailer.oid(),
            signatures,
            revision: tree.id(),
        })
    }

    pub fn commit(&self) -> &git2::Oid {
        &self.metadata.commit
        &self.commit
    }

    pub fn author_commit(&self) -> git2::Oid {
        self.metadata.author_commit
        self.author_commit
    }

    pub fn typename(&self) -> &TypeName {
@@ -197,16 +259,21 @@ impl Change {
        &self.contents
    }

    pub fn schema_commit(&self) -> git2::Oid {
    pub fn schema_commit(&self) -> Option<git2::Oid> {
        self.schema_commit
    }

    pub fn authorizing_identity_commit(&self) -> git2::Oid {
        self.metadata.authorizing_identity_commit
        self.authorizing_identity_commit
    }

    pub fn valid_signatures(&self) -> bool {
        self.metadata.valid_signatures()
        for (key, sig) in self.signatures.iter() {
            if !key.verify(sig, self.revision.as_bytes()) {
                return false;
            }
        }
        true
    }
}

diff --git a/cob/src/change_graph.rs b/cob/src/change_graph.rs
index b020f9e5..4c4bf4cb 100644
--- a/cob/src/change_graph.rs
+++ b/cob/src/change_graph.rs
@@ -4,17 +4,13 @@
// Linking Exception. For full terms see the included LICENSE file.

use super::{
    schema_change,
    AuthorizingIdentity,
    Change,
    CollaborativeObject,
    IdentityStorage,
    ObjectId,
    Schema,
    SchemaChange,
    TypeName,
};
use link_identities::git::Urn;
use petgraph::{
    visit::{EdgeRef, Topo, Walker},
    EdgeDirection,
@@ -34,14 +30,6 @@ pub enum Error {
    MissingRevision(git2::Oid),
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    LoadSchema(#[from] schema_change::error::Load),
    #[error("schema change is authorized by an incorrect identity URN, expected {expected} but was {actual}")]
    SchemaAuthorizingUrnIncorrect { expected: Urn, actual: Urn },
    #[error("no authorizing identity found for schema change")]
    NoSchemaAuthorizingIdentityFound,
    #[error("invalid signature on schema change")]
    InvalidSchemaSignatures,
}

/// The graph of changes for a particular collaborative object
@@ -50,7 +38,6 @@ pub(super) struct ChangeGraph<'a> {
    object_id: ObjectId,
    authorizing_identity: &'a dyn AuthorizingIdentity,
    graph: petgraph::Graph<Change, ()>,
    schema_change: SchemaChange,
}

impl<'a> ChangeGraph<'a> {
@@ -119,12 +106,8 @@ impl<'a> ChangeGraph<'a> {
            let first_node = &self.graph[*root];
            first_node.typename().clone()
        };
        let evaluating = evaluation::Evaluating::new(
            identities,
            self.authorizing_identity,
            self.repo,
            self.schema().clone(),
        );
        let evaluating =
            evaluation::Evaluating::new(identities, self.authorizing_identity, self.repo);
        let topo = Topo::new(&self.graph);
        let items = topo.iter(&self.graph).map(|idx| {
            let node = &self.graph[idx];
@@ -143,7 +126,6 @@ impl<'a> ChangeGraph<'a> {
            typename,
            history,
            id: self.object_id,
            schema: self.schema_change.schema().clone(),
        }
    }

@@ -166,14 +148,6 @@ impl<'a> ChangeGraph<'a> {
        let for_display = self.graph.map(|_ix, n| n.to_string(), |_ix, _e| "");
        petgraph::dot::Dot::new(&for_display).to_string()
    }

    pub(super) fn schema_commit(&self) -> git2::Oid {
        self.schema_change.commit()
    }

    pub(super) fn schema(&self) -> &Schema {
        self.schema_change.schema()
    }
}

struct GraphBuilder {
@@ -209,7 +183,7 @@ impl GraphBuilder {
            .parents()
            .filter_map(|parent| {
                if parent.id() != author_commit
                    && parent.id() != schema_commit
                    && Some(parent.id()) != schema_commit
                    && parent.id() != authorizing_identity_commit
                    && !self.has_edge(parent.id(), commit.id())
                {
@@ -243,15 +217,14 @@ impl GraphBuilder {
        object_id: ObjectId,
        authorizing_identity: &'b dyn AuthorizingIdentity,
    ) -> Result<Option<ChangeGraph<'b>>, Error> {
        if let Some(root) = self.graph.externals(petgraph::Direction::Incoming).next() {
            let root_change = &self.graph[root];
            let schema_change = SchemaChange::load(root_change.schema_commit(), repo)?;
            if !schema_change.valid_signatures() {
                return Err(Error::InvalidSchemaSignatures);
            }
        if self
            .graph
            .externals(petgraph::Direction::Incoming)
            .next()
            .is_some()
        {
            Ok(Some(ChangeGraph {
                repo,
                schema_change,
                object_id,
                authorizing_identity,
                graph: self.graph,
diff --git a/cob/src/change_graph/evaluation.rs b/cob/src/change_graph/evaluation.rs
index a9e91bc0..6058071b 100644
--- a/cob/src/change_graph/evaluation.rs
+++ b/cob/src/change_graph/evaluation.rs
@@ -10,18 +10,15 @@ use crate::{
    history,
    identity_storage::{lookup_authorizing_identity, lookup_person},
    pruning_fold,
    validated_automerge::{error::ProposalError, ValidatedAutomerge},
    AuthDecision,
    AuthorizingIdentity,
    IdentityStorage,
    Schema,
};

pub struct Evaluating<'a, I: IdentityStorage> {
    identities: &'a I,
    authorizing_identity: &'a dyn AuthorizingIdentity,
    repo: &'a git2::Repository,
    in_progress_history: ValidatedAutomerge,
}

impl<'a, I: IdentityStorage> Evaluating<'a, I> {
@@ -29,13 +26,11 @@ impl<'a, I: IdentityStorage> Evaluating<'a, I> {
        identities: &'a I,
        authorizer: &'a dyn AuthorizingIdentity,
        repo: &'a git2::Repository,
        schema: Schema,
    ) -> Evaluating<'a, I> {
        Evaluating {
            identities,
            authorizing_identity: authorizer,
            repo,
            in_progress_history: ValidatedAutomerge::new(schema),
        }
    }

@@ -111,19 +106,6 @@ impl<'a, I: IdentityStorage> Evaluating<'a, I> {
            },
        };

        // Check that the history the change carries is well formed and does not violate
        // the schema
        match &change.contents() {
            history::EntryContents::Automerge(bytes) => {
                match self.in_progress_history.propose_change(bytes) {
                    Ok(()) => {},
                    Err(e) => {
                        return Err(RejectionReason::InvalidChange(e));
                    },
                }
            },
        };

        Ok(history::HistoryEntry::new(
            *change.commit(),
            author.urn(),
@@ -164,7 +146,6 @@ enum RejectionReason {
    Unauthorized {
        reason: &'static str,
    },
    InvalidChange(ProposalError),
}

impl RejectionReason {
@@ -214,12 +195,6 @@ impl RejectionReason {
                    "rejecting change as it was not authorized"
                );
            },
            RejectionReason::InvalidChange(error) => {
                tracing::warn!(
                    err=?error,
                    "rejecting invalid change"
                );
            },
        }
    }
}
diff --git a/cob/src/change_metadata.rs b/cob/src/change_metadata.rs
deleted file mode 100644
index c7855cdb..00000000
--- a/cob/src/change_metadata.rs
@@ -1,167 +0,0 @@
// Copyright © 2019-2020 The Radicle Foundation <hello@radicle.foundation>
//
// This file is part of radicle-link, distributed under the GPLv3 with Radicle
// Linking Exception. For full terms see the included LICENSE file.

use git_trailers::{parse as parse_trailers, Error as TrailerError, OwnedTrailer, Trailer};
use link_crypto::{BoxedSignError, BoxedSigner};
use link_identities::sign::{error::Signatures as SignaturesError, Signatures};

use thiserror::Error as ThisError;

use std::convert::TryFrom;

/// We represent both changes to a collaborative object and changes to the
/// objects schema as commits. `ChangeMetadata` captures the metadata which is
/// common to both object changes and schema changes
pub(super) struct ChangeMetadata {
    /// The commit where this change lives
    pub(super) commit: git2::Oid,
    /// The OID of the tree the commit points at, we need this to validate the
    /// signatures
    pub(super) revision: git2::Oid,
    /// The signatures of this change
    pub(super) signatures: Signatures,
    /// The OID of the parent commit of this change which points at the author
    /// identity
    pub(super) author_commit: git2::Oid,
    /// The OID of the parent commit which points at the identity this change
    /// was authorized with respect to at the time the change was authored.
    pub(super) authorizing_identity_commit: git2::Oid,
    /// The trailers of the commit. We need to hold on to these so more specific
    /// processing can be performed by specific change types. E.g. the
    /// parsing of the `SchemaCommitTrailer` in `Change::load`
    pub(super) trailers: Vec<OwnedTrailer>,
}

#[derive(Debug, ThisError)]
pub enum LoadError {
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    Signatures(#[from] SignaturesError),
    #[error("non utf-8 characters in commit message")]
    Utf8,
    #[error(transparent)]
    Trailer(#[from] TrailerError),
    #[error(transparent)]
    AuthorTrailer(#[from] super::trailers::error::InvalidAuthorTrailer),
    #[error(transparent)]
    AuthorizingIdentityTrailer(#[from] super::trailers::error::InvalidAuthorizingIdentityTrailer),
}

#[derive(Debug, ThisError)]
pub enum CreateError {
    #[error(transparent)]
    Git(#[from] git2::Error),
    #[error(transparent)]
    Signer(#[from] BoxedSignError),
}

impl TryFrom<&git2::Commit<'_>> for ChangeMetadata {
    type Error = LoadError;

    fn try_from(commit: &git2::Commit) -> Result<Self, Self::Error> {
        let trailers = commit
            .message()
            .ok_or(LoadError::Utf8)
            .and_then(|s| parse_trailers(s, ":").map_err(|e| e.into()))?;
        let owned_trailers: Vec<OwnedTrailer> = trailers.iter().map(OwnedTrailer::from).collect();
        let author_commit_trailer =
            super::trailers::AuthorCommitTrailer::try_from(&owned_trailers[..])?;
        let authorizing_identity_trailer =
            super::trailers::AuthorizingIdentityCommitTrailer::try_from(&owned_trailers[..])?;
        let signatures = Signatures::try_from(trailers)?;
        Ok(ChangeMetadata {
            commit: commit.id(),
            revision: commit.tree_id(),
            signatures,
            author_commit: author_commit_trailer.oid(),
            authorizing_identity_commit: authorizing_identity_trailer.oid(),
            trailers: owned_trailers,